From 86ed8585a57278ce06c68d08c45b6e5e845b963d Mon Sep 17 00:00:00 2001 From: Yuwen Xiong Date: Mon, 12 Sep 2016 20:25:25 +0800 Subject: [PATCH] init --- .gitignore | 79 + ImageSets/Main/test.txt | 4952 +++++ ImageSets/Main/train.txt | 8218 ++++++++ ImageSets/Main/trainval.txt | 16551 ++++++++++++++++ ImageSets/Main/val.txt | 8333 ++++++++ ImageSets/README.md | 1 + LICENSE | 21 + README.md | 150 + data/.gitignore | 6 + data/README.md | 69 + data/demo/000456.jpg | Bin 0 -> 105302 bytes data/demo/000542.jpg | Bin 0 -> 115536 bytes data/demo/001150.jpg | Bin 0 -> 88635 bytes data/demo/001763.jpg | Bin 0 -> 73424 bytes data/demo/004545.jpg | Bin 0 -> 123072 bytes data/pylintrc | 3 + data/scripts/fetch_faster_rcnn_models.sh | 34 + data/scripts/fetch_imagenet_models.sh | 34 + data/scripts/fetch_selective_search_data.sh | 34 + experiments/README.md | 5 + experiments/cfgs/faster_rcnn_alt_opt.yml | 5 + experiments/cfgs/faster_rcnn_end2end.yml | 11 + experiments/cfgs/rfcn_end2end.yml | 15 + experiments/cfgs/rfcn_end2end_ohem.yml | 16 + experiments/logs/.gitignore | 1 + experiments/scripts/fast_rcnn.sh | 63 + experiments/scripts/faster_rcnn_alt_opt.sh | 62 + experiments/scripts/faster_rcnn_end2end.sh | 68 + experiments/scripts/rfcn_end2end.sh | 68 + experiments/scripts/rfcn_end2end_ohem.sh | 70 + .../scripts/rfcn_end2end_ohem_warmup.sh | 84 + lib/Makefile | 3 + .../VOCdevkit-matlab-wrapper/get_voc_opts.m | 14 + .../VOCdevkit-matlab-wrapper/voc_eval.m | 56 + .../VOCdevkit-matlab-wrapper/xVOCap.m | 10 + lib/datasets/__init__.py | 6 + lib/datasets/coco.py | 394 + lib/datasets/ds_utils.py | 41 + lib/datasets/factory.py | 43 + lib/datasets/imdb.py | 253 + lib/datasets/pascal_voc.py | 344 + lib/datasets/tools/mcg_munge.py | 38 + lib/datasets/voc_eval.py | 200 + lib/fast_rcnn/__init__.py | 6 + lib/fast_rcnn/bbox_transform.py | 75 + lib/fast_rcnn/config.py | 290 + lib/fast_rcnn/nms_wrapper.py | 20 + lib/fast_rcnn/test.py | 298 + lib/fast_rcnn/train.py | 186 + lib/nms/.gitignore | 3 + lib/nms/__init__.py | 0 lib/nms/cpu_nms.pyx | 68 + lib/nms/gpu_nms.hpp | 2 + lib/nms/gpu_nms.pyx | 31 + lib/nms/nms_kernel.cu | 144 + lib/nms/py_cpu_nms.py | 38 + lib/pycocotools/UPSTREAM_REV | 1 + lib/pycocotools/__init__.py | 1 + lib/pycocotools/_mask.pyx | 291 + lib/pycocotools/coco.py | 351 + lib/pycocotools/cocoeval.py | 444 + lib/pycocotools/license.txt | 26 + lib/pycocotools/mask.py | 82 + lib/pycocotools/maskApi.c | 208 + lib/pycocotools/maskApi.h | 55 + lib/roi_data_layer/__init__.py | 6 + lib/roi_data_layer/layer.py | 196 + lib/roi_data_layer/minibatch.py | 199 + lib/roi_data_layer/roidb.py | 133 + lib/rpn/README.md | 23 + lib/rpn/__init__.py | 6 + lib/rpn/anchor_target_layer.py | 281 + lib/rpn/generate.py | 117 + lib/rpn/generate_anchors.py | 105 + lib/rpn/proposal_layer.py | 177 + lib/rpn/proposal_target_layer.py | 216 + lib/setup.py | 156 + lib/transform/__init__.py | 0 lib/transform/torch_image_transform_layer.py | 64 + lib/utils/.gitignore | 2 + lib/utils/__init__.py | 6 + lib/utils/bbox.pyx | 55 + lib/utils/blob.py | 45 + lib/utils/timer.py | 32 + models/coco/VGG16/fast_rcnn/solver.prototxt | 16 + models/coco/VGG16/fast_rcnn/test.prototxt | 499 + models/coco/VGG16/fast_rcnn/train.prototxt | 485 + .../VGG16/faster_rcnn_end2end/solver.prototxt | 15 + .../VGG16/faster_rcnn_end2end/test.prototxt | 590 + .../VGG16/faster_rcnn_end2end/train.prototxt | 642 + .../VGG_CNN_M_1024/fast_rcnn/solver.prototxt | 15 + .../VGG_CNN_M_1024/fast_rcnn/test.prototxt | 299 + .../VGG_CNN_M_1024/fast_rcnn/train.prototxt | 292 + .../faster_rcnn_end2end/solver.prototxt | 14 + .../faster_rcnn_end2end/test.prototxt | 432 + .../faster_rcnn_end2end/train.prototxt | 453 + .../rfcn_end2end/class-aware/test.prototxt | 7186 +++++++ .../class-aware/train_ohem.prototxt | 7343 +++++++ .../ResNet-101/rfcn_end2end/solver.prototxt | 16 + .../rfcn_end2end/solver_ohem.prototxt | 16 + .../solver_ohem_continue.prototxt~ | 16 + .../rfcn_end2end/solver_warmup.prototxt | 14 + .../solver_warmup_continue.prototxt | 16 + .../rfcn_end2end/test_agonistic.prototxt | 7186 +++++++ .../rfcn_end2end/train_agonistic.prototxt | 7274 +++++++ .../train_agonistic_ohem.prototxt | 7344 +++++++ .../rfcn_end2end/class-aware/test.prototxt | 3787 ++++ .../class-aware/train_ohem.prototxt | 3946 ++++ .../ResNet-50/rfcn_end2end/solver.prototxt | 16 + .../rfcn_end2end/solver_ohem.prototxt | 16 + .../rfcn_end2end/solver_warmup.prototxt | 14 + .../solver_warmup_continue.prototxt | 16 + .../rfcn_end2end/test_agonistic.prototxt | 3787 ++++ .../rfcn_end2end/train_agonistic.prototxt | 3877 ++++ .../train_agonistic_ohem.prototxt | 3946 ++++ .../VGG16/fast_rcnn/solver.prototxt | 16 + .../pascal_voc/VGG16/fast_rcnn/test.prototxt | 517 + .../pascal_voc/VGG16/fast_rcnn/train.prototxt | 503 + .../faster_rcnn_alt_opt/faster_rcnn_test.pt | 409 + .../VGG16/faster_rcnn_alt_opt/rpn_test.pt | 341 + .../stage1_fast_rcnn_solver30k40k.pt | 16 + .../stage1_fast_rcnn_train.pt | 542 + .../stage1_rpn_solver60k80k.pt | 16 + .../faster_rcnn_alt_opt/stage1_rpn_train.pt | 525 + .../stage2_fast_rcnn_solver30k40k.pt | 16 + .../stage2_fast_rcnn_train.pt | 490 + .../stage2_rpn_solver60k80k.pt | 16 + .../faster_rcnn_alt_opt/stage2_rpn_train.pt | 465 + .../VGG16/faster_rcnn_end2end/solver.prototxt | 16 + .../VGG16/faster_rcnn_end2end/test.prototxt | 608 + .../VGG16/faster_rcnn_end2end/train.prototxt | 673 + .../VGG_CNN_M_1024/fast_rcnn/solver.prototxt | 15 + .../VGG_CNN_M_1024/fast_rcnn/test.prototxt | 317 + .../VGG_CNN_M_1024/fast_rcnn/train.prototxt | 310 + .../faster_rcnn_alt_opt/faster_rcnn_test.pt | 289 + .../faster_rcnn_alt_opt/rpn_test.pt | 221 + .../stage1_fast_rcnn_solver30k40k.pt | 16 + .../stage1_fast_rcnn_train.pt | 337 + .../stage1_rpn_solver60k80k.pt | 16 + .../faster_rcnn_alt_opt/stage1_rpn_train.pt | 286 + .../stage2_fast_rcnn_solver30k40k.pt | 16 + .../stage2_fast_rcnn_train.pt | 337 + .../stage2_rpn_solver60k80k.pt | 16 + .../faster_rcnn_alt_opt/stage2_rpn_train.pt | 286 + .../faster_rcnn_end2end/solver.prototxt | 14 + .../faster_rcnn_end2end/test.prototxt | 450 + .../faster_rcnn_end2end/train.prototxt | 484 + .../pascal_voc/ZF/fast_rcnn/solver.prototxt | 18 + models/pascal_voc/ZF/fast_rcnn/test.prototxt | 251 + models/pascal_voc/ZF/fast_rcnn/train.prototxt | 300 + .../faster_rcnn_alt_opt/faster_rcnn_test.pt | 327 + .../ZF/faster_rcnn_alt_opt/rpn_test.pt | 233 + .../stage1_fast_rcnn_solver30k40k.pt | 16 + .../stage1_fast_rcnn_train.pt | 362 + .../stage1_rpn_solver60k80k.pt | 16 + .../faster_rcnn_alt_opt/stage1_rpn_train.pt | 312 + .../stage2_fast_rcnn_solver30k40k.pt | 16 + .../stage2_fast_rcnn_train.pt | 362 + .../stage2_rpn_solver60k80k.pt | 16 + .../faster_rcnn_alt_opt/stage2_rpn_train.pt | 312 + .../ZF/faster_rcnn_end2end/solver.prototxt | 25 + .../ZF/faster_rcnn_end2end/test.prototxt | 373 + .../ZF/faster_rcnn_end2end/train.prototxt | 497 + tools/README.md | 1 + tools/_init_paths.py | 25 + tools/compress_net.py | 125 + tools/demo.py | 151 + tools/eval_recall.py | 70 + tools/reval.py | 66 + tools/rpn_generate.py | 91 + tools/test_net.py | 90 + tools/train_faster_rcnn_alt_opt.py | 334 + tools/train_net.py | 112 + tools/train_svms.py | 353 + 174 files changed, 117795 insertions(+) create mode 100644 .gitignore create mode 100644 ImageSets/Main/test.txt create mode 100644 ImageSets/Main/train.txt create mode 100644 ImageSets/Main/trainval.txt create mode 100644 ImageSets/Main/val.txt create mode 100644 ImageSets/README.md create mode 100644 LICENSE create mode 100644 README.md create mode 100644 data/.gitignore create mode 100644 data/README.md create mode 100644 data/demo/000456.jpg create mode 100644 data/demo/000542.jpg create mode 100644 data/demo/001150.jpg create mode 100644 data/demo/001763.jpg create mode 100644 data/demo/004545.jpg create mode 100644 data/pylintrc create mode 100755 data/scripts/fetch_faster_rcnn_models.sh create mode 100755 data/scripts/fetch_imagenet_models.sh create mode 100755 data/scripts/fetch_selective_search_data.sh create mode 100644 experiments/README.md create mode 100644 experiments/cfgs/faster_rcnn_alt_opt.yml create mode 100644 experiments/cfgs/faster_rcnn_end2end.yml create mode 100644 experiments/cfgs/rfcn_end2end.yml create mode 100644 experiments/cfgs/rfcn_end2end_ohem.yml create mode 100644 experiments/logs/.gitignore create mode 100755 experiments/scripts/fast_rcnn.sh create mode 100755 experiments/scripts/faster_rcnn_alt_opt.sh create mode 100755 experiments/scripts/faster_rcnn_end2end.sh create mode 100755 experiments/scripts/rfcn_end2end.sh create mode 100755 experiments/scripts/rfcn_end2end_ohem.sh create mode 100755 experiments/scripts/rfcn_end2end_ohem_warmup.sh create mode 100644 lib/Makefile create mode 100644 lib/datasets/VOCdevkit-matlab-wrapper/get_voc_opts.m create mode 100644 lib/datasets/VOCdevkit-matlab-wrapper/voc_eval.m create mode 100644 lib/datasets/VOCdevkit-matlab-wrapper/xVOCap.m create mode 100644 lib/datasets/__init__.py create mode 100644 lib/datasets/coco.py create mode 100644 lib/datasets/ds_utils.py create mode 100644 lib/datasets/factory.py create mode 100644 lib/datasets/imdb.py create mode 100644 lib/datasets/pascal_voc.py create mode 100644 lib/datasets/tools/mcg_munge.py create mode 100644 lib/datasets/voc_eval.py create mode 100644 lib/fast_rcnn/__init__.py create mode 100644 lib/fast_rcnn/bbox_transform.py create mode 100644 lib/fast_rcnn/config.py create mode 100644 lib/fast_rcnn/nms_wrapper.py create mode 100644 lib/fast_rcnn/test.py create mode 100644 lib/fast_rcnn/train.py create mode 100644 lib/nms/.gitignore create mode 100644 lib/nms/__init__.py create mode 100644 lib/nms/cpu_nms.pyx create mode 100644 lib/nms/gpu_nms.hpp create mode 100644 lib/nms/gpu_nms.pyx create mode 100644 lib/nms/nms_kernel.cu create mode 100644 lib/nms/py_cpu_nms.py create mode 100644 lib/pycocotools/UPSTREAM_REV create mode 100644 lib/pycocotools/__init__.py create mode 100644 lib/pycocotools/_mask.pyx create mode 100644 lib/pycocotools/coco.py create mode 100644 lib/pycocotools/cocoeval.py create mode 100644 lib/pycocotools/license.txt create mode 100644 lib/pycocotools/mask.py create mode 100644 lib/pycocotools/maskApi.c create mode 100644 lib/pycocotools/maskApi.h create mode 100644 lib/roi_data_layer/__init__.py create mode 100644 lib/roi_data_layer/layer.py create mode 100644 lib/roi_data_layer/minibatch.py create mode 100644 lib/roi_data_layer/roidb.py create mode 100644 lib/rpn/README.md create mode 100644 lib/rpn/__init__.py create mode 100644 lib/rpn/anchor_target_layer.py create mode 100644 lib/rpn/generate.py create mode 100644 lib/rpn/generate_anchors.py create mode 100644 lib/rpn/proposal_layer.py create mode 100644 lib/rpn/proposal_target_layer.py create mode 100644 lib/setup.py create mode 100644 lib/transform/__init__.py create mode 100644 lib/transform/torch_image_transform_layer.py create mode 100644 lib/utils/.gitignore create mode 100644 lib/utils/__init__.py create mode 100644 lib/utils/bbox.pyx create mode 100644 lib/utils/blob.py create mode 100644 lib/utils/timer.py create mode 100644 models/coco/VGG16/fast_rcnn/solver.prototxt create mode 100644 models/coco/VGG16/fast_rcnn/test.prototxt create mode 100644 models/coco/VGG16/fast_rcnn/train.prototxt create mode 100644 models/coco/VGG16/faster_rcnn_end2end/solver.prototxt create mode 100644 models/coco/VGG16/faster_rcnn_end2end/test.prototxt create mode 100644 models/coco/VGG16/faster_rcnn_end2end/train.prototxt create mode 100644 models/coco/VGG_CNN_M_1024/fast_rcnn/solver.prototxt create mode 100644 models/coco/VGG_CNN_M_1024/fast_rcnn/test.prototxt create mode 100644 models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt create mode 100644 models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt create mode 100644 models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt create mode 100644 models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/test.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/train_ohem.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/solver.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem_continue.prototxt~ create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup_continue.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/test_agonistic.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt create mode 100644 models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/test.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/train_ohem.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/solver.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/solver_ohem.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup_continue.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/test_agonistic.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt create mode 100644 models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt create mode 100644 models/pascal_voc/VGG16/fast_rcnn/solver.prototxt create mode 100644 models/pascal_voc/VGG16/fast_rcnn/test.prototxt create mode 100644 models/pascal_voc/VGG16/fast_rcnn/train.prototxt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/faster_rcnn_test.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/rpn_test.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_end2end/solver.prototxt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_end2end/test.prototxt create mode 100644 models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/solver.prototxt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/test.prototxt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/faster_rcnn_test.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/rpn_test.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt create mode 100644 models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt create mode 100644 models/pascal_voc/ZF/fast_rcnn/solver.prototxt create mode 100644 models/pascal_voc/ZF/fast_rcnn/test.prototxt create mode 100644 models/pascal_voc/ZF/fast_rcnn/train.prototxt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/faster_rcnn_test.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/rpn_test.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt create mode 100644 models/pascal_voc/ZF/faster_rcnn_end2end/solver.prototxt create mode 100644 models/pascal_voc/ZF/faster_rcnn_end2end/test.prototxt create mode 100644 models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt create mode 100644 tools/README.md create mode 100644 tools/_init_paths.py create mode 100755 tools/compress_net.py create mode 100755 tools/demo.py create mode 100755 tools/eval_recall.py create mode 100755 tools/reval.py create mode 100755 tools/rpn_generate.py create mode 100755 tools/test_net.py create mode 100755 tools/train_faster_rcnn_alt_opt.py create mode 100755 tools/train_net.py create mode 100755 tools/train_svms.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..51ef049 --- /dev/null +++ b/.gitignore @@ -0,0 +1,79 @@ +.ipynb_checkpoints +lib/build +lib/pycocotools/_mask.c +lib/pycocotools/_mask.so +caffe +output + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# IPython Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# dotenv +.env + +# virtualenv +venv/ +ENV/ + +# Spyder project settings +.spyderproject + +# Rope project settings +.ropeproject diff --git a/ImageSets/Main/test.txt b/ImageSets/Main/test.txt new file mode 100644 index 0000000..30177c1 --- /dev/null +++ b/ImageSets/Main/test.txt @@ -0,0 +1,4952 @@ +000001 +000002 +000003 +000004 +000006 +000008 +000010 +000011 +000013 +000014 +000015 +000018 +000022 +000025 +000027 +000028 +000029 +000031 +000037 +000038 +000040 +000043 +000045 +000049 +000053 +000054 +000055 +000056 +000057 +000058 +000059 +000062 +000067 +000068 +000069 +000070 +000071 +000074 +000075 +000076 +000079 +000080 +000082 +000084 +000085 +000086 +000087 +000088 +000090 +000092 +000094 +000096 +000097 +000098 +000100 +000103 +000105 +000106 +000108 +000111 +000114 +000115 +000116 +000119 +000124 +000126 +000127 +000128 +000135 +000136 +000137 +000139 +000144 +000145 +000148 +000149 +000151 +000152 +000155 +000157 +000160 +000166 +000167 +000168 +000172 +000175 +000176 +000178 +000179 +000181 +000182 +000183 +000185 +000186 +000188 +000191 +000195 +000196 +000197 +000199 +000201 +000202 +000204 +000205 +000206 +000212 +000213 +000216 +000217 +000223 +000226 +000227 +000230 +000231 +000234 +000237 +000238 +000239 +000240 +000243 +000247 +000248 +000252 +000253 +000254 +000255 +000258 +000260 +000261 +000264 +000265 +000267 +000271 +000272 +000273 +000274 +000277 +000279 +000280 +000281 +000283 +000284 +000286 +000287 +000290 +000291 +000292 +000293 +000295 +000297 +000299 +000300 +000301 +000309 +000310 +000313 +000314 +000315 +000316 +000319 +000324 +000326 +000327 +000330 +000333 +000335 +000339 +000341 +000342 +000345 +000346 +000348 +000350 +000351 +000353 +000356 +000357 +000358 +000360 +000361 +000362 +000364 +000365 +000366 +000368 +000369 +000371 +000375 +000376 +000377 +000378 +000383 +000384 +000385 +000386 +000388 +000389 +000390 +000392 +000393 +000397 +000398 +000399 +000401 +000402 +000405 +000409 +000410 +000412 +000413 +000414 +000415 +000418 +000421 +000422 +000423 +000425 +000426 +000429 +000432 +000434 +000436 +000437 +000440 +000441 +000442 +000444 +000445 +000447 +000449 +000451 +000452 +000453 +000455 +000456 +000457 +000458 +000465 +000466 +000467 +000471 +000472 +000473 +000475 +000478 +000479 +000481 +000485 +000487 +000488 +000490 +000493 +000495 +000497 +000502 +000504 +000505 +000506 +000507 +000510 +000511 +000512 +000517 +000521 +000527 +000529 +000532 +000533 +000534 +000536 +000538 +000539 +000542 +000546 +000547 +000548 +000551 +000553 +000556 +000557 +000558 +000560 +000561 +000562 +000566 +000567 +000568 +000569 +000570 +000571 +000572 +000573 +000574 +000575 +000576 +000578 +000580 +000584 +000585 +000586 +000587 +000593 +000594 +000595 +000596 +000600 +000602 +000603 +000604 +000606 +000607 +000611 +000614 +000615 +000616 +000617 +000618 +000621 +000623 +000624 +000627 +000629 +000630 +000631 +000634 +000636 +000638 +000639 +000640 +000641 +000642 +000643 +000644 +000646 +000649 +000650 +000651 +000652 +000655 +000658 +000659 +000662 +000664 +000665 +000666 +000668 +000669 +000670 +000673 +000674 +000678 +000679 +000681 +000683 +000687 +000691 +000692 +000693 +000696 +000697 +000698 +000701 +000703 +000704 +000706 +000708 +000715 +000716 +000718 +000719 +000721 +000722 +000723 +000724 +000725 +000727 +000732 +000734 +000735 +000736 +000737 +000741 +000743 +000744 +000745 +000747 +000749 +000751 +000757 +000758 +000759 +000762 +000765 +000766 +000769 +000773 +000775 +000778 +000779 +000781 +000783 +000784 +000785 +000788 +000789 +000790 +000792 +000795 +000798 +000801 +000803 +000807 +000809 +000811 +000813 +000817 +000819 +000821 +000824 +000825 +000833 +000835 +000836 +000837 +000838 +000839 +000840 +000841 +000844 +000846 +000852 +000853 +000856 +000858 +000861 +000864 +000866 +000869 +000870 +000873 +000875 +000877 +000881 +000883 +000884 +000886 +000890 +000891 +000893 +000894 +000897 +000901 +000905 +000907 +000909 +000910 +000913 +000914 +000916 +000922 +000924 +000925 +000927 +000928 +000930 +000932 +000933 +000938 +000939 +000940 +000941 +000942 +000944 +000945 +000952 +000953 +000955 +000956 +000957 +000959 +000960 +000961 +000963 +000968 +000969 +000970 +000974 +000975 +000976 +000978 +000979 +000981 +000983 +000984 +000985 +000986 +000988 +000990 +000992 +000994 +000995 +000998 +001000 +001003 +001005 +001006 +001007 +001013 +001016 +001019 +001020 +001021 +001022 +001023 +001025 +001026 +001029 +001030 +001031 +001032 +001033 +001034 +001035 +001037 +001038 +001039 +001040 +001044 +001046 +001047 +001048 +001049 +001051 +001054 +001055 +001058 +001059 +001063 +001065 +001067 +001070 +001075 +001076 +001080 +001081 +001085 +001086 +001087 +001088 +001089 +001090 +001094 +001095 +001096 +001098 +001099 +001100 +001103 +001105 +001108 +001111 +001114 +001115 +001116 +001117 +001118 +001120 +001122 +001123 +001126 +001128 +001131 +001132 +001133 +001134 +001135 +001138 +001139 +001141 +001146 +001150 +001153 +001155 +001157 +001159 +001162 +001163 +001165 +001167 +001169 +001173 +001177 +001178 +001179 +001180 +001181 +001183 +001188 +001189 +001190 +001193 +001195 +001196 +001197 +001198 +001202 +001208 +001210 +001213 +001216 +001217 +001218 +001219 +001220 +001222 +001223 +001227 +001228 +001232 +001235 +001238 +001242 +001243 +001244 +001245 +001246 +001249 +001251 +001252 +001253 +001255 +001256 +001257 +001261 +001262 +001264 +001267 +001271 +001275 +001276 +001278 +001280 +001282 +001283 +001285 +001291 +001295 +001296 +001297 +001300 +001301 +001302 +001303 +001305 +001306 +001307 +001308 +001313 +001317 +001318 +001319 +001320 +001321 +001322 +001328 +001329 +001331 +001335 +001336 +001338 +001339 +001340 +001342 +001344 +001347 +001349 +001351 +001353 +001354 +001355 +001356 +001357 +001358 +001359 +001363 +001366 +001367 +001368 +001369 +001370 +001372 +001373 +001374 +001376 +001377 +001379 +001380 +001381 +001382 +001389 +001391 +001392 +001394 +001396 +001398 +001399 +001401 +001403 +001407 +001410 +001411 +001412 +001415 +001416 +001417 +001419 +001422 +001423 +001424 +001425 +001428 +001429 +001431 +001433 +001435 +001437 +001438 +001440 +001446 +001447 +001448 +001449 +001452 +001454 +001456 +001458 +001459 +001461 +001462 +001469 +001471 +001473 +001474 +001476 +001477 +001478 +001482 +001487 +001489 +001491 +001495 +001496 +001500 +001502 +001503 +001505 +001506 +001507 +001508 +001511 +001513 +001516 +001518 +001519 +001520 +001525 +001527 +001530 +001533 +001534 +001535 +001538 +001540 +001542 +001546 +001547 +001549 +001550 +001551 +001552 +001558 +001560 +001562 +001564 +001566 +001567 +001568 +001569 +001570 +001572 +001573 +001574 +001575 +001578 +001581 +001583 +001584 +001585 +001587 +001589 +001591 +001592 +001596 +001599 +001600 +001601 +001602 +001605 +001606 +001609 +001613 +001615 +001616 +001619 +001620 +001621 +001623 +001624 +001625 +001626 +001629 +001631 +001634 +001635 +001637 +001639 +001641 +001644 +001645 +001646 +001648 +001652 +001655 +001656 +001657 +001658 +001659 +001660 +001663 +001664 +001665 +001666 +001667 +001668 +001670 +001671 +001672 +001674 +001679 +001681 +001687 +001692 +001694 +001695 +001696 +001697 +001698 +001700 +001701 +001702 +001703 +001704 +001705 +001706 +001709 +001710 +001712 +001715 +001716 +001719 +001720 +001722 +001728 +001731 +001735 +001736 +001737 +001740 +001742 +001743 +001744 +001745 +001748 +001751 +001753 +001757 +001760 +001762 +001763 +001764 +001767 +001769 +001770 +001773 +001774 +001776 +001779 +001781 +001783 +001786 +001788 +001790 +001791 +001792 +001794 +001796 +001798 +001802 +001803 +001804 +001805 +001808 +001811 +001812 +001813 +001814 +001815 +001817 +001819 +001820 +001822 +001823 +001824 +001826 +001829 +001831 +001835 +001838 +001839 +001844 +001846 +001848 +001850 +001851 +001852 +001856 +001857 +001859 +001863 +001865 +001866 +001867 +001868 +001869 +001871 +001873 +001874 +001876 +001879 +001880 +001883 +001884 +001885 +001886 +001889 +001890 +001891 +001893 +001895 +001897 +001900 +001905 +001908 +001909 +001910 +001912 +001913 +001914 +001916 +001917 +001919 +001921 +001923 +001924 +001925 +001926 +001929 +001935 +001939 +001942 +001943 +001946 +001947 +001949 +001951 +001953 +001955 +001956 +001957 +001959 +001961 +001965 +001966 +001967 +001968 +001969 +001973 +001974 +001975 +001979 +001983 +001984 +001986 +001987 +001988 +001990 +001991 +001992 +001993 +001994 +001996 +001997 +001998 +002003 +002005 +002007 +002008 +002009 +002010 +002013 +002014 +002016 +002017 +002018 +002026 +002028 +002029 +002031 +002032 +002033 +002035 +002038 +002040 +002041 +002044 +002046 +002048 +002050 +002052 +002053 +002057 +002059 +002060 +002062 +002065 +002066 +002071 +002072 +002073 +002074 +002075 +002076 +002077 +002078 +002079 +002080 +002081 +002084 +002085 +002087 +002089 +002092 +002093 +002097 +002100 +002103 +002105 +002106 +002107 +002110 +002111 +002113 +002115 +002118 +002119 +002121 +002122 +002123 +002127 +002128 +002130 +002131 +002133 +002137 +002138 +002141 +002143 +002144 +002147 +002148 +002149 +002150 +002154 +002157 +002159 +002160 +002161 +002162 +002164 +002167 +002168 +002173 +002175 +002177 +002185 +002188 +002189 +002195 +002198 +002200 +002203 +002204 +002205 +002206 +002207 +002210 +002211 +002216 +002217 +002222 +002223 +002225 +002227 +002229 +002230 +002231 +002232 +002235 +002236 +002239 +002240 +002242 +002243 +002245 +002246 +002250 +002252 +002254 +002258 +002262 +002264 +002269 +002271 +002274 +002275 +002282 +002283 +002286 +002289 +002292 +002294 +002295 +002296 +002297 +002298 +002299 +002301 +002303 +002304 +002309 +002312 +002313 +002314 +002316 +002317 +002319 +002322 +002325 +002326 +002327 +002331 +002336 +002338 +002339 +002341 +002344 +002346 +002349 +002351 +002353 +002356 +002357 +002358 +002360 +002363 +002365 +002370 +002379 +002380 +002381 +002383 +002386 +002388 +002389 +002390 +002394 +002395 +002396 +002397 +002398 +002399 +002400 +002402 +002406 +002408 +002409 +002412 +002414 +002416 +002418 +002421 +002422 +002424 +002426 +002428 +002429 +002430 +002431 +002432 +002434 +002438 +002440 +002446 +002447 +002449 +002451 +002453 +002455 +002457 +002463 +002464 +002467 +002469 +002473 +002474 +002475 +002482 +002484 +002485 +002486 +002487 +002488 +002489 +002495 +002498 +002499 +002503 +002506 +002507 +002509 +002510 +002511 +002515 +002516 +002517 +002521 +002522 +002526 +002527 +002528 +002530 +002531 +002532 +002535 +002536 +002538 +002541 +002543 +002548 +002550 +002551 +002552 +002553 +002556 +002557 +002560 +002562 +002568 +002570 +002573 +002574 +002575 +002576 +002577 +002580 +002581 +002582 +002583 +002587 +002588 +002591 +002592 +002596 +002597 +002601 +002602 +002604 +002607 +002608 +002610 +002612 +002614 +002616 +002617 +002619 +002620 +002622 +002623 +002624 +002626 +002628 +002629 +002630 +002631 +002638 +002639 +002640 +002642 +002644 +002650 +002651 +002652 +002654 +002655 +002656 +002660 +002661 +002663 +002665 +002671 +002672 +002673 +002674 +002676 +002679 +002681 +002685 +002686 +002687 +002688 +002692 +002694 +002698 +002700 +002701 +002703 +002705 +002707 +002708 +002711 +002712 +002716 +002719 +002720 +002724 +002725 +002726 +002728 +002729 +002731 +002733 +002736 +002739 +002740 +002742 +002743 +002746 +002748 +002750 +002752 +002753 +002754 +002756 +002758 +002761 +002764 +002768 +002769 +002770 +002771 +002773 +002777 +002780 +002781 +002787 +002788 +002789 +002790 +002792 +002793 +002797 +002799 +002802 +002805 +002806 +002808 +002809 +002811 +002813 +002814 +002818 +002819 +002821 +002822 +002823 +002824 +002825 +002828 +002829 +002830 +002831 +002832 +002837 +002839 +002840 +002843 +002846 +002849 +002850 +002851 +002852 +002853 +002856 +002857 +002860 +002861 +002862 +002863 +002865 +002871 +002872 +002874 +002876 +002877 +002878 +002882 +002883 +002885 +002887 +002888 +002890 +002892 +002894 +002895 +002897 +002898 +002900 +002902 +002903 +002904 +002905 +002907 +002908 +002909 +002911 +002918 +002920 +002921 +002922 +002923 +002925 +002926 +002927 +002928 +002929 +002930 +002936 +002945 +002948 +002949 +002950 +002951 +002955 +002959 +002961 +002964 +002968 +002970 +002971 +002972 +002973 +002974 +002979 +002980 +002981 +002982 +002983 +002985 +002991 +002993 +002996 +002997 +002998 +002999 +003001 +003006 +003010 +003012 +003014 +003016 +003018 +003019 +003020 +003022 +003025 +003026 +003029 +003030 +003033 +003035 +003036 +003037 +003040 +003041 +003043 +003046 +003048 +003049 +003050 +003052 +003055 +003059 +003060 +003062 +003067 +003068 +003069 +003070 +003071 +003073 +003075 +003076 +003079 +003080 +003081 +003084 +003087 +003091 +003095 +003096 +003097 +003099 +003101 +003104 +003109 +003111 +003113 +003114 +003115 +003119 +003123 +003125 +003128 +003130 +003131 +003132 +003136 +003139 +003141 +003143 +003144 +003148 +003151 +003152 +003153 +003156 +003158 +003160 +003166 +003167 +003168 +003171 +003172 +003173 +003174 +003179 +003180 +003182 +003187 +003190 +003191 +003192 +003193 +003196 +003197 +003198 +003201 +003203 +003206 +003208 +003209 +003212 +003215 +003217 +003220 +003221 +003222 +003224 +003225 +003226 +003227 +003230 +003232 +003234 +003235 +003237 +003238 +003241 +003245 +003246 +003248 +003249 +003251 +003252 +003257 +003263 +003264 +003265 +003266 +003267 +003268 +003275 +003276 +003277 +003278 +003281 +003283 +003286 +003287 +003288 +003289 +003291 +003295 +003297 +003298 +003302 +003304 +003305 +003306 +003309 +003310 +003312 +003314 +003315 +003317 +003318 +003319 +003321 +003322 +003323 +003324 +003326 +003328 +003329 +003332 +003333 +003334 +003340 +003341 +003342 +003345 +003346 +003347 +003348 +003352 +003353 +003357 +003358 +003361 +003364 +003366 +003368 +003371 +003372 +003375 +003378 +003381 +003383 +003384 +003385 +003387 +003388 +003389 +003393 +003394 +003399 +003400 +003402 +003405 +003409 +003411 +003414 +003418 +003423 +003426 +003427 +003428 +003431 +003432 +003434 +003437 +003438 +003440 +003442 +003445 +003446 +003447 +003448 +003454 +003456 +003457 +003459 +003460 +003463 +003467 +003471 +003472 +003473 +003474 +003475 +003476 +003478 +003479 +003480 +003481 +003482 +003483 +003485 +003486 +003488 +003490 +003494 +003495 +003498 +003501 +003502 +003503 +003504 +003505 +003507 +003512 +003513 +003514 +003515 +003517 +003520 +003523 +003526 +003527 +003531 +003532 +003533 +003534 +003535 +003538 +003540 +003541 +003542 +003543 +003544 +003545 +003547 +003552 +003553 +003557 +003558 +003559 +003560 +003561 +003562 +003563 +003568 +003569 +003570 +003571 +003572 +003573 +003574 +003578 +003579 +003581 +003582 +003583 +003584 +003590 +003591 +003592 +003595 +003598 +003600 +003601 +003602 +003607 +003610 +003612 +003613 +003615 +003616 +003617 +003619 +003624 +003626 +003630 +003631 +003633 +003637 +003641 +003643 +003647 +003649 +003650 +003652 +003653 +003659 +003661 +003665 +003666 +003668 +003670 +003672 +003676 +003677 +003680 +003682 +003683 +003686 +003687 +003689 +003692 +003693 +003697 +003701 +003702 +003707 +003710 +003712 +003715 +003716 +003718 +003719 +003720 +003723 +003724 +003725 +003726 +003728 +003730 +003731 +003733 +003734 +003736 +003737 +003738 +003739 +003741 +003742 +003744 +003745 +003746 +003747 +003755 +003756 +003757 +003761 +003762 +003764 +003765 +003766 +003768 +003769 +003770 +003771 +003775 +003776 +003777 +003778 +003782 +003785 +003787 +003789 +003794 +003795 +003799 +003800 +003801 +003802 +003804 +003805 +003810 +003812 +003813 +003815 +003816 +003819 +003822 +003823 +003825 +003829 +003831 +003832 +003833 +003836 +003839 +003840 +003841 +003842 +003843 +003850 +003851 +003852 +003853 +003854 +003858 +003862 +003864 +003867 +003870 +003873 +003875 +003878 +003880 +003881 +003882 +003883 +003884 +003888 +003892 +003893 +003894 +003896 +003897 +003900 +003901 +003902 +003903 +003904 +003906 +003908 +003909 +003910 +003914 +003916 +003917 +003920 +003922 +003925 +003927 +003928 +003929 +003930 +003931 +003933 +003934 +003938 +003940 +003942 +003943 +003944 +003950 +003951 +003952 +003955 +003958 +003959 +003962 +003964 +003967 +003968 +003972 +003975 +003976 +003977 +003978 +003980 +003981 +003982 +003985 +003989 +003995 +003999 +004000 +004001 +004002 +004004 +004006 +004007 +004018 +004021 +004022 +004024 +004026 +004027 +004029 +004030 +004032 +004036 +004038 +004040 +004041 +004042 +004043 +004044 +004045 +004048 +004049 +004050 +004053 +004054 +004055 +004056 +004059 +004061 +004062 +004063 +004064 +004065 +004068 +004070 +004071 +004072 +004074 +004078 +004079 +004080 +004081 +004083 +004084 +004086 +004088 +004090 +004094 +004096 +004097 +004098 +004099 +004101 +004103 +004104 +004107 +004109 +004112 +004114 +004115 +004116 +004118 +004119 +004123 +004124 +004125 +004126 +004127 +004128 +004130 +004132 +004134 +004139 +004144 +004147 +004151 +004153 +004154 +004155 +004156 +004157 +004159 +004160 +004161 +004162 +004165 +004166 +004167 +004172 +004173 +004175 +004176 +004177 +004179 +004180 +004181 +004182 +004183 +004184 +004187 +004188 +004197 +004198 +004199 +004202 +004206 +004207 +004208 +004210 +004211 +004213 +004214 +004216 +004217 +004218 +004219 +004222 +004225 +004226 +004227 +004233 +004234 +004235 +004236 +004238 +004240 +004243 +004245 +004248 +004249 +004250 +004251 +004252 +004254 +004260 +004261 +004262 +004266 +004267 +004268 +004276 +004277 +004278 +004282 +004285 +004288 +004289 +004290 +004294 +004297 +004299 +004301 +004302 +004305 +004306 +004308 +004309 +004311 +004313 +004314 +004316 +004317 +004319 +004320 +004324 +004328 +004330 +004332 +004334 +004335 +004336 +004337 +004340 +004342 +004343 +004344 +004348 +004350 +004353 +004355 +004357 +004358 +004362 +004363 +004366 +004373 +004374 +004375 +004377 +004378 +004381 +004382 +004383 +004385 +004388 +004393 +004394 +004395 +004398 +004399 +004400 +004401 +004402 +004403 +004406 +004407 +004408 +004410 +004412 +004413 +004414 +004415 +004416 +004417 +004418 +004419 +004420 +004422 +004425 +004426 +004427 +004428 +004431 +004435 +004440 +004442 +004443 +004444 +004445 +004447 +004448 +004449 +004451 +004453 +004454 +004456 +004458 +004460 +004461 +004462 +004465 +004467 +004469 +004472 +004473 +004475 +004476 +004477 +004478 +004480 +004482 +004483 +004485 +004486 +004489 +004491 +004492 +004497 +004501 +004503 +004504 +004505 +004506 +004511 +004513 +004515 +004516 +004521 +004522 +004523 +004525 +004529 +004531 +004533 +004534 +004536 +004538 +004541 +004543 +004545 +004546 +004547 +004550 +004554 +004556 +004557 +004559 +004560 +004561 +004564 +004567 +004568 +004569 +004572 +004573 +004575 +004577 +004578 +004580 +004582 +004583 +004586 +004589 +004590 +004593 +004594 +004596 +004598 +004599 +004602 +004603 +004608 +004610 +004613 +004614 +004615 +004616 +004617 +004619 +004620 +004621 +004624 +004629 +004633 +004635 +004637 +004638 +004639 +004640 +004641 +004642 +004645 +004646 +004650 +004657 +004658 +004659 +004661 +004663 +004664 +004665 +004666 +004667 +004668 +004669 +004670 +004677 +004678 +004680 +004681 +004684 +004688 +004690 +004695 +004696 +004697 +004698 +004700 +004703 +004704 +004709 +004711 +004712 +004713 +004716 +004717 +004720 +004721 +004724 +004725 +004726 +004728 +004729 +004730 +004731 +004733 +004734 +004736 +004738 +004739 +004740 +004741 +004744 +004745 +004749 +004751 +004752 +004755 +004756 +004757 +004758 +004759 +004762 +004763 +004764 +004765 +004766 +004767 +004769 +004771 +004772 +004774 +004775 +004778 +004780 +004781 +004784 +004787 +004791 +004795 +004798 +004800 +004802 +004803 +004804 +004806 +004807 +004809 +004810 +004811 +004813 +004817 +004819 +004820 +004821 +004822 +004824 +004827 +004829 +004833 +004835 +004838 +004843 +004844 +004845 +004847 +004851 +004853 +004854 +004855 +004858 +004860 +004861 +004862 +004864 +004865 +004870 +004871 +004874 +004875 +004877 +004880 +004881 +004883 +004884 +004887 +004888 +004889 +004891 +004892 +004893 +004894 +004899 +004900 +004901 +004904 +004906 +004908 +004909 +004914 +004915 +004917 +004918 +004919 +004920 +004921 +004922 +004923 +004924 +004925 +004927 +004930 +004932 +004933 +004934 +004937 +004940 +004941 +004942 +004944 +004945 +004947 +004949 +004952 +004957 +004959 +004964 +004965 +004969 +004970 +004971 +004975 +004978 +004979 +004980 +004981 +004988 +004989 +004993 +004996 +005000 +005002 +005005 +005008 +005009 +005010 +005011 +005012 +005013 +005015 +005017 +005019 +005021 +005022 +005025 +005030 +005031 +005034 +005035 +005038 +005040 +005041 +005043 +005044 +005046 +005048 +005049 +005050 +005051 +005053 +005059 +005060 +005066 +005069 +005070 +005074 +005075 +005076 +005080 +005082 +005083 +005087 +005088 +005089 +005091 +005092 +005095 +005096 +005098 +005099 +005100 +005103 +005105 +005106 +005109 +005112 +005113 +005115 +005117 +005118 +005119 +005120 +005123 +005125 +005126 +005127 +005132 +005133 +005137 +005139 +005140 +005141 +005142 +005147 +005148 +005149 +005151 +005152 +005154 +005155 +005157 +005158 +005162 +005163 +005164 +005165 +005166 +005167 +005170 +005172 +005174 +005178 +005180 +005182 +005184 +005187 +005188 +005192 +005193 +005194 +005196 +005197 +005198 +005200 +005201 +005204 +005205 +005206 +005207 +005211 +005213 +005216 +005218 +005221 +005225 +005226 +005227 +005228 +005232 +005233 +005234 +005235 +005237 +005238 +005240 +005241 +005243 +005247 +005249 +005250 +005251 +005252 +005255 +005256 +005261 +005265 +005266 +005271 +005272 +005275 +005276 +005277 +005279 +005280 +005282 +005284 +005286 +005287 +005289 +005291 +005294 +005295 +005296 +005299 +005300 +005301 +005302 +005308 +005309 +005313 +005316 +005317 +005321 +005322 +005323 +005324 +005329 +005330 +005332 +005333 +005334 +005335 +005339 +005341 +005342 +005347 +005353 +005354 +005356 +005357 +005359 +005361 +005362 +005364 +005366 +005372 +005375 +005376 +005377 +005381 +005382 +005386 +005390 +005392 +005394 +005399 +005400 +005401 +005402 +005403 +005409 +005411 +005412 +005415 +005422 +005425 +005426 +005427 +005428 +005432 +005435 +005437 +005442 +005443 +005444 +005446 +005447 +005449 +005452 +005456 +005458 +005459 +005460 +005462 +005463 +005464 +005466 +005468 +005472 +005473 +005474 +005476 +005477 +005479 +005480 +005482 +005484 +005488 +005490 +005491 +005492 +005493 +005494 +005495 +005498 +005500 +005501 +005502 +005503 +005504 +005505 +005506 +005512 +005513 +005516 +005520 +005523 +005525 +005528 +005529 +005532 +005533 +005534 +005537 +005538 +005540 +005543 +005545 +005546 +005548 +005551 +005553 +005555 +005556 +005557 +005558 +005560 +005561 +005562 +005564 +005565 +005567 +005569 +005570 +005571 +005572 +005575 +005578 +005580 +005581 +005587 +005589 +005594 +005595 +005596 +005597 +005598 +005602 +005604 +005607 +005610 +005612 +005616 +005617 +005619 +005621 +005622 +005623 +005626 +005627 +005628 +005632 +005633 +005634 +005635 +005638 +005642 +005643 +005646 +005649 +005650 +005651 +005656 +005659 +005661 +005663 +005665 +005666 +005667 +005670 +005671 +005673 +005675 +005677 +005678 +005681 +005683 +005684 +005688 +005689 +005690 +005691 +005692 +005694 +005698 +005703 +005706 +005707 +005708 +005709 +005711 +005712 +005717 +005720 +005721 +005722 +005724 +005725 +005726 +005727 +005733 +005734 +005737 +005739 +005744 +005745 +005746 +005748 +005750 +005751 +005753 +005754 +005758 +005759 +005763 +005766 +005767 +005770 +005771 +005772 +005774 +005775 +005776 +005777 +005778 +005785 +005787 +005792 +005793 +005795 +005797 +005798 +005800 +005801 +005802 +005804 +005807 +005808 +005809 +005810 +005816 +005820 +005822 +005823 +005827 +005832 +005833 +005834 +005835 +005837 +005842 +005844 +005846 +005847 +005848 +005849 +005855 +005857 +005858 +005862 +005865 +005866 +005869 +005870 +005871 +005872 +005876 +005880 +005882 +005883 +005886 +005887 +005890 +005891 +005892 +005896 +005898 +005900 +005902 +005904 +005907 +005913 +005915 +005916 +005921 +005922 +005924 +005925 +005926 +005927 +005929 +005931 +005932 +005933 +005934 +005935 +005936 +005937 +005939 +005941 +005942 +005943 +005944 +005945 +005946 +005949 +005950 +005953 +005955 +005957 +005958 +005959 +005962 +005965 +005966 +005967 +005969 +005972 +005973 +005974 +005976 +005977 +005978 +005982 +005986 +005987 +005993 +005994 +005997 +005999 +006002 +006003 +006006 +006007 +006008 +006010 +006013 +006014 +006015 +006016 +006017 +006019 +006021 +006022 +006024 +006031 +006032 +006034 +006036 +006037 +006039 +006040 +006044 +006047 +006048 +006049 +006050 +006051 +006052 +006053 +006054 +006056 +006057 +006059 +006060 +006063 +006064 +006068 +006072 +006075 +006076 +006077 +006080 +006081 +006082 +006083 +006085 +006086 +006087 +006090 +006092 +006093 +006094 +006099 +006101 +006102 +006106 +006109 +006110 +006112 +006113 +006114 +006115 +006116 +006118 +006119 +006121 +006122 +006126 +006127 +006132 +006137 +006138 +006142 +006143 +006144 +006145 +006147 +006149 +006152 +006154 +006155 +006157 +006160 +006164 +006165 +006167 +006168 +006169 +006173 +006178 +006182 +006186 +006191 +006192 +006193 +006194 +006195 +006197 +006199 +006200 +006204 +006205 +006207 +006211 +006213 +006217 +006226 +006227 +006228 +006231 +006232 +006237 +006239 +006242 +006244 +006245 +006246 +006248 +006253 +006255 +006256 +006257 +006263 +006265 +006266 +006268 +006271 +006273 +006274 +006278 +006280 +006283 +006287 +006288 +006292 +006293 +006294 +006297 +006298 +006302 +006303 +006307 +006308 +006310 +006311 +006312 +006313 +006315 +006316 +006317 +006322 +006324 +006326 +006327 +006328 +006331 +006332 +006333 +006334 +006336 +006340 +006342 +006343 +006345 +006347 +006354 +006356 +006358 +006359 +006360 +006361 +006364 +006365 +006368 +006370 +006372 +006373 +006376 +006378 +006379 +006380 +006383 +006384 +006386 +006388 +006389 +006390 +006393 +006394 +006397 +006399 +006401 +006402 +006403 +006405 +006406 +006407 +006408 +006410 +006412 +006413 +006414 +006415 +006416 +006420 +006422 +006423 +006426 +006431 +006432 +006435 +006439 +006441 +006446 +006451 +006452 +006453 +006454 +006457 +006460 +006461 +006464 +006467 +006469 +006471 +006477 +006478 +006479 +006481 +006485 +006487 +006489 +006490 +006491 +006493 +006494 +006496 +006498 +006500 +006502 +006504 +006505 +006508 +006510 +006511 +006513 +006514 +006516 +006517 +006518 +006521 +006522 +006525 +006526 +006527 +006528 +006531 +006533 +006535 +006537 +006539 +006540 +006541 +006544 +006545 +006546 +006552 +006554 +006555 +006557 +006558 +006559 +006561 +006563 +006566 +006567 +006568 +006571 +006573 +006574 +006577 +006579 +006580 +006581 +006582 +006586 +006589 +006590 +006591 +006592 +006594 +006596 +006598 +006600 +006601 +006604 +006607 +006608 +006613 +006614 +006615 +006616 +006620 +006623 +006624 +006629 +006630 +006633 +006634 +006639 +006640 +006641 +006642 +006644 +006646 +006649 +006650 +006651 +006653 +006655 +006656 +006659 +006662 +006663 +006665 +006669 +006672 +006675 +006676 +006680 +006683 +006685 +006686 +006688 +006691 +006692 +006693 +006700 +006701 +006705 +006710 +006711 +006712 +006713 +006715 +006716 +006717 +006720 +006721 +006723 +006724 +006728 +006729 +006732 +006733 +006737 +006741 +006742 +006743 +006744 +006745 +006746 +006749 +006750 +006752 +006754 +006756 +006757 +006758 +006763 +006764 +006767 +006770 +006771 +006774 +006775 +006776 +006778 +006779 +006780 +006785 +006787 +006788 +006790 +006791 +006792 +006793 +006795 +006796 +006798 +006801 +006804 +006807 +006809 +006811 +006812 +006815 +006816 +006817 +006818 +006820 +006823 +006826 +006830 +006831 +006832 +006834 +006837 +006843 +006846 +006851 +006853 +006854 +006856 +006857 +006861 +006863 +006870 +006871 +006872 +006873 +006875 +006877 +006879 +006881 +006882 +006885 +006888 +006889 +006890 +006891 +006894 +006895 +006897 +006898 +006901 +006902 +006904 +006905 +006906 +006907 +006913 +006915 +006920 +006921 +006923 +006925 +006926 +006927 +006928 +006929 +006936 +006937 +006938 +006941 +006942 +006946 +006951 +006954 +006955 +006957 +006960 +006961 +006964 +006967 +006969 +006970 +006973 +006974 +006975 +006977 +006978 +006979 +006980 +006982 +006984 +006985 +006986 +006991 +006992 +006993 +006996 +006997 +006998 +006999 +007000 +007001 +007005 +007010 +007012 +007013 +007014 +007015 +007017 +007019 +007024 +007026 +007027 +007028 +007030 +007032 +007034 +007037 +007041 +007043 +007044 +007047 +007051 +007053 +007055 +007057 +007060 +007061 +007063 +007066 +007067 +007069 +007076 +007081 +007082 +007083 +007085 +007087 +007091 +007094 +007096 +007098 +007099 +007102 +007103 +007106 +007107 +007110 +007111 +007112 +007115 +007116 +007118 +007119 +007120 +007124 +007126 +007127 +007131 +007134 +007135 +007136 +007137 +007142 +007143 +007145 +007151 +007155 +007156 +007157 +007158 +007160 +007161 +007164 +007169 +007170 +007171 +007173 +007175 +007176 +007178 +007179 +007181 +007183 +007186 +007188 +007190 +007192 +007195 +007196 +007198 +007199 +007201 +007202 +007203 +007206 +007207 +007209 +007218 +007220 +007221 +007225 +007226 +007228 +007229 +007231 +007232 +007233 +007235 +007237 +007238 +007239 +007240 +007242 +007246 +007248 +007251 +007252 +007253 +007254 +007255 +007257 +007262 +007264 +007265 +007267 +007268 +007269 +007272 +007273 +007277 +007278 +007281 +007282 +007286 +007287 +007288 +007290 +007291 +007293 +007301 +007303 +007304 +007306 +007307 +007309 +007310 +007312 +007313 +007315 +007316 +007317 +007319 +007320 +007321 +007324 +007326 +007328 +007331 +007332 +007333 +007335 +007337 +007338 +007339 +007340 +007341 +007342 +007345 +007347 +007348 +007349 +007352 +007353 +007354 +007355 +007357 +007358 +007360 +007362 +007364 +007366 +007367 +007368 +007371 +007377 +007378 +007379 +007380 +007382 +007384 +007386 +007387 +007391 +007392 +007393 +007395 +007397 +007399 +007400 +007401 +007402 +007403 +007404 +007405 +007406 +007407 +007409 +007412 +007415 +007418 +007420 +007423 +007426 +007428 +007429 +007430 +007434 +007440 +007441 +007442 +007444 +007447 +007450 +007452 +007453 +007455 +007456 +007459 +007462 +007463 +007464 +007469 +007471 +007472 +007473 +007476 +007478 +007485 +007487 +007488 +007492 +007494 +007495 +007496 +007499 +007500 +007501 +007502 +007504 +007505 +007507 +007508 +007509 +007510 +007512 +007514 +007515 +007516 +007518 +007520 +007522 +007529 +007531 +007532 +007534 +007539 +007541 +007542 +007545 +007548 +007549 +007550 +007552 +007553 +007554 +007556 +007557 +007560 +007561 +007562 +007564 +007567 +007569 +007573 +007574 +007577 +007580 +007581 +007582 +007583 +007584 +007587 +007588 +007589 +007591 +007593 +007595 +007596 +007597 +007598 +007599 +007602 +007604 +007607 +007608 +007609 +007610 +007613 +007616 +007617 +007620 +007623 +007625 +007627 +007628 +007630 +007632 +007634 +007635 +007636 +007638 +007641 +007643 +007644 +007645 +007646 +007648 +007651 +007652 +007658 +007659 +007660 +007661 +007665 +007669 +007674 +007676 +007681 +007684 +007686 +007689 +007690 +007693 +007695 +007698 +007700 +007701 +007703 +007706 +007707 +007708 +007710 +007711 +007714 +007716 +007717 +007719 +007722 +007725 +007726 +007728 +007730 +007733 +007734 +007737 +007738 +007739 +007741 +007744 +007747 +007750 +007752 +007755 +007756 +007757 +007759 +007761 +007764 +007766 +007769 +007770 +007771 +007774 +007778 +007780 +007782 +007783 +007784 +007785 +007787 +007788 +007789 +007792 +007794 +007796 +007797 +007800 +007801 +007802 +007804 +007805 +007806 +007807 +007808 +007811 +007816 +007817 +007818 +007822 +007823 +007825 +007827 +007828 +007829 +007830 +007832 +007835 +007837 +007839 +007842 +007844 +007846 +007848 +007849 +007850 +007851 +007852 +007858 +007860 +007861 +007862 +007866 +007867 +007870 +007871 +007874 +007875 +007879 +007880 +007881 +007882 +007887 +007888 +007891 +007892 +007893 +007894 +007895 +007896 +007903 +007904 +007906 +007907 +007912 +007913 +007917 +007918 +007922 +007927 +007929 +007930 +007934 +007936 +007937 +007938 +007941 +007942 +007944 +007945 +007948 +007949 +007951 +007952 +007955 +007957 +007960 +007961 +007962 +007965 +007966 +007967 +007969 +007972 +007973 +007975 +007977 +007978 +007981 +007982 +007983 +007985 +007986 +007988 +007989 +007990 +007992 +007993 +007994 +007995 +008000 +008003 +008006 +008007 +008010 +008011 +008013 +008014 +008015 +008016 +008018 +008020 +008021 +008022 +008025 +008027 +008028 +008030 +008034 +008035 +008038 +008039 +008041 +008045 +008046 +008047 +008050 +008052 +008054 +008055 +008056 +008058 +008059 +008065 +008066 +008070 +008071 +008073 +008074 +008077 +008078 +008080 +008081 +008088 +008089 +008090 +008092 +008094 +008097 +008099 +008102 +008104 +008109 +008110 +008111 +008113 +008114 +008118 +008119 +008120 +008123 +008124 +008126 +008128 +008129 +008131 +008133 +008134 +008135 +008136 +008143 +008145 +008146 +008147 +008148 +008149 +008152 +008153 +008154 +008155 +008156 +008157 +008158 +008161 +008162 +008165 +008167 +008170 +008172 +008176 +008178 +008179 +008181 +008182 +008183 +008184 +008185 +008187 +008192 +008193 +008194 +008195 +008196 +008198 +008201 +008205 +008206 +008207 +008210 +008212 +008214 +008215 +008217 +008219 +008221 +008227 +008228 +008230 +008231 +008233 +008234 +008237 +008238 +008239 +008240 +008242 +008243 +008245 +008246 +008247 +008249 +008255 +008256 +008257 +008259 +008264 +008265 +008266 +008267 +008270 +008271 +008273 +008274 +008276 +008277 +008278 +008283 +008286 +008287 +008288 +008289 +008290 +008291 +008298 +008303 +008304 +008305 +008308 +008309 +008314 +008321 +008324 +008325 +008328 +008330 +008331 +008333 +008334 +008337 +008339 +008340 +008343 +008344 +008347 +008348 +008350 +008352 +008353 +008354 +008356 +008357 +008358 +008361 +008362 +008363 +008366 +008367 +008369 +008371 +008373 +008375 +008377 +008378 +008379 +008380 +008382 +008383 +008389 +008392 +008393 +008394 +008395 +008396 +008399 +008400 +008401 +008402 +008404 +008405 +008406 +008407 +008408 +008411 +008412 +008414 +008417 +008418 +008419 +008420 +008421 +008428 +008431 +008432 +008435 +008436 +008439 +008440 +008441 +008446 +008447 +008448 +008451 +008455 +008457 +008458 +008459 +008460 +008463 +008464 +008469 +008471 +008473 +008474 +008476 +008479 +008480 +008481 +008486 +008487 +008488 +008489 +008490 +008491 +008493 +008496 +008497 +008500 +008501 +008504 +008505 +008507 +008508 +008510 +008511 +008515 +008516 +008520 +008525 +008527 +008528 +008531 +008532 +008537 +008538 +008539 +008540 +008543 +008544 +008545 +008546 +008547 +008548 +008551 +008552 +008554 +008555 +008560 +008561 +008563 +008565 +008566 +008567 +008569 +008570 +008571 +008574 +008575 +008577 +008578 +008579 +008580 +008583 +008589 +008590 +008591 +008593 +008594 +008597 +008598 +008599 +008600 +008603 +008605 +008609 +008611 +008613 +008614 +008616 +008619 +008622 +008623 +008625 +008626 +008627 +008629 +008630 +008631 +008632 +008634 +008637 +008640 +008641 +008642 +008643 +008646 +008648 +008649 +008650 +008651 +008652 +008656 +008657 +008658 +008659 +008660 +008661 +008662 +008664 +008666 +008668 +008669 +008671 +008672 +008673 +008674 +008675 +008677 +008678 +008679 +008681 +008682 +008684 +008685 +008686 +008689 +008693 +008694 +008696 +008697 +008700 +008703 +008704 +008705 +008707 +008708 +008711 +008712 +008714 +008715 +008719 +008721 +008724 +008726 +008729 +008734 +008735 +008736 +008737 +008740 +008743 +008745 +008746 +008751 +008754 +008758 +008761 +008762 +008763 +008765 +008767 +008774 +008777 +008778 +008779 +008780 +008781 +008782 +008785 +008786 +008787 +008788 +008789 +008791 +008792 +008795 +008797 +008798 +008800 +008802 +008803 +008804 +008807 +008808 +008812 +008816 +008818 +008820 +008821 +008824 +008825 +008827 +008828 +008829 +008830 +008832 +008834 +008839 +008842 +008844 +008845 +008846 +008850 +008851 +008852 +008853 +008855 +008857 +008860 +008861 +008863 +008864 +008866 +008868 +008869 +008870 +008875 +008877 +008881 +008882 +008887 +008889 +008893 +008894 +008895 +008896 +008897 +008898 +008899 +008901 +008902 +008903 +008904 +008906 +008907 +008908 +008910 +008912 +008915 +008916 +008918 +008922 +008924 +008925 +008928 +008934 +008935 +008937 +008938 +008941 +008945 +008946 +008947 +008949 +008950 +008952 +008954 +008956 +008957 +008959 +008963 +008964 +008972 +008974 +008977 +008981 +008984 +008986 +008990 +008991 +008992 +008993 +008994 +008996 +008998 +009001 +009003 +009008 +009009 +009010 +009011 +009012 +009013 +009014 +009017 +009021 +009023 +009025 +009026 +009028 +009030 +009031 +009033 +009038 +009040 +009041 +009043 +009044 +009046 +009047 +009050 +009052 +009054 +009055 +009056 +009057 +009061 +009062 +009065 +009067 +009069 +009070 +009071 +009074 +009075 +009076 +009077 +009081 +009082 +009083 +009084 +009088 +009090 +009092 +009093 +009095 +009096 +009097 +009101 +009102 +009103 +009104 +009107 +009109 +009110 +009111 +009115 +009118 +009119 +009120 +009122 +009124 +009125 +009127 +009130 +009132 +009134 +009135 +009137 +009139 +009140 +009142 +009143 +009145 +009146 +009149 +009152 +009154 +009156 +009158 +009164 +009165 +009167 +009169 +009170 +009171 +009172 +009176 +009182 +009183 +009188 +009190 +009198 +009199 +009201 +009203 +009204 +009206 +009207 +009210 +009211 +009216 +009217 +009219 +009220 +009222 +009223 +009225 +009226 +009228 +009229 +009231 +009232 +009233 +009234 +009235 +009237 +009240 +009241 +009243 +009248 +009253 +009256 +009257 +009258 +009260 +009261 +009262 +009263 +009264 +009265 +009266 +009267 +009274 +009275 +009276 +009277 +009280 +009284 +009292 +009293 +009294 +009297 +009298 +009300 +009301 +009302 +009304 +009305 +009310 +009311 +009313 +009314 +009317 +009319 +009320 +009321 +009322 +009328 +009329 +009332 +009335 +009338 +009340 +009341 +009344 +009345 +009346 +009352 +009353 +009355 +009356 +009357 +009360 +009361 +009363 +009364 +009366 +009367 +009369 +009370 +009372 +009376 +009379 +009380 +009381 +009383 +009384 +009385 +009387 +009390 +009391 +009395 +009396 +009397 +009399 +009400 +009402 +009403 +009404 +009415 +009416 +009423 +009425 +009426 +009427 +009428 +009430 +009431 +009435 +009436 +009441 +009442 +009444 +009447 +009449 +009450 +009451 +009452 +009453 +009462 +009467 +009471 +009473 +009474 +009475 +009478 +009482 +009483 +009485 +009486 +009487 +009489 +009492 +009493 +009495 +009498 +009501 +009503 +009505 +009506 +009509 +009510 +009511 +009513 +009514 +009521 +009522 +009525 +009529 +009530 +009534 +009535 +009536 +009538 +009539 +009544 +009547 +009548 +009552 +009553 +009554 +009555 +009556 +009559 +009561 +009563 +009564 +009569 +009570 +009572 +009574 +009575 +009578 +009581 +009582 +009583 +009589 +009590 +009592 +009593 +009594 +009595 +009599 +009601 +009602 +009604 +009606 +009607 +009608 +009610 +009612 +009616 +009622 +009624 +009625 +009626 +009628 +009630 +009631 +009632 +009633 +009635 +009639 +009640 +009642 +009643 +009645 +009646 +009648 +009651 +009652 +009653 +009657 +009660 +009661 +009662 +009663 +009665 +009669 +009672 +009673 +009674 +009675 +009677 +009680 +009682 +009683 +009688 +009689 +009690 +009694 +009696 +009697 +009701 +009704 +009705 +009708 +009714 +009715 +009716 +009720 +009722 +009723 +009725 +009727 +009728 +009730 +009731 +009736 +009739 +009740 +009741 +009742 +009744 +009750 +009751 +009752 +009753 +009757 +009759 +009760 +009765 +009766 +009768 +009769 +009770 +009771 +009775 +009777 +009779 +009782 +009783 +009784 +009786 +009787 +009788 +009791 +009793 +009795 +009798 +009799 +009802 +009803 +009804 +009806 +009811 +009812 +009814 +009815 +009817 +009818 +009820 +009821 +009824 +009826 +009827 +009829 +009835 +009837 +009838 +009840 +009843 +009844 +009846 +009847 +009849 +009850 +009853 +009854 +009856 +009857 +009861 +009864 +009866 +009871 +009873 +009875 +009876 +009883 +009885 +009888 +009889 +009890 +009891 +009892 +009893 +009895 +009899 +009901 +009903 +009906 +009907 +009909 +009910 +009912 +009914 +009915 +009916 +009919 +009921 +009922 +009924 +009925 +009927 +009928 +009929 +009930 +009931 +009933 +009934 +009936 +009937 +009941 +009943 +009948 +009951 +009952 +009953 +009956 +009957 +009960 +009962 +009963 diff --git a/ImageSets/Main/train.txt b/ImageSets/Main/train.txt new file mode 100644 index 0000000..4647eec --- /dev/null +++ b/ImageSets/Main/train.txt @@ -0,0 +1,8218 @@ +000012 +000017 +000023 +000026 +000032 +000033 +000034 +000035 +000036 +000042 +000044 +000047 +000048 +000061 +000064 +000066 +000073 +000077 +000078 +000083 +000089 +000091 +000104 +000112 +000122 +000129 +000133 +000134 +000138 +000140 +000141 +000147 +000153 +000154 +000159 +000161 +000162 +000163 +000164 +000171 +000173 +000174 +000187 +000189 +000192 +000193 +000194 +000198 +000200 +000207 +000209 +000219 +000220 +000222 +000225 +000228 +000235 +000242 +000250 +000256 +000259 +000262 +000263 +000276 +000278 +000282 +000288 +000294 +000296 +000306 +000307 +000311 +000312 +000317 +000320 +000325 +000331 +000334 +000337 +000344 +000347 +000349 +000355 +000359 +000367 +000370 +000372 +000379 +000382 +000387 +000391 +000394 +000395 +000400 +000404 +000406 +000407 +000411 +000416 +000430 +000431 +000438 +000446 +000450 +000454 +000463 +000468 +000469 +000470 +000474 +000476 +000477 +000484 +000489 +000496 +000503 +000508 +000516 +000518 +000519 +000522 +000524 +000525 +000526 +000528 +000535 +000537 +000541 +000544 +000549 +000550 +000552 +000554 +000555 +000559 +000565 +000577 +000583 +000589 +000590 +000592 +000597 +000605 +000609 +000612 +000620 +000622 +000625 +000632 +000633 +000635 +000648 +000654 +000657 +000671 +000672 +000680 +000685 +000688 +000689 +000695 +000699 +000700 +000709 +000710 +000711 +000726 +000729 +000731 +000733 +000739 +000740 +000753 +000754 +000761 +000764 +000767 +000768 +000770 +000774 +000793 +000796 +000804 +000805 +000810 +000812 +000818 +000820 +000822 +000823 +000827 +000828 +000829 +000830 +000845 +000849 +000850 +000851 +000859 +000860 +000865 +000867 +000871 +000887 +000888 +000889 +000892 +000898 +000899 +000900 +000902 +000904 +000906 +000908 +000912 +000915 +000919 +000929 +000936 +000943 +000950 +000951 +000954 +000958 +000962 +000964 +000965 +000966 +000967 +000977 +000980 +000987 +000989 +000991 +000993 +000996 +000997 +000999 +001001 +001002 +001008 +001010 +001011 +001014 +001015 +001024 +001036 +001043 +001050 +001057 +001060 +001064 +001068 +001071 +001073 +001077 +001078 +001079 +001082 +001101 +001106 +001112 +001113 +001119 +001127 +001129 +001130 +001140 +001147 +001151 +001152 +001156 +001158 +001168 +001171 +001172 +001174 +001182 +001191 +001194 +001204 +001205 +001207 +001209 +001212 +001214 +001226 +001229 +001230 +001234 +001237 +001239 +001240 +001248 +001258 +001263 +001268 +001269 +001270 +001273 +001279 +001287 +001294 +001299 +001304 +001309 +001312 +001314 +001315 +001323 +001325 +001326 +001327 +001332 +001333 +001334 +001345 +001346 +001348 +001364 +001365 +001378 +001384 +001385 +001388 +001390 +001393 +001395 +001402 +001404 +001405 +001406 +001408 +001409 +001414 +001418 +001420 +001421 +001426 +001427 +001434 +001436 +001442 +001450 +001451 +001453 +001455 +001457 +001468 +001470 +001479 +001480 +001483 +001485 +001486 +001488 +001492 +001494 +001498 +001499 +001501 +001504 +001512 +001515 +001517 +001521 +001524 +001526 +001528 +001529 +001532 +001539 +001548 +001555 +001556 +001557 +001559 +001563 +001576 +001579 +001580 +001586 +001590 +001593 +001594 +001597 +001604 +001607 +001610 +001611 +001612 +001622 +001627 +001630 +001633 +001636 +001643 +001649 +001650 +001651 +001654 +001661 +001662 +001669 +001673 +001676 +001680 +001683 +001684 +001688 +001690 +001699 +001707 +001708 +001711 +001713 +001714 +001717 +001721 +001723 +001729 +001732 +001733 +001734 +001738 +001739 +001741 +001750 +001752 +001754 +001758 +001759 +001761 +001765 +001766 +001768 +001777 +001780 +001787 +001789 +001800 +001806 +001809 +001810 +001821 +001825 +001828 +001832 +001834 +001836 +001840 +001841 +001843 +001845 +001853 +001854 +001858 +001861 +001864 +001870 +001881 +001892 +001894 +001896 +001898 +001902 +001903 +001904 +001906 +001915 +001922 +001928 +001930 +001937 +001938 +001941 +001945 +001950 +001952 +001954 +001960 +001963 +001971 +001977 +001978 +001980 +001981 +001985 +001989 +001995 +001999 +002001 +002002 +002004 +002006 +002012 +002015 +002020 +002025 +002027 +002034 +002037 +002039 +002042 +002043 +002047 +002049 +002051 +002055 +002056 +002061 +002068 +002069 +002095 +002096 +002104 +002108 +002116 +002117 +002120 +002126 +002132 +002134 +002139 +002151 +002153 +002155 +002156 +002158 +002166 +002170 +002172 +002176 +002178 +002179 +002180 +002182 +002186 +002187 +002191 +002192 +002193 +002194 +002196 +002197 +002199 +002208 +002212 +002215 +002219 +002221 +002224 +002234 +002237 +002238 +002241 +002247 +002249 +002253 +002255 +002256 +002260 +002265 +002277 +002279 +002280 +002284 +002287 +002291 +002293 +002306 +002307 +002310 +002311 +002315 +002318 +002320 +002321 +002323 +002334 +002335 +002342 +002347 +002350 +002354 +002355 +002359 +002362 +002368 +002373 +002384 +002392 +002401 +002403 +002405 +002410 +002411 +002413 +002419 +002420 +002423 +002433 +002436 +002439 +002442 +002443 +002445 +002448 +002458 +002461 +002465 +002466 +002468 +002471 +002472 +002478 +002480 +002481 +002483 +002490 +002494 +002496 +002500 +002501 +002502 +002512 +002514 +002518 +002519 +002533 +002534 +002539 +002544 +002545 +002547 +002554 +002555 +002558 +002559 +002564 +002569 +002571 +002572 +002579 +002590 +002594 +002595 +002599 +002603 +002609 +002611 +002625 +002627 +002634 +002635 +002645 +002647 +002648 +002653 +002662 +002664 +002666 +002669 +002680 +002682 +002683 +002684 +002691 +002697 +002702 +002704 +002710 +002713 +002715 +002722 +002730 +002735 +002737 +002738 +002744 +002745 +002749 +002755 +002757 +002759 +002763 +002765 +002766 +002774 +002778 +002779 +002782 +002783 +002791 +002795 +002796 +002801 +002804 +002807 +002816 +002817 +002820 +002826 +002834 +002841 +002844 +002845 +002848 +002855 +002858 +002864 +002866 +002867 +002868 +002869 +002870 +002873 +002881 +002899 +002906 +002914 +002919 +002931 +002934 +002937 +002939 +002953 +002956 +002957 +002958 +002962 +002969 +002975 +002976 +002987 +002988 +002989 +002990 +002992 +002995 +003002 +003003 +003007 +003011 +003013 +003024 +003027 +003034 +003042 +003047 +003051 +003053 +003061 +003063 +003066 +003074 +003077 +003083 +003085 +003088 +003092 +003100 +003103 +003105 +003106 +003107 +003108 +003110 +003116 +003122 +003124 +003133 +003134 +003135 +003138 +003140 +003145 +003146 +003147 +003149 +003150 +003155 +003157 +003159 +003161 +003163 +003165 +003169 +003175 +003181 +003183 +003184 +003185 +003188 +003202 +003204 +003205 +003211 +003214 +003229 +003231 +003233 +003236 +003240 +003242 +003244 +003247 +003253 +003254 +003259 +003260 +003261 +003269 +003270 +003273 +003279 +003280 +003282 +003284 +003290 +003292 +003303 +003308 +003320 +003330 +003331 +003336 +003337 +003338 +003339 +003343 +003349 +003350 +003354 +003355 +003356 +003359 +003363 +003365 +003367 +003369 +003373 +003374 +003379 +003380 +003382 +003392 +003395 +003396 +003401 +003406 +003408 +003412 +003413 +003416 +003417 +003420 +003421 +003424 +003430 +003433 +003436 +003439 +003441 +003450 +003452 +003466 +003477 +003484 +003487 +003489 +003491 +003493 +003496 +003497 +003499 +003500 +003506 +003508 +003509 +003510 +003511 +003522 +003524 +003525 +003529 +003539 +003548 +003549 +003550 +003551 +003555 +003564 +003565 +003575 +003576 +003577 +003585 +003586 +003588 +003596 +003599 +003603 +003604 +003605 +003608 +003609 +003614 +003621 +003622 +003625 +003627 +003628 +003629 +003634 +003635 +003642 +003644 +003645 +003646 +003656 +003658 +003662 +003663 +003664 +003671 +003678 +003679 +003681 +003688 +003694 +003695 +003698 +003699 +003700 +003704 +003705 +003713 +003714 +003732 +003735 +003740 +003743 +003748 +003749 +003751 +003752 +003758 +003759 +003763 +003767 +003773 +003779 +003781 +003784 +003786 +003788 +003790 +003792 +003797 +003806 +003807 +003811 +003817 +003818 +003824 +003827 +003828 +003830 +003834 +003835 +003847 +003849 +003856 +003859 +003860 +003861 +003865 +003866 +003874 +003879 +003887 +003889 +003890 +003898 +003899 +003907 +003912 +003913 +003921 +003932 +003935 +003936 +003939 +003945 +003949 +003953 +003956 +003961 +003969 +003970 +003971 +003974 +003983 +003987 +003988 +003991 +003993 +003997 +003998 +004005 +004008 +004009 +004012 +004013 +004014 +004016 +004017 +004019 +004023 +004028 +004033 +004034 +004035 +004037 +004046 +004052 +004058 +004067 +004091 +004092 +004093 +004095 +004100 +004106 +004111 +004120 +004121 +004129 +004131 +004133 +004136 +004137 +004138 +004140 +004146 +004149 +004152 +004158 +004163 +004164 +004168 +004169 +004170 +004171 +004189 +004190 +004196 +004200 +004201 +004209 +004215 +004220 +004221 +004223 +004224 +004228 +004231 +004232 +004237 +004241 +004242 +004244 +004247 +004253 +004255 +004256 +004263 +004269 +004270 +004271 +004272 +004273 +004280 +004281 +004283 +004287 +004291 +004292 +004296 +004300 +004303 +004307 +004315 +004318 +004322 +004325 +004327 +004333 +004338 +004339 +004345 +004347 +004359 +004360 +004361 +004365 +004367 +004370 +004371 +004372 +004376 +004379 +004386 +004387 +004389 +004391 +004392 +004404 +004434 +004436 +004439 +004441 +004452 +004470 +004471 +004479 +004481 +004484 +004496 +004500 +004502 +004508 +004510 +004514 +004517 +004519 +004520 +004524 +004526 +004537 +004540 +004544 +004548 +004549 +004551 +004553 +004562 +004563 +004565 +004566 +004570 +004571 +004576 +004579 +004584 +004587 +004591 +004595 +004597 +004604 +004605 +004607 +004611 +004612 +004622 +004623 +004625 +004627 +004628 +004631 +004634 +004636 +004643 +004644 +004648 +004651 +004656 +004671 +004675 +004679 +004683 +004685 +004686 +004687 +004691 +004693 +004694 +004701 +004702 +004705 +004706 +004710 +004714 +004715 +004718 +004723 +004735 +004737 +004742 +004743 +004747 +004748 +004753 +004754 +004760 +004773 +004776 +004779 +004782 +004783 +004790 +004792 +004793 +004794 +004797 +004799 +004801 +004808 +004815 +004823 +004828 +004830 +004832 +004836 +004837 +004841 +004842 +004846 +004848 +004849 +004857 +004869 +004873 +004876 +004879 +004882 +004885 +004897 +004898 +004902 +004905 +004907 +004910 +004911 +004913 +004929 +004946 +004951 +004955 +004958 +004961 +004962 +004966 +004968 +004972 +004973 +004974 +004976 +004984 +004987 +004990 +004991 +004992 +004995 +005001 +005004 +005006 +005007 +005016 +005018 +005020 +005023 +005024 +005026 +005027 +005029 +005032 +005033 +005045 +005047 +005052 +005057 +005058 +005061 +005065 +005068 +005071 +005073 +005078 +005084 +005086 +005090 +005093 +005094 +005097 +005101 +005107 +005108 +005114 +005121 +005122 +005124 +005129 +005130 +005134 +005138 +005143 +005153 +005156 +005168 +005169 +005171 +005173 +005177 +005181 +005183 +005186 +005189 +005190 +005191 +005202 +005203 +005208 +005215 +005217 +005219 +005223 +005231 +005236 +005244 +005245 +005246 +005257 +005258 +005259 +005260 +005262 +005269 +005273 +005283 +005285 +005288 +005290 +005292 +005297 +005303 +005304 +005307 +005310 +005311 +005318 +005327 +005336 +005337 +005338 +005344 +005345 +005351 +005358 +005360 +005363 +005368 +005369 +005373 +005374 +005387 +005388 +005389 +005391 +005396 +005404 +005405 +005406 +005408 +005410 +005413 +005414 +005417 +005420 +005424 +005433 +005440 +005445 +005448 +005450 +005451 +005453 +005455 +005457 +005467 +005478 +005483 +005487 +005489 +005496 +005499 +005508 +005509 +005511 +005514 +005515 +005519 +005524 +005526 +005527 +005536 +005541 +005542 +005544 +005547 +005563 +005566 +005568 +005574 +005579 +005582 +005585 +005591 +005592 +005599 +005600 +005601 +005603 +005605 +005609 +005611 +005624 +005625 +005630 +005631 +005636 +005637 +005639 +005644 +005648 +005654 +005658 +005668 +005669 +005680 +005686 +005695 +005697 +005699 +005700 +005704 +005705 +005710 +005713 +005715 +005718 +005728 +005730 +005731 +005735 +005738 +005740 +005742 +005752 +005756 +005757 +005764 +005765 +005769 +005780 +005782 +005783 +005784 +005786 +005789 +005796 +005803 +005805 +005806 +005813 +005814 +005817 +005821 +005824 +005826 +005831 +005836 +005838 +005840 +005843 +005850 +005851 +005859 +005860 +005861 +005864 +005867 +005873 +005881 +005884 +005885 +005888 +005889 +005893 +005895 +005899 +005901 +005903 +005905 +005908 +005909 +005910 +005911 +005918 +005920 +005923 +005930 +005938 +005947 +005948 +005951 +005960 +005961 +005964 +005971 +005980 +005983 +005984 +005990 +005992 +006004 +006009 +006011 +006020 +006023 +006025 +006030 +006033 +006038 +006043 +006061 +006065 +006066 +006067 +006070 +006073 +006074 +006078 +006079 +006088 +006091 +006095 +006096 +006100 +006103 +006104 +006105 +006123 +006128 +006130 +006131 +006134 +006135 +006140 +006141 +006156 +006158 +006162 +006166 +006170 +006171 +006172 +006174 +006175 +006176 +006177 +006179 +006180 +006181 +006183 +006187 +006189 +006196 +006208 +006210 +006221 +006223 +006224 +006225 +006229 +006230 +006236 +006238 +006243 +006247 +006250 +006251 +006261 +006262 +006264 +006267 +006270 +006272 +006275 +006279 +006285 +006289 +006290 +006291 +006299 +006304 +006305 +006320 +006329 +006341 +006344 +006349 +006352 +006353 +006362 +006363 +006366 +006367 +006369 +006371 +006374 +006375 +006381 +006382 +006395 +006400 +006411 +006417 +006418 +006419 +006427 +006429 +006433 +006434 +006436 +006438 +006442 +006447 +006448 +006455 +006458 +006459 +006462 +006466 +006470 +006472 +006474 +006475 +006476 +006482 +006483 +006486 +006495 +006499 +006501 +006503 +006506 +006515 +006523 +006524 +006536 +006547 +006548 +006549 +006550 +006551 +006556 +006560 +006564 +006569 +006595 +006597 +006602 +006605 +006609 +006610 +006612 +006622 +006626 +006627 +006635 +006636 +006637 +006638 +006648 +006652 +006654 +006658 +006660 +006674 +006684 +006689 +006694 +006695 +006697 +006698 +006703 +006704 +006706 +006707 +006708 +006714 +006726 +006727 +006731 +006734 +006735 +006736 +006738 +006740 +006748 +006753 +006755 +006766 +006773 +006777 +006781 +006782 +006784 +006794 +006805 +006806 +006810 +006822 +006824 +006825 +006833 +006836 +006839 +006840 +006844 +006845 +006847 +006848 +006849 +006852 +006858 +006864 +006866 +006868 +006869 +006874 +006883 +006887 +006893 +006896 +006899 +006900 +006909 +006910 +006911 +006912 +006914 +006916 +006917 +006919 +006930 +006931 +006939 +006943 +006947 +006948 +006950 +006958 +006959 +006968 +006971 +006976 +006983 +007002 +007003 +007006 +007007 +007011 +007016 +007018 +007023 +007025 +007029 +007033 +007036 +007039 +007040 +007045 +007050 +007062 +007064 +007072 +007073 +007075 +007078 +007079 +007080 +007088 +007089 +007090 +007092 +007093 +007095 +007105 +007108 +007113 +007121 +007125 +007128 +007129 +007130 +007133 +007138 +007150 +007152 +007154 +007159 +007163 +007166 +007168 +007177 +007180 +007182 +007184 +007185 +007193 +007194 +007197 +007205 +007213 +007214 +007219 +007222 +007223 +007234 +007241 +007243 +007250 +007256 +007261 +007263 +007271 +007279 +007285 +007289 +007295 +007298 +007305 +007308 +007322 +007323 +007325 +007327 +007334 +007336 +007351 +007361 +007365 +007369 +007370 +007373 +007375 +007381 +007385 +007389 +007394 +007396 +007398 +007410 +007411 +007413 +007417 +007419 +007421 +007425 +007431 +007437 +007446 +007454 +007458 +007466 +007467 +007468 +007474 +007477 +007479 +007481 +007483 +007490 +007491 +007493 +007497 +007503 +007513 +007519 +007521 +007524 +007526 +007530 +007535 +007536 +007538 +007540 +007544 +007558 +007565 +007566 +007570 +007572 +007575 +007578 +007586 +007590 +007594 +007600 +007601 +007606 +007611 +007619 +007621 +007629 +007631 +007633 +007637 +007653 +007654 +007655 +007663 +007667 +007683 +007685 +007692 +007696 +007697 +007699 +007704 +007713 +007718 +007721 +007729 +007731 +007735 +007736 +007740 +007748 +007749 +007751 +007753 +007762 +007767 +007775 +007777 +007781 +007790 +007791 +007795 +007803 +007809 +007810 +007814 +007819 +007820 +007821 +007831 +007836 +007838 +007840 +007847 +007853 +007854 +007859 +007863 +007864 +007872 +007876 +007877 +007878 +007883 +007884 +007885 +007898 +007900 +007901 +007905 +007908 +007910 +007911 +007914 +007915 +007923 +007925 +007926 +007932 +007939 +007940 +007953 +007959 +007963 +007964 +007968 +007974 +007976 +007980 +007991 +007996 +008001 +008004 +008005 +008008 +008012 +008017 +008019 +008026 +008037 +008040 +008042 +008043 +008044 +008049 +008051 +008053 +008062 +008063 +008064 +008067 +008072 +008075 +008076 +008079 +008082 +008083 +008084 +008093 +008095 +008096 +008098 +008106 +008108 +008116 +008117 +008121 +008127 +008130 +008137 +008139 +008142 +008150 +008163 +008164 +008166 +008169 +008174 +008186 +008188 +008197 +008199 +008202 +008203 +008204 +008211 +008213 +008216 +008218 +008223 +008226 +008232 +008235 +008248 +008250 +008252 +008253 +008254 +008260 +008261 +008262 +008263 +008269 +008272 +008280 +008282 +008296 +008301 +008302 +008310 +008311 +008312 +008313 +008315 +008316 +008317 +008322 +008332 +008336 +008338 +008341 +008342 +008346 +008351 +008360 +008372 +008374 +008381 +008384 +008385 +008388 +008391 +008397 +008398 +008403 +008409 +008422 +008425 +008426 +008427 +008437 +008442 +008443 +008445 +008449 +008452 +008453 +008456 +008462 +008465 +008466 +008467 +008468 +008470 +008475 +008477 +008478 +008482 +008483 +008495 +008506 +008517 +008523 +008529 +008530 +008533 +008536 +008549 +008550 +008558 +008559 +008568 +008581 +008585 +008587 +008588 +008595 +008596 +008602 +008610 +008615 +008617 +008618 +008628 +008633 +008645 +008655 +008663 +008665 +008670 +008676 +008688 +008690 +008691 +008699 +008702 +008706 +008710 +008720 +008723 +008725 +008727 +008731 +008732 +008738 +008741 +008744 +008748 +008750 +008755 +008756 +008757 +008760 +008764 +008768 +008770 +008771 +008776 +008783 +008784 +008790 +008794 +008806 +008809 +008811 +008813 +008814 +008815 +008819 +008838 +008840 +008841 +008847 +008856 +008862 +008865 +008872 +008878 +008879 +008883 +008885 +008886 +008891 +008900 +008905 +008909 +008920 +008923 +008926 +008929 +008930 +008932 +008933 +008936 +008939 +008944 +008948 +008958 +008960 +008961 +008962 +008966 +008967 +008968 +008969 +008970 +008971 +008973 +008975 +008978 +008979 +008980 +008985 +008987 +008988 +008989 +008995 +008999 +009000 +009004 +009005 +009016 +009018 +009020 +009027 +009029 +009032 +009036 +009042 +009045 +009049 +009058 +009059 +009063 +009066 +009068 +009073 +009078 +009080 +009086 +009098 +009099 +009100 +009106 +009108 +009114 +009117 +009121 +009123 +009136 +009144 +009148 +009153 +009160 +009161 +009166 +009173 +009175 +009181 +009184 +009185 +009191 +009196 +009197 +009200 +009205 +009208 +009209 +009214 +009215 +009218 +009227 +009230 +009238 +009242 +009245 +009251 +009252 +009255 +009259 +009269 +009270 +009271 +009272 +009283 +009285 +009287 +009288 +009289 +009290 +009295 +009296 +009299 +009306 +009307 +009308 +009316 +009318 +009324 +009325 +009327 +009333 +009336 +009339 +009342 +009343 +009358 +009359 +009362 +009365 +009377 +009386 +009388 +009389 +009392 +009393 +009394 +009398 +009406 +009407 +009409 +009410 +009411 +009413 +009417 +009418 +009419 +009420 +009421 +009422 +009424 +009429 +009432 +009434 +009446 +009458 +009460 +009463 +009465 +009466 +009469 +009476 +009488 +009490 +009491 +009496 +009497 +009499 +009504 +009508 +009512 +009515 +009516 +009518 +009520 +009523 +009524 +009526 +009528 +009537 +009541 +009542 +009545 +009549 +009551 +009557 +009562 +009566 +009573 +009576 +009577 +009579 +009584 +009585 +009587 +009596 +009600 +009605 +009609 +009613 +009614 +009615 +009618 +009621 +009623 +009629 +009634 +009637 +009638 +009644 +009650 +009654 +009656 +009659 +009664 +009666 +009668 +009671 +009679 +009684 +009691 +009693 +009702 +009703 +009707 +009709 +009713 +009717 +009718 +009721 +009729 +009733 +009734 +009735 +009749 +009755 +009756 +009762 +009763 +009774 +009776 +009789 +009790 +009792 +009797 +009800 +009805 +009807 +009808 +009810 +009813 +009825 +009828 +009830 +009832 +009834 +009839 +009842 +009845 +009848 +009851 +009852 +009855 +009859 +009860 +009867 +009868 +009869 +009872 +009874 +009877 +009878 +009879 +009882 +009884 +009887 +009896 +009904 +009911 +009918 +009920 +009926 +009938 +009940 +009942 +009944 +009945 +009949 +009959 +009961 +2008_000008 +2008_000015 +2008_000019 +2008_000023 +2008_000028 +2008_000033 +2008_000036 +2008_000037 +2008_000041 +2008_000045 +2008_000053 +2008_000060 +2008_000066 +2008_000070 +2008_000074 +2008_000085 +2008_000089 +2008_000093 +2008_000095 +2008_000096 +2008_000097 +2008_000099 +2008_000103 +2008_000105 +2008_000109 +2008_000112 +2008_000128 +2008_000131 +2008_000132 +2008_000141 +2008_000142 +2008_000143 +2008_000144 +2008_000148 +2008_000151 +2008_000154 +2008_000162 +2008_000176 +2008_000181 +2008_000185 +2008_000187 +2008_000188 +2008_000189 +2008_000191 +2008_000192 +2008_000193 +2008_000196 +2008_000197 +2008_000199 +2008_000202 +2008_000207 +2008_000217 +2008_000226 +2008_000227 +2008_000235 +2008_000236 +2008_000237 +2008_000238 +2008_000252 +2008_000255 +2008_000259 +2008_000260 +2008_000262 +2008_000266 +2008_000273 +2008_000275 +2008_000283 +2008_000284 +2008_000287 +2008_000289 +2008_000290 +2008_000291 +2008_000297 +2008_000309 +2008_000311 +2008_000313 +2008_000315 +2008_000316 +2008_000318 +2008_000330 +2008_000335 +2008_000336 +2008_000338 +2008_000342 +2008_000343 +2008_000346 +2008_000348 +2008_000350 +2008_000356 +2008_000361 +2008_000364 +2008_000365 +2008_000371 +2008_000380 +2008_000392 +2008_000393 +2008_000397 +2008_000399 +2008_000400 +2008_000405 +2008_000415 +2008_000416 +2008_000421 +2008_000422 +2008_000426 +2008_000428 +2008_000432 +2008_000435 +2008_000436 +2008_000437 +2008_000442 +2008_000443 +2008_000445 +2008_000447 +2008_000448 +2008_000455 +2008_000461 +2008_000470 +2008_000471 +2008_000480 +2008_000488 +2008_000491 +2008_000493 +2008_000495 +2008_000499 +2008_000502 +2008_000505 +2008_000512 +2008_000514 +2008_000515 +2008_000527 +2008_000531 +2008_000540 +2008_000544 +2008_000545 +2008_000548 +2008_000552 +2008_000559 +2008_000561 +2008_000563 +2008_000567 +2008_000572 +2008_000578 +2008_000583 +2008_000584 +2008_000585 +2008_000588 +2008_000595 +2008_000607 +2008_000613 +2008_000615 +2008_000619 +2008_000626 +2008_000628 +2008_000636 +2008_000641 +2008_000645 +2008_000646 +2008_000648 +2008_000650 +2008_000655 +2008_000669 +2008_000672 +2008_000674 +2008_000676 +2008_000678 +2008_000683 +2008_000689 +2008_000694 +2008_000696 +2008_000703 +2008_000704 +2008_000711 +2008_000716 +2008_000719 +2008_000721 +2008_000723 +2008_000724 +2008_000726 +2008_000729 +2008_000732 +2008_000733 +2008_000742 +2008_000753 +2008_000756 +2008_000758 +2008_000760 +2008_000761 +2008_000764 +2008_000775 +2008_000777 +2008_000778 +2008_000785 +2008_000787 +2008_000790 +2008_000792 +2008_000798 +2008_000801 +2008_000808 +2008_000814 +2008_000815 +2008_000824 +2008_000829 +2008_000832 +2008_000833 +2008_000841 +2008_000842 +2008_000844 +2008_000847 +2008_000851 +2008_000854 +2008_000860 +2008_000861 +2008_000867 +2008_000870 +2008_000873 +2008_000875 +2008_000881 +2008_000883 +2008_000887 +2008_000899 +2008_000901 +2008_000902 +2008_000905 +2008_000908 +2008_000912 +2008_000914 +2008_000915 +2008_000923 +2008_000924 +2008_000928 +2008_000934 +2008_000941 +2008_000944 +2008_000953 +2008_000959 +2008_000970 +2008_000973 +2008_000979 +2008_000981 +2008_000985 +2008_000987 +2008_000999 +2008_001018 +2008_001020 +2008_001021 +2008_001022 +2008_001023 +2008_001026 +2008_001030 +2008_001031 +2008_001035 +2008_001036 +2008_001039 +2008_001042 +2008_001047 +2008_001048 +2008_001052 +2008_001054 +2008_001056 +2008_001057 +2008_001071 +2008_001073 +2008_001081 +2008_001083 +2008_001104 +2008_001105 +2008_001106 +2008_001112 +2008_001115 +2008_001118 +2008_001119 +2008_001130 +2008_001133 +2008_001134 +2008_001137 +2008_001143 +2008_001147 +2008_001158 +2008_001159 +2008_001161 +2008_001164 +2008_001169 +2008_001171 +2008_001182 +2008_001188 +2008_001189 +2008_001190 +2008_001196 +2008_001202 +2008_001203 +2008_001206 +2008_001208 +2008_001215 +2008_001219 +2008_001223 +2008_001230 +2008_001235 +2008_001238 +2008_001245 +2008_001263 +2008_001267 +2008_001272 +2008_001274 +2008_001278 +2008_001285 +2008_001294 +2008_001299 +2008_001302 +2008_001307 +2008_001310 +2008_001312 +2008_001325 +2008_001329 +2008_001335 +2008_001336 +2008_001346 +2008_001351 +2008_001357 +2008_001358 +2008_001359 +2008_001373 +2008_001375 +2008_001382 +2008_001383 +2008_001385 +2008_001387 +2008_001389 +2008_001390 +2008_001399 +2008_001402 +2008_001405 +2008_001408 +2008_001413 +2008_001414 +2008_001419 +2008_001420 +2008_001431 +2008_001434 +2008_001440 +2008_001444 +2008_001446 +2008_001448 +2008_001454 +2008_001455 +2008_001460 +2008_001461 +2008_001462 +2008_001464 +2008_001467 +2008_001479 +2008_001482 +2008_001488 +2008_001493 +2008_001495 +2008_001498 +2008_001500 +2008_001501 +2008_001510 +2008_001523 +2008_001525 +2008_001529 +2008_001533 +2008_001538 +2008_001541 +2008_001550 +2008_001563 +2008_001566 +2008_001576 +2008_001577 +2008_001582 +2008_001591 +2008_001592 +2008_001601 +2008_001609 +2008_001610 +2008_001615 +2008_001617 +2008_001620 +2008_001626 +2008_001631 +2008_001632 +2008_001641 +2008_001643 +2008_001645 +2008_001652 +2008_001653 +2008_001661 +2008_001666 +2008_001670 +2008_001673 +2008_001679 +2008_001690 +2008_001691 +2008_001692 +2008_001694 +2008_001699 +2008_001704 +2008_001706 +2008_001708 +2008_001709 +2008_001710 +2008_001716 +2008_001719 +2008_001724 +2008_001729 +2008_001735 +2008_001737 +2008_001741 +2008_001744 +2008_001746 +2008_001751 +2008_001758 +2008_001761 +2008_001770 +2008_001775 +2008_001781 +2008_001783 +2008_001787 +2008_001789 +2008_001791 +2008_001796 +2008_001797 +2008_001801 +2008_001809 +2008_001811 +2008_001813 +2008_001829 +2008_001832 +2008_001834 +2008_001836 +2008_001837 +2008_001842 +2008_001845 +2008_001849 +2008_001852 +2008_001854 +2008_001856 +2008_001860 +2008_001865 +2008_001866 +2008_001872 +2008_001876 +2008_001880 +2008_001881 +2008_001882 +2008_001888 +2008_001894 +2008_001896 +2008_001903 +2008_001911 +2008_001921 +2008_001926 +2008_001929 +2008_001937 +2008_001941 +2008_001947 +2008_001955 +2008_001956 +2008_001957 +2008_001967 +2008_001970 +2008_001977 +2008_001980 +2008_001982 +2008_001986 +2008_001997 +2008_002000 +2008_002001 +2008_002002 +2008_002005 +2008_002009 +2008_002023 +2008_002032 +2008_002033 +2008_002056 +2008_002058 +2008_002061 +2008_002062 +2008_002064 +2008_002066 +2008_002067 +2008_002073 +2008_002079 +2008_002080 +2008_002093 +2008_002094 +2008_002096 +2008_002103 +2008_002112 +2008_002116 +2008_002117 +2008_002118 +2008_002119 +2008_002123 +2008_002129 +2008_002131 +2008_002145 +2008_002148 +2008_002150 +2008_002156 +2008_002160 +2008_002162 +2008_002175 +2008_002177 +2008_002181 +2008_002182 +2008_002195 +2008_002197 +2008_002200 +2008_002202 +2008_002204 +2008_002206 +2008_002208 +2008_002210 +2008_002215 +2008_002218 +2008_002220 +2008_002221 +2008_002225 +2008_002227 +2008_002229 +2008_002236 +2008_002243 +2008_002244 +2008_002247 +2008_002248 +2008_002251 +2008_002255 +2008_002258 +2008_002262 +2008_002270 +2008_002278 +2008_002279 +2008_002280 +2008_002281 +2008_002288 +2008_002294 +2008_002296 +2008_002299 +2008_002304 +2008_002307 +2008_002311 +2008_002317 +2008_002325 +2008_002327 +2008_002329 +2008_002331 +2008_002335 +2008_002338 +2008_002340 +2008_002343 +2008_002344 +2008_002350 +2008_002357 +2008_002361 +2008_002362 +2008_002365 +2008_002368 +2008_002369 +2008_002370 +2008_002377 +2008_002389 +2008_002399 +2008_002405 +2008_002411 +2008_002418 +2008_002422 +2008_002425 +2008_002434 +2008_002437 +2008_002441 +2008_002442 +2008_002448 +2008_002457 +2008_002458 +2008_002459 +2008_002461 +2008_002465 +2008_002466 +2008_002471 +2008_002473 +2008_002482 +2008_002484 +2008_002487 +2008_002491 +2008_002501 +2008_002506 +2008_002514 +2008_002515 +2008_002524 +2008_002533 +2008_002541 +2008_002543 +2008_002547 +2008_002549 +2008_002551 +2008_002555 +2008_002562 +2008_002566 +2008_002568 +2008_002574 +2008_002575 +2008_002578 +2008_002583 +2008_002584 +2008_002601 +2008_002612 +2008_002613 +2008_002621 +2008_002622 +2008_002625 +2008_002634 +2008_002638 +2008_002641 +2008_002647 +2008_002648 +2008_002649 +2008_002650 +2008_002662 +2008_002665 +2008_002666 +2008_002668 +2008_002672 +2008_002674 +2008_002675 +2008_002676 +2008_002679 +2008_002686 +2008_002697 +2008_002698 +2008_002704 +2008_002710 +2008_002712 +2008_002718 +2008_002719 +2008_002728 +2008_002730 +2008_002733 +2008_002736 +2008_002741 +2008_002749 +2008_002750 +2008_002758 +2008_002760 +2008_002762 +2008_002767 +2008_002772 +2008_002774 +2008_002776 +2008_002784 +2008_002787 +2008_002791 +2008_002793 +2008_002794 +2008_002804 +2008_002808 +2008_002813 +2008_002823 +2008_002834 +2008_002842 +2008_002848 +2008_002850 +2008_002854 +2008_002856 +2008_002857 +2008_002866 +2008_002868 +2008_002872 +2008_002873 +2008_002880 +2008_002885 +2008_002887 +2008_002890 +2008_002891 +2008_002892 +2008_002894 +2008_002903 +2008_002913 +2008_002916 +2008_002917 +2008_002922 +2008_002930 +2008_002931 +2008_002943 +2008_002948 +2008_002951 +2008_002954 +2008_002955 +2008_002957 +2008_002960 +2008_002961 +2008_002966 +2008_002970 +2008_002972 +2008_002977 +2008_002983 +2008_002984 +2008_002985 +2008_002988 +2008_002993 +2008_002997 +2008_003013 +2008_003015 +2008_003017 +2008_003018 +2008_003021 +2008_003023 +2008_003025 +2008_003033 +2008_003037 +2008_003039 +2008_003041 +2008_003043 +2008_003048 +2008_003049 +2008_003057 +2008_003059 +2008_003060 +2008_003061 +2008_003063 +2008_003065 +2008_003068 +2008_003075 +2008_003079 +2008_003081 +2008_003083 +2008_003087 +2008_003093 +2008_003094 +2008_003099 +2008_003101 +2008_003112 +2008_003114 +2008_003120 +2008_003122 +2008_003127 +2008_003128 +2008_003134 +2008_003140 +2008_003143 +2008_003146 +2008_003147 +2008_003151 +2008_003154 +2008_003157 +2008_003160 +2008_003168 +2008_003180 +2008_003182 +2008_003191 +2008_003196 +2008_003200 +2008_003203 +2008_003208 +2008_003209 +2008_003213 +2008_003224 +2008_003231 +2008_003242 +2008_003244 +2008_003248 +2008_003251 +2008_003252 +2008_003261 +2008_003264 +2008_003265 +2008_003266 +2008_003269 +2008_003272 +2008_003275 +2008_003276 +2008_003277 +2008_003283 +2008_003287 +2008_003288 +2008_003290 +2008_003297 +2008_003302 +2008_003303 +2008_003304 +2008_003311 +2008_003313 +2008_003318 +2008_003321 +2008_003323 +2008_003329 +2008_003335 +2008_003338 +2008_003342 +2008_003347 +2008_003360 +2008_003362 +2008_003373 +2008_003378 +2008_003380 +2008_003381 +2008_003386 +2008_003393 +2008_003394 +2008_003406 +2008_003409 +2008_003414 +2008_003415 +2008_003417 +2008_003418 +2008_003426 +2008_003429 +2008_003430 +2008_003434 +2008_003435 +2008_003437 +2008_003447 +2008_003448 +2008_003452 +2008_003458 +2008_003462 +2008_003463 +2008_003469 +2008_003478 +2008_003480 +2008_003485 +2008_003488 +2008_003489 +2008_003496 +2008_003497 +2008_003498 +2008_003500 +2008_003501 +2008_003504 +2008_003507 +2008_003510 +2008_003515 +2008_003520 +2008_003521 +2008_003522 +2008_003523 +2008_003533 +2008_003534 +2008_003544 +2008_003559 +2008_003560 +2008_003562 +2008_003571 +2008_003575 +2008_003578 +2008_003579 +2008_003582 +2008_003585 +2008_003587 +2008_003589 +2008_003590 +2008_003596 +2008_003608 +2008_003611 +2008_003617 +2008_003622 +2008_003626 +2008_003629 +2008_003635 +2008_003637 +2008_003645 +2008_003652 +2008_003653 +2008_003655 +2008_003659 +2008_003665 +2008_003667 +2008_003674 +2008_003675 +2008_003677 +2008_003682 +2008_003685 +2008_003688 +2008_003689 +2008_003691 +2008_003697 +2008_003701 +2008_003703 +2008_003706 +2008_003707 +2008_003712 +2008_003719 +2008_003726 +2008_003729 +2008_003732 +2008_003746 +2008_003748 +2008_003761 +2008_003762 +2008_003764 +2008_003769 +2008_003773 +2008_003774 +2008_003776 +2008_003779 +2008_003781 +2008_003788 +2008_003791 +2008_003796 +2008_003802 +2008_003811 +2008_003814 +2008_003815 +2008_003819 +2008_003831 +2008_003841 +2008_003842 +2008_003847 +2008_003849 +2008_003852 +2008_003854 +2008_003864 +2008_003866 +2008_003870 +2008_003871 +2008_003882 +2008_003883 +2008_003888 +2008_003891 +2008_003892 +2008_003908 +2008_003913 +2008_003914 +2008_003916 +2008_003920 +2008_003922 +2008_003925 +2008_003939 +2008_003942 +2008_003947 +2008_003956 +2008_003966 +2008_003967 +2008_003970 +2008_003974 +2008_003975 +2008_003978 +2008_003984 +2008_003985 +2008_003986 +2008_003992 +2008_003995 +2008_003998 +2008_004000 +2008_004004 +2008_004008 +2008_004014 +2008_004017 +2008_004021 +2008_004022 +2008_004024 +2008_004026 +2008_004036 +2008_004037 +2008_004042 +2008_004044 +2008_004053 +2008_004055 +2008_004066 +2008_004074 +2008_004077 +2008_004080 +2008_004084 +2008_004087 +2008_004092 +2008_004097 +2008_004100 +2008_004102 +2008_004106 +2008_004110 +2008_004112 +2008_004113 +2008_004120 +2008_004122 +2008_004130 +2008_004134 +2008_004138 +2008_004145 +2008_004147 +2008_004148 +2008_004161 +2008_004163 +2008_004165 +2008_004171 +2008_004176 +2008_004195 +2008_004196 +2008_004201 +2008_004208 +2008_004217 +2008_004218 +2008_004224 +2008_004231 +2008_004232 +2008_004235 +2008_004239 +2008_004246 +2008_004247 +2008_004259 +2008_004265 +2008_004269 +2008_004274 +2008_004276 +2008_004280 +2008_004284 +2008_004287 +2008_004288 +2008_004291 +2008_004293 +2008_004296 +2008_004301 +2008_004303 +2008_004307 +2008_004313 +2008_004314 +2008_004319 +2008_004321 +2008_004325 +2008_004328 +2008_004331 +2008_004342 +2008_004353 +2008_004358 +2008_004362 +2008_004365 +2008_004372 +2008_004376 +2008_004378 +2008_004380 +2008_004385 +2008_004387 +2008_004398 +2008_004403 +2008_004410 +2008_004411 +2008_004412 +2008_004416 +2008_004418 +2008_004428 +2008_004430 +2008_004435 +2008_004436 +2008_004439 +2008_004441 +2008_004443 +2008_004450 +2008_004452 +2008_004457 +2008_004458 +2008_004462 +2008_004480 +2008_004488 +2008_004490 +2008_004492 +2008_004493 +2008_004499 +2008_004505 +2008_004506 +2008_004512 +2008_004513 +2008_004515 +2008_004518 +2008_004519 +2008_004532 +2008_004539 +2008_004544 +2008_004545 +2008_004547 +2008_004551 +2008_004559 +2008_004567 +2008_004568 +2008_004570 +2008_004574 +2008_004581 +2008_004583 +2008_004584 +2008_004585 +2008_004588 +2008_004590 +2008_004593 +2008_004602 +2008_004603 +2008_004607 +2008_004611 +2008_004616 +2008_004617 +2008_004620 +2008_004629 +2008_004631 +2008_004634 +2008_004635 +2008_004636 +2008_004648 +2008_004649 +2008_004661 +2008_004663 +2008_004666 +2008_004667 +2008_004668 +2008_004671 +2008_004672 +2008_004677 +2008_004678 +2008_004679 +2008_004690 +2008_004692 +2008_004697 +2008_004703 +2008_004707 +2008_004713 +2008_004719 +2008_004725 +2008_004732 +2008_004739 +2008_004749 +2008_004750 +2008_004752 +2008_004763 +2008_004764 +2008_004767 +2008_004770 +2008_004776 +2008_004777 +2008_004781 +2008_004783 +2008_004786 +2008_004802 +2008_004804 +2008_004807 +2008_004808 +2008_004821 +2008_004822 +2008_004827 +2008_004834 +2008_004838 +2008_004841 +2008_004844 +2008_004845 +2008_004847 +2008_004849 +2008_004850 +2008_004856 +2008_004858 +2008_004868 +2008_004869 +2008_004872 +2008_004874 +2008_004876 +2008_004892 +2008_004893 +2008_004899 +2008_004903 +2008_004908 +2008_004911 +2008_004914 +2008_004917 +2008_004920 +2008_004931 +2008_004934 +2008_004937 +2008_004938 +2008_004945 +2008_004946 +2008_004950 +2008_004961 +2008_004964 +2008_004966 +2008_004969 +2008_004970 +2008_004973 +2008_004976 +2008_004977 +2008_004981 +2008_004983 +2008_004985 +2008_004990 +2008_004991 +2008_004998 +2008_005000 +2008_005006 +2008_005013 +2008_005016 +2008_005033 +2008_005036 +2008_005040 +2008_005042 +2008_005045 +2008_005051 +2008_005055 +2008_005064 +2008_005066 +2008_005071 +2008_005074 +2008_005078 +2008_005080 +2008_005081 +2008_005082 +2008_005084 +2008_005088 +2008_005090 +2008_005094 +2008_005101 +2008_005108 +2008_005115 +2008_005127 +2008_005132 +2008_005133 +2008_005134 +2008_005136 +2008_005137 +2008_005146 +2008_005150 +2008_005158 +2008_005159 +2008_005168 +2008_005171 +2008_005172 +2008_005174 +2008_005178 +2008_005186 +2008_005193 +2008_005196 +2008_005201 +2008_005209 +2008_005213 +2008_005214 +2008_005216 +2008_005218 +2008_005220 +2008_005221 +2008_005231 +2008_005234 +2008_005236 +2008_005240 +2008_005247 +2008_005248 +2008_005250 +2008_005266 +2008_005269 +2008_005271 +2008_005279 +2008_005281 +2008_005283 +2008_005294 +2008_005295 +2008_005296 +2008_005297 +2008_005300 +2008_005303 +2008_005310 +2008_005315 +2008_005321 +2008_005324 +2008_005325 +2008_005329 +2008_005331 +2008_005333 +2008_005336 +2008_005342 +2008_005345 +2008_005349 +2008_005350 +2008_005354 +2008_005357 +2008_005362 +2008_005363 +2008_005365 +2008_005367 +2008_005375 +2008_005376 +2008_005380 +2008_005382 +2008_005386 +2008_005389 +2008_005395 +2008_005396 +2008_005400 +2008_005405 +2008_005408 +2008_005412 +2008_005414 +2008_005415 +2008_005429 +2008_005433 +2008_005443 +2008_005449 +2008_005451 +2008_005456 +2008_005463 +2008_005465 +2008_005473 +2008_005477 +2008_005484 +2008_005491 +2008_005494 +2008_005496 +2008_005500 +2008_005502 +2008_005505 +2008_005507 +2008_005512 +2008_005514 +2008_005517 +2008_005519 +2008_005521 +2008_005523 +2008_005526 +2008_005527 +2008_005531 +2008_005536 +2008_005541 +2008_005549 +2008_005558 +2008_005560 +2008_005561 +2008_005567 +2008_005569 +2008_005570 +2008_005572 +2008_005584 +2008_005589 +2008_005591 +2008_005593 +2008_005600 +2008_005603 +2008_005609 +2008_005610 +2008_005616 +2008_005618 +2008_005623 +2008_005625 +2008_005626 +2008_005634 +2008_005636 +2008_005639 +2008_005641 +2008_005650 +2008_005653 +2008_005656 +2008_005668 +2008_005673 +2008_005675 +2008_005678 +2008_005679 +2008_005682 +2008_005683 +2008_005698 +2008_005705 +2008_005706 +2008_005707 +2008_005713 +2008_005714 +2008_005716 +2008_005719 +2008_005724 +2008_005728 +2008_005736 +2008_005737 +2008_005739 +2008_005742 +2008_005747 +2008_005752 +2008_005757 +2008_005758 +2008_005761 +2008_005767 +2008_005770 +2008_005780 +2008_005791 +2008_005794 +2008_005800 +2008_005803 +2008_005810 +2008_005817 +2008_005818 +2008_005822 +2008_005823 +2008_005832 +2008_005834 +2008_005839 +2008_005843 +2008_005845 +2008_005850 +2008_005853 +2008_005855 +2008_005856 +2008_005867 +2008_005871 +2008_005873 +2008_005874 +2008_005878 +2008_005882 +2008_005890 +2008_005891 +2008_005893 +2008_005897 +2008_005902 +2008_005903 +2008_005905 +2008_005916 +2008_005921 +2008_005923 +2008_005926 +2008_005929 +2008_005935 +2008_005937 +2008_005938 +2008_005945 +2008_005954 +2008_005956 +2008_005959 +2008_005960 +2008_005967 +2008_005968 +2008_005972 +2008_005976 +2008_005979 +2008_005982 +2008_005991 +2008_005997 +2008_006000 +2008_006004 +2008_006014 +2008_006020 +2008_006032 +2008_006039 +2008_006046 +2008_006049 +2008_006062 +2008_006064 +2008_006065 +2008_006067 +2008_006070 +2008_006074 +2008_006076 +2008_006078 +2008_006081 +2008_006085 +2008_006090 +2008_006092 +2008_006096 +2008_006099 +2008_006102 +2008_006111 +2008_006119 +2008_006121 +2008_006124 +2008_006128 +2008_006129 +2008_006133 +2008_006135 +2008_006136 +2008_006140 +2008_006145 +2008_006152 +2008_006158 +2008_006164 +2008_006170 +2008_006181 +2008_006182 +2008_006186 +2008_006188 +2008_006192 +2008_006194 +2008_006195 +2008_006210 +2008_006211 +2008_006213 +2008_006215 +2008_006220 +2008_006221 +2008_006224 +2008_006225 +2008_006232 +2008_006234 +2008_006235 +2008_006242 +2008_006244 +2008_006249 +2008_006250 +2008_006253 +2008_006256 +2008_006257 +2008_006258 +2008_006265 +2008_006271 +2008_006272 +2008_006273 +2008_006276 +2008_006280 +2008_006281 +2008_006289 +2008_006294 +2008_006295 +2008_006300 +2008_006315 +2008_006317 +2008_006323 +2008_006329 +2008_006331 +2008_006335 +2008_006336 +2008_006339 +2008_006345 +2008_006349 +2008_006350 +2008_006351 +2008_006353 +2008_006355 +2008_006361 +2008_006364 +2008_006365 +2008_006369 +2008_006370 +2008_006373 +2008_006376 +2008_006384 +2008_006386 +2008_006387 +2008_006389 +2008_006390 +2008_006400 +2008_006401 +2008_006404 +2008_006410 +2008_006417 +2008_006419 +2008_006421 +2008_006427 +2008_006430 +2008_006432 +2008_006433 +2008_006434 +2008_006436 +2008_006447 +2008_006448 +2008_006461 +2008_006462 +2008_006470 +2008_006474 +2008_006475 +2008_006481 +2008_006482 +2008_006483 +2008_006488 +2008_006490 +2008_006491 +2008_006496 +2008_006497 +2008_006500 +2008_006506 +2008_006509 +2008_006511 +2008_006512 +2008_006522 +2008_006538 +2008_006540 +2008_006543 +2008_006546 +2008_006549 +2008_006558 +2008_006561 +2008_006562 +2008_006564 +2008_006566 +2008_006567 +2008_006570 +2008_006578 +2008_006579 +2008_006585 +2008_006586 +2008_006598 +2008_006599 +2008_006602 +2008_006606 +2008_006610 +2008_006613 +2008_006619 +2008_006623 +2008_006624 +2008_006625 +2008_006626 +2008_006629 +2008_006634 +2008_006637 +2008_006638 +2008_006641 +2008_006645 +2008_006649 +2008_006650 +2008_006654 +2008_006655 +2008_006657 +2008_006663 +2008_006667 +2008_006668 +2008_006677 +2008_006682 +2008_006691 +2008_006692 +2008_006700 +2008_006705 +2008_006712 +2008_006715 +2008_006717 +2008_006718 +2008_006719 +2008_006720 +2008_006724 +2008_006728 +2008_006730 +2008_006733 +2008_006737 +2008_006748 +2008_006750 +2008_006751 +2008_006753 +2008_006761 +2008_006762 +2008_006764 +2008_006767 +2008_006778 +2008_006785 +2008_006802 +2008_006807 +2008_006808 +2008_006810 +2008_006818 +2008_006819 +2008_006820 +2008_006827 +2008_006832 +2008_006834 +2008_006843 +2008_006847 +2008_006857 +2008_006864 +2008_006865 +2008_006868 +2008_006872 +2008_006873 +2008_006877 +2008_006879 +2008_006881 +2008_006882 +2008_006889 +2008_006898 +2008_006902 +2008_006903 +2008_006908 +2008_006909 +2008_006910 +2008_006919 +2008_006920 +2008_006921 +2008_006923 +2008_006926 +2008_006933 +2008_006936 +2008_006946 +2008_006950 +2008_006953 +2008_006954 +2008_006960 +2008_006961 +2008_006962 +2008_006965 +2008_006969 +2008_006973 +2008_006992 +2008_007003 +2008_007004 +2008_007009 +2008_007011 +2008_007012 +2008_007014 +2008_007022 +2008_007026 +2008_007028 +2008_007030 +2008_007038 +2008_007039 +2008_007043 +2008_007045 +2008_007054 +2008_007058 +2008_007060 +2008_007061 +2008_007069 +2008_007073 +2008_007075 +2008_007076 +2008_007081 +2008_007082 +2008_007085 +2008_007090 +2008_007095 +2008_007097 +2008_007098 +2008_007101 +2008_007106 +2008_007115 +2008_007118 +2008_007124 +2008_007129 +2008_007131 +2008_007138 +2008_007142 +2008_007145 +2008_007146 +2008_007147 +2008_007151 +2008_007156 +2008_007161 +2008_007165 +2008_007168 +2008_007169 +2008_007179 +2008_007185 +2008_007197 +2008_007201 +2008_007205 +2008_007208 +2008_007211 +2008_007217 +2008_007218 +2008_007221 +2008_007223 +2008_007226 +2008_007236 +2008_007237 +2008_007239 +2008_007242 +2008_007245 +2008_007246 +2008_007252 +2008_007254 +2008_007260 +2008_007261 +2008_007265 +2008_007274 +2008_007280 +2008_007281 +2008_007286 +2008_007289 +2008_007291 +2008_007298 +2008_007307 +2008_007312 +2008_007313 +2008_007320 +2008_007321 +2008_007325 +2008_007335 +2008_007343 +2008_007346 +2008_007353 +2008_007356 +2008_007357 +2008_007361 +2008_007363 +2008_007364 +2008_007375 +2008_007382 +2008_007383 +2008_007388 +2008_007394 +2008_007397 +2008_007410 +2008_007421 +2008_007423 +2008_007424 +2008_007425 +2008_007428 +2008_007432 +2008_007433 +2008_007438 +2008_007442 +2008_007443 +2008_007444 +2008_007448 +2008_007456 +2008_007465 +2008_007469 +2008_007470 +2008_007471 +2008_007472 +2008_007473 +2008_007477 +2008_007485 +2008_007486 +2008_007491 +2008_007496 +2008_007500 +2008_007504 +2008_007509 +2008_007510 +2008_007511 +2008_007515 +2008_007519 +2008_007524 +2008_007528 +2008_007533 +2008_007537 +2008_007544 +2008_007546 +2008_007556 +2008_007559 +2008_007565 +2008_007573 +2008_007576 +2008_007581 +2008_007584 +2008_007588 +2008_007589 +2008_007593 +2008_007597 +2008_007604 +2008_007608 +2008_007611 +2008_007613 +2008_007621 +2008_007625 +2008_007629 +2008_007630 +2008_007640 +2008_007641 +2008_007646 +2008_007648 +2008_007653 +2008_007660 +2008_007664 +2008_007665 +2008_007666 +2008_007675 +2008_007682 +2008_007683 +2008_007691 +2008_007692 +2008_007696 +2008_007697 +2008_007698 +2008_007701 +2008_007709 +2008_007710 +2008_007717 +2008_007724 +2008_007726 +2008_007730 +2008_007742 +2008_007746 +2008_007748 +2008_007750 +2008_007752 +2008_007755 +2008_007758 +2008_007759 +2008_007761 +2008_007770 +2008_007777 +2008_007779 +2008_007780 +2008_007781 +2008_007786 +2008_007788 +2008_007789 +2008_007805 +2008_007812 +2008_007817 +2008_007825 +2008_007829 +2008_007833 +2008_007835 +2008_007837 +2008_007840 +2008_007842 +2008_007843 +2008_007848 +2008_007852 +2008_007858 +2008_007861 +2008_007864 +2008_007869 +2008_007870 +2008_007873 +2008_007877 +2008_007879 +2008_007882 +2008_007883 +2008_007891 +2008_007895 +2008_007897 +2008_007904 +2008_007907 +2008_007909 +2008_007912 +2008_007913 +2008_007916 +2008_007918 +2008_007928 +2008_007937 +2008_007938 +2008_007940 +2008_007941 +2008_007947 +2008_007950 +2008_007953 +2008_007962 +2008_007969 +2008_007973 +2008_007975 +2008_007977 +2008_007985 +2008_007987 +2008_007988 +2008_007990 +2008_007997 +2008_007998 +2008_007999 +2008_008002 +2008_008004 +2008_008007 +2008_008012 +2008_008018 +2008_008020 +2008_008021 +2008_008028 +2008_008031 +2008_008034 +2008_008037 +2008_008043 +2008_008048 +2008_008058 +2008_008064 +2008_008070 +2008_008072 +2008_008073 +2008_008074 +2008_008080 +2008_008083 +2008_008092 +2008_008095 +2008_008097 +2008_008098 +2008_008106 +2008_008112 +2008_008116 +2008_008121 +2008_008122 +2008_008125 +2008_008132 +2008_008147 +2008_008148 +2008_008150 +2008_008152 +2008_008154 +2008_008162 +2008_008166 +2008_008169 +2008_008170 +2008_008176 +2008_008180 +2008_008184 +2008_008193 +2008_008194 +2008_008197 +2008_008199 +2008_008200 +2008_008206 +2008_008211 +2008_008212 +2008_008215 +2008_008218 +2008_008220 +2008_008223 +2008_008227 +2008_008229 +2008_008237 +2008_008242 +2008_008247 +2008_008262 +2008_008263 +2008_008266 +2008_008274 +2008_008275 +2008_008276 +2008_008281 +2008_008287 +2008_008288 +2008_008294 +2008_008300 +2008_008309 +2008_008315 +2008_008319 +2008_008321 +2008_008323 +2008_008324 +2008_008325 +2008_008330 +2008_008338 +2008_008342 +2008_008343 +2008_008344 +2008_008345 +2008_008347 +2008_008356 +2008_008363 +2008_008364 +2008_008366 +2008_008368 +2008_008370 +2008_008382 +2008_008384 +2008_008391 +2008_008402 +2008_008403 +2008_008404 +2008_008410 +2008_008411 +2008_008416 +2008_008423 +2008_008428 +2008_008431 +2008_008432 +2008_008440 +2008_008447 +2008_008455 +2008_008462 +2008_008464 +2008_008471 +2008_008476 +2008_008479 +2008_008480 +2008_008482 +2008_008487 +2008_008490 +2008_008496 +2008_008497 +2008_008507 +2008_008508 +2008_008511 +2008_008517 +2008_008521 +2008_008522 +2008_008523 +2008_008525 +2008_008526 +2008_008528 +2008_008530 +2008_008533 +2008_008541 +2008_008544 +2008_008545 +2008_008546 +2008_008547 +2008_008549 +2008_008550 +2008_008560 +2008_008567 +2008_008572 +2008_008578 +2008_008579 +2008_008583 +2008_008589 +2008_008590 +2008_008591 +2008_008593 +2008_008600 +2008_008601 +2008_008607 +2008_008608 +2008_008616 +2008_008618 +2008_008623 +2008_008624 +2008_008635 +2008_008637 +2008_008641 +2008_008642 +2008_008649 +2008_008654 +2008_008665 +2008_008666 +2008_008668 +2008_008671 +2008_008673 +2008_008674 +2008_008681 +2008_008685 +2008_008689 +2008_008691 +2008_008694 +2008_008696 +2008_008697 +2008_008701 +2008_008706 +2008_008707 +2008_008714 +2008_008717 +2008_008719 +2008_008725 +2008_008735 +2008_008744 +2008_008745 +2008_008748 +2008_008749 +2008_008757 +2008_008770 +2008_008773 +2009_000006 +2009_000010 +2009_000014 +2009_000015 +2009_000016 +2009_000021 +2009_000027 +2009_000028 +2009_000029 +2009_000030 +2009_000040 +2009_000042 +2009_000052 +2009_000054 +2009_000056 +2009_000058 +2009_000059 +2009_000073 +2009_000082 +2009_000085 +2009_000088 +2009_000091 +2009_000100 +2009_000103 +2009_000104 +2009_000105 +2009_000109 +2009_000119 +2009_000120 +2009_000122 +2009_000128 +2009_000130 +2009_000131 +2009_000132 +2009_000133 +2009_000135 +2009_000137 +2009_000140 +2009_000141 +2009_000145 +2009_000150 +2009_000151 +2009_000159 +2009_000160 +2009_000161 +2009_000164 +2009_000168 +2009_000176 +2009_000177 +2009_000188 +2009_000195 +2009_000197 +2009_000203 +2009_000209 +2009_000217 +2009_000218 +2009_000223 +2009_000227 +2009_000229 +2009_000232 +2009_000233 +2009_000237 +2009_000239 +2009_000248 +2009_000250 +2009_000251 +2009_000253 +2009_000268 +2009_000277 +2009_000280 +2009_000281 +2009_000285 +2009_000287 +2009_000289 +2009_000290 +2009_000303 +2009_000317 +2009_000320 +2009_000322 +2009_000327 +2009_000336 +2009_000339 +2009_000340 +2009_000341 +2009_000343 +2009_000344 +2009_000347 +2009_000350 +2009_000367 +2009_000375 +2009_000377 +2009_000379 +2009_000385 +2009_000390 +2009_000393 +2009_000400 +2009_000405 +2009_000408 +2009_000409 +2009_000416 +2009_000419 +2009_000420 +2009_000438 +2009_000439 +2009_000443 +2009_000444 +2009_000445 +2009_000449 +2009_000452 +2009_000454 +2009_000463 +2009_000464 +2009_000471 +2009_000474 +2009_000476 +2009_000477 +2009_000486 +2009_000491 +2009_000493 +2009_000494 +2009_000500 +2009_000502 +2009_000503 +2009_000504 +2009_000505 +2009_000515 +2009_000522 +2009_000525 +2009_000527 +2009_000529 +2009_000532 +2009_000535 +2009_000539 +2009_000544 +2009_000546 +2009_000547 +2009_000553 +2009_000557 +2009_000560 +2009_000562 +2009_000565 +2009_000567 +2009_000575 +2009_000576 +2009_000577 +2009_000579 +2009_000585 +2009_000586 +2009_000591 +2009_000592 +2009_000595 +2009_000599 +2009_000600 +2009_000602 +2009_000603 +2009_000604 +2009_000617 +2009_000626 +2009_000629 +2009_000632 +2009_000635 +2009_000636 +2009_000638 +2009_000642 +2009_000651 +2009_000653 +2009_000655 +2009_000662 +2009_000663 +2009_000672 +2009_000679 +2009_000684 +2009_000686 +2009_000690 +2009_000692 +2009_000694 +2009_000695 +2009_000696 +2009_000708 +2009_000709 +2009_000718 +2009_000720 +2009_000722 +2009_000737 +2009_000744 +2009_000745 +2009_000746 +2009_000748 +2009_000750 +2009_000752 +2009_000755 +2009_000757 +2009_000759 +2009_000768 +2009_000770 +2009_000774 +2009_000777 +2009_000789 +2009_000790 +2009_000793 +2009_000794 +2009_000796 +2009_000797 +2009_000801 +2009_000804 +2009_000805 +2009_000815 +2009_000816 +2009_000831 +2009_000833 +2009_000834 +2009_000848 +2009_000849 +2009_000854 +2009_000867 +2009_000869 +2009_000874 +2009_000882 +2009_000887 +2009_000889 +2009_000894 +2009_000895 +2009_000899 +2009_000902 +2009_000906 +2009_000910 +2009_000915 +2009_000920 +2009_000926 +2009_000927 +2009_000930 +2009_000932 +2009_000937 +2009_000938 +2009_000945 +2009_000953 +2009_000961 +2009_000962 +2009_000967 +2009_000969 +2009_000970 +2009_000973 +2009_000974 +2009_000975 +2009_000979 +2009_000980 +2009_000981 +2009_000987 +2009_000990 +2009_000996 +2009_001002 +2009_001009 +2009_001012 +2009_001013 +2009_001019 +2009_001027 +2009_001036 +2009_001037 +2009_001040 +2009_001042 +2009_001052 +2009_001056 +2009_001059 +2009_001068 +2009_001070 +2009_001074 +2009_001078 +2009_001079 +2009_001081 +2009_001085 +2009_001091 +2009_001095 +2009_001096 +2009_001098 +2009_001100 +2009_001102 +2009_001103 +2009_001104 +2009_001105 +2009_001107 +2009_001110 +2009_001111 +2009_001117 +2009_001124 +2009_001129 +2009_001133 +2009_001135 +2009_001137 +2009_001138 +2009_001140 +2009_001145 +2009_001146 +2009_001147 +2009_001151 +2009_001152 +2009_001153 +2009_001154 +2009_001159 +2009_001163 +2009_001172 +2009_001177 +2009_001180 +2009_001188 +2009_001190 +2009_001192 +2009_001197 +2009_001199 +2009_001201 +2009_001203 +2009_001205 +2009_001206 +2009_001208 +2009_001216 +2009_001217 +2009_001221 +2009_001224 +2009_001229 +2009_001230 +2009_001236 +2009_001237 +2009_001238 +2009_001241 +2009_001251 +2009_001253 +2009_001254 +2009_001260 +2009_001263 +2009_001264 +2009_001268 +2009_001270 +2009_001271 +2009_001282 +2009_001283 +2009_001285 +2009_001291 +2009_001301 +2009_001303 +2009_001305 +2009_001306 +2009_001308 +2009_001311 +2009_001312 +2009_001319 +2009_001323 +2009_001327 +2009_001328 +2009_001329 +2009_001339 +2009_001344 +2009_001354 +2009_001357 +2009_001359 +2009_001360 +2009_001364 +2009_001368 +2009_001369 +2009_001372 +2009_001374 +2009_001375 +2009_001376 +2009_001385 +2009_001388 +2009_001389 +2009_001390 +2009_001395 +2009_001403 +2009_001412 +2009_001422 +2009_001424 +2009_001434 +2009_001435 +2009_001443 +2009_001444 +2009_001446 +2009_001448 +2009_001450 +2009_001452 +2009_001453 +2009_001457 +2009_001462 +2009_001463 +2009_001466 +2009_001472 +2009_001474 +2009_001475 +2009_001476 +2009_001480 +2009_001481 +2009_001493 +2009_001494 +2009_001500 +2009_001502 +2009_001507 +2009_001508 +2009_001514 +2009_001516 +2009_001517 +2009_001537 +2009_001538 +2009_001541 +2009_001542 +2009_001544 +2009_001546 +2009_001550 +2009_001553 +2009_001555 +2009_001558 +2009_001566 +2009_001567 +2009_001570 +2009_001585 +2009_001589 +2009_001590 +2009_001595 +2009_001598 +2009_001602 +2009_001605 +2009_001608 +2009_001611 +2009_001612 +2009_001614 +2009_001615 +2009_001625 +2009_001636 +2009_001638 +2009_001640 +2009_001642 +2009_001651 +2009_001657 +2009_001660 +2009_001664 +2009_001670 +2009_001671 +2009_001674 +2009_001676 +2009_001677 +2009_001678 +2009_001689 +2009_001690 +2009_001693 +2009_001695 +2009_001704 +2009_001705 +2009_001706 +2009_001715 +2009_001719 +2009_001720 +2009_001724 +2009_001732 +2009_001734 +2009_001735 +2009_001740 +2009_001744 +2009_001746 +2009_001747 +2009_001749 +2009_001750 +2009_001751 +2009_001755 +2009_001770 +2009_001779 +2009_001781 +2009_001782 +2009_001783 +2009_001792 +2009_001798 +2009_001800 +2009_001801 +2009_001802 +2009_001806 +2009_001807 +2009_001809 +2009_001812 +2009_001817 +2009_001825 +2009_001826 +2009_001827 +2009_001828 +2009_001831 +2009_001837 +2009_001840 +2009_001846 +2009_001847 +2009_001856 +2009_001861 +2009_001865 +2009_001867 +2009_001868 +2009_001869 +2009_001871 +2009_001873 +2009_001874 +2009_001875 +2009_001884 +2009_001885 +2009_001888 +2009_001894 +2009_001897 +2009_001898 +2009_001902 +2009_001904 +2009_001908 +2009_001910 +2009_001917 +2009_001922 +2009_001926 +2009_001927 +2009_001933 +2009_001934 +2009_001937 +2009_001948 +2009_001952 +2009_001959 +2009_001960 +2009_001961 +2009_001962 +2009_001964 +2009_001972 +2009_001975 +2009_001990 +2009_001994 +2009_001997 +2009_001999 +2009_002000 +2009_002010 +2009_002018 +2009_002019 +2009_002037 +2009_002040 +2009_002044 +2009_002052 +2009_002054 +2009_002057 +2009_002060 +2009_002064 +2009_002066 +2009_002072 +2009_002077 +2009_002083 +2009_002086 +2009_002088 +2009_002089 +2009_002093 +2009_002096 +2009_002098 +2009_002099 +2009_002103 +2009_002104 +2009_002105 +2009_002107 +2009_002112 +2009_002116 +2009_002117 +2009_002118 +2009_002119 +2009_002120 +2009_002123 +2009_002126 +2009_002129 +2009_002133 +2009_002145 +2009_002146 +2009_002147 +2009_002149 +2009_002151 +2009_002152 +2009_002153 +2009_002173 +2009_002176 +2009_002180 +2009_002182 +2009_002192 +2009_002193 +2009_002197 +2009_002198 +2009_002203 +2009_002204 +2009_002214 +2009_002216 +2009_002225 +2009_002229 +2009_002235 +2009_002236 +2009_002240 +2009_002245 +2009_002253 +2009_002254 +2009_002256 +2009_002258 +2009_002259 +2009_002262 +2009_002264 +2009_002271 +2009_002273 +2009_002274 +2009_002281 +2009_002285 +2009_002289 +2009_002297 +2009_002298 +2009_002299 +2009_002301 +2009_002311 +2009_002312 +2009_002314 +2009_002324 +2009_002326 +2009_002331 +2009_002338 +2009_002339 +2009_002343 +2009_002348 +2009_002352 +2009_002358 +2009_002362 +2009_002371 +2009_002376 +2009_002377 +2009_002381 +2009_002386 +2009_002387 +2009_002388 +2009_002391 +2009_002397 +2009_002404 +2009_002406 +2009_002408 +2009_002409 +2009_002416 +2009_002419 +2009_002422 +2009_002423 +2009_002424 +2009_002425 +2009_002429 +2009_002431 +2009_002434 +2009_002438 +2009_002439 +2009_002443 +2009_002448 +2009_002452 +2009_002456 +2009_002460 +2009_002472 +2009_002504 +2009_002505 +2009_002506 +2009_002514 +2009_002519 +2009_002522 +2009_002523 +2009_002530 +2009_002536 +2009_002542 +2009_002543 +2009_002553 +2009_002556 +2009_002557 +2009_002558 +2009_002559 +2009_002561 +2009_002565 +2009_002567 +2009_002577 +2009_002579 +2009_002585 +2009_002586 +2009_002588 +2009_002595 +2009_002597 +2009_002599 +2009_002605 +2009_002611 +2009_002612 +2009_002613 +2009_002615 +2009_002616 +2009_002620 +2009_002621 +2009_002624 +2009_002625 +2009_002626 +2009_002628 +2009_002629 +2009_002648 +2009_002652 +2009_002659 +2009_002662 +2009_002671 +2009_002672 +2009_002674 +2009_002676 +2009_002685 +2009_002688 +2009_002689 +2009_002695 +2009_002697 +2009_002703 +2009_002704 +2009_002705 +2009_002710 +2009_002713 +2009_002714 +2009_002715 +2009_002719 +2009_002725 +2009_002728 +2009_002734 +2009_002746 +2009_002750 +2009_002758 +2009_002759 +2009_002763 +2009_002764 +2009_002770 +2009_002780 +2009_002784 +2009_002789 +2009_002791 +2009_002792 +2009_002798 +2009_002799 +2009_002813 +2009_002814 +2009_002817 +2009_002820 +2009_002824 +2009_002827 +2009_002831 +2009_002835 +2009_002842 +2009_002843 +2009_002844 +2009_002845 +2009_002847 +2009_002849 +2009_002850 +2009_002851 +2009_002853 +2009_002855 +2009_002862 +2009_002867 +2009_002869 +2009_002872 +2009_002879 +2009_002885 +2009_002890 +2009_002893 +2009_002897 +2009_002901 +2009_002908 +2009_002912 +2009_002914 +2009_002917 +2009_002921 +2009_002932 +2009_002933 +2009_002935 +2009_002937 +2009_002946 +2009_002947 +2009_002952 +2009_002954 +2009_002955 +2009_002957 +2009_002958 +2009_002961 +2009_002970 +2009_002971 +2009_002972 +2009_002976 +2009_002980 +2009_002983 +2009_002984 +2009_002988 +2009_002993 +2009_002999 +2009_003000 +2009_003002 +2009_003006 +2009_003007 +2009_003012 +2009_003019 +2009_003032 +2009_003034 +2009_003035 +2009_003039 +2009_003042 +2009_003053 +2009_003054 +2009_003056 +2009_003064 +2009_003066 +2009_003067 +2009_003068 +2009_003075 +2009_003077 +2009_003078 +2009_003082 +2009_003087 +2009_003088 +2009_003090 +2009_003091 +2009_003093 +2009_003095 +2009_003107 +2009_003108 +2009_003109 +2009_003115 +2009_003116 +2009_003118 +2009_003127 +2009_003138 +2009_003142 +2009_003146 +2009_003147 +2009_003155 +2009_003156 +2009_003157 +2009_003164 +2009_003165 +2009_003166 +2009_003168 +2009_003172 +2009_003173 +2009_003175 +2009_003187 +2009_003200 +2009_003208 +2009_003209 +2009_003218 +2009_003219 +2009_003222 +2009_003225 +2009_003229 +2009_003232 +2009_003233 +2009_003234 +2009_003249 +2009_003253 +2009_003257 +2009_003261 +2009_003265 +2009_003267 +2009_003272 +2009_003277 +2009_003285 +2009_003290 +2009_003309 +2009_003310 +2009_003315 +2009_003316 +2009_003317 +2009_003326 +2009_003327 +2009_003333 +2009_003338 +2009_003340 +2009_003345 +2009_003349 +2009_003350 +2009_003352 +2009_003353 +2009_003360 +2009_003361 +2009_003363 +2009_003365 +2009_003367 +2009_003369 +2009_003377 +2009_003381 +2009_003383 +2009_003384 +2009_003385 +2009_003386 +2009_003395 +2009_003396 +2009_003402 +2009_003407 +2009_003416 +2009_003419 +2009_003425 +2009_003430 +2009_003436 +2009_003443 +2009_003446 +2009_003447 +2009_003454 +2009_003455 +2009_003458 +2009_003459 +2009_003461 +2009_003468 +2009_003482 +2009_003488 +2009_003489 +2009_003490 +2009_003492 +2009_003497 +2009_003510 +2009_003511 +2009_003513 +2009_003519 +2009_003520 +2009_003522 +2009_003524 +2009_003531 +2009_003533 +2009_003534 +2009_003539 +2009_003540 +2009_003541 +2009_003545 +2009_003546 +2009_003555 +2009_003562 +2009_003563 +2009_003572 +2009_003577 +2009_003583 +2009_003594 +2009_003600 +2009_003601 +2009_003605 +2009_003608 +2009_003609 +2009_003613 +2009_003614 +2009_003624 +2009_003629 +2009_003634 +2009_003636 +2009_003639 +2009_003644 +2009_003646 +2009_003647 +2009_003652 +2009_003654 +2009_003657 +2009_003660 +2009_003663 +2009_003667 +2009_003668 +2009_003677 +2009_003683 +2009_003685 +2009_003688 +2009_003690 +2009_003694 +2009_003695 +2009_003697 +2009_003702 +2009_003705 +2009_003708 +2009_003709 +2009_003711 +2009_003717 +2009_003720 +2009_003722 +2009_003732 +2009_003734 +2009_003735 +2009_003736 +2009_003739 +2009_003743 +2009_003752 +2009_003753 +2009_003757 +2009_003760 +2009_003765 +2009_003768 +2009_003775 +2009_003783 +2009_003784 +2009_003786 +2009_003790 +2009_003793 +2009_003799 +2009_003801 +2009_003808 +2009_003815 +2009_003816 +2009_003818 +2009_003819 +2009_003820 +2009_003825 +2009_003827 +2009_003829 +2009_003837 +2009_003838 +2009_003843 +2009_003846 +2009_003848 +2009_003852 +2009_003860 +2009_003865 +2009_003867 +2009_003873 +2009_003883 +2009_003888 +2009_003896 +2009_003897 +2009_003900 +2009_003912 +2009_003913 +2009_003920 +2009_003921 +2009_003922 +2009_003933 +2009_003942 +2009_003956 +2009_003958 +2009_003961 +2009_003966 +2009_003974 +2009_003975 +2009_003976 +2009_003985 +2009_003993 +2009_003994 +2009_004002 +2009_004005 +2009_004007 +2009_004012 +2009_004018 +2009_004020 +2009_004023 +2009_004025 +2009_004037 +2009_004042 +2009_004055 +2009_004058 +2009_004069 +2009_004073 +2009_004074 +2009_004078 +2009_004082 +2009_004083 +2009_004088 +2009_004091 +2009_004094 +2009_004095 +2009_004096 +2009_004100 +2009_004103 +2009_004105 +2009_004109 +2009_004112 +2009_004117 +2009_004118 +2009_004121 +2009_004122 +2009_004133 +2009_004134 +2009_004139 +2009_004153 +2009_004154 +2009_004159 +2009_004162 +2009_004165 +2009_004168 +2009_004169 +2009_004171 +2009_004173 +2009_004174 +2009_004176 +2009_004177 +2009_004178 +2009_004179 +2009_004180 +2009_004181 +2009_004183 +2009_004186 +2009_004187 +2009_004191 +2009_004199 +2009_004200 +2009_004201 +2009_004202 +2009_004211 +2009_004212 +2009_004213 +2009_004218 +2009_004222 +2009_004225 +2009_004227 +2009_004228 +2009_004229 +2009_004231 +2009_004234 +2009_004244 +2009_004249 +2009_004261 +2009_004264 +2009_004271 +2009_004276 +2009_004278 +2009_004279 +2009_004283 +2009_004285 +2009_004289 +2009_004290 +2009_004295 +2009_004301 +2009_004308 +2009_004312 +2009_004315 +2009_004316 +2009_004317 +2009_004319 +2009_004322 +2009_004323 +2009_004327 +2009_004328 +2009_004334 +2009_004336 +2009_004338 +2009_004340 +2009_004341 +2009_004347 +2009_004351 +2009_004357 +2009_004358 +2009_004368 +2009_004369 +2009_004370 +2009_004371 +2009_004374 +2009_004375 +2009_004383 +2009_004392 +2009_004394 +2009_004397 +2009_004399 +2009_004406 +2009_004409 +2009_004417 +2009_004424 +2009_004425 +2009_004426 +2009_004429 +2009_004432 +2009_004434 +2009_004438 +2009_004442 +2009_004444 +2009_004445 +2009_004446 +2009_004449 +2009_004451 +2009_004452 +2009_004454 +2009_004464 +2009_004465 +2009_004475 +2009_004477 +2009_004479 +2009_004486 +2009_004492 +2009_004501 +2009_004503 +2009_004508 +2009_004511 +2009_004513 +2009_004514 +2009_004519 +2009_004527 +2009_004539 +2009_004545 +2009_004547 +2009_004554 +2009_004557 +2009_004560 +2009_004561 +2009_004562 +2009_004565 +2009_004570 +2009_004571 +2009_004572 +2009_004582 +2009_004593 +2009_004598 +2009_004606 +2009_004616 +2009_004619 +2009_004620 +2009_004626 +2009_004628 +2009_004631 +2009_004639 +2009_004642 +2009_004643 +2009_004647 +2009_004651 +2009_004652 +2009_004656 +2009_004661 +2009_004662 +2009_004667 +2009_004671 +2009_004674 +2009_004681 +2009_004683 +2009_004684 +2009_004688 +2009_004694 +2009_004701 +2009_004705 +2009_004708 +2009_004709 +2009_004710 +2009_004719 +2009_004723 +2009_004728 +2009_004731 +2009_004734 +2009_004737 +2009_004745 +2009_004756 +2009_004759 +2009_004760 +2009_004761 +2009_004764 +2009_004766 +2009_004771 +2009_004772 +2009_004779 +2009_004786 +2009_004787 +2009_004790 +2009_004794 +2009_004797 +2009_004798 +2009_004804 +2009_004805 +2009_004806 +2009_004813 +2009_004815 +2009_004817 +2009_004824 +2009_004829 +2009_004830 +2009_004831 +2009_004836 +2009_004839 +2009_004846 +2009_004847 +2009_004855 +2009_004871 +2009_004874 +2009_004877 +2009_004880 +2009_004887 +2009_004888 +2009_004890 +2009_004898 +2009_004901 +2009_004903 +2009_004904 +2009_004905 +2009_004907 +2009_004914 +2009_004919 +2009_004921 +2009_004926 +2009_004939 +2009_004943 +2009_004944 +2009_004945 +2009_004953 +2009_004958 +2009_004959 +2009_004962 +2009_004965 +2009_004972 +2009_004975 +2009_004977 +2009_004979 +2009_004980 +2009_004983 +2009_004984 +2009_004986 +2009_004990 +2009_004999 +2009_005000 +2009_005006 +2009_005015 +2009_005016 +2009_005024 +2009_005030 +2009_005031 +2009_005035 +2009_005037 +2009_005040 +2009_005042 +2009_005044 +2009_005045 +2009_005051 +2009_005055 +2009_005056 +2009_005057 +2009_005069 +2009_005070 +2009_005075 +2009_005076 +2009_005081 +2009_005084 +2009_005085 +2009_005094 +2009_005095 +2009_005102 +2009_005107 +2009_005118 +2009_005120 +2009_005126 +2009_005127 +2009_005128 +2009_005129 +2009_005130 +2009_005131 +2009_005133 +2009_005141 +2009_005142 +2009_005144 +2009_005145 +2009_005147 +2009_005154 +2009_005155 +2009_005160 +2009_005162 +2009_005163 +2009_005168 +2009_005170 +2009_005177 +2009_005181 +2009_005183 +2009_005191 +2009_005194 +2009_005198 +2009_005201 +2009_005218 +2009_005234 +2009_005236 +2009_005240 +2009_005246 +2009_005247 +2009_005251 +2009_005256 +2009_005263 +2009_005265 +2009_005269 +2009_005272 +2009_005278 +2009_005282 +2009_005287 +2009_005293 +2009_005297 +2009_005303 +2009_005307 +2009_005308 +2009_005311 +2010_000002 +2010_000009 +2010_000014 +2010_000018 +2010_000023 +2010_000026 +2010_000031 +2010_000043 +2010_000045 +2010_000048 +2010_000052 +2010_000055 +2010_000056 +2010_000061 +2010_000063 +2010_000067 +2010_000071 +2010_000073 +2010_000075 +2010_000076 +2010_000079 +2010_000080 +2010_000082 +2010_000089 +2010_000091 +2010_000103 +2010_000109 +2010_000111 +2010_000114 +2010_000117 +2010_000120 +2010_000124 +2010_000131 +2010_000132 +2010_000133 +2010_000136 +2010_000137 +2010_000138 +2010_000141 +2010_000148 +2010_000152 +2010_000157 +2010_000165 +2010_000169 +2010_000177 +2010_000182 +2010_000183 +2010_000187 +2010_000189 +2010_000190 +2010_000195 +2010_000198 +2010_000203 +2010_000204 +2010_000209 +2010_000222 +2010_000224 +2010_000227 +2010_000229 +2010_000233 +2010_000234 +2010_000244 +2010_000245 +2010_000248 +2010_000249 +2010_000250 +2010_000255 +2010_000263 +2010_000264 +2010_000269 +2010_000270 +2010_000276 +2010_000285 +2010_000293 +2010_000296 +2010_000299 +2010_000302 +2010_000303 +2010_000307 +2010_000310 +2010_000320 +2010_000323 +2010_000329 +2010_000337 +2010_000347 +2010_000356 +2010_000361 +2010_000362 +2010_000371 +2010_000377 +2010_000386 +2010_000388 +2010_000389 +2010_000392 +2010_000393 +2010_000394 +2010_000395 +2010_000404 +2010_000413 +2010_000415 +2010_000419 +2010_000420 +2010_000432 +2010_000436 +2010_000437 +2010_000439 +2010_000447 +2010_000448 +2010_000453 +2010_000458 +2010_000459 +2010_000463 +2010_000465 +2010_000466 +2010_000469 +2010_000473 +2010_000477 +2010_000480 +2010_000484 +2010_000488 +2010_000490 +2010_000492 +2010_000495 +2010_000498 +2010_000500 +2010_000503 +2010_000508 +2010_000511 +2010_000513 +2010_000519 +2010_000522 +2010_000527 +2010_000534 +2010_000538 +2010_000545 +2010_000549 +2010_000556 +2010_000557 +2010_000561 +2010_000564 +2010_000567 +2010_000568 +2010_000571 +2010_000576 +2010_000577 +2010_000578 +2010_000581 +2010_000588 +2010_000591 +2010_000601 +2010_000613 +2010_000616 +2010_000626 +2010_000630 +2010_000632 +2010_000641 +2010_000644 +2010_000645 +2010_000648 +2010_000651 +2010_000658 +2010_000661 +2010_000664 +2010_000667 +2010_000671 +2010_000674 +2010_000675 +2010_000678 +2010_000681 +2010_000685 +2010_000687 +2010_000688 +2010_000691 +2010_000694 +2010_000702 +2010_000707 +2010_000710 +2010_000715 +2010_000716 +2010_000717 +2010_000721 +2010_000723 +2010_000739 +2010_000740 +2010_000746 +2010_000747 +2010_000748 +2010_000750 +2010_000760 +2010_000765 +2010_000769 +2010_000770 +2010_000772 +2010_000773 +2010_000782 +2010_000785 +2010_000787 +2010_000799 +2010_000800 +2010_000803 +2010_000806 +2010_000807 +2010_000808 +2010_000810 +2010_000815 +2010_000827 +2010_000837 +2010_000838 +2010_000842 +2010_000847 +2010_000849 +2010_000855 +2010_000857 +2010_000860 +2010_000862 +2010_000863 +2010_000871 +2010_000872 +2010_000879 +2010_000885 +2010_000887 +2010_000891 +2010_000899 +2010_000908 +2010_000910 +2010_000912 +2010_000914 +2010_000920 +2010_000922 +2010_000926 +2010_000938 +2010_000939 +2010_000942 +2010_000954 +2010_000970 +2010_000971 +2010_000974 +2010_000978 +2010_000979 +2010_000983 +2010_000984 +2010_000986 +2010_000991 +2010_000994 +2010_000995 +2010_001002 +2010_001012 +2010_001013 +2010_001020 +2010_001023 +2010_001025 +2010_001039 +2010_001043 +2010_001044 +2010_001054 +2010_001063 +2010_001066 +2010_001074 +2010_001076 +2010_001087 +2010_001092 +2010_001094 +2010_001098 +2010_001100 +2010_001103 +2010_001105 +2010_001106 +2010_001110 +2010_001111 +2010_001112 +2010_001113 +2010_001118 +2010_001120 +2010_001121 +2010_001123 +2010_001126 +2010_001131 +2010_001134 +2010_001139 +2010_001140 +2010_001142 +2010_001143 +2010_001148 +2010_001152 +2010_001154 +2010_001159 +2010_001160 +2010_001175 +2010_001177 +2010_001179 +2010_001183 +2010_001184 +2010_001185 +2010_001193 +2010_001195 +2010_001199 +2010_001205 +2010_001210 +2010_001211 +2010_001212 +2010_001224 +2010_001225 +2010_001237 +2010_001240 +2010_001245 +2010_001247 +2010_001250 +2010_001253 +2010_001254 +2010_001261 +2010_001271 +2010_001273 +2010_001274 +2010_001275 +2010_001277 +2010_001279 +2010_001282 +2010_001288 +2010_001289 +2010_001299 +2010_001310 +2010_001311 +2010_001312 +2010_001317 +2010_001320 +2010_001328 +2010_001329 +2010_001337 +2010_001338 +2010_001339 +2010_001344 +2010_001347 +2010_001356 +2010_001360 +2010_001361 +2010_001363 +2010_001366 +2010_001370 +2010_001372 +2010_001374 +2010_001383 +2010_001385 +2010_001386 +2010_001390 +2010_001395 +2010_001397 +2010_001399 +2010_001401 +2010_001402 +2010_001406 +2010_001408 +2010_001410 +2010_001413 +2010_001418 +2010_001422 +2010_001425 +2010_001430 +2010_001431 +2010_001433 +2010_001434 +2010_001435 +2010_001450 +2010_001456 +2010_001457 +2010_001458 +2010_001464 +2010_001465 +2010_001472 +2010_001478 +2010_001480 +2010_001481 +2010_001487 +2010_001489 +2010_001499 +2010_001503 +2010_001511 +2010_001514 +2010_001515 +2010_001529 +2010_001533 +2010_001537 +2010_001547 +2010_001550 +2010_001551 +2010_001552 +2010_001555 +2010_001560 +2010_001561 +2010_001562 +2010_001569 +2010_001572 +2010_001576 +2010_001580 +2010_001583 +2010_001590 +2010_001592 +2010_001594 +2010_001595 +2010_001596 +2010_001599 +2010_001602 +2010_001603 +2010_001607 +2010_001608 +2010_001618 +2010_001619 +2010_001626 +2010_001630 +2010_001638 +2010_001644 +2010_001647 +2010_001649 +2010_001650 +2010_001660 +2010_001665 +2010_001674 +2010_001676 +2010_001687 +2010_001689 +2010_001694 +2010_001698 +2010_001700 +2010_001706 +2010_001709 +2010_001710 +2010_001715 +2010_001718 +2010_001719 +2010_001726 +2010_001729 +2010_001732 +2010_001743 +2010_001744 +2010_001746 +2010_001747 +2010_001748 +2010_001753 +2010_001756 +2010_001759 +2010_001762 +2010_001770 +2010_001776 +2010_001780 +2010_001784 +2010_001785 +2010_001794 +2010_001795 +2010_001797 +2010_001801 +2010_001806 +2010_001807 +2010_001808 +2010_001810 +2010_001817 +2010_001841 +2010_001842 +2010_001846 +2010_001849 +2010_001850 +2010_001852 +2010_001853 +2010_001856 +2010_001858 +2010_001860 +2010_001864 +2010_001870 +2010_001881 +2010_001884 +2010_001885 +2010_001896 +2010_001899 +2010_001911 +2010_001919 +2010_001922 +2010_001923 +2010_001924 +2010_001931 +2010_001933 +2010_001934 +2010_001939 +2010_001940 +2010_001941 +2010_001944 +2010_001948 +2010_001957 +2010_001960 +2010_001970 +2010_001973 +2010_001974 +2010_001976 +2010_001978 +2010_001979 +2010_001980 +2010_001981 +2010_001982 +2010_001993 +2010_001994 +2010_002015 +2010_002018 +2010_002020 +2010_002023 +2010_002026 +2010_002032 +2010_002037 +2010_002039 +2010_002042 +2010_002044 +2010_002045 +2010_002047 +2010_002054 +2010_002055 +2010_002057 +2010_002065 +2010_002068 +2010_002070 +2010_002080 +2010_002095 +2010_002097 +2010_002104 +2010_002107 +2010_002118 +2010_002121 +2010_002127 +2010_002129 +2010_002130 +2010_002132 +2010_002136 +2010_002139 +2010_002141 +2010_002143 +2010_002149 +2010_002152 +2010_002154 +2010_002166 +2010_002168 +2010_002176 +2010_002177 +2010_002179 +2010_002180 +2010_002185 +2010_002191 +2010_002193 +2010_002203 +2010_002204 +2010_002207 +2010_002208 +2010_002215 +2010_002216 +2010_002218 +2010_002220 +2010_002221 +2010_002226 +2010_002227 +2010_002236 +2010_002242 +2010_002243 +2010_002248 +2010_002254 +2010_002263 +2010_002267 +2010_002274 +2010_002278 +2010_002286 +2010_002295 +2010_002299 +2010_002301 +2010_002309 +2010_002312 +2010_002318 +2010_002320 +2010_002327 +2010_002333 +2010_002338 +2010_002346 +2010_002349 +2010_002353 +2010_002356 +2010_002363 +2010_002364 +2010_002368 +2010_002369 +2010_002371 +2010_002374 +2010_002378 +2010_002379 +2010_002382 +2010_002387 +2010_002391 +2010_002392 +2010_002393 +2010_002399 +2010_002400 +2010_002410 +2010_002413 +2010_002418 +2010_002424 +2010_002425 +2010_002429 +2010_002431 +2010_002435 +2010_002438 +2010_002439 +2010_002440 +2010_002445 +2010_002452 +2010_002455 +2010_002456 +2010_002457 +2010_002459 +2010_002462 +2010_002469 +2010_002472 +2010_002475 +2010_002485 +2010_002487 +2010_002492 +2010_002496 +2010_002497 +2010_002498 +2010_002499 +2010_002501 +2010_002507 +2010_002509 +2010_002513 +2010_002520 +2010_002527 +2010_002529 +2010_002532 +2010_002537 +2010_002551 +2010_002552 +2010_002553 +2010_002556 +2010_002562 +2010_002567 +2010_002570 +2010_002573 +2010_002575 +2010_002577 +2010_002582 +2010_002583 +2010_002589 +2010_002592 +2010_002594 +2010_002614 +2010_002615 +2010_002616 +2010_002618 +2010_002620 +2010_002624 +2010_002625 +2010_002626 +2010_002628 +2010_002642 +2010_002644 +2010_002647 +2010_002653 +2010_002656 +2010_002659 +2010_002662 +2010_002665 +2010_002674 +2010_002675 +2010_002684 +2010_002686 +2010_002688 +2010_002692 +2010_002696 +2010_002697 +2010_002702 +2010_002708 +2010_002720 +2010_002722 +2010_002729 +2010_002733 +2010_002734 +2010_002742 +2010_002746 +2010_002747 +2010_002750 +2010_002752 +2010_002759 +2010_002760 +2010_002772 +2010_002778 +2010_002779 +2010_002781 +2010_002786 +2010_002794 +2010_002797 +2010_002801 +2010_002805 +2010_002811 +2010_002813 +2010_002815 +2010_002816 +2010_002820 +2010_002821 +2010_002830 +2010_002831 +2010_002834 +2010_002838 +2010_002839 +2010_002841 +2010_002842 +2010_002843 +2010_002844 +2010_002851 +2010_002855 +2010_002856 +2010_002857 +2010_002865 +2010_002870 +2010_002880 +2010_002884 +2010_002891 +2010_002892 +2010_002896 +2010_002899 +2010_002901 +2010_002903 +2010_002907 +2010_002909 +2010_002915 +2010_002917 +2010_002931 +2010_002935 +2010_002937 +2010_002938 +2010_002941 +2010_002946 +2010_002947 +2010_002948 +2010_002955 +2010_002962 +2010_002973 +2010_002976 +2010_002978 +2010_002979 +2010_002982 +2010_002987 +2010_002990 +2010_003002 +2010_003003 +2010_003007 +2010_003010 +2010_003011 +2010_003013 +2010_003017 +2010_003025 +2010_003027 +2010_003028 +2010_003032 +2010_003034 +2010_003035 +2010_003037 +2010_003044 +2010_003047 +2010_003050 +2010_003053 +2010_003055 +2010_003056 +2010_003057 +2010_003062 +2010_003077 +2010_003078 +2010_003084 +2010_003086 +2010_003088 +2010_003093 +2010_003094 +2010_003097 +2010_003101 +2010_003106 +2010_003108 +2010_003114 +2010_003115 +2010_003117 +2010_003119 +2010_003137 +2010_003138 +2010_003143 +2010_003148 +2010_003149 +2010_003151 +2010_003153 +2010_003157 +2010_003159 +2010_003162 +2010_003169 +2010_003170 +2010_003173 +2010_003174 +2010_003179 +2010_003185 +2010_003186 +2010_003191 +2010_003192 +2010_003197 +2010_003203 +2010_003204 +2010_003206 +2010_003218 +2010_003222 +2010_003227 +2010_003230 +2010_003238 +2010_003241 +2010_003250 +2010_003252 +2010_003255 +2010_003256 +2010_003259 +2010_003263 +2010_003264 +2010_003269 +2010_003274 +2010_003280 +2010_003283 +2010_003290 +2010_003291 +2010_003297 +2010_003300 +2010_003301 +2010_003304 +2010_003305 +2010_003309 +2010_003329 +2010_003332 +2010_003333 +2010_003337 +2010_003342 +2010_003343 +2010_003344 +2010_003345 +2010_003350 +2010_003351 +2010_003353 +2010_003355 +2010_003367 +2010_003370 +2010_003371 +2010_003372 +2010_003374 +2010_003380 +2010_003383 +2010_003384 +2010_003391 +2010_003395 +2010_003400 +2010_003405 +2010_003406 +2010_003415 +2010_003421 +2010_003432 +2010_003435 +2010_003436 +2010_003437 +2010_003439 +2010_003469 +2010_003474 +2010_003477 +2010_003478 +2010_003481 +2010_003483 +2010_003491 +2010_003507 +2010_003509 +2010_003512 +2010_003513 +2010_003526 +2010_003529 +2010_003534 +2010_003535 +2010_003538 +2010_003539 +2010_003546 +2010_003549 +2010_003551 +2010_003554 +2010_003556 +2010_003560 +2010_003567 +2010_003574 +2010_003576 +2010_003582 +2010_003592 +2010_003598 +2010_003599 +2010_003601 +2010_003604 +2010_003608 +2010_003612 +2010_003618 +2010_003625 +2010_003629 +2010_003634 +2010_003635 +2010_003643 +2010_003644 +2010_003648 +2010_003649 +2010_003651 +2010_003656 +2010_003665 +2010_003670 +2010_003671 +2010_003672 +2010_003674 +2010_003677 +2010_003680 +2010_003686 +2010_003689 +2010_003690 +2010_003696 +2010_003703 +2010_003714 +2010_003717 +2010_003719 +2010_003721 +2010_003725 +2010_003734 +2010_003736 +2010_003737 +2010_003743 +2010_003747 +2010_003752 +2010_003754 +2010_003770 +2010_003773 +2010_003784 +2010_003788 +2010_003789 +2010_003791 +2010_003798 +2010_003799 +2010_003804 +2010_003815 +2010_003816 +2010_003818 +2010_003821 +2010_003822 +2010_003825 +2010_003837 +2010_003844 +2010_003845 +2010_003856 +2010_003860 +2010_003864 +2010_003865 +2010_003871 +2010_003874 +2010_003875 +2010_003877 +2010_003884 +2010_003887 +2010_003891 +2010_003892 +2010_003893 +2010_003894 +2010_003897 +2010_003899 +2010_003900 +2010_003906 +2010_003910 +2010_003911 +2010_003914 +2010_003925 +2010_003929 +2010_003931 +2010_003937 +2010_003938 +2010_003945 +2010_003949 +2010_003950 +2010_003954 +2010_003957 +2010_003958 +2010_003974 +2010_003982 +2010_003987 +2010_003994 +2010_003995 +2010_003996 +2010_004002 +2010_004005 +2010_004007 +2010_004008 +2010_004009 +2010_004011 +2010_004014 +2010_004017 +2010_004025 +2010_004028 +2010_004029 +2010_004030 +2010_004033 +2010_004043 +2010_004045 +2010_004048 +2010_004052 +2010_004053 +2010_004059 +2010_004060 +2010_004061 +2010_004062 +2010_004065 +2010_004066 +2010_004069 +2010_004071 +2010_004072 +2010_004074 +2010_004075 +2010_004081 +2010_004084 +2010_004089 +2010_004092 +2010_004108 +2010_004109 +2010_004111 +2010_004116 +2010_004118 +2010_004119 +2010_004121 +2010_004123 +2010_004130 +2010_004133 +2010_004138 +2010_004144 +2010_004148 +2010_004154 +2010_004160 +2010_004162 +2010_004163 +2010_004168 +2010_004171 +2010_004172 +2010_004175 +2010_004180 +2010_004186 +2010_004191 +2010_004192 +2010_004197 +2010_004198 +2010_004204 +2010_004210 +2010_004216 +2010_004222 +2010_004223 +2010_004231 +2010_004239 +2010_004242 +2010_004244 +2010_004247 +2010_004248 +2010_004249 +2010_004252 +2010_004256 +2010_004258 +2010_004259 +2010_004264 +2010_004271 +2010_004275 +2010_004276 +2010_004282 +2010_004283 +2010_004288 +2010_004289 +2010_004295 +2010_004296 +2010_004301 +2010_004306 +2010_004307 +2010_004311 +2010_004325 +2010_004327 +2010_004332 +2010_004333 +2010_004336 +2010_004344 +2010_004346 +2010_004349 +2010_004357 +2010_004358 +2010_004360 +2010_004361 +2010_004363 +2010_004365 +2010_004366 +2010_004367 +2010_004368 +2010_004370 +2010_004371 +2010_004373 +2010_004385 +2010_004402 +2010_004412 +2010_004423 +2010_004429 +2010_004436 +2010_004441 +2010_004445 +2010_004448 +2010_004450 +2010_004451 +2010_004459 +2010_004466 +2010_004467 +2010_004476 +2010_004477 +2010_004478 +2010_004481 +2010_004491 +2010_004492 +2010_004493 +2010_004499 +2010_004501 +2010_004511 +2010_004514 +2010_004517 +2010_004518 +2010_004521 +2010_004523 +2010_004540 +2010_004546 +2010_004558 +2010_004560 +2010_004561 +2010_004569 +2010_004573 +2010_004575 +2010_004576 +2010_004577 +2010_004581 +2010_004591 +2010_004592 +2010_004594 +2010_004598 +2010_004600 +2010_004601 +2010_004604 +2010_004609 +2010_004616 +2010_004620 +2010_004621 +2010_004625 +2010_004631 +2010_004638 +2010_004646 +2010_004655 +2010_004656 +2010_004657 +2010_004660 +2010_004665 +2010_004666 +2010_004669 +2010_004676 +2010_004680 +2010_004683 +2010_004690 +2010_004694 +2010_004696 +2010_004698 +2010_004703 +2010_004704 +2010_004708 +2010_004710 +2010_004712 +2010_004717 +2010_004721 +2010_004726 +2010_004728 +2010_004729 +2010_004730 +2010_004738 +2010_004741 +2010_004749 +2010_004751 +2010_004760 +2010_004765 +2010_004766 +2010_004770 +2010_004773 +2010_004777 +2010_004782 +2010_004791 +2010_004793 +2010_004797 +2010_004805 +2010_004806 +2010_004807 +2010_004808 +2010_004812 +2010_004816 +2010_004822 +2010_004824 +2010_004826 +2010_004831 +2010_004832 +2010_004838 +2010_004841 +2010_004844 +2010_004847 +2010_004848 +2010_004852 +2010_004855 +2010_004871 +2010_004874 +2010_004878 +2010_004879 +2010_004888 +2010_004890 +2010_004896 +2010_004900 +2010_004910 +2010_004913 +2010_004916 +2010_004918 +2010_004922 +2010_004928 +2010_004933 +2010_004937 +2010_004938 +2010_004942 +2010_004943 +2010_004944 +2010_004945 +2010_004948 +2010_004950 +2010_004953 +2010_004959 +2010_004960 +2010_004962 +2010_004963 +2010_004966 +2010_004968 +2010_004970 +2010_004971 +2010_004973 +2010_004974 +2010_004983 +2010_004987 +2010_004991 +2010_004995 +2010_004997 +2010_005002 +2010_005011 +2010_005016 +2010_005017 +2010_005018 +2010_005019 +2010_005022 +2010_005028 +2010_005033 +2010_005041 +2010_005054 +2010_005055 +2010_005060 +2010_005062 +2010_005064 +2010_005068 +2010_005071 +2010_005072 +2010_005080 +2010_005090 +2010_005093 +2010_005094 +2010_005098 +2010_005099 +2010_005100 +2010_005101 +2010_005106 +2010_005110 +2010_005111 +2010_005119 +2010_005127 +2010_005128 +2010_005129 +2010_005133 +2010_005134 +2010_005147 +2010_005149 +2010_005155 +2010_005161 +2010_005170 +2010_005182 +2010_005183 +2010_005190 +2010_005193 +2010_005198 +2010_005199 +2010_005201 +2010_005202 +2010_005211 +2010_005213 +2010_005216 +2010_005217 +2010_005223 +2010_005229 +2010_005232 +2010_005236 +2010_005238 +2010_005241 +2010_005253 +2010_005257 +2010_005258 +2010_005260 +2010_005261 +2010_005266 +2010_005270 +2010_005273 +2010_005274 +2010_005275 +2010_005276 +2010_005277 +2010_005279 +2010_005297 +2010_005299 +2010_005301 +2010_005303 +2010_005306 +2010_005308 +2010_005309 +2010_005310 +2010_005312 +2010_005317 +2010_005318 +2010_005320 +2010_005349 +2010_005350 +2010_005352 +2010_005359 +2010_005361 +2010_005364 +2010_005365 +2010_005371 +2010_005376 +2010_005377 +2010_005384 +2010_005385 +2010_005386 +2010_005388 +2010_005389 +2010_005391 +2010_005393 +2010_005402 +2010_005403 +2010_005408 +2010_005409 +2010_005415 +2010_005417 +2010_005419 +2010_005426 +2010_005429 +2010_005434 +2010_005437 +2010_005442 +2010_005450 +2010_005457 +2010_005458 +2010_005462 +2010_005466 +2010_005468 +2010_005471 +2010_005475 +2010_005489 +2010_005492 +2010_005494 +2010_005497 +2010_005498 +2010_005500 +2010_005505 +2010_005506 +2010_005511 +2010_005512 +2010_005513 +2010_005518 +2010_005519 +2010_005522 +2010_005535 +2010_005536 +2010_005540 +2010_005546 +2010_005557 +2010_005559 +2010_005561 +2010_005565 +2010_005570 +2010_005571 +2010_005573 +2010_005578 +2010_005584 +2010_005585 +2010_005588 +2010_005591 +2010_005593 +2010_005595 +2010_005596 +2010_005597 +2010_005601 +2010_005603 +2010_005604 +2010_005608 +2010_005614 +2010_005615 +2010_005616 +2010_005619 +2010_005627 +2010_005628 +2010_005629 +2010_005640 +2010_005643 +2010_005646 +2010_005652 +2010_005663 +2010_005665 +2010_005668 +2010_005669 +2010_005670 +2010_005672 +2010_005678 +2010_005683 +2010_005684 +2010_005696 +2010_005700 +2010_005715 +2010_005716 +2010_005721 +2010_005723 +2010_005725 +2010_005732 +2010_005734 +2010_005735 +2010_005736 +2010_005740 +2010_005744 +2010_005746 +2010_005748 +2010_005750 +2010_005753 +2010_005755 +2010_005758 +2010_005770 +2010_005775 +2010_005776 +2010_005782 +2010_005785 +2010_005791 +2010_005794 +2010_005796 +2010_005800 +2010_005805 +2010_005807 +2010_005810 +2010_005816 +2010_005820 +2010_005821 +2010_005823 +2010_005825 +2010_005826 +2010_005830 +2010_005835 +2010_005836 +2010_005840 +2010_005841 +2010_005845 +2010_005847 +2010_005854 +2010_005855 +2010_005865 +2010_005867 +2010_005874 +2010_005875 +2010_005876 +2010_005891 +2010_005892 +2010_005898 +2010_005904 +2010_005906 +2010_005909 +2010_005919 +2010_005921 +2010_005927 +2010_005928 +2010_005929 +2010_005930 +2010_005932 +2010_005935 +2010_005942 +2010_005948 +2010_005949 +2010_005951 +2010_005952 +2010_005954 +2010_005957 +2010_005958 +2010_005959 +2010_005960 +2010_005967 +2010_005968 +2010_005972 +2010_005974 +2010_005975 +2010_005978 +2010_005982 +2010_005984 +2010_005985 +2010_005986 +2010_005987 +2010_005995 +2010_005996 +2010_006009 +2010_006012 +2010_006015 +2010_006023 +2010_006028 +2010_006040 +2010_006042 +2010_006050 +2010_006063 +2010_006066 +2010_006067 +2010_006073 +2010_006078 +2010_006079 +2011_000003 +2011_000006 +2011_000012 +2011_000017 +2011_000022 +2011_000025 +2011_000027 +2011_000028 +2011_000030 +2011_000041 +2011_000044 +2011_000048 +2011_000052 +2011_000053 +2011_000058 +2011_000068 +2011_000069 +2011_000072 +2011_000095 +2011_000105 +2011_000108 +2011_000116 +2011_000122 +2011_000137 +2011_000138 +2011_000145 +2011_000149 +2011_000152 +2011_000176 +2011_000181 +2011_000182 +2011_000192 +2011_000196 +2011_000197 +2011_000208 +2011_000216 +2011_000219 +2011_000220 +2011_000221 +2011_000222 +2011_000224 +2011_000228 +2011_000233 +2011_000241 +2011_000243 +2011_000249 +2011_000250 +2011_000252 +2011_000258 +2011_000267 +2011_000268 +2011_000269 +2011_000277 +2011_000278 +2011_000282 +2011_000285 +2011_000286 +2011_000290 +2011_000293 +2011_000297 +2011_000305 +2011_000317 +2011_000324 +2011_000329 +2011_000342 +2011_000343 +2011_000345 +2011_000347 +2011_000359 +2011_000361 +2011_000362 +2011_000370 +2011_000375 +2011_000376 +2011_000379 +2011_000382 +2011_000383 +2011_000385 +2011_000388 +2011_000392 +2011_000397 +2011_000398 +2011_000399 +2011_000400 +2011_000413 +2011_000416 +2011_000420 +2011_000428 +2011_000430 +2011_000434 +2011_000442 +2011_000444 +2011_000449 +2011_000450 +2011_000453 +2011_000454 +2011_000457 +2011_000461 +2011_000465 +2011_000468 +2011_000469 +2011_000472 +2011_000475 +2011_000485 +2011_000491 +2011_000492 +2011_000494 +2011_000496 +2011_000499 +2011_000502 +2011_000505 +2011_000509 +2011_000513 +2011_000520 +2011_000531 +2011_000534 +2011_000538 +2011_000542 +2011_000550 +2011_000551 +2011_000556 +2011_000558 +2011_000560 +2011_000565 +2011_000567 +2011_000572 +2011_000573 +2011_000577 +2011_000578 +2011_000579 +2011_000586 +2011_000589 +2011_000594 +2011_000596 +2011_000621 +2011_000628 +2011_000629 +2011_000631 +2011_000637 +2011_000641 +2011_000642 +2011_000646 +2011_000651 +2011_000652 +2011_000655 +2011_000657 +2011_000673 +2011_000675 +2011_000682 +2011_000684 +2011_000689 +2011_000692 +2011_000698 +2011_000701 +2011_000703 +2011_000704 +2011_000711 +2011_000713 +2011_000725 +2011_000730 +2011_000731 +2011_000748 +2011_000755 +2011_000757 +2011_000758 +2011_000759 +2011_000763 +2011_000768 +2011_000769 +2011_000771 +2011_000788 +2011_000790 +2011_000791 +2011_000793 +2011_000800 +2011_000804 +2011_000806 +2011_000815 +2011_000819 +2011_000820 +2011_000823 +2011_000827 +2011_000828 +2011_000829 +2011_000831 +2011_000834 +2011_000837 +2011_000839 +2011_000840 +2011_000845 +2011_000847 +2011_000848 +2011_000855 +2011_000858 +2011_000859 +2011_000875 +2011_000882 +2011_000885 +2011_000893 +2011_000895 +2011_000898 +2011_000899 +2011_000920 +2011_000922 +2011_000934 +2011_000940 +2011_000944 +2011_000947 +2011_000954 +2011_000971 +2011_000973 +2011_000975 +2011_000979 +2011_000981 +2011_000982 +2011_000983 +2011_000987 +2011_000991 +2011_000996 +2011_000997 +2011_000999 +2011_001001 +2011_001004 +2011_001009 +2011_001010 +2011_001011 +2011_001015 +2011_001016 +2011_001022 +2011_001023 +2011_001027 +2011_001028 +2011_001030 +2011_001031 +2011_001033 +2011_001034 +2011_001052 +2011_001055 +2011_001062 +2011_001066 +2011_001073 +2011_001079 +2011_001080 +2011_001091 +2011_001093 +2011_001097 +2011_001107 +2011_001117 +2011_001123 +2011_001127 +2011_001133 +2011_001134 +2011_001135 +2011_001136 +2011_001139 +2011_001144 +2011_001150 +2011_001153 +2011_001163 +2011_001166 +2011_001168 +2011_001169 +2011_001173 +2011_001175 +2011_001176 +2011_001188 +2011_001189 +2011_001192 +2011_001193 +2011_001198 +2011_001208 +2011_001211 +2011_001215 +2011_001216 +2011_001220 +2011_001227 +2011_001238 +2011_001240 +2011_001246 +2011_001253 +2011_001254 +2011_001255 +2011_001257 +2011_001259 +2011_001270 +2011_001272 +2011_001277 +2011_001285 +2011_001286 +2011_001302 +2011_001310 +2011_001318 +2011_001320 +2011_001323 +2011_001333 +2011_001336 +2011_001344 +2011_001354 +2011_001357 +2011_001369 +2011_001373 +2011_001381 +2011_001382 +2011_001384 +2011_001394 +2011_001400 +2011_001402 +2011_001411 +2011_001412 +2011_001414 +2011_001422 +2011_001424 +2011_001432 +2011_001449 +2011_001451 +2011_001455 +2011_001456 +2011_001463 +2011_001464 +2011_001466 +2011_001475 +2011_001476 +2011_001479 +2011_001480 +2011_001498 +2011_001503 +2011_001505 +2011_001510 +2011_001514 +2011_001519 +2011_001526 +2011_001532 +2011_001536 +2011_001537 +2011_001538 +2011_001542 +2011_001547 +2011_001549 +2011_001557 +2011_001560 +2011_001566 +2011_001571 +2011_001572 +2011_001582 +2011_001586 +2011_001599 +2011_001600 +2011_001602 +2011_001605 +2011_001606 +2011_001611 +2011_001616 +2011_001621 +2011_001622 +2011_001625 +2011_001629 +2011_001632 +2011_001643 +2011_001647 +2011_001649 +2011_001650 +2011_001652 +2011_001653 +2011_001656 +2011_001662 +2011_001663 +2011_001666 +2011_001671 +2011_001673 +2011_001679 +2011_001689 +2011_001694 +2011_001695 +2011_001698 +2011_001700 +2011_001710 +2011_001715 +2011_001716 +2011_001727 +2011_001730 +2011_001732 +2011_001733 +2011_001739 +2011_001740 +2011_001753 +2011_001754 +2011_001755 +2011_001764 +2011_001765 +2011_001766 +2011_001769 +2011_001776 +2011_001779 +2011_001789 +2011_001790 +2011_001791 +2011_001796 +2011_001799 +2011_001805 +2011_001810 +2011_001811 +2011_001826 +2011_001833 +2011_001840 +2011_001855 +2011_001866 +2011_001871 +2011_001872 +2011_001875 +2011_001884 +2011_001885 +2011_001886 +2011_001889 +2011_001891 +2011_001893 +2011_001895 +2011_001896 +2011_001901 +2011_001902 +2011_001904 +2011_001906 +2011_001920 +2011_001922 +2011_001924 +2011_001926 +2011_001928 +2011_001929 +2011_001930 +2011_001937 +2011_001938 +2011_001944 +2011_001949 +2011_001950 +2011_001952 +2011_001956 +2011_001959 +2011_001961 +2011_001964 +2011_001967 +2011_001971 +2011_001972 +2011_001974 +2011_001977 +2011_001987 +2011_001991 +2011_002005 +2011_002006 +2011_002012 +2011_002022 +2011_002027 +2011_002031 +2011_002034 +2011_002039 +2011_002046 +2011_002049 +2011_002050 +2011_002053 +2011_002055 +2011_002062 +2011_002063 +2011_002073 +2011_002085 +2011_002096 +2011_002097 +2011_002106 +2011_002107 +2011_002111 +2011_002113 +2011_002114 +2011_002119 +2011_002131 +2011_002134 +2011_002135 +2011_002142 +2011_002143 +2011_002144 +2011_002147 +2011_002148 +2011_002149 +2011_002167 +2011_002177 +2011_002179 +2011_002186 +2011_002189 +2011_002211 +2011_002218 +2011_002222 +2011_002224 +2011_002227 +2011_002228 +2011_002236 +2011_002237 +2011_002239 +2011_002245 +2011_002246 +2011_002251 +2011_002252 +2011_002253 +2011_002265 +2011_002268 +2011_002273 +2011_002278 +2011_002281 +2011_002284 +2011_002291 +2011_002300 +2011_002303 +2011_002318 +2011_002335 +2011_002341 +2011_002346 +2011_002347 +2011_002348 +2011_002350 +2011_002359 +2011_002381 +2011_002385 +2011_002387 +2011_002388 +2011_002389 +2011_002394 +2011_002397 +2011_002398 +2011_002402 +2011_002410 +2011_002413 +2011_002418 +2011_002419 +2011_002420 +2011_002421 +2011_002422 +2011_002433 +2011_002435 +2011_002436 +2011_002443 +2011_002447 +2011_002448 +2011_002455 +2011_002457 +2011_002458 +2011_002460 +2011_002461 +2011_002462 +2011_002464 +2011_002470 +2011_002474 +2011_002476 +2011_002484 +2011_002488 +2011_002492 +2011_002503 +2011_002504 +2011_002511 +2011_002514 +2011_002526 +2011_002528 +2011_002533 +2011_002543 +2011_002551 +2011_002552 +2011_002553 +2011_002554 +2011_002555 +2011_002559 +2011_002560 +2011_002561 +2011_002567 +2011_002568 +2011_002571 +2011_002584 +2011_002585 +2011_002590 +2011_002594 +2011_002598 +2011_002601 +2011_002606 +2011_002609 +2011_002614 +2011_002616 +2011_002618 +2011_002620 +2011_002636 +2011_002638 +2011_002649 +2011_002650 +2011_002652 +2011_002656 +2011_002657 +2011_002658 +2011_002661 +2011_002664 +2011_002673 +2011_002676 +2011_002677 +2011_002697 +2011_002706 +2011_002709 +2011_002715 +2011_002717 +2011_002719 +2011_002724 +2011_002726 +2011_002746 +2011_002748 +2011_002752 +2011_002756 +2011_002767 +2011_002770 +2011_002775 +2011_002776 +2011_002779 +2011_002780 +2011_002782 +2011_002790 +2011_002795 +2011_002798 +2011_002803 +2011_002808 +2011_002811 +2011_002814 +2011_002818 +2011_002821 +2011_002823 +2011_002826 +2011_002834 +2011_002842 +2011_002851 +2011_002852 +2011_002867 +2011_002872 +2011_002873 +2011_002881 +2011_002884 +2011_002889 +2011_002908 +2011_002911 +2011_002912 +2011_002913 +2011_002917 +2011_002920 +2011_002921 +2011_002924 +2011_002927 +2011_002930 +2011_002932 +2011_002935 +2011_002937 +2011_002938 +2011_002940 +2011_002942 +2011_002947 +2011_002949 +2011_002953 +2011_002956 +2011_002958 +2011_002965 +2011_002966 +2011_002969 +2011_002974 +2011_002979 +2011_002987 +2011_002988 +2011_003002 +2011_003005 +2011_003010 +2011_003016 +2011_003020 +2011_003025 +2011_003034 +2011_003038 +2011_003041 +2011_003044 +2011_003047 +2011_003048 +2011_003049 +2011_003054 +2011_003057 +2011_003063 +2011_003065 +2011_003066 +2011_003073 +2011_003074 +2011_003078 +2011_003081 +2011_003091 +2011_003109 +2011_003121 +2011_003124 +2011_003132 +2011_003134 +2011_003138 +2011_003141 +2011_003148 +2011_003150 +2011_003151 +2011_003154 +2011_003158 +2011_003159 +2011_003162 +2011_003171 +2011_003177 +2011_003183 +2011_003184 +2011_003187 +2011_003188 +2011_003192 +2011_003194 +2011_003216 +2011_003223 +2011_003230 +2011_003236 +2011_003238 +2011_003246 +2011_003247 +2011_003253 +2011_003255 +2011_003259 +2011_003274 +2011_003276 diff --git a/ImageSets/Main/trainval.txt b/ImageSets/Main/trainval.txt new file mode 100644 index 0000000..2994c00 --- /dev/null +++ b/ImageSets/Main/trainval.txt @@ -0,0 +1,16551 @@ +000005 +000007 +000009 +000012 +000016 +000017 +000019 +000020 +000021 +000023 +000024 +000026 +000030 +000032 +000033 +000034 +000035 +000036 +000039 +000041 +000042 +000044 +000046 +000047 +000048 +000050 +000051 +000052 +000060 +000061 +000063 +000064 +000065 +000066 +000072 +000073 +000077 +000078 +000081 +000083 +000089 +000091 +000093 +000095 +000099 +000101 +000102 +000104 +000107 +000109 +000110 +000112 +000113 +000117 +000118 +000120 +000121 +000122 +000123 +000125 +000129 +000130 +000131 +000132 +000133 +000134 +000138 +000140 +000141 +000142 +000143 +000146 +000147 +000150 +000153 +000154 +000156 +000158 +000159 +000161 +000162 +000163 +000164 +000165 +000169 +000170 +000171 +000173 +000174 +000177 +000180 +000184 +000187 +000189 +000190 +000192 +000193 +000194 +000198 +000200 +000203 +000207 +000208 +000209 +000210 +000211 +000214 +000215 +000218 +000219 +000220 +000221 +000222 +000224 +000225 +000228 +000229 +000232 +000233 +000235 +000236 +000241 +000242 +000244 +000245 +000246 +000249 +000250 +000251 +000256 +000257 +000259 +000262 +000263 +000266 +000268 +000269 +000270 +000275 +000276 +000278 +000282 +000285 +000288 +000289 +000294 +000296 +000298 +000302 +000303 +000304 +000305 +000306 +000307 +000308 +000311 +000312 +000317 +000318 +000320 +000321 +000322 +000323 +000325 +000328 +000329 +000331 +000332 +000334 +000336 +000337 +000338 +000340 +000343 +000344 +000347 +000349 +000352 +000354 +000355 +000359 +000363 +000367 +000370 +000372 +000373 +000374 +000379 +000380 +000381 +000382 +000387 +000391 +000394 +000395 +000396 +000400 +000403 +000404 +000406 +000407 +000408 +000411 +000416 +000417 +000419 +000420 +000424 +000427 +000428 +000430 +000431 +000433 +000435 +000438 +000439 +000443 +000446 +000448 +000450 +000454 +000459 +000460 +000461 +000462 +000463 +000464 +000468 +000469 +000470 +000474 +000476 +000477 +000480 +000482 +000483 +000484 +000486 +000489 +000491 +000492 +000494 +000496 +000498 +000499 +000500 +000501 +000503 +000508 +000509 +000513 +000514 +000515 +000516 +000518 +000519 +000520 +000522 +000523 +000524 +000525 +000526 +000528 +000530 +000531 +000535 +000537 +000540 +000541 +000543 +000544 +000545 +000549 +000550 +000552 +000554 +000555 +000559 +000563 +000564 +000565 +000577 +000579 +000581 +000582 +000583 +000588 +000589 +000590 +000591 +000592 +000597 +000598 +000599 +000601 +000605 +000608 +000609 +000610 +000612 +000613 +000619 +000620 +000622 +000625 +000626 +000628 +000632 +000633 +000635 +000637 +000645 +000647 +000648 +000653 +000654 +000656 +000657 +000660 +000661 +000663 +000667 +000671 +000672 +000675 +000676 +000677 +000680 +000682 +000684 +000685 +000686 +000688 +000689 +000690 +000694 +000695 +000699 +000700 +000702 +000705 +000707 +000709 +000710 +000711 +000712 +000713 +000714 +000717 +000720 +000726 +000728 +000729 +000730 +000731 +000733 +000738 +000739 +000740 +000742 +000746 +000748 +000750 +000752 +000753 +000754 +000755 +000756 +000760 +000761 +000763 +000764 +000767 +000768 +000770 +000771 +000772 +000774 +000776 +000777 +000780 +000782 +000786 +000787 +000791 +000793 +000794 +000796 +000797 +000799 +000800 +000802 +000804 +000805 +000806 +000808 +000810 +000812 +000814 +000815 +000816 +000818 +000820 +000822 +000823 +000826 +000827 +000828 +000829 +000830 +000831 +000832 +000834 +000842 +000843 +000845 +000847 +000848 +000849 +000850 +000851 +000854 +000855 +000857 +000859 +000860 +000862 +000863 +000865 +000867 +000868 +000871 +000872 +000874 +000876 +000878 +000879 +000880 +000882 +000885 +000887 +000888 +000889 +000892 +000895 +000896 +000898 +000899 +000900 +000902 +000903 +000904 +000906 +000908 +000911 +000912 +000915 +000917 +000918 +000919 +000920 +000921 +000923 +000926 +000929 +000931 +000934 +000935 +000936 +000937 +000943 +000946 +000947 +000948 +000949 +000950 +000951 +000954 +000958 +000962 +000964 +000965 +000966 +000967 +000971 +000972 +000973 +000977 +000980 +000982 +000987 +000989 +000991 +000993 +000996 +000997 +000999 +001001 +001002 +001004 +001008 +001009 +001010 +001011 +001012 +001014 +001015 +001017 +001018 +001024 +001027 +001028 +001036 +001041 +001042 +001043 +001045 +001050 +001052 +001053 +001056 +001057 +001060 +001061 +001062 +001064 +001066 +001068 +001069 +001071 +001072 +001073 +001074 +001077 +001078 +001079 +001082 +001083 +001084 +001091 +001092 +001093 +001097 +001101 +001102 +001104 +001106 +001107 +001109 +001110 +001112 +001113 +001119 +001121 +001124 +001125 +001127 +001129 +001130 +001136 +001137 +001140 +001142 +001143 +001144 +001145 +001147 +001148 +001149 +001151 +001152 +001154 +001156 +001158 +001160 +001161 +001164 +001166 +001168 +001170 +001171 +001172 +001174 +001175 +001176 +001182 +001184 +001185 +001186 +001187 +001191 +001192 +001194 +001199 +001200 +001201 +001203 +001204 +001205 +001206 +001207 +001209 +001211 +001212 +001214 +001215 +001221 +001224 +001225 +001226 +001229 +001230 +001231 +001233 +001234 +001236 +001237 +001239 +001240 +001241 +001247 +001248 +001250 +001254 +001258 +001259 +001260 +001263 +001265 +001266 +001268 +001269 +001270 +001272 +001273 +001274 +001277 +001279 +001281 +001284 +001286 +001287 +001288 +001289 +001290 +001292 +001293 +001294 +001298 +001299 +001304 +001309 +001310 +001311 +001312 +001314 +001315 +001316 +001323 +001324 +001325 +001326 +001327 +001330 +001332 +001333 +001334 +001337 +001341 +001343 +001345 +001346 +001348 +001350 +001352 +001360 +001361 +001362 +001364 +001365 +001371 +001375 +001378 +001383 +001384 +001385 +001386 +001387 +001388 +001390 +001393 +001395 +001397 +001400 +001402 +001404 +001405 +001406 +001408 +001409 +001413 +001414 +001418 +001420 +001421 +001426 +001427 +001430 +001432 +001434 +001436 +001439 +001441 +001442 +001443 +001444 +001445 +001450 +001451 +001453 +001455 +001457 +001460 +001463 +001464 +001465 +001466 +001467 +001468 +001470 +001472 +001475 +001479 +001480 +001481 +001483 +001484 +001485 +001486 +001488 +001490 +001492 +001493 +001494 +001497 +001498 +001499 +001501 +001504 +001509 +001510 +001512 +001514 +001515 +001517 +001521 +001522 +001523 +001524 +001526 +001528 +001529 +001531 +001532 +001536 +001537 +001539 +001541 +001543 +001544 +001545 +001548 +001553 +001554 +001555 +001556 +001557 +001559 +001561 +001563 +001565 +001571 +001576 +001577 +001579 +001580 +001582 +001586 +001588 +001590 +001593 +001594 +001595 +001597 +001598 +001603 +001604 +001607 +001608 +001610 +001611 +001612 +001614 +001617 +001618 +001622 +001627 +001628 +001630 +001632 +001633 +001636 +001638 +001640 +001642 +001643 +001647 +001649 +001650 +001651 +001653 +001654 +001661 +001662 +001669 +001673 +001675 +001676 +001677 +001678 +001680 +001682 +001683 +001684 +001685 +001686 +001688 +001689 +001690 +001691 +001693 +001699 +001707 +001708 +001711 +001713 +001714 +001717 +001718 +001721 +001723 +001724 +001725 +001726 +001727 +001729 +001730 +001732 +001733 +001734 +001738 +001739 +001741 +001746 +001747 +001749 +001750 +001752 +001754 +001755 +001756 +001758 +001759 +001761 +001765 +001766 +001768 +001771 +001772 +001775 +001777 +001778 +001780 +001782 +001784 +001785 +001787 +001789 +001793 +001795 +001797 +001799 +001800 +001801 +001806 +001807 +001809 +001810 +001816 +001818 +001821 +001825 +001827 +001828 +001830 +001832 +001833 +001834 +001836 +001837 +001840 +001841 +001842 +001843 +001845 +001847 +001849 +001853 +001854 +001855 +001858 +001860 +001861 +001862 +001864 +001870 +001872 +001875 +001877 +001878 +001881 +001882 +001887 +001888 +001892 +001894 +001896 +001898 +001899 +001901 +001902 +001903 +001904 +001906 +001907 +001911 +001915 +001918 +001920 +001922 +001927 +001928 +001930 +001931 +001932 +001933 +001934 +001936 +001937 +001938 +001940 +001941 +001944 +001945 +001948 +001950 +001952 +001954 +001958 +001960 +001962 +001963 +001964 +001970 +001971 +001972 +001976 +001977 +001978 +001980 +001981 +001982 +001985 +001989 +001995 +001999 +002000 +002001 +002002 +002004 +002006 +002011 +002012 +002015 +002019 +002020 +002021 +002022 +002023 +002024 +002025 +002027 +002030 +002034 +002036 +002037 +002039 +002042 +002043 +002045 +002047 +002049 +002051 +002054 +002055 +002056 +002058 +002061 +002063 +002064 +002067 +002068 +002069 +002070 +002082 +002083 +002086 +002088 +002090 +002091 +002094 +002095 +002096 +002098 +002099 +002101 +002102 +002104 +002108 +002109 +002112 +002114 +002116 +002117 +002120 +002124 +002125 +002126 +002129 +002132 +002134 +002135 +002136 +002139 +002140 +002142 +002145 +002146 +002151 +002152 +002153 +002155 +002156 +002158 +002163 +002165 +002166 +002169 +002170 +002171 +002172 +002174 +002176 +002178 +002179 +002180 +002181 +002182 +002183 +002184 +002186 +002187 +002190 +002191 +002192 +002193 +002194 +002196 +002197 +002199 +002201 +002202 +002208 +002209 +002212 +002213 +002214 +002215 +002218 +002219 +002220 +002221 +002224 +002226 +002228 +002233 +002234 +002237 +002238 +002241 +002244 +002247 +002248 +002249 +002251 +002253 +002255 +002256 +002257 +002259 +002260 +002261 +002263 +002265 +002266 +002267 +002268 +002270 +002272 +002273 +002276 +002277 +002278 +002279 +002280 +002281 +002284 +002285 +002287 +002288 +002290 +002291 +002293 +002300 +002302 +002305 +002306 +002307 +002308 +002310 +002311 +002315 +002318 +002320 +002321 +002323 +002324 +002328 +002329 +002330 +002332 +002333 +002334 +002335 +002337 +002340 +002342 +002343 +002345 +002347 +002348 +002350 +002352 +002354 +002355 +002359 +002361 +002362 +002364 +002366 +002367 +002368 +002369 +002371 +002372 +002373 +002374 +002375 +002376 +002377 +002378 +002382 +002384 +002385 +002387 +002391 +002392 +002393 +002401 +002403 +002404 +002405 +002407 +002410 +002411 +002413 +002415 +002417 +002419 +002420 +002423 +002425 +002427 +002433 +002435 +002436 +002437 +002439 +002441 +002442 +002443 +002444 +002445 +002448 +002450 +002452 +002454 +002456 +002458 +002459 +002460 +002461 +002462 +002465 +002466 +002468 +002470 +002471 +002472 +002476 +002477 +002478 +002479 +002480 +002481 +002483 +002490 +002491 +002492 +002493 +002494 +002496 +002497 +002500 +002501 +002502 +002504 +002505 +002508 +002512 +002513 +002514 +002518 +002519 +002520 +002523 +002524 +002525 +002529 +002533 +002534 +002537 +002539 +002540 +002542 +002544 +002545 +002546 +002547 +002549 +002554 +002555 +002558 +002559 +002561 +002563 +002564 +002565 +002566 +002567 +002569 +002571 +002572 +002578 +002579 +002584 +002585 +002586 +002589 +002590 +002593 +002594 +002595 +002598 +002599 +002600 +002603 +002605 +002606 +002609 +002611 +002613 +002615 +002618 +002621 +002625 +002627 +002632 +002633 +002634 +002635 +002636 +002637 +002641 +002643 +002645 +002646 +002647 +002648 +002649 +002653 +002657 +002658 +002659 +002662 +002664 +002666 +002667 +002668 +002669 +002670 +002675 +002677 +002678 +002680 +002682 +002683 +002684 +002689 +002690 +002691 +002693 +002695 +002696 +002697 +002699 +002702 +002704 +002706 +002709 +002710 +002713 +002714 +002715 +002717 +002718 +002721 +002722 +002723 +002727 +002730 +002732 +002734 +002735 +002737 +002738 +002741 +002744 +002745 +002747 +002749 +002751 +002755 +002757 +002759 +002760 +002762 +002763 +002765 +002766 +002767 +002772 +002774 +002775 +002776 +002778 +002779 +002782 +002783 +002784 +002785 +002786 +002791 +002794 +002795 +002796 +002798 +002800 +002801 +002803 +002804 +002807 +002810 +002812 +002815 +002816 +002817 +002820 +002826 +002827 +002833 +002834 +002835 +002836 +002838 +002841 +002842 +002844 +002845 +002847 +002848 +002854 +002855 +002858 +002859 +002864 +002866 +002867 +002868 +002869 +002870 +002873 +002875 +002879 +002880 +002881 +002884 +002886 +002889 +002891 +002893 +002896 +002899 +002901 +002906 +002910 +002912 +002913 +002914 +002915 +002916 +002917 +002919 +002924 +002931 +002932 +002933 +002934 +002935 +002937 +002938 +002939 +002940 +002941 +002942 +002943 +002944 +002946 +002947 +002952 +002953 +002954 +002956 +002957 +002958 +002960 +002962 +002963 +002965 +002966 +002967 +002969 +002975 +002976 +002977 +002978 +002984 +002986 +002987 +002988 +002989 +002990 +002992 +002994 +002995 +003000 +003002 +003003 +003004 +003005 +003007 +003008 +003009 +003011 +003013 +003015 +003017 +003021 +003023 +003024 +003027 +003028 +003031 +003032 +003034 +003038 +003039 +003042 +003044 +003045 +003047 +003051 +003053 +003054 +003056 +003057 +003058 +003061 +003063 +003064 +003065 +003066 +003072 +003074 +003077 +003078 +003082 +003083 +003085 +003086 +003088 +003089 +003090 +003092 +003093 +003094 +003098 +003100 +003102 +003103 +003105 +003106 +003107 +003108 +003110 +003112 +003116 +003117 +003118 +003120 +003121 +003122 +003124 +003126 +003127 +003129 +003133 +003134 +003135 +003137 +003138 +003140 +003142 +003145 +003146 +003147 +003149 +003150 +003154 +003155 +003157 +003159 +003161 +003162 +003163 +003164 +003165 +003169 +003170 +003175 +003176 +003177 +003178 +003181 +003183 +003184 +003185 +003186 +003188 +003189 +003194 +003195 +003199 +003200 +003202 +003204 +003205 +003207 +003210 +003211 +003213 +003214 +003216 +003218 +003219 +003223 +003228 +003229 +003231 +003233 +003236 +003239 +003240 +003242 +003243 +003244 +003247 +003250 +003253 +003254 +003255 +003256 +003258 +003259 +003260 +003261 +003262 +003269 +003270 +003271 +003272 +003273 +003274 +003279 +003280 +003282 +003284 +003285 +003290 +003292 +003293 +003294 +003296 +003299 +003300 +003301 +003303 +003307 +003308 +003311 +003313 +003316 +003320 +003325 +003327 +003330 +003331 +003335 +003336 +003337 +003338 +003339 +003343 +003344 +003349 +003350 +003351 +003354 +003355 +003356 +003359 +003360 +003362 +003363 +003365 +003367 +003369 +003370 +003373 +003374 +003376 +003377 +003379 +003380 +003382 +003386 +003390 +003391 +003392 +003395 +003396 +003397 +003398 +003401 +003403 +003404 +003406 +003407 +003408 +003410 +003412 +003413 +003415 +003416 +003417 +003419 +003420 +003421 +003422 +003424 +003425 +003429 +003430 +003433 +003435 +003436 +003439 +003441 +003443 +003444 +003449 +003450 +003451 +003452 +003453 +003455 +003458 +003461 +003462 +003464 +003465 +003466 +003468 +003469 +003470 +003477 +003484 +003487 +003489 +003491 +003492 +003493 +003496 +003497 +003499 +003500 +003506 +003508 +003509 +003510 +003511 +003516 +003518 +003519 +003521 +003522 +003524 +003525 +003528 +003529 +003530 +003536 +003537 +003539 +003546 +003548 +003549 +003550 +003551 +003554 +003555 +003556 +003564 +003565 +003566 +003567 +003575 +003576 +003577 +003580 +003585 +003586 +003587 +003588 +003589 +003593 +003594 +003596 +003597 +003599 +003603 +003604 +003605 +003606 +003608 +003609 +003611 +003614 +003618 +003620 +003621 +003622 +003623 +003625 +003627 +003628 +003629 +003632 +003634 +003635 +003636 +003638 +003639 +003640 +003642 +003644 +003645 +003646 +003648 +003651 +003654 +003655 +003656 +003657 +003658 +003660 +003662 +003663 +003664 +003667 +003669 +003671 +003673 +003674 +003675 +003678 +003679 +003681 +003684 +003685 +003688 +003690 +003691 +003694 +003695 +003696 +003698 +003699 +003700 +003703 +003704 +003705 +003706 +003708 +003709 +003711 +003713 +003714 +003717 +003721 +003722 +003727 +003729 +003732 +003735 +003740 +003743 +003748 +003749 +003750 +003751 +003752 +003753 +003754 +003758 +003759 +003760 +003763 +003767 +003772 +003773 +003774 +003779 +003780 +003781 +003783 +003784 +003786 +003788 +003790 +003791 +003792 +003793 +003796 +003797 +003798 +003803 +003806 +003807 +003808 +003809 +003811 +003814 +003817 +003818 +003820 +003821 +003824 +003826 +003827 +003828 +003830 +003834 +003835 +003837 +003838 +003844 +003845 +003846 +003847 +003848 +003849 +003855 +003856 +003857 +003859 +003860 +003861 +003863 +003865 +003866 +003868 +003869 +003871 +003872 +003874 +003876 +003877 +003879 +003885 +003886 +003887 +003889 +003890 +003891 +003895 +003898 +003899 +003905 +003907 +003911 +003912 +003913 +003915 +003918 +003919 +003921 +003923 +003924 +003926 +003932 +003935 +003936 +003937 +003939 +003941 +003945 +003946 +003947 +003948 +003949 +003953 +003954 +003956 +003957 +003960 +003961 +003963 +003965 +003966 +003969 +003970 +003971 +003973 +003974 +003979 +003983 +003984 +003986 +003987 +003988 +003990 +003991 +003992 +003993 +003994 +003996 +003997 +003998 +004003 +004005 +004008 +004009 +004010 +004011 +004012 +004013 +004014 +004015 +004016 +004017 +004019 +004020 +004023 +004025 +004028 +004031 +004033 +004034 +004035 +004037 +004039 +004046 +004047 +004051 +004052 +004057 +004058 +004060 +004066 +004067 +004069 +004073 +004075 +004076 +004077 +004082 +004085 +004087 +004089 +004091 +004092 +004093 +004095 +004100 +004102 +004105 +004106 +004108 +004110 +004111 +004113 +004117 +004120 +004121 +004122 +004129 +004131 +004133 +004135 +004136 +004137 +004138 +004140 +004141 +004142 +004143 +004145 +004146 +004148 +004149 +004150 +004152 +004158 +004163 +004164 +004168 +004169 +004170 +004171 +004174 +004178 +004185 +004186 +004189 +004190 +004191 +004192 +004193 +004194 +004195 +004196 +004200 +004201 +004203 +004204 +004205 +004209 +004212 +004215 +004220 +004221 +004223 +004224 +004228 +004229 +004230 +004231 +004232 +004237 +004239 +004241 +004242 +004244 +004246 +004247 +004253 +004255 +004256 +004257 +004258 +004259 +004263 +004264 +004265 +004269 +004270 +004271 +004272 +004273 +004274 +004275 +004279 +004280 +004281 +004283 +004284 +004286 +004287 +004291 +004292 +004293 +004295 +004296 +004298 +004300 +004303 +004304 +004307 +004310 +004312 +004315 +004318 +004321 +004322 +004323 +004325 +004326 +004327 +004329 +004331 +004333 +004338 +004339 +004341 +004345 +004346 +004347 +004349 +004351 +004352 +004354 +004356 +004359 +004360 +004361 +004364 +004365 +004367 +004368 +004369 +004370 +004371 +004372 +004376 +004379 +004380 +004384 +004386 +004387 +004389 +004390 +004391 +004392 +004396 +004397 +004404 +004405 +004409 +004411 +004421 +004423 +004424 +004429 +004430 +004432 +004433 +004434 +004436 +004437 +004438 +004439 +004441 +004446 +004450 +004452 +004455 +004457 +004459 +004463 +004464 +004466 +004468 +004470 +004471 +004474 +004479 +004481 +004484 +004487 +004488 +004490 +004493 +004494 +004495 +004496 +004498 +004499 +004500 +004502 +004507 +004508 +004509 +004510 +004512 +004514 +004517 +004518 +004519 +004520 +004524 +004526 +004527 +004528 +004530 +004532 +004535 +004537 +004539 +004540 +004542 +004544 +004548 +004549 +004551 +004552 +004553 +004555 +004558 +004562 +004563 +004565 +004566 +004570 +004571 +004574 +004576 +004579 +004581 +004584 +004585 +004587 +004588 +004591 +004592 +004595 +004597 +004600 +004601 +004604 +004605 +004606 +004607 +004609 +004611 +004612 +004618 +004622 +004623 +004625 +004626 +004627 +004628 +004630 +004631 +004632 +004634 +004636 +004643 +004644 +004647 +004648 +004649 +004651 +004652 +004653 +004654 +004655 +004656 +004660 +004662 +004671 +004672 +004673 +004674 +004675 +004676 +004679 +004682 +004683 +004685 +004686 +004687 +004689 +004691 +004692 +004693 +004694 +004699 +004701 +004702 +004705 +004706 +004707 +004708 +004710 +004714 +004715 +004718 +004719 +004722 +004723 +004727 +004732 +004735 +004737 +004742 +004743 +004746 +004747 +004748 +004750 +004753 +004754 +004760 +004761 +004768 +004770 +004773 +004776 +004777 +004779 +004782 +004783 +004785 +004786 +004788 +004789 +004790 +004792 +004793 +004794 +004796 +004797 +004799 +004801 +004805 +004808 +004812 +004814 +004815 +004816 +004818 +004823 +004825 +004826 +004828 +004830 +004831 +004832 +004834 +004836 +004837 +004839 +004840 +004841 +004842 +004846 +004848 +004849 +004850 +004852 +004856 +004857 +004859 +004863 +004866 +004867 +004868 +004869 +004872 +004873 +004876 +004878 +004879 +004882 +004885 +004886 +004890 +004895 +004896 +004897 +004898 +004902 +004903 +004905 +004907 +004910 +004911 +004912 +004913 +004916 +004926 +004928 +004929 +004931 +004935 +004936 +004938 +004939 +004943 +004946 +004948 +004950 +004951 +004953 +004954 +004955 +004956 +004958 +004960 +004961 +004962 +004963 +004966 +004967 +004968 +004972 +004973 +004974 +004976 +004977 +004982 +004983 +004984 +004985 +004986 +004987 +004990 +004991 +004992 +004994 +004995 +004997 +004998 +004999 +005001 +005003 +005004 +005006 +005007 +005014 +005016 +005018 +005020 +005023 +005024 +005026 +005027 +005028 +005029 +005032 +005033 +005036 +005037 +005039 +005042 +005045 +005047 +005052 +005054 +005055 +005056 +005057 +005058 +005061 +005062 +005063 +005064 +005065 +005067 +005068 +005071 +005072 +005073 +005077 +005078 +005079 +005081 +005084 +005085 +005086 +005090 +005093 +005094 +005097 +005101 +005102 +005104 +005107 +005108 +005110 +005111 +005114 +005116 +005121 +005122 +005124 +005128 +005129 +005130 +005131 +005134 +005135 +005136 +005138 +005143 +005144 +005145 +005146 +005150 +005153 +005156 +005159 +005160 +005161 +005168 +005169 +005171 +005173 +005175 +005176 +005177 +005179 +005181 +005183 +005185 +005186 +005189 +005190 +005191 +005195 +005199 +005202 +005203 +005208 +005209 +005210 +005212 +005214 +005215 +005217 +005219 +005220 +005222 +005223 +005224 +005229 +005230 +005231 +005236 +005239 +005242 +005244 +005245 +005246 +005248 +005253 +005254 +005257 +005258 +005259 +005260 +005262 +005263 +005264 +005267 +005268 +005269 +005270 +005273 +005274 +005278 +005281 +005283 +005285 +005288 +005290 +005292 +005293 +005297 +005298 +005303 +005304 +005305 +005306 +005307 +005310 +005311 +005312 +005314 +005315 +005318 +005319 +005320 +005325 +005326 +005327 +005328 +005331 +005336 +005337 +005338 +005340 +005343 +005344 +005345 +005346 +005348 +005349 +005350 +005351 +005352 +005355 +005358 +005360 +005363 +005365 +005367 +005368 +005369 +005370 +005371 +005373 +005374 +005378 +005379 +005380 +005383 +005384 +005385 +005387 +005388 +005389 +005391 +005393 +005395 +005396 +005397 +005398 +005404 +005405 +005406 +005407 +005408 +005410 +005413 +005414 +005416 +005417 +005418 +005419 +005420 +005421 +005423 +005424 +005429 +005430 +005431 +005433 +005434 +005436 +005438 +005439 +005440 +005441 +005445 +005448 +005450 +005451 +005453 +005454 +005455 +005457 +005461 +005465 +005467 +005469 +005470 +005471 +005475 +005478 +005481 +005483 +005485 +005486 +005487 +005489 +005496 +005497 +005499 +005507 +005508 +005509 +005510 +005511 +005514 +005515 +005517 +005518 +005519 +005521 +005522 +005524 +005526 +005527 +005530 +005531 +005535 +005536 +005539 +005541 +005542 +005544 +005547 +005549 +005550 +005552 +005554 +005559 +005563 +005566 +005568 +005573 +005574 +005576 +005577 +005579 +005582 +005583 +005584 +005585 +005586 +005588 +005590 +005591 +005592 +005593 +005599 +005600 +005601 +005603 +005605 +005606 +005608 +005609 +005611 +005613 +005614 +005615 +005618 +005620 +005624 +005625 +005629 +005630 +005631 +005636 +005637 +005639 +005640 +005641 +005644 +005645 +005647 +005648 +005652 +005653 +005654 +005655 +005657 +005658 +005660 +005662 +005664 +005668 +005669 +005672 +005674 +005676 +005679 +005680 +005682 +005685 +005686 +005687 +005693 +005695 +005696 +005697 +005699 +005700 +005701 +005702 +005704 +005705 +005710 +005713 +005714 +005715 +005716 +005718 +005719 +005723 +005728 +005729 +005730 +005731 +005732 +005735 +005736 +005738 +005740 +005741 +005742 +005743 +005747 +005749 +005752 +005755 +005756 +005757 +005760 +005761 +005762 +005764 +005765 +005768 +005769 +005773 +005779 +005780 +005781 +005782 +005783 +005784 +005786 +005788 +005789 +005790 +005791 +005794 +005796 +005799 +005803 +005805 +005806 +005811 +005812 +005813 +005814 +005815 +005817 +005818 +005819 +005821 +005824 +005825 +005826 +005828 +005829 +005830 +005831 +005836 +005838 +005839 +005840 +005841 +005843 +005845 +005850 +005851 +005852 +005853 +005854 +005856 +005859 +005860 +005861 +005863 +005864 +005867 +005868 +005873 +005874 +005875 +005877 +005878 +005879 +005881 +005884 +005885 +005888 +005889 +005893 +005894 +005895 +005897 +005899 +005901 +005903 +005905 +005906 +005908 +005909 +005910 +005911 +005912 +005914 +005917 +005918 +005919 +005920 +005923 +005928 +005930 +005938 +005940 +005947 +005948 +005951 +005952 +005954 +005956 +005960 +005961 +005963 +005964 +005968 +005970 +005971 +005975 +005979 +005980 +005981 +005983 +005984 +005985 +005988 +005989 +005990 +005991 +005992 +005995 +005996 +005998 +006000 +006001 +006004 +006005 +006009 +006011 +006012 +006018 +006020 +006023 +006025 +006026 +006027 +006028 +006029 +006030 +006033 +006035 +006038 +006041 +006042 +006043 +006045 +006046 +006055 +006058 +006061 +006062 +006065 +006066 +006067 +006069 +006070 +006071 +006073 +006074 +006078 +006079 +006084 +006088 +006089 +006091 +006095 +006096 +006097 +006098 +006100 +006103 +006104 +006105 +006107 +006108 +006111 +006117 +006120 +006123 +006124 +006125 +006128 +006129 +006130 +006131 +006133 +006134 +006135 +006136 +006139 +006140 +006141 +006146 +006148 +006150 +006151 +006153 +006156 +006158 +006159 +006161 +006162 +006163 +006166 +006170 +006171 +006172 +006174 +006175 +006176 +006177 +006179 +006180 +006181 +006183 +006184 +006185 +006187 +006188 +006189 +006190 +006196 +006198 +006201 +006202 +006203 +006206 +006208 +006209 +006210 +006212 +006214 +006215 +006216 +006218 +006219 +006220 +006221 +006222 +006223 +006224 +006225 +006229 +006230 +006233 +006234 +006235 +006236 +006238 +006240 +006241 +006243 +006247 +006249 +006250 +006251 +006252 +006254 +006258 +006259 +006260 +006261 +006262 +006264 +006267 +006269 +006270 +006272 +006275 +006276 +006277 +006279 +006281 +006282 +006284 +006285 +006286 +006289 +006290 +006291 +006295 +006296 +006299 +006300 +006301 +006304 +006305 +006306 +006309 +006314 +006318 +006319 +006320 +006321 +006323 +006325 +006329 +006330 +006335 +006337 +006338 +006339 +006341 +006344 +006346 +006348 +006349 +006350 +006351 +006352 +006353 +006355 +006357 +006362 +006363 +006366 +006367 +006369 +006371 +006374 +006375 +006377 +006381 +006382 +006385 +006387 +006391 +006392 +006395 +006396 +006398 +006400 +006404 +006409 +006411 +006417 +006418 +006419 +006421 +006424 +006425 +006427 +006428 +006429 +006430 +006433 +006434 +006436 +006437 +006438 +006440 +006442 +006443 +006444 +006445 +006447 +006448 +006449 +006450 +006455 +006456 +006458 +006459 +006462 +006463 +006465 +006466 +006468 +006470 +006472 +006473 +006474 +006475 +006476 +006480 +006482 +006483 +006484 +006486 +006488 +006492 +006495 +006497 +006499 +006501 +006503 +006506 +006507 +006509 +006512 +006515 +006519 +006520 +006523 +006524 +006529 +006530 +006532 +006534 +006536 +006538 +006542 +006543 +006547 +006548 +006549 +006550 +006551 +006553 +006556 +006560 +006562 +006564 +006565 +006569 +006570 +006572 +006575 +006576 +006578 +006583 +006584 +006585 +006587 +006588 +006593 +006595 +006597 +006599 +006602 +006603 +006605 +006606 +006609 +006610 +006611 +006612 +006617 +006618 +006619 +006621 +006622 +006625 +006626 +006627 +006628 +006631 +006632 +006635 +006636 +006637 +006638 +006643 +006645 +006647 +006648 +006652 +006654 +006657 +006658 +006660 +006661 +006664 +006666 +006667 +006668 +006670 +006671 +006673 +006674 +006677 +006678 +006679 +006681 +006682 +006684 +006687 +006689 +006690 +006694 +006695 +006696 +006697 +006698 +006699 +006702 +006703 +006704 +006706 +006707 +006708 +006709 +006714 +006718 +006719 +006722 +006725 +006726 +006727 +006730 +006731 +006734 +006735 +006736 +006738 +006739 +006740 +006747 +006748 +006751 +006753 +006755 +006759 +006760 +006761 +006762 +006765 +006766 +006768 +006769 +006772 +006773 +006777 +006781 +006782 +006783 +006784 +006786 +006789 +006794 +006797 +006799 +006800 +006802 +006803 +006805 +006806 +006808 +006810 +006813 +006814 +006819 +006821 +006822 +006824 +006825 +006827 +006828 +006829 +006833 +006835 +006836 +006838 +006839 +006840 +006841 +006842 +006844 +006845 +006847 +006848 +006849 +006850 +006852 +006855 +006858 +006859 +006860 +006862 +006864 +006865 +006866 +006867 +006868 +006869 +006874 +006876 +006878 +006880 +006883 +006884 +006886 +006887 +006892 +006893 +006896 +006899 +006900 +006903 +006908 +006909 +006910 +006911 +006912 +006914 +006916 +006917 +006918 +006919 +006922 +006924 +006930 +006931 +006932 +006933 +006934 +006935 +006939 +006940 +006943 +006944 +006945 +006947 +006948 +006949 +006950 +006952 +006953 +006956 +006958 +006959 +006962 +006963 +006965 +006966 +006968 +006971 +006972 +006976 +006981 +006983 +006987 +006988 +006989 +006990 +006994 +006995 +007002 +007003 +007004 +007006 +007007 +007008 +007009 +007011 +007016 +007018 +007020 +007021 +007022 +007023 +007025 +007029 +007031 +007033 +007035 +007036 +007038 +007039 +007040 +007042 +007045 +007046 +007048 +007049 +007050 +007052 +007054 +007056 +007058 +007059 +007062 +007064 +007065 +007068 +007070 +007071 +007072 +007073 +007074 +007075 +007077 +007078 +007079 +007080 +007084 +007086 +007088 +007089 +007090 +007092 +007093 +007095 +007097 +007100 +007101 +007104 +007105 +007108 +007109 +007113 +007114 +007117 +007121 +007122 +007123 +007125 +007128 +007129 +007130 +007132 +007133 +007138 +007139 +007140 +007141 +007144 +007146 +007147 +007148 +007149 +007150 +007152 +007153 +007154 +007159 +007162 +007163 +007165 +007166 +007167 +007168 +007172 +007174 +007177 +007180 +007182 +007184 +007185 +007187 +007189 +007191 +007193 +007194 +007197 +007200 +007204 +007205 +007208 +007210 +007211 +007212 +007213 +007214 +007215 +007216 +007217 +007219 +007222 +007223 +007224 +007227 +007230 +007234 +007236 +007241 +007243 +007244 +007245 +007247 +007249 +007250 +007256 +007258 +007259 +007260 +007261 +007263 +007266 +007270 +007271 +007274 +007275 +007276 +007279 +007280 +007283 +007284 +007285 +007289 +007292 +007294 +007295 +007296 +007297 +007298 +007299 +007300 +007302 +007305 +007308 +007311 +007314 +007318 +007322 +007323 +007325 +007327 +007329 +007330 +007334 +007336 +007343 +007344 +007346 +007350 +007351 +007356 +007359 +007361 +007363 +007365 +007369 +007370 +007372 +007373 +007374 +007375 +007376 +007381 +007383 +007385 +007388 +007389 +007390 +007394 +007396 +007398 +007408 +007410 +007411 +007413 +007414 +007416 +007417 +007419 +007421 +007422 +007424 +007425 +007427 +007431 +007432 +007433 +007435 +007436 +007437 +007438 +007439 +007443 +007445 +007446 +007448 +007449 +007451 +007454 +007457 +007458 +007460 +007461 +007465 +007466 +007467 +007468 +007470 +007474 +007475 +007477 +007479 +007480 +007481 +007482 +007483 +007484 +007486 +007489 +007490 +007491 +007493 +007497 +007498 +007503 +007506 +007511 +007513 +007517 +007519 +007521 +007523 +007524 +007525 +007526 +007527 +007528 +007530 +007533 +007535 +007536 +007537 +007538 +007540 +007543 +007544 +007546 +007547 +007551 +007555 +007558 +007559 +007563 +007565 +007566 +007568 +007570 +007571 +007572 +007575 +007576 +007578 +007579 +007585 +007586 +007590 +007592 +007594 +007600 +007601 +007603 +007605 +007606 +007611 +007612 +007614 +007615 +007618 +007619 +007621 +007622 +007624 +007626 +007629 +007631 +007633 +007637 +007639 +007640 +007642 +007647 +007649 +007650 +007653 +007654 +007655 +007656 +007657 +007662 +007663 +007664 +007666 +007667 +007668 +007670 +007671 +007672 +007673 +007675 +007677 +007678 +007679 +007680 +007682 +007683 +007685 +007687 +007688 +007691 +007692 +007694 +007696 +007697 +007699 +007702 +007704 +007705 +007709 +007712 +007713 +007715 +007718 +007720 +007721 +007723 +007724 +007727 +007729 +007731 +007732 +007735 +007736 +007740 +007742 +007743 +007745 +007746 +007748 +007749 +007751 +007753 +007754 +007758 +007760 +007762 +007763 +007765 +007767 +007768 +007772 +007773 +007775 +007776 +007777 +007779 +007781 +007786 +007790 +007791 +007793 +007795 +007798 +007799 +007803 +007809 +007810 +007812 +007813 +007814 +007815 +007819 +007820 +007821 +007824 +007826 +007831 +007833 +007834 +007836 +007838 +007840 +007841 +007843 +007845 +007847 +007853 +007854 +007855 +007856 +007857 +007859 +007863 +007864 +007865 +007868 +007869 +007872 +007873 +007876 +007877 +007878 +007883 +007884 +007885 +007886 +007889 +007890 +007897 +007898 +007899 +007900 +007901 +007902 +007905 +007908 +007909 +007910 +007911 +007914 +007915 +007916 +007919 +007920 +007921 +007923 +007924 +007925 +007926 +007928 +007931 +007932 +007933 +007935 +007939 +007940 +007943 +007946 +007947 +007950 +007953 +007954 +007956 +007958 +007959 +007963 +007964 +007968 +007970 +007971 +007974 +007976 +007979 +007980 +007984 +007987 +007991 +007996 +007997 +007998 +007999 +008001 +008002 +008004 +008005 +008008 +008009 +008012 +008017 +008019 +008023 +008024 +008026 +008029 +008031 +008032 +008033 +008036 +008037 +008040 +008042 +008043 +008044 +008048 +008049 +008051 +008053 +008057 +008060 +008061 +008062 +008063 +008064 +008067 +008068 +008069 +008072 +008075 +008076 +008079 +008082 +008083 +008084 +008085 +008086 +008087 +008091 +008093 +008095 +008096 +008098 +008100 +008101 +008103 +008105 +008106 +008107 +008108 +008112 +008115 +008116 +008117 +008121 +008122 +008125 +008127 +008130 +008132 +008137 +008138 +008139 +008140 +008141 +008142 +008144 +008150 +008151 +008159 +008160 +008163 +008164 +008166 +008168 +008169 +008171 +008173 +008174 +008175 +008177 +008180 +008186 +008188 +008189 +008190 +008191 +008197 +008199 +008200 +008202 +008203 +008204 +008208 +008209 +008211 +008213 +008216 +008218 +008220 +008222 +008223 +008224 +008225 +008226 +008229 +008232 +008235 +008236 +008241 +008244 +008248 +008250 +008251 +008252 +008253 +008254 +008258 +008260 +008261 +008262 +008263 +008268 +008269 +008272 +008275 +008279 +008280 +008281 +008282 +008284 +008285 +008292 +008293 +008294 +008295 +008296 +008297 +008299 +008300 +008301 +008302 +008306 +008307 +008310 +008311 +008312 +008313 +008315 +008316 +008317 +008318 +008319 +008320 +008322 +008323 +008326 +008327 +008329 +008332 +008335 +008336 +008338 +008341 +008342 +008345 +008346 +008349 +008351 +008355 +008359 +008360 +008364 +008365 +008368 +008370 +008372 +008374 +008376 +008381 +008384 +008385 +008386 +008387 +008388 +008390 +008391 +008397 +008398 +008403 +008409 +008410 +008413 +008415 +008416 +008422 +008423 +008424 +008425 +008426 +008427 +008429 +008430 +008433 +008434 +008437 +008438 +008442 +008443 +008444 +008445 +008449 +008450 +008452 +008453 +008454 +008456 +008461 +008462 +008465 +008466 +008467 +008468 +008470 +008472 +008475 +008477 +008478 +008482 +008483 +008484 +008485 +008492 +008494 +008495 +008498 +008499 +008502 +008503 +008506 +008509 +008512 +008513 +008514 +008517 +008518 +008519 +008521 +008522 +008523 +008524 +008526 +008529 +008530 +008533 +008534 +008535 +008536 +008541 +008542 +008549 +008550 +008553 +008556 +008557 +008558 +008559 +008562 +008564 +008568 +008572 +008573 +008576 +008581 +008582 +008584 +008585 +008586 +008587 +008588 +008592 +008595 +008596 +008601 +008602 +008604 +008606 +008607 +008608 +008610 +008612 +008615 +008617 +008618 +008620 +008621 +008624 +008628 +008633 +008635 +008636 +008638 +008639 +008644 +008645 +008647 +008653 +008654 +008655 +008663 +008665 +008667 +008670 +008676 +008680 +008683 +008687 +008688 +008690 +008691 +008692 +008695 +008698 +008699 +008701 +008702 +008706 +008709 +008710 +008713 +008716 +008717 +008718 +008720 +008722 +008723 +008725 +008727 +008728 +008730 +008731 +008732 +008733 +008738 +008739 +008741 +008742 +008744 +008747 +008748 +008749 +008750 +008752 +008753 +008755 +008756 +008757 +008759 +008760 +008764 +008766 +008768 +008769 +008770 +008771 +008772 +008773 +008775 +008776 +008783 +008784 +008790 +008793 +008794 +008796 +008799 +008801 +008805 +008806 +008809 +008810 +008811 +008813 +008814 +008815 +008817 +008819 +008822 +008823 +008826 +008831 +008833 +008835 +008836 +008837 +008838 +008840 +008841 +008843 +008847 +008848 +008849 +008854 +008856 +008858 +008859 +008862 +008865 +008867 +008871 +008872 +008873 +008874 +008876 +008878 +008879 +008880 +008883 +008884 +008885 +008886 +008888 +008890 +008891 +008892 +008900 +008905 +008909 +008911 +008913 +008914 +008917 +008919 +008920 +008921 +008923 +008926 +008927 +008929 +008930 +008931 +008932 +008933 +008936 +008939 +008940 +008942 +008943 +008944 +008948 +008951 +008953 +008955 +008958 +008960 +008961 +008962 +008965 +008966 +008967 +008968 +008969 +008970 +008971 +008973 +008975 +008976 +008978 +008979 +008980 +008982 +008983 +008985 +008987 +008988 +008989 +008995 +008997 +008999 +009000 +009002 +009004 +009005 +009006 +009007 +009015 +009016 +009018 +009019 +009020 +009022 +009024 +009027 +009029 +009032 +009034 +009035 +009036 +009037 +009039 +009042 +009045 +009048 +009049 +009051 +009053 +009058 +009059 +009060 +009063 +009064 +009066 +009068 +009072 +009073 +009078 +009079 +009080 +009085 +009086 +009087 +009089 +009091 +009094 +009098 +009099 +009100 +009105 +009106 +009108 +009112 +009113 +009114 +009116 +009117 +009121 +009123 +009126 +009128 +009129 +009131 +009133 +009136 +009138 +009141 +009144 +009147 +009148 +009150 +009151 +009153 +009155 +009157 +009159 +009160 +009161 +009162 +009163 +009166 +009168 +009173 +009174 +009175 +009177 +009178 +009179 +009180 +009181 +009184 +009185 +009186 +009187 +009189 +009191 +009192 +009193 +009194 +009195 +009196 +009197 +009200 +009202 +009205 +009208 +009209 +009212 +009213 +009214 +009215 +009218 +009221 +009224 +009227 +009230 +009236 +009238 +009239 +009242 +009244 +009245 +009246 +009247 +009249 +009250 +009251 +009252 +009254 +009255 +009259 +009268 +009269 +009270 +009271 +009272 +009273 +009278 +009279 +009281 +009282 +009283 +009285 +009286 +009287 +009288 +009289 +009290 +009291 +009295 +009296 +009299 +009303 +009306 +009307 +009308 +009309 +009312 +009315 +009316 +009318 +009323 +009324 +009325 +009326 +009327 +009330 +009331 +009333 +009334 +009336 +009337 +009339 +009342 +009343 +009347 +009348 +009349 +009350 +009351 +009354 +009358 +009359 +009362 +009365 +009368 +009371 +009373 +009374 +009375 +009377 +009378 +009382 +009386 +009388 +009389 +009392 +009393 +009394 +009398 +009401 +009405 +009406 +009407 +009408 +009409 +009410 +009411 +009412 +009413 +009414 +009417 +009418 +009419 +009420 +009421 +009422 +009424 +009429 +009432 +009433 +009434 +009437 +009438 +009439 +009440 +009443 +009445 +009446 +009448 +009454 +009455 +009456 +009457 +009458 +009459 +009460 +009461 +009463 +009464 +009465 +009466 +009468 +009469 +009470 +009472 +009476 +009477 +009479 +009480 +009481 +009484 +009488 +009490 +009491 +009494 +009496 +009497 +009499 +009500 +009502 +009504 +009507 +009508 +009512 +009515 +009516 +009517 +009518 +009519 +009520 +009523 +009524 +009526 +009527 +009528 +009531 +009532 +009533 +009537 +009540 +009541 +009542 +009543 +009545 +009546 +009549 +009550 +009551 +009557 +009558 +009560 +009562 +009565 +009566 +009567 +009568 +009571 +009573 +009576 +009577 +009579 +009580 +009584 +009585 +009586 +009587 +009588 +009591 +009596 +009597 +009598 +009600 +009603 +009605 +009609 +009611 +009613 +009614 +009615 +009617 +009618 +009619 +009620 +009621 +009623 +009627 +009629 +009634 +009636 +009637 +009638 +009641 +009644 +009647 +009649 +009650 +009654 +009655 +009656 +009658 +009659 +009664 +009666 +009667 +009668 +009670 +009671 +009676 +009678 +009679 +009681 +009684 +009685 +009686 +009687 +009691 +009692 +009693 +009695 +009698 +009699 +009700 +009702 +009703 +009706 +009707 +009709 +009710 +009711 +009712 +009713 +009717 +009718 +009719 +009721 +009724 +009726 +009729 +009732 +009733 +009734 +009735 +009737 +009738 +009743 +009745 +009746 +009747 +009748 +009749 +009754 +009755 +009756 +009758 +009761 +009762 +009763 +009764 +009767 +009772 +009773 +009774 +009776 +009778 +009780 +009781 +009785 +009789 +009790 +009792 +009794 +009796 +009797 +009800 +009801 +009805 +009807 +009808 +009809 +009810 +009813 +009816 +009819 +009822 +009823 +009825 +009828 +009830 +009831 +009832 +009833 +009834 +009836 +009839 +009841 +009842 +009845 +009848 +009851 +009852 +009855 +009858 +009859 +009860 +009862 +009863 +009865 +009867 +009868 +009869 +009870 +009872 +009874 +009877 +009878 +009879 +009880 +009881 +009882 +009884 +009886 +009887 +009894 +009896 +009897 +009898 +009900 +009902 +009904 +009905 +009908 +009911 +009913 +009917 +009918 +009920 +009923 +009926 +009932 +009935 +009938 +009939 +009940 +009942 +009944 +009945 +009946 +009947 +009949 +009950 +009954 +009955 +009958 +009959 +009961 +2008_000002 +2008_000003 +2008_000007 +2008_000008 +2008_000009 +2008_000015 +2008_000016 +2008_000019 +2008_000021 +2008_000023 +2008_000026 +2008_000027 +2008_000028 +2008_000032 +2008_000033 +2008_000034 +2008_000036 +2008_000037 +2008_000041 +2008_000042 +2008_000043 +2008_000045 +2008_000050 +2008_000051 +2008_000052 +2008_000053 +2008_000054 +2008_000056 +2008_000059 +2008_000060 +2008_000062 +2008_000064 +2008_000066 +2008_000067 +2008_000070 +2008_000073 +2008_000074 +2008_000075 +2008_000076 +2008_000078 +2008_000080 +2008_000082 +2008_000084 +2008_000085 +2008_000089 +2008_000090 +2008_000093 +2008_000095 +2008_000096 +2008_000097 +2008_000099 +2008_000103 +2008_000105 +2008_000107 +2008_000109 +2008_000112 +2008_000115 +2008_000116 +2008_000119 +2008_000120 +2008_000123 +2008_000128 +2008_000131 +2008_000132 +2008_000133 +2008_000134 +2008_000138 +2008_000140 +2008_000141 +2008_000142 +2008_000143 +2008_000144 +2008_000145 +2008_000148 +2008_000149 +2008_000151 +2008_000154 +2008_000162 +2008_000163 +2008_000174 +2008_000176 +2008_000177 +2008_000181 +2008_000182 +2008_000183 +2008_000185 +2008_000187 +2008_000188 +2008_000189 +2008_000190 +2008_000191 +2008_000192 +2008_000193 +2008_000194 +2008_000195 +2008_000196 +2008_000197 +2008_000199 +2008_000202 +2008_000203 +2008_000204 +2008_000207 +2008_000213 +2008_000215 +2008_000217 +2008_000219 +2008_000222 +2008_000223 +2008_000226 +2008_000227 +2008_000233 +2008_000234 +2008_000235 +2008_000236 +2008_000237 +2008_000238 +2008_000239 +2008_000243 +2008_000244 +2008_000246 +2008_000251 +2008_000252 +2008_000253 +2008_000254 +2008_000255 +2008_000257 +2008_000259 +2008_000260 +2008_000261 +2008_000262 +2008_000264 +2008_000266 +2008_000268 +2008_000270 +2008_000271 +2008_000272 +2008_000273 +2008_000274 +2008_000275 +2008_000277 +2008_000278 +2008_000281 +2008_000283 +2008_000284 +2008_000287 +2008_000289 +2008_000290 +2008_000291 +2008_000297 +2008_000298 +2008_000304 +2008_000305 +2008_000306 +2008_000307 +2008_000309 +2008_000311 +2008_000313 +2008_000315 +2008_000316 +2008_000318 +2008_000321 +2008_000328 +2008_000330 +2008_000335 +2008_000336 +2008_000338 +2008_000339 +2008_000340 +2008_000342 +2008_000343 +2008_000345 +2008_000346 +2008_000348 +2008_000350 +2008_000354 +2008_000356 +2008_000358 +2008_000359 +2008_000361 +2008_000364 +2008_000365 +2008_000367 +2008_000371 +2008_000373 +2008_000376 +2008_000378 +2008_000380 +2008_000381 +2008_000382 +2008_000383 +2008_000391 +2008_000392 +2008_000393 +2008_000397 +2008_000398 +2008_000399 +2008_000400 +2008_000401 +2008_000403 +2008_000405 +2008_000406 +2008_000407 +2008_000408 +2008_000413 +2008_000414 +2008_000415 +2008_000416 +2008_000418 +2008_000419 +2008_000421 +2008_000422 +2008_000423 +2008_000424 +2008_000426 +2008_000428 +2008_000432 +2008_000435 +2008_000436 +2008_000437 +2008_000442 +2008_000443 +2008_000445 +2008_000446 +2008_000447 +2008_000448 +2008_000452 +2008_000455 +2008_000457 +2008_000461 +2008_000464 +2008_000465 +2008_000466 +2008_000469 +2008_000470 +2008_000471 +2008_000472 +2008_000473 +2008_000474 +2008_000475 +2008_000480 +2008_000481 +2008_000488 +2008_000489 +2008_000491 +2008_000492 +2008_000493 +2008_000495 +2008_000496 +2008_000498 +2008_000499 +2008_000501 +2008_000502 +2008_000505 +2008_000510 +2008_000511 +2008_000512 +2008_000514 +2008_000515 +2008_000516 +2008_000519 +2008_000522 +2008_000527 +2008_000531 +2008_000532 +2008_000533 +2008_000535 +2008_000536 +2008_000540 +2008_000541 +2008_000544 +2008_000545 +2008_000547 +2008_000548 +2008_000552 +2008_000553 +2008_000558 +2008_000559 +2008_000561 +2008_000562 +2008_000563 +2008_000564 +2008_000566 +2008_000567 +2008_000568 +2008_000569 +2008_000572 +2008_000573 +2008_000578 +2008_000579 +2008_000581 +2008_000583 +2008_000584 +2008_000585 +2008_000588 +2008_000589 +2008_000595 +2008_000599 +2008_000602 +2008_000605 +2008_000607 +2008_000609 +2008_000613 +2008_000614 +2008_000615 +2008_000619 +2008_000620 +2008_000622 +2008_000623 +2008_000626 +2008_000628 +2008_000629 +2008_000630 +2008_000634 +2008_000636 +2008_000640 +2008_000641 +2008_000645 +2008_000646 +2008_000647 +2008_000648 +2008_000650 +2008_000652 +2008_000655 +2008_000656 +2008_000657 +2008_000659 +2008_000660 +2008_000661 +2008_000662 +2008_000666 +2008_000669 +2008_000670 +2008_000672 +2008_000673 +2008_000674 +2008_000676 +2008_000677 +2008_000678 +2008_000683 +2008_000689 +2008_000690 +2008_000691 +2008_000694 +2008_000695 +2008_000696 +2008_000697 +2008_000699 +2008_000700 +2008_000703 +2008_000704 +2008_000705 +2008_000706 +2008_000711 +2008_000714 +2008_000716 +2008_000719 +2008_000721 +2008_000723 +2008_000724 +2008_000725 +2008_000726 +2008_000727 +2008_000729 +2008_000731 +2008_000732 +2008_000733 +2008_000734 +2008_000737 +2008_000740 +2008_000742 +2008_000745 +2008_000748 +2008_000753 +2008_000756 +2008_000758 +2008_000760 +2008_000761 +2008_000764 +2008_000765 +2008_000769 +2008_000775 +2008_000776 +2008_000777 +2008_000778 +2008_000780 +2008_000782 +2008_000783 +2008_000785 +2008_000787 +2008_000788 +2008_000790 +2008_000792 +2008_000793 +2008_000795 +2008_000796 +2008_000798 +2008_000801 +2008_000803 +2008_000804 +2008_000805 +2008_000806 +2008_000808 +2008_000811 +2008_000814 +2008_000815 +2008_000817 +2008_000824 +2008_000825 +2008_000828 +2008_000829 +2008_000832 +2008_000833 +2008_000834 +2008_000835 +2008_000837 +2008_000839 +2008_000841 +2008_000842 +2008_000844 +2008_000847 +2008_000848 +2008_000851 +2008_000853 +2008_000854 +2008_000857 +2008_000858 +2008_000860 +2008_000861 +2008_000863 +2008_000864 +2008_000867 +2008_000868 +2008_000870 +2008_000873 +2008_000875 +2008_000876 +2008_000878 +2008_000880 +2008_000881 +2008_000883 +2008_000884 +2008_000885 +2008_000887 +2008_000897 +2008_000899 +2008_000901 +2008_000902 +2008_000904 +2008_000905 +2008_000908 +2008_000910 +2008_000911 +2008_000912 +2008_000914 +2008_000915 +2008_000916 +2008_000917 +2008_000919 +2008_000922 +2008_000923 +2008_000924 +2008_000928 +2008_000931 +2008_000934 +2008_000936 +2008_000939 +2008_000940 +2008_000941 +2008_000942 +2008_000943 +2008_000944 +2008_000950 +2008_000952 +2008_000953 +2008_000956 +2008_000957 +2008_000959 +2008_000960 +2008_000964 +2008_000965 +2008_000970 +2008_000971 +2008_000972 +2008_000973 +2008_000976 +2008_000979 +2008_000981 +2008_000982 +2008_000984 +2008_000985 +2008_000987 +2008_000992 +2008_000993 +2008_000999 +2008_001004 +2008_001007 +2008_001009 +2008_001012 +2008_001013 +2008_001018 +2008_001020 +2008_001021 +2008_001022 +2008_001023 +2008_001024 +2008_001026 +2008_001028 +2008_001030 +2008_001031 +2008_001034 +2008_001035 +2008_001036 +2008_001039 +2008_001040 +2008_001041 +2008_001042 +2008_001046 +2008_001047 +2008_001048 +2008_001052 +2008_001054 +2008_001055 +2008_001056 +2008_001057 +2008_001060 +2008_001062 +2008_001063 +2008_001066 +2008_001068 +2008_001070 +2008_001071 +2008_001073 +2008_001074 +2008_001075 +2008_001076 +2008_001077 +2008_001078 +2008_001080 +2008_001081 +2008_001083 +2008_001089 +2008_001090 +2008_001092 +2008_001098 +2008_001099 +2008_001104 +2008_001105 +2008_001106 +2008_001111 +2008_001112 +2008_001113 +2008_001114 +2008_001115 +2008_001118 +2008_001119 +2008_001120 +2008_001121 +2008_001122 +2008_001130 +2008_001133 +2008_001134 +2008_001135 +2008_001136 +2008_001137 +2008_001139 +2008_001140 +2008_001142 +2008_001143 +2008_001147 +2008_001150 +2008_001154 +2008_001155 +2008_001158 +2008_001159 +2008_001160 +2008_001161 +2008_001164 +2008_001166 +2008_001167 +2008_001168 +2008_001169 +2008_001170 +2008_001171 +2008_001177 +2008_001182 +2008_001183 +2008_001185 +2008_001188 +2008_001189 +2008_001190 +2008_001192 +2008_001194 +2008_001196 +2008_001199 +2008_001202 +2008_001203 +2008_001205 +2008_001206 +2008_001208 +2008_001210 +2008_001215 +2008_001218 +2008_001219 +2008_001220 +2008_001221 +2008_001223 +2008_001225 +2008_001226 +2008_001227 +2008_001230 +2008_001231 +2008_001235 +2008_001236 +2008_001238 +2008_001241 +2008_001245 +2008_001248 +2008_001249 +2008_001255 +2008_001257 +2008_001260 +2008_001262 +2008_001263 +2008_001264 +2008_001267 +2008_001271 +2008_001272 +2008_001274 +2008_001275 +2008_001278 +2008_001283 +2008_001284 +2008_001285 +2008_001290 +2008_001294 +2008_001296 +2008_001299 +2008_001301 +2008_001302 +2008_001304 +2008_001306 +2008_001307 +2008_001308 +2008_001310 +2008_001312 +2008_001314 +2008_001318 +2008_001320 +2008_001322 +2008_001325 +2008_001329 +2008_001333 +2008_001334 +2008_001335 +2008_001336 +2008_001338 +2008_001340 +2008_001344 +2008_001346 +2008_001349 +2008_001350 +2008_001351 +2008_001353 +2008_001356 +2008_001357 +2008_001358 +2008_001359 +2008_001366 +2008_001367 +2008_001369 +2008_001373 +2008_001374 +2008_001375 +2008_001376 +2008_001379 +2008_001380 +2008_001382 +2008_001383 +2008_001385 +2008_001387 +2008_001388 +2008_001389 +2008_001390 +2008_001391 +2008_001395 +2008_001399 +2008_001401 +2008_001402 +2008_001404 +2008_001405 +2008_001406 +2008_001408 +2008_001410 +2008_001413 +2008_001414 +2008_001415 +2008_001419 +2008_001420 +2008_001427 +2008_001428 +2008_001429 +2008_001430 +2008_001431 +2008_001432 +2008_001433 +2008_001434 +2008_001436 +2008_001437 +2008_001439 +2008_001440 +2008_001444 +2008_001445 +2008_001446 +2008_001448 +2008_001451 +2008_001454 +2008_001455 +2008_001456 +2008_001460 +2008_001461 +2008_001462 +2008_001464 +2008_001466 +2008_001467 +2008_001468 +2008_001470 +2008_001475 +2008_001478 +2008_001479 +2008_001481 +2008_001482 +2008_001486 +2008_001488 +2008_001491 +2008_001493 +2008_001494 +2008_001495 +2008_001498 +2008_001500 +2008_001501 +2008_001503 +2008_001504 +2008_001510 +2008_001513 +2008_001514 +2008_001516 +2008_001520 +2008_001522 +2008_001523 +2008_001525 +2008_001527 +2008_001529 +2008_001531 +2008_001533 +2008_001534 +2008_001536 +2008_001538 +2008_001539 +2008_001540 +2008_001541 +2008_001542 +2008_001543 +2008_001544 +2008_001546 +2008_001547 +2008_001549 +2008_001550 +2008_001551 +2008_001553 +2008_001563 +2008_001564 +2008_001566 +2008_001574 +2008_001575 +2008_001576 +2008_001577 +2008_001580 +2008_001582 +2008_001586 +2008_001589 +2008_001590 +2008_001591 +2008_001592 +2008_001593 +2008_001594 +2008_001596 +2008_001598 +2008_001601 +2008_001602 +2008_001605 +2008_001607 +2008_001609 +2008_001610 +2008_001613 +2008_001615 +2008_001617 +2008_001619 +2008_001620 +2008_001622 +2008_001624 +2008_001625 +2008_001626 +2008_001629 +2008_001631 +2008_001632 +2008_001636 +2008_001638 +2008_001640 +2008_001641 +2008_001643 +2008_001645 +2008_001648 +2008_001649 +2008_001652 +2008_001653 +2008_001655 +2008_001659 +2008_001660 +2008_001661 +2008_001663 +2008_001666 +2008_001667 +2008_001668 +2008_001669 +2008_001670 +2008_001673 +2008_001676 +2008_001679 +2008_001680 +2008_001681 +2008_001682 +2008_001688 +2008_001690 +2008_001691 +2008_001692 +2008_001694 +2008_001697 +2008_001699 +2008_001702 +2008_001704 +2008_001706 +2008_001708 +2008_001709 +2008_001710 +2008_001712 +2008_001714 +2008_001715 +2008_001716 +2008_001717 +2008_001719 +2008_001722 +2008_001723 +2008_001724 +2008_001727 +2008_001729 +2008_001730 +2008_001731 +2008_001735 +2008_001736 +2008_001737 +2008_001741 +2008_001742 +2008_001744 +2008_001745 +2008_001746 +2008_001750 +2008_001751 +2008_001757 +2008_001758 +2008_001761 +2008_001763 +2008_001764 +2008_001765 +2008_001769 +2008_001770 +2008_001772 +2008_001773 +2008_001774 +2008_001775 +2008_001781 +2008_001782 +2008_001783 +2008_001784 +2008_001787 +2008_001789 +2008_001791 +2008_001792 +2008_001796 +2008_001797 +2008_001799 +2008_001801 +2008_001802 +2008_001805 +2008_001806 +2008_001808 +2008_001809 +2008_001810 +2008_001811 +2008_001812 +2008_001813 +2008_001814 +2008_001815 +2008_001816 +2008_001820 +2008_001821 +2008_001823 +2008_001825 +2008_001829 +2008_001830 +2008_001832 +2008_001834 +2008_001836 +2008_001837 +2008_001838 +2008_001841 +2008_001842 +2008_001843 +2008_001845 +2008_001849 +2008_001850 +2008_001852 +2008_001854 +2008_001856 +2008_001858 +2008_001860 +2008_001862 +2008_001863 +2008_001865 +2008_001866 +2008_001867 +2008_001869 +2008_001871 +2008_001872 +2008_001874 +2008_001876 +2008_001880 +2008_001881 +2008_001882 +2008_001885 +2008_001888 +2008_001894 +2008_001895 +2008_001896 +2008_001899 +2008_001903 +2008_001905 +2008_001907 +2008_001908 +2008_001909 +2008_001910 +2008_001911 +2008_001914 +2008_001919 +2008_001920 +2008_001921 +2008_001926 +2008_001928 +2008_001929 +2008_001930 +2008_001932 +2008_001934 +2008_001937 +2008_001941 +2008_001945 +2008_001946 +2008_001947 +2008_001951 +2008_001955 +2008_001956 +2008_001957 +2008_001958 +2008_001961 +2008_001965 +2008_001966 +2008_001967 +2008_001969 +2008_001970 +2008_001971 +2008_001977 +2008_001978 +2008_001979 +2008_001980 +2008_001982 +2008_001985 +2008_001986 +2008_001987 +2008_001989 +2008_001992 +2008_001997 +2008_001998 +2008_002000 +2008_002001 +2008_002002 +2008_002003 +2008_002004 +2008_002005 +2008_002007 +2008_002009 +2008_002011 +2008_002013 +2008_002017 +2008_002021 +2008_002023 +2008_002026 +2008_002031 +2008_002032 +2008_002033 +2008_002035 +2008_002036 +2008_002037 +2008_002039 +2008_002042 +2008_002043 +2008_002045 +2008_002046 +2008_002047 +2008_002052 +2008_002056 +2008_002058 +2008_002061 +2008_002062 +2008_002064 +2008_002066 +2008_002067 +2008_002069 +2008_002071 +2008_002073 +2008_002079 +2008_002080 +2008_002082 +2008_002084 +2008_002086 +2008_002088 +2008_002092 +2008_002093 +2008_002094 +2008_002096 +2008_002098 +2008_002099 +2008_002103 +2008_002107 +2008_002112 +2008_002113 +2008_002114 +2008_002115 +2008_002116 +2008_002117 +2008_002118 +2008_002119 +2008_002123 +2008_002124 +2008_002129 +2008_002131 +2008_002132 +2008_002138 +2008_002140 +2008_002144 +2008_002145 +2008_002146 +2008_002148 +2008_002150 +2008_002151 +2008_002152 +2008_002153 +2008_002155 +2008_002156 +2008_002158 +2008_002160 +2008_002162 +2008_002167 +2008_002169 +2008_002172 +2008_002175 +2008_002176 +2008_002177 +2008_002179 +2008_002181 +2008_002182 +2008_002185 +2008_002191 +2008_002193 +2008_002194 +2008_002195 +2008_002197 +2008_002198 +2008_002199 +2008_002200 +2008_002201 +2008_002202 +2008_002204 +2008_002205 +2008_002206 +2008_002207 +2008_002208 +2008_002209 +2008_002210 +2008_002212 +2008_002215 +2008_002218 +2008_002220 +2008_002221 +2008_002222 +2008_002223 +2008_002225 +2008_002227 +2008_002229 +2008_002231 +2008_002234 +2008_002236 +2008_002239 +2008_002240 +2008_002241 +2008_002243 +2008_002244 +2008_002247 +2008_002248 +2008_002250 +2008_002251 +2008_002255 +2008_002258 +2008_002259 +2008_002262 +2008_002267 +2008_002269 +2008_002270 +2008_002272 +2008_002273 +2008_002278 +2008_002279 +2008_002280 +2008_002281 +2008_002283 +2008_002288 +2008_002292 +2008_002293 +2008_002294 +2008_002296 +2008_002298 +2008_002299 +2008_002304 +2008_002305 +2008_002307 +2008_002311 +2008_002312 +2008_002314 +2008_002317 +2008_002321 +2008_002322 +2008_002324 +2008_002325 +2008_002327 +2008_002328 +2008_002329 +2008_002330 +2008_002331 +2008_002335 +2008_002338 +2008_002340 +2008_002343 +2008_002344 +2008_002347 +2008_002349 +2008_002350 +2008_002356 +2008_002357 +2008_002358 +2008_002359 +2008_002361 +2008_002362 +2008_002365 +2008_002366 +2008_002368 +2008_002369 +2008_002370 +2008_002372 +2008_002374 +2008_002377 +2008_002378 +2008_002379 +2008_002383 +2008_002384 +2008_002389 +2008_002395 +2008_002399 +2008_002401 +2008_002403 +2008_002404 +2008_002405 +2008_002408 +2008_002410 +2008_002411 +2008_002412 +2008_002414 +2008_002418 +2008_002419 +2008_002422 +2008_002424 +2008_002425 +2008_002428 +2008_002429 +2008_002430 +2008_002434 +2008_002436 +2008_002437 +2008_002438 +2008_002439 +2008_002441 +2008_002442 +2008_002444 +2008_002445 +2008_002446 +2008_002448 +2008_002451 +2008_002452 +2008_002454 +2008_002456 +2008_002457 +2008_002458 +2008_002459 +2008_002461 +2008_002464 +2008_002465 +2008_002466 +2008_002467 +2008_002470 +2008_002471 +2008_002473 +2008_002477 +2008_002481 +2008_002482 +2008_002483 +2008_002484 +2008_002485 +2008_002487 +2008_002491 +2008_002492 +2008_002494 +2008_002495 +2008_002499 +2008_002501 +2008_002502 +2008_002504 +2008_002506 +2008_002508 +2008_002509 +2008_002510 +2008_002512 +2008_002514 +2008_002515 +2008_002516 +2008_002521 +2008_002523 +2008_002524 +2008_002526 +2008_002527 +2008_002533 +2008_002536 +2008_002540 +2008_002541 +2008_002542 +2008_002543 +2008_002547 +2008_002549 +2008_002551 +2008_002555 +2008_002558 +2008_002562 +2008_002564 +2008_002566 +2008_002567 +2008_002568 +2008_002574 +2008_002575 +2008_002576 +2008_002578 +2008_002579 +2008_002583 +2008_002584 +2008_002588 +2008_002589 +2008_002590 +2008_002597 +2008_002598 +2008_002599 +2008_002601 +2008_002603 +2008_002606 +2008_002610 +2008_002612 +2008_002613 +2008_002616 +2008_002621 +2008_002622 +2008_002623 +2008_002624 +2008_002625 +2008_002631 +2008_002634 +2008_002638 +2008_002639 +2008_002640 +2008_002641 +2008_002643 +2008_002645 +2008_002647 +2008_002648 +2008_002649 +2008_002650 +2008_002652 +2008_002653 +2008_002662 +2008_002665 +2008_002666 +2008_002668 +2008_002670 +2008_002672 +2008_002673 +2008_002674 +2008_002675 +2008_002676 +2008_002677 +2008_002678 +2008_002679 +2008_002680 +2008_002681 +2008_002682 +2008_002684 +2008_002686 +2008_002687 +2008_002696 +2008_002697 +2008_002698 +2008_002700 +2008_002701 +2008_002704 +2008_002705 +2008_002709 +2008_002710 +2008_002712 +2008_002714 +2008_002715 +2008_002716 +2008_002718 +2008_002719 +2008_002720 +2008_002725 +2008_002728 +2008_002730 +2008_002732 +2008_002733 +2008_002735 +2008_002736 +2008_002738 +2008_002741 +2008_002746 +2008_002749 +2008_002750 +2008_002751 +2008_002752 +2008_002753 +2008_002756 +2008_002758 +2008_002760 +2008_002762 +2008_002766 +2008_002767 +2008_002768 +2008_002772 +2008_002773 +2008_002774 +2008_002775 +2008_002776 +2008_002778 +2008_002783 +2008_002784 +2008_002787 +2008_002789 +2008_002791 +2008_002792 +2008_002793 +2008_002794 +2008_002795 +2008_002801 +2008_002804 +2008_002806 +2008_002808 +2008_002809 +2008_002811 +2008_002813 +2008_002814 +2008_002817 +2008_002820 +2008_002823 +2008_002826 +2008_002829 +2008_002830 +2008_002831 +2008_002834 +2008_002835 +2008_002838 +2008_002842 +2008_002843 +2008_002845 +2008_002847 +2008_002848 +2008_002850 +2008_002852 +2008_002854 +2008_002856 +2008_002857 +2008_002859 +2008_002860 +2008_002864 +2008_002866 +2008_002868 +2008_002869 +2008_002870 +2008_002872 +2008_002873 +2008_002875 +2008_002876 +2008_002879 +2008_002880 +2008_002882 +2008_002883 +2008_002885 +2008_002887 +2008_002890 +2008_002891 +2008_002892 +2008_002894 +2008_002897 +2008_002899 +2008_002900 +2008_002903 +2008_002904 +2008_002906 +2008_002908 +2008_002909 +2008_002910 +2008_002913 +2008_002916 +2008_002917 +2008_002920 +2008_002922 +2008_002926 +2008_002929 +2008_002930 +2008_002931 +2008_002932 +2008_002936 +2008_002942 +2008_002943 +2008_002946 +2008_002947 +2008_002948 +2008_002951 +2008_002954 +2008_002955 +2008_002956 +2008_002957 +2008_002958 +2008_002960 +2008_002961 +2008_002965 +2008_002966 +2008_002968 +2008_002970 +2008_002971 +2008_002972 +2008_002973 +2008_002977 +2008_002983 +2008_002984 +2008_002985 +2008_002988 +2008_002992 +2008_002993 +2008_002997 +2008_002999 +2008_003001 +2008_003003 +2008_003005 +2008_003008 +2008_003013 +2008_003015 +2008_003017 +2008_003018 +2008_003020 +2008_003021 +2008_003022 +2008_003023 +2008_003025 +2008_003026 +2008_003030 +2008_003033 +2008_003034 +2008_003037 +2008_003039 +2008_003041 +2008_003043 +2008_003045 +2008_003048 +2008_003049 +2008_003051 +2008_003052 +2008_003053 +2008_003055 +2008_003056 +2008_003057 +2008_003059 +2008_003060 +2008_003061 +2008_003062 +2008_003063 +2008_003065 +2008_003067 +2008_003068 +2008_003072 +2008_003073 +2008_003075 +2008_003076 +2008_003079 +2008_003081 +2008_003082 +2008_003083 +2008_003087 +2008_003088 +2008_003089 +2008_003090 +2008_003093 +2008_003094 +2008_003095 +2008_003099 +2008_003100 +2008_003101 +2008_003104 +2008_003105 +2008_003106 +2008_003107 +2008_003108 +2008_003110 +2008_003112 +2008_003114 +2008_003120 +2008_003122 +2008_003127 +2008_003128 +2008_003132 +2008_003133 +2008_003134 +2008_003135 +2008_003136 +2008_003140 +2008_003141 +2008_003143 +2008_003144 +2008_003146 +2008_003147 +2008_003151 +2008_003152 +2008_003154 +2008_003155 +2008_003157 +2008_003160 +2008_003161 +2008_003167 +2008_003168 +2008_003170 +2008_003178 +2008_003180 +2008_003181 +2008_003182 +2008_003186 +2008_003187 +2008_003189 +2008_003191 +2008_003193 +2008_003196 +2008_003200 +2008_003202 +2008_003203 +2008_003205 +2008_003208 +2008_003209 +2008_003210 +2008_003211 +2008_003213 +2008_003220 +2008_003222 +2008_003224 +2008_003225 +2008_003228 +2008_003231 +2008_003232 +2008_003238 +2008_003239 +2008_003242 +2008_003244 +2008_003245 +2008_003248 +2008_003249 +2008_003251 +2008_003252 +2008_003255 +2008_003256 +2008_003261 +2008_003263 +2008_003264 +2008_003265 +2008_003266 +2008_003269 +2008_003270 +2008_003271 +2008_003272 +2008_003275 +2008_003276 +2008_003277 +2008_003278 +2008_003280 +2008_003283 +2008_003286 +2008_003287 +2008_003288 +2008_003289 +2008_003290 +2008_003291 +2008_003295 +2008_003297 +2008_003300 +2008_003302 +2008_003303 +2008_003304 +2008_003305 +2008_003311 +2008_003313 +2008_003316 +2008_003318 +2008_003320 +2008_003321 +2008_003323 +2008_003326 +2008_003329 +2008_003330 +2008_003331 +2008_003333 +2008_003334 +2008_003335 +2008_003336 +2008_003338 +2008_003342 +2008_003343 +2008_003344 +2008_003347 +2008_003348 +2008_003350 +2008_003351 +2008_003359 +2008_003360 +2008_003361 +2008_003362 +2008_003369 +2008_003373 +2008_003374 +2008_003378 +2008_003379 +2008_003380 +2008_003381 +2008_003382 +2008_003384 +2008_003386 +2008_003393 +2008_003394 +2008_003395 +2008_003402 +2008_003405 +2008_003406 +2008_003407 +2008_003409 +2008_003414 +2008_003415 +2008_003417 +2008_003418 +2008_003420 +2008_003423 +2008_003424 +2008_003426 +2008_003429 +2008_003430 +2008_003432 +2008_003433 +2008_003434 +2008_003435 +2008_003437 +2008_003439 +2008_003442 +2008_003443 +2008_003447 +2008_003448 +2008_003449 +2008_003451 +2008_003452 +2008_003453 +2008_003458 +2008_003461 +2008_003462 +2008_003463 +2008_003464 +2008_003466 +2008_003467 +2008_003469 +2008_003472 +2008_003475 +2008_003476 +2008_003477 +2008_003478 +2008_003479 +2008_003480 +2008_003482 +2008_003483 +2008_003484 +2008_003485 +2008_003488 +2008_003489 +2008_003492 +2008_003493 +2008_003496 +2008_003497 +2008_003498 +2008_003499 +2008_003500 +2008_003501 +2008_003504 +2008_003507 +2008_003510 +2008_003511 +2008_003514 +2008_003515 +2008_003519 +2008_003520 +2008_003521 +2008_003522 +2008_003523 +2008_003524 +2008_003526 +2008_003531 +2008_003533 +2008_003534 +2008_003542 +2008_003544 +2008_003545 +2008_003546 +2008_003547 +2008_003552 +2008_003557 +2008_003559 +2008_003560 +2008_003562 +2008_003565 +2008_003571 +2008_003572 +2008_003575 +2008_003576 +2008_003577 +2008_003578 +2008_003579 +2008_003580 +2008_003582 +2008_003585 +2008_003587 +2008_003589 +2008_003590 +2008_003591 +2008_003592 +2008_003593 +2008_003596 +2008_003598 +2008_003604 +2008_003607 +2008_003608 +2008_003609 +2008_003610 +2008_003611 +2008_003613 +2008_003617 +2008_003618 +2008_003619 +2008_003621 +2008_003622 +2008_003624 +2008_003626 +2008_003629 +2008_003635 +2008_003636 +2008_003637 +2008_003638 +2008_003645 +2008_003647 +2008_003650 +2008_003652 +2008_003653 +2008_003655 +2008_003658 +2008_003659 +2008_003662 +2008_003665 +2008_003667 +2008_003671 +2008_003672 +2008_003673 +2008_003674 +2008_003675 +2008_003676 +2008_003677 +2008_003680 +2008_003681 +2008_003682 +2008_003683 +2008_003684 +2008_003685 +2008_003688 +2008_003689 +2008_003691 +2008_003694 +2008_003697 +2008_003701 +2008_003703 +2008_003704 +2008_003706 +2008_003707 +2008_003709 +2008_003712 +2008_003713 +2008_003718 +2008_003719 +2008_003720 +2008_003721 +2008_003722 +2008_003726 +2008_003729 +2008_003732 +2008_003733 +2008_003737 +2008_003743 +2008_003744 +2008_003745 +2008_003746 +2008_003748 +2008_003749 +2008_003753 +2008_003754 +2008_003755 +2008_003756 +2008_003761 +2008_003762 +2008_003763 +2008_003764 +2008_003766 +2008_003767 +2008_003768 +2008_003769 +2008_003772 +2008_003773 +2008_003774 +2008_003775 +2008_003776 +2008_003777 +2008_003779 +2008_003780 +2008_003781 +2008_003782 +2008_003788 +2008_003789 +2008_003791 +2008_003793 +2008_003794 +2008_003796 +2008_003799 +2008_003800 +2008_003801 +2008_003802 +2008_003805 +2008_003811 +2008_003812 +2008_003813 +2008_003814 +2008_003815 +2008_003819 +2008_003820 +2008_003821 +2008_003825 +2008_003826 +2008_003827 +2008_003829 +2008_003830 +2008_003831 +2008_003835 +2008_003838 +2008_003840 +2008_003841 +2008_003842 +2008_003843 +2008_003844 +2008_003846 +2008_003847 +2008_003849 +2008_003852 +2008_003854 +2008_003856 +2008_003858 +2008_003860 +2008_003864 +2008_003866 +2008_003868 +2008_003870 +2008_003871 +2008_003873 +2008_003874 +2008_003876 +2008_003881 +2008_003882 +2008_003883 +2008_003884 +2008_003885 +2008_003886 +2008_003888 +2008_003891 +2008_003892 +2008_003894 +2008_003904 +2008_003905 +2008_003908 +2008_003913 +2008_003914 +2008_003915 +2008_003916 +2008_003920 +2008_003921 +2008_003922 +2008_003924 +2008_003925 +2008_003926 +2008_003929 +2008_003932 +2008_003933 +2008_003939 +2008_003940 +2008_003941 +2008_003942 +2008_003943 +2008_003944 +2008_003945 +2008_003947 +2008_003948 +2008_003951 +2008_003956 +2008_003958 +2008_003962 +2008_003965 +2008_003966 +2008_003967 +2008_003969 +2008_003970 +2008_003971 +2008_003974 +2008_003975 +2008_003976 +2008_003978 +2008_003983 +2008_003984 +2008_003985 +2008_003986 +2008_003988 +2008_003989 +2008_003992 +2008_003995 +2008_003996 +2008_003997 +2008_003998 +2008_004000 +2008_004002 +2008_004003 +2008_004004 +2008_004006 +2008_004007 +2008_004008 +2008_004014 +2008_004015 +2008_004016 +2008_004017 +2008_004018 +2008_004020 +2008_004021 +2008_004022 +2008_004024 +2008_004026 +2008_004027 +2008_004030 +2008_004036 +2008_004037 +2008_004040 +2008_004042 +2008_004044 +2008_004045 +2008_004046 +2008_004048 +2008_004053 +2008_004054 +2008_004055 +2008_004056 +2008_004058 +2008_004064 +2008_004066 +2008_004069 +2008_004071 +2008_004074 +2008_004075 +2008_004076 +2008_004077 +2008_004080 +2008_004081 +2008_004084 +2008_004087 +2008_004088 +2008_004090 +2008_004092 +2008_004093 +2008_004097 +2008_004100 +2008_004101 +2008_004102 +2008_004103 +2008_004105 +2008_004106 +2008_004110 +2008_004112 +2008_004113 +2008_004119 +2008_004120 +2008_004121 +2008_004122 +2008_004123 +2008_004124 +2008_004125 +2008_004126 +2008_004127 +2008_004130 +2008_004134 +2008_004135 +2008_004137 +2008_004138 +2008_004140 +2008_004142 +2008_004145 +2008_004147 +2008_004148 +2008_004155 +2008_004161 +2008_004163 +2008_004165 +2008_004166 +2008_004171 +2008_004174 +2008_004175 +2008_004176 +2008_004178 +2008_004182 +2008_004188 +2008_004189 +2008_004190 +2008_004195 +2008_004196 +2008_004198 +2008_004201 +2008_004203 +2008_004205 +2008_004208 +2008_004212 +2008_004213 +2008_004214 +2008_004216 +2008_004217 +2008_004218 +2008_004221 +2008_004224 +2008_004230 +2008_004231 +2008_004232 +2008_004234 +2008_004235 +2008_004239 +2008_004242 +2008_004243 +2008_004245 +2008_004246 +2008_004247 +2008_004251 +2008_004257 +2008_004258 +2008_004259 +2008_004263 +2008_004265 +2008_004269 +2008_004270 +2008_004271 +2008_004273 +2008_004274 +2008_004276 +2008_004278 +2008_004279 +2008_004280 +2008_004284 +2008_004287 +2008_004288 +2008_004289 +2008_004290 +2008_004291 +2008_004292 +2008_004293 +2008_004296 +2008_004297 +2008_004301 +2008_004303 +2008_004306 +2008_004307 +2008_004308 +2008_004312 +2008_004313 +2008_004314 +2008_004317 +2008_004318 +2008_004319 +2008_004321 +2008_004324 +2008_004325 +2008_004326 +2008_004327 +2008_004328 +2008_004330 +2008_004331 +2008_004333 +2008_004339 +2008_004342 +2008_004344 +2008_004345 +2008_004347 +2008_004348 +2008_004353 +2008_004354 +2008_004357 +2008_004358 +2008_004361 +2008_004362 +2008_004363 +2008_004365 +2008_004367 +2008_004371 +2008_004372 +2008_004374 +2008_004376 +2008_004378 +2008_004380 +2008_004384 +2008_004385 +2008_004387 +2008_004389 +2008_004391 +2008_004394 +2008_004396 +2008_004398 +2008_004399 +2008_004402 +2008_004403 +2008_004406 +2008_004408 +2008_004410 +2008_004411 +2008_004412 +2008_004414 +2008_004416 +2008_004417 +2008_004418 +2008_004419 +2008_004422 +2008_004425 +2008_004426 +2008_004427 +2008_004428 +2008_004430 +2008_004431 +2008_004433 +2008_004435 +2008_004436 +2008_004438 +2008_004439 +2008_004441 +2008_004443 +2008_004445 +2008_004450 +2008_004452 +2008_004453 +2008_004455 +2008_004457 +2008_004458 +2008_004459 +2008_004460 +2008_004462 +2008_004464 +2008_004469 +2008_004470 +2008_004471 +2008_004476 +2008_004477 +2008_004478 +2008_004479 +2008_004480 +2008_004482 +2008_004487 +2008_004488 +2008_004490 +2008_004492 +2008_004493 +2008_004497 +2008_004498 +2008_004499 +2008_004501 +2008_004502 +2008_004504 +2008_004505 +2008_004506 +2008_004510 +2008_004512 +2008_004513 +2008_004515 +2008_004518 +2008_004519 +2008_004520 +2008_004522 +2008_004525 +2008_004526 +2008_004528 +2008_004532 +2008_004533 +2008_004534 +2008_004538 +2008_004539 +2008_004540 +2008_004541 +2008_004544 +2008_004545 +2008_004546 +2008_004547 +2008_004549 +2008_004550 +2008_004551 +2008_004552 +2008_004553 +2008_004554 +2008_004559 +2008_004564 +2008_004567 +2008_004568 +2008_004570 +2008_004574 +2008_004575 +2008_004579 +2008_004581 +2008_004583 +2008_004584 +2008_004585 +2008_004588 +2008_004589 +2008_004590 +2008_004592 +2008_004593 +2008_004599 +2008_004602 +2008_004603 +2008_004605 +2008_004606 +2008_004607 +2008_004610 +2008_004611 +2008_004612 +2008_004613 +2008_004614 +2008_004615 +2008_004616 +2008_004617 +2008_004619 +2008_004620 +2008_004621 +2008_004624 +2008_004629 +2008_004630 +2008_004631 +2008_004632 +2008_004633 +2008_004634 +2008_004635 +2008_004636 +2008_004640 +2008_004646 +2008_004647 +2008_004648 +2008_004649 +2008_004653 +2008_004654 +2008_004656 +2008_004659 +2008_004661 +2008_004662 +2008_004663 +2008_004665 +2008_004666 +2008_004667 +2008_004668 +2008_004670 +2008_004671 +2008_004672 +2008_004677 +2008_004678 +2008_004679 +2008_004684 +2008_004687 +2008_004688 +2008_004689 +2008_004690 +2008_004692 +2008_004695 +2008_004696 +2008_004697 +2008_004701 +2008_004702 +2008_004703 +2008_004704 +2008_004705 +2008_004706 +2008_004707 +2008_004711 +2008_004713 +2008_004716 +2008_004718 +2008_004719 +2008_004720 +2008_004722 +2008_004725 +2008_004726 +2008_004729 +2008_004730 +2008_004732 +2008_004736 +2008_004739 +2008_004740 +2008_004742 +2008_004745 +2008_004749 +2008_004750 +2008_004752 +2008_004754 +2008_004756 +2008_004758 +2008_004760 +2008_004763 +2008_004764 +2008_004766 +2008_004767 +2008_004768 +2008_004770 +2008_004771 +2008_004774 +2008_004776 +2008_004777 +2008_004778 +2008_004781 +2008_004783 +2008_004784 +2008_004786 +2008_004794 +2008_004795 +2008_004797 +2008_004802 +2008_004804 +2008_004805 +2008_004807 +2008_004808 +2008_004812 +2008_004814 +2008_004819 +2008_004821 +2008_004822 +2008_004825 +2008_004827 +2008_004832 +2008_004833 +2008_004834 +2008_004837 +2008_004838 +2008_004841 +2008_004844 +2008_004845 +2008_004847 +2008_004849 +2008_004850 +2008_004851 +2008_004852 +2008_004854 +2008_004856 +2008_004858 +2008_004862 +2008_004866 +2008_004868 +2008_004869 +2008_004872 +2008_004873 +2008_004874 +2008_004875 +2008_004876 +2008_004881 +2008_004885 +2008_004887 +2008_004892 +2008_004893 +2008_004894 +2008_004896 +2008_004898 +2008_004899 +2008_004900 +2008_004903 +2008_004904 +2008_004907 +2008_004908 +2008_004910 +2008_004911 +2008_004914 +2008_004917 +2008_004920 +2008_004921 +2008_004923 +2008_004926 +2008_004930 +2008_004931 +2008_004933 +2008_004934 +2008_004935 +2008_004937 +2008_004938 +2008_004940 +2008_004942 +2008_004945 +2008_004946 +2008_004948 +2008_004950 +2008_004955 +2008_004961 +2008_004964 +2008_004966 +2008_004967 +2008_004968 +2008_004969 +2008_004970 +2008_004973 +2008_004974 +2008_004975 +2008_004976 +2008_004977 +2008_004979 +2008_004981 +2008_004982 +2008_004983 +2008_004984 +2008_004985 +2008_004986 +2008_004990 +2008_004991 +2008_004995 +2008_004998 +2008_005000 +2008_005001 +2008_005003 +2008_005006 +2008_005008 +2008_005010 +2008_005013 +2008_005015 +2008_005016 +2008_005023 +2008_005032 +2008_005033 +2008_005035 +2008_005036 +2008_005037 +2008_005040 +2008_005042 +2008_005043 +2008_005045 +2008_005046 +2008_005049 +2008_005051 +2008_005054 +2008_005055 +2008_005057 +2008_005061 +2008_005063 +2008_005064 +2008_005065 +2008_005066 +2008_005068 +2008_005070 +2008_005071 +2008_005072 +2008_005074 +2008_005078 +2008_005080 +2008_005081 +2008_005082 +2008_005084 +2008_005085 +2008_005088 +2008_005089 +2008_005090 +2008_005092 +2008_005094 +2008_005096 +2008_005097 +2008_005098 +2008_005101 +2008_005105 +2008_005107 +2008_005108 +2008_005109 +2008_005110 +2008_005111 +2008_005114 +2008_005115 +2008_005117 +2008_005123 +2008_005127 +2008_005132 +2008_005133 +2008_005134 +2008_005136 +2008_005137 +2008_005139 +2008_005140 +2008_005146 +2008_005147 +2008_005150 +2008_005151 +2008_005156 +2008_005158 +2008_005159 +2008_005160 +2008_005166 +2008_005167 +2008_005168 +2008_005171 +2008_005172 +2008_005174 +2008_005175 +2008_005178 +2008_005181 +2008_005182 +2008_005183 +2008_005185 +2008_005186 +2008_005190 +2008_005191 +2008_005193 +2008_005194 +2008_005196 +2008_005197 +2008_005201 +2008_005204 +2008_005205 +2008_005208 +2008_005209 +2008_005213 +2008_005214 +2008_005215 +2008_005216 +2008_005217 +2008_005218 +2008_005220 +2008_005221 +2008_005231 +2008_005233 +2008_005234 +2008_005235 +2008_005236 +2008_005240 +2008_005242 +2008_005243 +2008_005244 +2008_005245 +2008_005247 +2008_005248 +2008_005250 +2008_005251 +2008_005252 +2008_005253 +2008_005254 +2008_005255 +2008_005257 +2008_005260 +2008_005261 +2008_005266 +2008_005269 +2008_005270 +2008_005271 +2008_005272 +2008_005276 +2008_005277 +2008_005279 +2008_005281 +2008_005282 +2008_005283 +2008_005288 +2008_005294 +2008_005295 +2008_005296 +2008_005297 +2008_005300 +2008_005303 +2008_005304 +2008_005309 +2008_005310 +2008_005313 +2008_005315 +2008_005316 +2008_005319 +2008_005321 +2008_005323 +2008_005324 +2008_005325 +2008_005327 +2008_005329 +2008_005331 +2008_005333 +2008_005335 +2008_005336 +2008_005337 +2008_005338 +2008_005342 +2008_005345 +2008_005346 +2008_005347 +2008_005348 +2008_005349 +2008_005350 +2008_005354 +2008_005356 +2008_005357 +2008_005359 +2008_005360 +2008_005361 +2008_005362 +2008_005363 +2008_005365 +2008_005367 +2008_005369 +2008_005373 +2008_005374 +2008_005375 +2008_005376 +2008_005378 +2008_005379 +2008_005380 +2008_005382 +2008_005386 +2008_005389 +2008_005393 +2008_005395 +2008_005396 +2008_005398 +2008_005399 +2008_005400 +2008_005404 +2008_005405 +2008_005406 +2008_005408 +2008_005412 +2008_005414 +2008_005415 +2008_005417 +2008_005421 +2008_005422 +2008_005423 +2008_005427 +2008_005429 +2008_005431 +2008_005433 +2008_005436 +2008_005439 +2008_005443 +2008_005444 +2008_005445 +2008_005446 +2008_005447 +2008_005449 +2008_005451 +2008_005455 +2008_005456 +2008_005460 +2008_005463 +2008_005465 +2008_005467 +2008_005469 +2008_005472 +2008_005473 +2008_005477 +2008_005480 +2008_005484 +2008_005485 +2008_005490 +2008_005491 +2008_005494 +2008_005496 +2008_005498 +2008_005500 +2008_005501 +2008_005502 +2008_005504 +2008_005505 +2008_005507 +2008_005510 +2008_005511 +2008_005512 +2008_005514 +2008_005517 +2008_005519 +2008_005521 +2008_005522 +2008_005523 +2008_005525 +2008_005526 +2008_005527 +2008_005530 +2008_005531 +2008_005534 +2008_005536 +2008_005538 +2008_005541 +2008_005544 +2008_005548 +2008_005549 +2008_005550 +2008_005552 +2008_005553 +2008_005558 +2008_005560 +2008_005561 +2008_005563 +2008_005564 +2008_005566 +2008_005567 +2008_005569 +2008_005570 +2008_005572 +2008_005573 +2008_005574 +2008_005582 +2008_005584 +2008_005588 +2008_005589 +2008_005591 +2008_005593 +2008_005599 +2008_005600 +2008_005601 +2008_005603 +2008_005608 +2008_005609 +2008_005610 +2008_005611 +2008_005612 +2008_005614 +2008_005616 +2008_005618 +2008_005623 +2008_005625 +2008_005626 +2008_005627 +2008_005628 +2008_005631 +2008_005633 +2008_005634 +2008_005635 +2008_005636 +2008_005637 +2008_005638 +2008_005639 +2008_005641 +2008_005642 +2008_005643 +2008_005646 +2008_005649 +2008_005650 +2008_005652 +2008_005653 +2008_005656 +2008_005657 +2008_005660 +2008_005663 +2008_005664 +2008_005668 +2008_005673 +2008_005675 +2008_005676 +2008_005677 +2008_005678 +2008_005679 +2008_005680 +2008_005681 +2008_005682 +2008_005683 +2008_005685 +2008_005686 +2008_005687 +2008_005691 +2008_005695 +2008_005698 +2008_005699 +2008_005701 +2008_005702 +2008_005703 +2008_005705 +2008_005706 +2008_005707 +2008_005713 +2008_005714 +2008_005716 +2008_005719 +2008_005720 +2008_005721 +2008_005724 +2008_005726 +2008_005727 +2008_005728 +2008_005732 +2008_005734 +2008_005735 +2008_005736 +2008_005737 +2008_005738 +2008_005739 +2008_005742 +2008_005747 +2008_005748 +2008_005750 +2008_005752 +2008_005757 +2008_005758 +2008_005761 +2008_005763 +2008_005764 +2008_005767 +2008_005768 +2008_005770 +2008_005774 +2008_005777 +2008_005779 +2008_005780 +2008_005788 +2008_005790 +2008_005791 +2008_005792 +2008_005794 +2008_005796 +2008_005798 +2008_005800 +2008_005801 +2008_005803 +2008_005805 +2008_005808 +2008_005810 +2008_005812 +2008_005816 +2008_005817 +2008_005818 +2008_005821 +2008_005822 +2008_005823 +2008_005825 +2008_005831 +2008_005832 +2008_005834 +2008_005838 +2008_005839 +2008_005843 +2008_005845 +2008_005846 +2008_005847 +2008_005848 +2008_005850 +2008_005853 +2008_005855 +2008_005856 +2008_005857 +2008_005860 +2008_005863 +2008_005865 +2008_005867 +2008_005869 +2008_005871 +2008_005873 +2008_005874 +2008_005875 +2008_005877 +2008_005878 +2008_005881 +2008_005882 +2008_005883 +2008_005884 +2008_005889 +2008_005890 +2008_005891 +2008_005893 +2008_005895 +2008_005897 +2008_005898 +2008_005902 +2008_005903 +2008_005904 +2008_005905 +2008_005907 +2008_005914 +2008_005915 +2008_005916 +2008_005918 +2008_005921 +2008_005923 +2008_005924 +2008_005926 +2008_005928 +2008_005929 +2008_005933 +2008_005934 +2008_005935 +2008_005936 +2008_005937 +2008_005938 +2008_005939 +2008_005943 +2008_005945 +2008_005954 +2008_005956 +2008_005957 +2008_005959 +2008_005960 +2008_005962 +2008_005964 +2008_005967 +2008_005968 +2008_005970 +2008_005972 +2008_005975 +2008_005976 +2008_005977 +2008_005978 +2008_005979 +2008_005980 +2008_005982 +2008_005984 +2008_005987 +2008_005989 +2008_005991 +2008_005997 +2008_006000 +2008_006002 +2008_006004 +2008_006007 +2008_006008 +2008_006010 +2008_006014 +2008_006017 +2008_006020 +2008_006021 +2008_006024 +2008_006027 +2008_006028 +2008_006031 +2008_006032 +2008_006034 +2008_006036 +2008_006037 +2008_006038 +2008_006039 +2008_006041 +2008_006042 +2008_006045 +2008_006046 +2008_006047 +2008_006049 +2008_006050 +2008_006052 +2008_006055 +2008_006058 +2008_006059 +2008_006062 +2008_006063 +2008_006064 +2008_006065 +2008_006067 +2008_006068 +2008_006070 +2008_006071 +2008_006072 +2008_006074 +2008_006076 +2008_006078 +2008_006081 +2008_006082 +2008_006085 +2008_006087 +2008_006088 +2008_006090 +2008_006092 +2008_006094 +2008_006096 +2008_006099 +2008_006100 +2008_006102 +2008_006104 +2008_006108 +2008_006109 +2008_006111 +2008_006112 +2008_006113 +2008_006117 +2008_006119 +2008_006120 +2008_006121 +2008_006124 +2008_006128 +2008_006129 +2008_006130 +2008_006133 +2008_006135 +2008_006136 +2008_006140 +2008_006143 +2008_006144 +2008_006145 +2008_006147 +2008_006148 +2008_006151 +2008_006152 +2008_006154 +2008_006158 +2008_006159 +2008_006163 +2008_006164 +2008_006166 +2008_006169 +2008_006170 +2008_006175 +2008_006178 +2008_006179 +2008_006181 +2008_006182 +2008_006185 +2008_006186 +2008_006188 +2008_006190 +2008_006192 +2008_006194 +2008_006195 +2008_006200 +2008_006203 +2008_006205 +2008_006207 +2008_006210 +2008_006211 +2008_006213 +2008_006215 +2008_006216 +2008_006218 +2008_006219 +2008_006220 +2008_006221 +2008_006222 +2008_006224 +2008_006225 +2008_006227 +2008_006229 +2008_006232 +2008_006233 +2008_006234 +2008_006235 +2008_006239 +2008_006240 +2008_006242 +2008_006244 +2008_006249 +2008_006250 +2008_006253 +2008_006254 +2008_006256 +2008_006257 +2008_006258 +2008_006262 +2008_006265 +2008_006267 +2008_006269 +2008_006271 +2008_006272 +2008_006273 +2008_006275 +2008_006276 +2008_006280 +2008_006281 +2008_006282 +2008_006285 +2008_006288 +2008_006289 +2008_006290 +2008_006294 +2008_006295 +2008_006298 +2008_006300 +2008_006303 +2008_006307 +2008_006310 +2008_006311 +2008_006315 +2008_006316 +2008_006317 +2008_006320 +2008_006323 +2008_006325 +2008_006327 +2008_006329 +2008_006330 +2008_006331 +2008_006335 +2008_006336 +2008_006337 +2008_006339 +2008_006341 +2008_006345 +2008_006347 +2008_006349 +2008_006350 +2008_006351 +2008_006353 +2008_006355 +2008_006356 +2008_006359 +2008_006361 +2008_006362 +2008_006364 +2008_006365 +2008_006366 +2008_006368 +2008_006369 +2008_006370 +2008_006373 +2008_006376 +2008_006377 +2008_006382 +2008_006384 +2008_006386 +2008_006387 +2008_006389 +2008_006390 +2008_006392 +2008_006394 +2008_006397 +2008_006400 +2008_006401 +2008_006403 +2008_006404 +2008_006407 +2008_006408 +2008_006409 +2008_006410 +2008_006416 +2008_006417 +2008_006419 +2008_006421 +2008_006424 +2008_006425 +2008_006427 +2008_006429 +2008_006430 +2008_006432 +2008_006433 +2008_006434 +2008_006436 +2008_006438 +2008_006441 +2008_006447 +2008_006448 +2008_006449 +2008_006452 +2008_006458 +2008_006461 +2008_006462 +2008_006463 +2008_006467 +2008_006470 +2008_006474 +2008_006475 +2008_006477 +2008_006480 +2008_006481 +2008_006482 +2008_006483 +2008_006487 +2008_006488 +2008_006489 +2008_006490 +2008_006491 +2008_006496 +2008_006497 +2008_006500 +2008_006502 +2008_006503 +2008_006506 +2008_006509 +2008_006511 +2008_006512 +2008_006517 +2008_006519 +2008_006520 +2008_006522 +2008_006523 +2008_006524 +2008_006526 +2008_006528 +2008_006530 +2008_006534 +2008_006538 +2008_006540 +2008_006543 +2008_006546 +2008_006547 +2008_006548 +2008_006549 +2008_006553 +2008_006554 +2008_006558 +2008_006561 +2008_006562 +2008_006564 +2008_006566 +2008_006567 +2008_006568 +2008_006570 +2008_006576 +2008_006578 +2008_006579 +2008_006585 +2008_006586 +2008_006587 +2008_006588 +2008_006591 +2008_006598 +2008_006599 +2008_006600 +2008_006602 +2008_006604 +2008_006605 +2008_006606 +2008_006609 +2008_006610 +2008_006611 +2008_006613 +2008_006614 +2008_006616 +2008_006617 +2008_006619 +2008_006621 +2008_006623 +2008_006624 +2008_006625 +2008_006626 +2008_006629 +2008_006631 +2008_006634 +2008_006635 +2008_006637 +2008_006638 +2008_006641 +2008_006642 +2008_006645 +2008_006646 +2008_006649 +2008_006650 +2008_006654 +2008_006655 +2008_006656 +2008_006657 +2008_006660 +2008_006662 +2008_006663 +2008_006665 +2008_006667 +2008_006668 +2008_006671 +2008_006677 +2008_006682 +2008_006684 +2008_006686 +2008_006690 +2008_006691 +2008_006692 +2008_006694 +2008_006696 +2008_006700 +2008_006701 +2008_006703 +2008_006705 +2008_006708 +2008_006710 +2008_006712 +2008_006714 +2008_006715 +2008_006716 +2008_006717 +2008_006718 +2008_006719 +2008_006720 +2008_006722 +2008_006724 +2008_006728 +2008_006730 +2008_006731 +2008_006732 +2008_006733 +2008_006737 +2008_006743 +2008_006746 +2008_006747 +2008_006748 +2008_006750 +2008_006751 +2008_006752 +2008_006753 +2008_006758 +2008_006761 +2008_006762 +2008_006764 +2008_006765 +2008_006767 +2008_006773 +2008_006774 +2008_006776 +2008_006777 +2008_006778 +2008_006779 +2008_006781 +2008_006784 +2008_006785 +2008_006792 +2008_006793 +2008_006796 +2008_006797 +2008_006798 +2008_006800 +2008_006802 +2008_006807 +2008_006808 +2008_006810 +2008_006811 +2008_006813 +2008_006815 +2008_006816 +2008_006817 +2008_006818 +2008_006819 +2008_006820 +2008_006824 +2008_006825 +2008_006827 +2008_006828 +2008_006831 +2008_006832 +2008_006833 +2008_006834 +2008_006835 +2008_006837 +2008_006839 +2008_006841 +2008_006843 +2008_006844 +2008_006847 +2008_006849 +2008_006855 +2008_006857 +2008_006863 +2008_006864 +2008_006865 +2008_006868 +2008_006870 +2008_006872 +2008_006873 +2008_006874 +2008_006877 +2008_006879 +2008_006880 +2008_006881 +2008_006882 +2008_006885 +2008_006887 +2008_006889 +2008_006890 +2008_006892 +2008_006896 +2008_006898 +2008_006900 +2008_006902 +2008_006903 +2008_006904 +2008_006907 +2008_006908 +2008_006909 +2008_006910 +2008_006912 +2008_006919 +2008_006920 +2008_006921 +2008_006923 +2008_006924 +2008_006925 +2008_006926 +2008_006933 +2008_006936 +2008_006939 +2008_006941 +2008_006944 +2008_006946 +2008_006948 +2008_006949 +2008_006950 +2008_006951 +2008_006952 +2008_006953 +2008_006954 +2008_006956 +2008_006959 +2008_006960 +2008_006961 +2008_006962 +2008_006965 +2008_006967 +2008_006968 +2008_006969 +2008_006973 +2008_006979 +2008_006980 +2008_006981 +2008_006986 +2008_006987 +2008_006989 +2008_006991 +2008_006992 +2008_006997 +2008_006998 +2008_006999 +2008_007003 +2008_007004 +2008_007006 +2008_007009 +2008_007010 +2008_007011 +2008_007012 +2008_007014 +2008_007019 +2008_007021 +2008_007022 +2008_007025 +2008_007026 +2008_007028 +2008_007030 +2008_007031 +2008_007032 +2008_007034 +2008_007038 +2008_007039 +2008_007042 +2008_007043 +2008_007045 +2008_007048 +2008_007050 +2008_007054 +2008_007056 +2008_007057 +2008_007058 +2008_007059 +2008_007060 +2008_007061 +2008_007064 +2008_007067 +2008_007069 +2008_007070 +2008_007073 +2008_007075 +2008_007076 +2008_007081 +2008_007082 +2008_007084 +2008_007085 +2008_007086 +2008_007090 +2008_007091 +2008_007095 +2008_007096 +2008_007097 +2008_007098 +2008_007101 +2008_007103 +2008_007105 +2008_007106 +2008_007108 +2008_007112 +2008_007114 +2008_007115 +2008_007118 +2008_007119 +2008_007120 +2008_007123 +2008_007124 +2008_007129 +2008_007130 +2008_007131 +2008_007133 +2008_007134 +2008_007138 +2008_007142 +2008_007143 +2008_007145 +2008_007146 +2008_007147 +2008_007151 +2008_007156 +2008_007161 +2008_007163 +2008_007164 +2008_007165 +2008_007166 +2008_007167 +2008_007168 +2008_007169 +2008_007171 +2008_007176 +2008_007179 +2008_007181 +2008_007182 +2008_007184 +2008_007185 +2008_007187 +2008_007188 +2008_007189 +2008_007190 +2008_007194 +2008_007195 +2008_007196 +2008_007197 +2008_007201 +2008_007205 +2008_007207 +2008_007208 +2008_007211 +2008_007214 +2008_007216 +2008_007217 +2008_007218 +2008_007219 +2008_007221 +2008_007222 +2008_007223 +2008_007225 +2008_007226 +2008_007227 +2008_007229 +2008_007231 +2008_007236 +2008_007237 +2008_007239 +2008_007241 +2008_007242 +2008_007245 +2008_007246 +2008_007247 +2008_007250 +2008_007252 +2008_007254 +2008_007256 +2008_007260 +2008_007261 +2008_007264 +2008_007265 +2008_007266 +2008_007269 +2008_007273 +2008_007274 +2008_007277 +2008_007279 +2008_007280 +2008_007281 +2008_007282 +2008_007285 +2008_007286 +2008_007287 +2008_007289 +2008_007291 +2008_007293 +2008_007295 +2008_007298 +2008_007305 +2008_007307 +2008_007311 +2008_007312 +2008_007313 +2008_007314 +2008_007317 +2008_007319 +2008_007320 +2008_007321 +2008_007323 +2008_007324 +2008_007325 +2008_007327 +2008_007332 +2008_007334 +2008_007335 +2008_007336 +2008_007339 +2008_007343 +2008_007344 +2008_007346 +2008_007348 +2008_007350 +2008_007352 +2008_007353 +2008_007356 +2008_007357 +2008_007358 +2008_007361 +2008_007363 +2008_007364 +2008_007374 +2008_007375 +2008_007378 +2008_007382 +2008_007383 +2008_007384 +2008_007388 +2008_007389 +2008_007390 +2008_007392 +2008_007393 +2008_007394 +2008_007397 +2008_007398 +2008_007402 +2008_007403 +2008_007404 +2008_007409 +2008_007410 +2008_007415 +2008_007417 +2008_007421 +2008_007423 +2008_007424 +2008_007425 +2008_007428 +2008_007430 +2008_007431 +2008_007432 +2008_007433 +2008_007434 +2008_007435 +2008_007438 +2008_007441 +2008_007442 +2008_007443 +2008_007444 +2008_007446 +2008_007448 +2008_007452 +2008_007455 +2008_007456 +2008_007458 +2008_007459 +2008_007461 +2008_007465 +2008_007466 +2008_007469 +2008_007470 +2008_007471 +2008_007472 +2008_007473 +2008_007476 +2008_007477 +2008_007478 +2008_007480 +2008_007485 +2008_007486 +2008_007488 +2008_007491 +2008_007494 +2008_007496 +2008_007497 +2008_007498 +2008_007500 +2008_007501 +2008_007504 +2008_007507 +2008_007509 +2008_007510 +2008_007511 +2008_007513 +2008_007514 +2008_007515 +2008_007519 +2008_007521 +2008_007524 +2008_007525 +2008_007527 +2008_007528 +2008_007529 +2008_007531 +2008_007533 +2008_007534 +2008_007536 +2008_007537 +2008_007538 +2008_007544 +2008_007546 +2008_007548 +2008_007556 +2008_007558 +2008_007559 +2008_007561 +2008_007565 +2008_007567 +2008_007573 +2008_007574 +2008_007576 +2008_007579 +2008_007581 +2008_007583 +2008_007584 +2008_007585 +2008_007586 +2008_007587 +2008_007588 +2008_007589 +2008_007591 +2008_007593 +2008_007594 +2008_007595 +2008_007596 +2008_007597 +2008_007599 +2008_007604 +2008_007608 +2008_007610 +2008_007611 +2008_007612 +2008_007613 +2008_007617 +2008_007618 +2008_007621 +2008_007623 +2008_007625 +2008_007629 +2008_007630 +2008_007632 +2008_007635 +2008_007640 +2008_007641 +2008_007643 +2008_007646 +2008_007648 +2008_007649 +2008_007653 +2008_007656 +2008_007660 +2008_007661 +2008_007662 +2008_007664 +2008_007665 +2008_007666 +2008_007668 +2008_007669 +2008_007673 +2008_007675 +2008_007676 +2008_007677 +2008_007682 +2008_007683 +2008_007685 +2008_007688 +2008_007690 +2008_007691 +2008_007692 +2008_007693 +2008_007694 +2008_007696 +2008_007697 +2008_007698 +2008_007701 +2008_007702 +2008_007704 +2008_007706 +2008_007709 +2008_007710 +2008_007714 +2008_007716 +2008_007717 +2008_007719 +2008_007724 +2008_007726 +2008_007729 +2008_007730 +2008_007733 +2008_007735 +2008_007736 +2008_007737 +2008_007738 +2008_007739 +2008_007741 +2008_007742 +2008_007745 +2008_007746 +2008_007748 +2008_007749 +2008_007750 +2008_007752 +2008_007755 +2008_007757 +2008_007758 +2008_007759 +2008_007760 +2008_007761 +2008_007764 +2008_007766 +2008_007768 +2008_007770 +2008_007777 +2008_007779 +2008_007780 +2008_007781 +2008_007786 +2008_007787 +2008_007788 +2008_007789 +2008_007791 +2008_007793 +2008_007794 +2008_007797 +2008_007798 +2008_007804 +2008_007805 +2008_007806 +2008_007811 +2008_007812 +2008_007814 +2008_007816 +2008_007817 +2008_007819 +2008_007823 +2008_007825 +2008_007827 +2008_007828 +2008_007829 +2008_007831 +2008_007833 +2008_007835 +2008_007836 +2008_007837 +2008_007839 +2008_007840 +2008_007841 +2008_007842 +2008_007843 +2008_007848 +2008_007850 +2008_007852 +2008_007853 +2008_007854 +2008_007855 +2008_007858 +2008_007861 +2008_007864 +2008_007869 +2008_007870 +2008_007871 +2008_007872 +2008_007873 +2008_007875 +2008_007877 +2008_007879 +2008_007882 +2008_007883 +2008_007884 +2008_007887 +2008_007888 +2008_007890 +2008_007891 +2008_007893 +2008_007895 +2008_007897 +2008_007902 +2008_007904 +2008_007907 +2008_007909 +2008_007912 +2008_007913 +2008_007914 +2008_007915 +2008_007916 +2008_007917 +2008_007918 +2008_007922 +2008_007923 +2008_007928 +2008_007931 +2008_007932 +2008_007933 +2008_007935 +2008_007936 +2008_007937 +2008_007938 +2008_007940 +2008_007941 +2008_007942 +2008_007945 +2008_007947 +2008_007948 +2008_007949 +2008_007950 +2008_007953 +2008_007954 +2008_007955 +2008_007962 +2008_007964 +2008_007966 +2008_007969 +2008_007970 +2008_007973 +2008_007975 +2008_007977 +2008_007981 +2008_007985 +2008_007986 +2008_007987 +2008_007988 +2008_007989 +2008_007990 +2008_007993 +2008_007994 +2008_007997 +2008_007998 +2008_007999 +2008_008001 +2008_008002 +2008_008004 +2008_008007 +2008_008011 +2008_008012 +2008_008018 +2008_008020 +2008_008021 +2008_008022 +2008_008024 +2008_008025 +2008_008028 +2008_008029 +2008_008031 +2008_008034 +2008_008037 +2008_008040 +2008_008043 +2008_008044 +2008_008048 +2008_008050 +2008_008052 +2008_008053 +2008_008055 +2008_008057 +2008_008058 +2008_008064 +2008_008066 +2008_008069 +2008_008070 +2008_008072 +2008_008073 +2008_008074 +2008_008075 +2008_008080 +2008_008083 +2008_008084 +2008_008086 +2008_008091 +2008_008092 +2008_008093 +2008_008095 +2008_008096 +2008_008097 +2008_008098 +2008_008103 +2008_008105 +2008_008106 +2008_008109 +2008_008112 +2008_008113 +2008_008115 +2008_008116 +2008_008120 +2008_008121 +2008_008122 +2008_008123 +2008_008125 +2008_008127 +2008_008130 +2008_008131 +2008_008132 +2008_008134 +2008_008141 +2008_008145 +2008_008146 +2008_008147 +2008_008148 +2008_008150 +2008_008152 +2008_008154 +2008_008155 +2008_008162 +2008_008166 +2008_008169 +2008_008170 +2008_008175 +2008_008176 +2008_008177 +2008_008179 +2008_008180 +2008_008184 +2008_008185 +2008_008190 +2008_008191 +2008_008192 +2008_008193 +2008_008194 +2008_008197 +2008_008199 +2008_008200 +2008_008203 +2008_008206 +2008_008208 +2008_008210 +2008_008211 +2008_008212 +2008_008215 +2008_008217 +2008_008218 +2008_008220 +2008_008221 +2008_008223 +2008_008224 +2008_008227 +2008_008229 +2008_008231 +2008_008232 +2008_008233 +2008_008234 +2008_008235 +2008_008237 +2008_008241 +2008_008242 +2008_008246 +2008_008247 +2008_008252 +2008_008254 +2008_008257 +2008_008262 +2008_008263 +2008_008266 +2008_008268 +2008_008269 +2008_008271 +2008_008272 +2008_008274 +2008_008275 +2008_008276 +2008_008278 +2008_008279 +2008_008281 +2008_008284 +2008_008287 +2008_008288 +2008_008292 +2008_008294 +2008_008296 +2008_008297 +2008_008300 +2008_008301 +2008_008302 +2008_008307 +2008_008309 +2008_008310 +2008_008313 +2008_008314 +2008_008315 +2008_008318 +2008_008319 +2008_008320 +2008_008321 +2008_008322 +2008_008323 +2008_008324 +2008_008325 +2008_008330 +2008_008331 +2008_008335 +2008_008336 +2008_008337 +2008_008338 +2008_008341 +2008_008342 +2008_008343 +2008_008344 +2008_008345 +2008_008346 +2008_008347 +2008_008354 +2008_008356 +2008_008357 +2008_008359 +2008_008362 +2008_008363 +2008_008364 +2008_008365 +2008_008366 +2008_008368 +2008_008370 +2008_008373 +2008_008376 +2008_008377 +2008_008379 +2008_008380 +2008_008382 +2008_008384 +2008_008387 +2008_008388 +2008_008391 +2008_008392 +2008_008393 +2008_008395 +2008_008402 +2008_008403 +2008_008404 +2008_008406 +2008_008410 +2008_008411 +2008_008416 +2008_008421 +2008_008423 +2008_008424 +2008_008428 +2008_008429 +2008_008431 +2008_008432 +2008_008433 +2008_008434 +2008_008435 +2008_008437 +2008_008439 +2008_008440 +2008_008443 +2008_008444 +2008_008446 +2008_008447 +2008_008450 +2008_008453 +2008_008455 +2008_008461 +2008_008462 +2008_008464 +2008_008466 +2008_008467 +2008_008469 +2008_008470 +2008_008471 +2008_008474 +2008_008476 +2008_008479 +2008_008480 +2008_008482 +2008_008487 +2008_008488 +2008_008490 +2008_008496 +2008_008497 +2008_008500 +2008_008501 +2008_008506 +2008_008507 +2008_008508 +2008_008511 +2008_008512 +2008_008517 +2008_008519 +2008_008521 +2008_008522 +2008_008523 +2008_008524 +2008_008525 +2008_008526 +2008_008527 +2008_008528 +2008_008530 +2008_008531 +2008_008533 +2008_008536 +2008_008537 +2008_008538 +2008_008541 +2008_008544 +2008_008545 +2008_008546 +2008_008547 +2008_008549 +2008_008550 +2008_008552 +2008_008554 +2008_008560 +2008_008564 +2008_008567 +2008_008570 +2008_008572 +2008_008574 +2008_008578 +2008_008579 +2008_008583 +2008_008585 +2008_008588 +2008_008589 +2008_008590 +2008_008591 +2008_008593 +2008_008595 +2008_008598 +2008_008600 +2008_008601 +2008_008606 +2008_008607 +2008_008608 +2008_008611 +2008_008613 +2008_008615 +2008_008616 +2008_008617 +2008_008618 +2008_008619 +2008_008621 +2008_008622 +2008_008623 +2008_008624 +2008_008627 +2008_008628 +2008_008629 +2008_008632 +2008_008635 +2008_008636 +2008_008637 +2008_008641 +2008_008642 +2008_008649 +2008_008652 +2008_008654 +2008_008658 +2008_008659 +2008_008662 +2008_008665 +2008_008666 +2008_008668 +2008_008671 +2008_008673 +2008_008674 +2008_008675 +2008_008676 +2008_008679 +2008_008681 +2008_008682 +2008_008683 +2008_008684 +2008_008685 +2008_008689 +2008_008690 +2008_008691 +2008_008694 +2008_008695 +2008_008696 +2008_008697 +2008_008700 +2008_008701 +2008_008705 +2008_008706 +2008_008707 +2008_008708 +2008_008711 +2008_008713 +2008_008714 +2008_008717 +2008_008718 +2008_008719 +2008_008724 +2008_008725 +2008_008726 +2008_008732 +2008_008735 +2008_008739 +2008_008744 +2008_008745 +2008_008746 +2008_008748 +2008_008749 +2008_008751 +2008_008753 +2008_008755 +2008_008757 +2008_008758 +2008_008765 +2008_008767 +2008_008770 +2008_008772 +2008_008773 +2009_000001 +2009_000002 +2009_000006 +2009_000009 +2009_000010 +2009_000011 +2009_000012 +2009_000013 +2009_000014 +2009_000015 +2009_000016 +2009_000017 +2009_000021 +2009_000022 +2009_000026 +2009_000027 +2009_000028 +2009_000029 +2009_000030 +2009_000032 +2009_000035 +2009_000037 +2009_000039 +2009_000040 +2009_000041 +2009_000042 +2009_000045 +2009_000051 +2009_000052 +2009_000054 +2009_000055 +2009_000056 +2009_000058 +2009_000059 +2009_000060 +2009_000063 +2009_000066 +2009_000067 +2009_000068 +2009_000072 +2009_000073 +2009_000074 +2009_000078 +2009_000080 +2009_000082 +2009_000084 +2009_000085 +2009_000087 +2009_000088 +2009_000089 +2009_000090 +2009_000091 +2009_000093 +2009_000096 +2009_000097 +2009_000100 +2009_000102 +2009_000103 +2009_000104 +2009_000105 +2009_000109 +2009_000119 +2009_000120 +2009_000121 +2009_000122 +2009_000124 +2009_000128 +2009_000130 +2009_000131 +2009_000132 +2009_000133 +2009_000135 +2009_000136 +2009_000137 +2009_000140 +2009_000141 +2009_000142 +2009_000145 +2009_000146 +2009_000149 +2009_000150 +2009_000151 +2009_000156 +2009_000157 +2009_000158 +2009_000159 +2009_000160 +2009_000161 +2009_000164 +2009_000165 +2009_000168 +2009_000169 +2009_000171 +2009_000176 +2009_000177 +2009_000181 +2009_000182 +2009_000183 +2009_000184 +2009_000188 +2009_000189 +2009_000192 +2009_000195 +2009_000197 +2009_000198 +2009_000199 +2009_000201 +2009_000203 +2009_000205 +2009_000206 +2009_000209 +2009_000212 +2009_000214 +2009_000216 +2009_000217 +2009_000218 +2009_000219 +2009_000223 +2009_000225 +2009_000227 +2009_000229 +2009_000232 +2009_000233 +2009_000237 +2009_000239 +2009_000242 +2009_000244 +2009_000247 +2009_000248 +2009_000249 +2009_000250 +2009_000251 +2009_000253 +2009_000254 +2009_000257 +2009_000260 +2009_000268 +2009_000276 +2009_000277 +2009_000280 +2009_000281 +2009_000282 +2009_000283 +2009_000284 +2009_000285 +2009_000286 +2009_000287 +2009_000288 +2009_000289 +2009_000290 +2009_000291 +2009_000293 +2009_000297 +2009_000298 +2009_000300 +2009_000303 +2009_000304 +2009_000305 +2009_000308 +2009_000309 +2009_000312 +2009_000316 +2009_000317 +2009_000318 +2009_000320 +2009_000321 +2009_000322 +2009_000327 +2009_000328 +2009_000330 +2009_000335 +2009_000336 +2009_000337 +2009_000339 +2009_000340 +2009_000341 +2009_000342 +2009_000343 +2009_000344 +2009_000347 +2009_000350 +2009_000351 +2009_000354 +2009_000356 +2009_000366 +2009_000367 +2009_000370 +2009_000375 +2009_000377 +2009_000378 +2009_000379 +2009_000385 +2009_000387 +2009_000389 +2009_000390 +2009_000391 +2009_000393 +2009_000397 +2009_000398 +2009_000399 +2009_000400 +2009_000402 +2009_000405 +2009_000408 +2009_000409 +2009_000410 +2009_000411 +2009_000412 +2009_000414 +2009_000416 +2009_000417 +2009_000418 +2009_000419 +2009_000420 +2009_000421 +2009_000422 +2009_000426 +2009_000430 +2009_000435 +2009_000438 +2009_000439 +2009_000440 +2009_000443 +2009_000444 +2009_000445 +2009_000446 +2009_000449 +2009_000452 +2009_000453 +2009_000454 +2009_000455 +2009_000456 +2009_000457 +2009_000461 +2009_000463 +2009_000464 +2009_000466 +2009_000469 +2009_000471 +2009_000472 +2009_000474 +2009_000476 +2009_000477 +2009_000483 +2009_000486 +2009_000487 +2009_000488 +2009_000491 +2009_000493 +2009_000494 +2009_000496 +2009_000499 +2009_000500 +2009_000501 +2009_000502 +2009_000503 +2009_000504 +2009_000505 +2009_000511 +2009_000512 +2009_000513 +2009_000515 +2009_000516 +2009_000519 +2009_000522 +2009_000523 +2009_000525 +2009_000526 +2009_000527 +2009_000529 +2009_000532 +2009_000535 +2009_000536 +2009_000539 +2009_000542 +2009_000544 +2009_000545 +2009_000546 +2009_000547 +2009_000549 +2009_000550 +2009_000552 +2009_000553 +2009_000557 +2009_000558 +2009_000559 +2009_000560 +2009_000562 +2009_000563 +2009_000565 +2009_000566 +2009_000567 +2009_000568 +2009_000573 +2009_000574 +2009_000575 +2009_000576 +2009_000577 +2009_000579 +2009_000585 +2009_000586 +2009_000590 +2009_000591 +2009_000592 +2009_000593 +2009_000595 +2009_000597 +2009_000599 +2009_000600 +2009_000602 +2009_000603 +2009_000604 +2009_000606 +2009_000608 +2009_000611 +2009_000614 +2009_000615 +2009_000617 +2009_000619 +2009_000624 +2009_000625 +2009_000626 +2009_000628 +2009_000629 +2009_000631 +2009_000632 +2009_000634 +2009_000635 +2009_000636 +2009_000637 +2009_000638 +2009_000641 +2009_000642 +2009_000647 +2009_000648 +2009_000651 +2009_000653 +2009_000655 +2009_000658 +2009_000661 +2009_000662 +2009_000663 +2009_000664 +2009_000670 +2009_000672 +2009_000674 +2009_000675 +2009_000676 +2009_000677 +2009_000679 +2009_000681 +2009_000683 +2009_000684 +2009_000686 +2009_000689 +2009_000690 +2009_000691 +2009_000692 +2009_000694 +2009_000695 +2009_000696 +2009_000702 +2009_000704 +2009_000705 +2009_000708 +2009_000709 +2009_000712 +2009_000716 +2009_000718 +2009_000719 +2009_000720 +2009_000722 +2009_000723 +2009_000724 +2009_000725 +2009_000726 +2009_000727 +2009_000730 +2009_000731 +2009_000732 +2009_000734 +2009_000737 +2009_000741 +2009_000742 +2009_000744 +2009_000745 +2009_000746 +2009_000748 +2009_000750 +2009_000752 +2009_000755 +2009_000756 +2009_000757 +2009_000758 +2009_000759 +2009_000760 +2009_000762 +2009_000763 +2009_000768 +2009_000770 +2009_000771 +2009_000774 +2009_000777 +2009_000778 +2009_000779 +2009_000782 +2009_000783 +2009_000789 +2009_000790 +2009_000791 +2009_000793 +2009_000794 +2009_000796 +2009_000797 +2009_000801 +2009_000804 +2009_000805 +2009_000811 +2009_000812 +2009_000815 +2009_000816 +2009_000817 +2009_000820 +2009_000821 +2009_000823 +2009_000824 +2009_000825 +2009_000828 +2009_000829 +2009_000830 +2009_000831 +2009_000833 +2009_000834 +2009_000837 +2009_000839 +2009_000840 +2009_000843 +2009_000845 +2009_000846 +2009_000848 +2009_000849 +2009_000851 +2009_000852 +2009_000854 +2009_000856 +2009_000858 +2009_000862 +2009_000865 +2009_000867 +2009_000869 +2009_000871 +2009_000874 +2009_000879 +2009_000882 +2009_000886 +2009_000887 +2009_000889 +2009_000890 +2009_000892 +2009_000894 +2009_000895 +2009_000896 +2009_000897 +2009_000898 +2009_000899 +2009_000901 +2009_000902 +2009_000904 +2009_000906 +2009_000909 +2009_000910 +2009_000915 +2009_000919 +2009_000920 +2009_000923 +2009_000924 +2009_000925 +2009_000926 +2009_000927 +2009_000928 +2009_000930 +2009_000931 +2009_000932 +2009_000934 +2009_000935 +2009_000937 +2009_000938 +2009_000939 +2009_000945 +2009_000948 +2009_000953 +2009_000954 +2009_000955 +2009_000958 +2009_000960 +2009_000961 +2009_000962 +2009_000964 +2009_000966 +2009_000967 +2009_000969 +2009_000970 +2009_000971 +2009_000973 +2009_000974 +2009_000975 +2009_000979 +2009_000980 +2009_000981 +2009_000985 +2009_000987 +2009_000989 +2009_000990 +2009_000991 +2009_000992 +2009_000995 +2009_000996 +2009_000998 +2009_001000 +2009_001002 +2009_001006 +2009_001007 +2009_001008 +2009_001009 +2009_001011 +2009_001012 +2009_001013 +2009_001016 +2009_001019 +2009_001021 +2009_001024 +2009_001026 +2009_001027 +2009_001028 +2009_001030 +2009_001036 +2009_001037 +2009_001038 +2009_001040 +2009_001042 +2009_001044 +2009_001052 +2009_001054 +2009_001055 +2009_001056 +2009_001057 +2009_001059 +2009_001061 +2009_001066 +2009_001068 +2009_001069 +2009_001070 +2009_001074 +2009_001075 +2009_001078 +2009_001079 +2009_001081 +2009_001082 +2009_001083 +2009_001084 +2009_001085 +2009_001090 +2009_001091 +2009_001094 +2009_001095 +2009_001096 +2009_001097 +2009_001098 +2009_001100 +2009_001102 +2009_001103 +2009_001104 +2009_001105 +2009_001106 +2009_001107 +2009_001108 +2009_001110 +2009_001111 +2009_001113 +2009_001117 +2009_001118 +2009_001120 +2009_001121 +2009_001124 +2009_001126 +2009_001128 +2009_001129 +2009_001133 +2009_001134 +2009_001135 +2009_001137 +2009_001138 +2009_001139 +2009_001140 +2009_001145 +2009_001146 +2009_001147 +2009_001148 +2009_001151 +2009_001152 +2009_001153 +2009_001154 +2009_001155 +2009_001159 +2009_001160 +2009_001163 +2009_001164 +2009_001166 +2009_001172 +2009_001177 +2009_001180 +2009_001181 +2009_001184 +2009_001188 +2009_001190 +2009_001192 +2009_001194 +2009_001195 +2009_001196 +2009_001197 +2009_001198 +2009_001199 +2009_001201 +2009_001203 +2009_001205 +2009_001206 +2009_001207 +2009_001208 +2009_001212 +2009_001215 +2009_001216 +2009_001217 +2009_001221 +2009_001224 +2009_001225 +2009_001227 +2009_001229 +2009_001230 +2009_001236 +2009_001237 +2009_001238 +2009_001240 +2009_001241 +2009_001242 +2009_001243 +2009_001245 +2009_001249 +2009_001251 +2009_001252 +2009_001253 +2009_001254 +2009_001255 +2009_001257 +2009_001259 +2009_001260 +2009_001263 +2009_001264 +2009_001266 +2009_001268 +2009_001270 +2009_001271 +2009_001278 +2009_001279 +2009_001282 +2009_001283 +2009_001285 +2009_001286 +2009_001288 +2009_001289 +2009_001291 +2009_001299 +2009_001300 +2009_001301 +2009_001303 +2009_001305 +2009_001306 +2009_001308 +2009_001309 +2009_001311 +2009_001312 +2009_001313 +2009_001314 +2009_001316 +2009_001319 +2009_001320 +2009_001321 +2009_001322 +2009_001323 +2009_001326 +2009_001327 +2009_001328 +2009_001329 +2009_001332 +2009_001333 +2009_001339 +2009_001343 +2009_001344 +2009_001345 +2009_001348 +2009_001349 +2009_001350 +2009_001354 +2009_001355 +2009_001357 +2009_001359 +2009_001360 +2009_001361 +2009_001363 +2009_001364 +2009_001366 +2009_001367 +2009_001368 +2009_001369 +2009_001370 +2009_001371 +2009_001372 +2009_001374 +2009_001375 +2009_001376 +2009_001384 +2009_001385 +2009_001387 +2009_001388 +2009_001389 +2009_001390 +2009_001391 +2009_001393 +2009_001395 +2009_001397 +2009_001398 +2009_001403 +2009_001406 +2009_001407 +2009_001409 +2009_001411 +2009_001412 +2009_001413 +2009_001414 +2009_001417 +2009_001419 +2009_001422 +2009_001424 +2009_001426 +2009_001427 +2009_001431 +2009_001433 +2009_001434 +2009_001435 +2009_001437 +2009_001440 +2009_001443 +2009_001444 +2009_001446 +2009_001447 +2009_001448 +2009_001449 +2009_001450 +2009_001452 +2009_001453 +2009_001456 +2009_001457 +2009_001462 +2009_001463 +2009_001466 +2009_001468 +2009_001470 +2009_001472 +2009_001474 +2009_001475 +2009_001476 +2009_001479 +2009_001480 +2009_001481 +2009_001484 +2009_001490 +2009_001493 +2009_001494 +2009_001498 +2009_001500 +2009_001501 +2009_001502 +2009_001505 +2009_001507 +2009_001508 +2009_001509 +2009_001514 +2009_001516 +2009_001517 +2009_001518 +2009_001519 +2009_001521 +2009_001522 +2009_001526 +2009_001534 +2009_001535 +2009_001536 +2009_001537 +2009_001538 +2009_001539 +2009_001541 +2009_001542 +2009_001544 +2009_001546 +2009_001549 +2009_001550 +2009_001553 +2009_001554 +2009_001555 +2009_001558 +2009_001562 +2009_001565 +2009_001566 +2009_001567 +2009_001568 +2009_001570 +2009_001575 +2009_001577 +2009_001581 +2009_001585 +2009_001587 +2009_001589 +2009_001590 +2009_001591 +2009_001593 +2009_001594 +2009_001595 +2009_001598 +2009_001602 +2009_001605 +2009_001606 +2009_001607 +2009_001608 +2009_001611 +2009_001612 +2009_001614 +2009_001615 +2009_001617 +2009_001618 +2009_001621 +2009_001623 +2009_001625 +2009_001627 +2009_001631 +2009_001633 +2009_001635 +2009_001636 +2009_001638 +2009_001640 +2009_001642 +2009_001643 +2009_001644 +2009_001645 +2009_001646 +2009_001648 +2009_001651 +2009_001653 +2009_001657 +2009_001660 +2009_001663 +2009_001664 +2009_001667 +2009_001670 +2009_001671 +2009_001673 +2009_001674 +2009_001675 +2009_001676 +2009_001677 +2009_001678 +2009_001682 +2009_001683 +2009_001684 +2009_001687 +2009_001689 +2009_001690 +2009_001693 +2009_001695 +2009_001696 +2009_001699 +2009_001704 +2009_001705 +2009_001706 +2009_001707 +2009_001709 +2009_001713 +2009_001715 +2009_001718 +2009_001719 +2009_001720 +2009_001723 +2009_001724 +2009_001731 +2009_001732 +2009_001733 +2009_001734 +2009_001735 +2009_001738 +2009_001740 +2009_001741 +2009_001743 +2009_001744 +2009_001746 +2009_001747 +2009_001749 +2009_001750 +2009_001751 +2009_001752 +2009_001754 +2009_001755 +2009_001758 +2009_001759 +2009_001764 +2009_001765 +2009_001767 +2009_001768 +2009_001770 +2009_001774 +2009_001775 +2009_001778 +2009_001779 +2009_001780 +2009_001781 +2009_001782 +2009_001783 +2009_001784 +2009_001792 +2009_001794 +2009_001798 +2009_001799 +2009_001800 +2009_001801 +2009_001802 +2009_001804 +2009_001805 +2009_001806 +2009_001807 +2009_001809 +2009_001810 +2009_001811 +2009_001812 +2009_001816 +2009_001817 +2009_001818 +2009_001820 +2009_001822 +2009_001823 +2009_001825 +2009_001826 +2009_001827 +2009_001828 +2009_001830 +2009_001831 +2009_001833 +2009_001835 +2009_001837 +2009_001839 +2009_001840 +2009_001846 +2009_001847 +2009_001848 +2009_001850 +2009_001851 +2009_001852 +2009_001853 +2009_001854 +2009_001856 +2009_001858 +2009_001861 +2009_001864 +2009_001865 +2009_001867 +2009_001868 +2009_001869 +2009_001871 +2009_001873 +2009_001874 +2009_001875 +2009_001881 +2009_001884 +2009_001885 +2009_001888 +2009_001890 +2009_001894 +2009_001897 +2009_001898 +2009_001902 +2009_001904 +2009_001905 +2009_001906 +2009_001907 +2009_001908 +2009_001909 +2009_001910 +2009_001911 +2009_001915 +2009_001916 +2009_001917 +2009_001922 +2009_001926 +2009_001927 +2009_001929 +2009_001931 +2009_001933 +2009_001934 +2009_001937 +2009_001940 +2009_001941 +2009_001945 +2009_001948 +2009_001949 +2009_001952 +2009_001959 +2009_001960 +2009_001961 +2009_001962 +2009_001964 +2009_001965 +2009_001967 +2009_001971 +2009_001972 +2009_001973 +2009_001975 +2009_001976 +2009_001977 +2009_001979 +2009_001980 +2009_001984 +2009_001988 +2009_001990 +2009_001991 +2009_001994 +2009_001997 +2009_001999 +2009_002000 +2009_002001 +2009_002002 +2009_002003 +2009_002008 +2009_002009 +2009_002010 +2009_002011 +2009_002012 +2009_002018 +2009_002019 +2009_002024 +2009_002031 +2009_002035 +2009_002037 +2009_002039 +2009_002040 +2009_002042 +2009_002044 +2009_002046 +2009_002047 +2009_002052 +2009_002053 +2009_002054 +2009_002055 +2009_002056 +2009_002057 +2009_002058 +2009_002060 +2009_002061 +2009_002064 +2009_002066 +2009_002072 +2009_002073 +2009_002077 +2009_002078 +2009_002082 +2009_002083 +2009_002086 +2009_002087 +2009_002088 +2009_002089 +2009_002093 +2009_002094 +2009_002096 +2009_002097 +2009_002098 +2009_002099 +2009_002103 +2009_002104 +2009_002105 +2009_002107 +2009_002110 +2009_002111 +2009_002112 +2009_002116 +2009_002117 +2009_002118 +2009_002119 +2009_002120 +2009_002122 +2009_002123 +2009_002126 +2009_002127 +2009_002128 +2009_002129 +2009_002131 +2009_002133 +2009_002136 +2009_002137 +2009_002139 +2009_002141 +2009_002144 +2009_002145 +2009_002146 +2009_002147 +2009_002149 +2009_002150 +2009_002151 +2009_002152 +2009_002153 +2009_002155 +2009_002164 +2009_002165 +2009_002169 +2009_002171 +2009_002173 +2009_002175 +2009_002176 +2009_002177 +2009_002180 +2009_002182 +2009_002185 +2009_002191 +2009_002192 +2009_002193 +2009_002194 +2009_002197 +2009_002198 +2009_002199 +2009_002202 +2009_002203 +2009_002204 +2009_002205 +2009_002208 +2009_002211 +2009_002212 +2009_002214 +2009_002215 +2009_002216 +2009_002219 +2009_002221 +2009_002222 +2009_002225 +2009_002226 +2009_002228 +2009_002229 +2009_002230 +2009_002231 +2009_002232 +2009_002235 +2009_002236 +2009_002238 +2009_002239 +2009_002240 +2009_002242 +2009_002245 +2009_002252 +2009_002253 +2009_002254 +2009_002256 +2009_002257 +2009_002258 +2009_002259 +2009_002262 +2009_002264 +2009_002265 +2009_002267 +2009_002268 +2009_002271 +2009_002272 +2009_002273 +2009_002274 +2009_002281 +2009_002282 +2009_002285 +2009_002286 +2009_002289 +2009_002291 +2009_002295 +2009_002297 +2009_002298 +2009_002299 +2009_002301 +2009_002302 +2009_002305 +2009_002306 +2009_002308 +2009_002311 +2009_002312 +2009_002314 +2009_002317 +2009_002319 +2009_002320 +2009_002324 +2009_002325 +2009_002326 +2009_002328 +2009_002331 +2009_002333 +2009_002335 +2009_002338 +2009_002339 +2009_002343 +2009_002346 +2009_002348 +2009_002349 +2009_002350 +2009_002352 +2009_002358 +2009_002360 +2009_002362 +2009_002363 +2009_002366 +2009_002370 +2009_002371 +2009_002372 +2009_002373 +2009_002374 +2009_002376 +2009_002377 +2009_002380 +2009_002381 +2009_002382 +2009_002386 +2009_002387 +2009_002388 +2009_002390 +2009_002391 +2009_002393 +2009_002397 +2009_002398 +2009_002399 +2009_002400 +2009_002401 +2009_002404 +2009_002406 +2009_002407 +2009_002408 +2009_002409 +2009_002414 +2009_002415 +2009_002416 +2009_002419 +2009_002420 +2009_002422 +2009_002423 +2009_002424 +2009_002425 +2009_002429 +2009_002431 +2009_002432 +2009_002433 +2009_002434 +2009_002436 +2009_002438 +2009_002439 +2009_002441 +2009_002443 +2009_002444 +2009_002445 +2009_002448 +2009_002449 +2009_002452 +2009_002453 +2009_002456 +2009_002457 +2009_002460 +2009_002464 +2009_002465 +2009_002470 +2009_002471 +2009_002472 +2009_002474 +2009_002475 +2009_002476 +2009_002477 +2009_002487 +2009_002488 +2009_002499 +2009_002500 +2009_002504 +2009_002505 +2009_002506 +2009_002510 +2009_002512 +2009_002514 +2009_002515 +2009_002517 +2009_002518 +2009_002519 +2009_002521 +2009_002522 +2009_002523 +2009_002524 +2009_002525 +2009_002527 +2009_002530 +2009_002531 +2009_002532 +2009_002535 +2009_002536 +2009_002537 +2009_002539 +2009_002542 +2009_002543 +2009_002546 +2009_002549 +2009_002552 +2009_002553 +2009_002556 +2009_002557 +2009_002558 +2009_002559 +2009_002561 +2009_002562 +2009_002563 +2009_002565 +2009_002566 +2009_002567 +2009_002568 +2009_002569 +2009_002570 +2009_002571 +2009_002573 +2009_002577 +2009_002579 +2009_002580 +2009_002584 +2009_002585 +2009_002586 +2009_002588 +2009_002591 +2009_002592 +2009_002594 +2009_002595 +2009_002597 +2009_002599 +2009_002604 +2009_002605 +2009_002607 +2009_002608 +2009_002609 +2009_002611 +2009_002612 +2009_002613 +2009_002614 +2009_002615 +2009_002616 +2009_002618 +2009_002620 +2009_002621 +2009_002624 +2009_002625 +2009_002626 +2009_002628 +2009_002629 +2009_002632 +2009_002634 +2009_002635 +2009_002638 +2009_002645 +2009_002648 +2009_002649 +2009_002651 +2009_002652 +2009_002659 +2009_002662 +2009_002663 +2009_002665 +2009_002667 +2009_002668 +2009_002669 +2009_002670 +2009_002671 +2009_002672 +2009_002673 +2009_002674 +2009_002675 +2009_002676 +2009_002680 +2009_002681 +2009_002683 +2009_002684 +2009_002685 +2009_002687 +2009_002688 +2009_002689 +2009_002695 +2009_002697 +2009_002698 +2009_002703 +2009_002704 +2009_002705 +2009_002708 +2009_002710 +2009_002711 +2009_002712 +2009_002713 +2009_002714 +2009_002715 +2009_002717 +2009_002719 +2009_002725 +2009_002727 +2009_002728 +2009_002732 +2009_002733 +2009_002734 +2009_002739 +2009_002741 +2009_002743 +2009_002744 +2009_002746 +2009_002749 +2009_002750 +2009_002752 +2009_002753 +2009_002754 +2009_002755 +2009_002758 +2009_002759 +2009_002762 +2009_002763 +2009_002764 +2009_002765 +2009_002770 +2009_002771 +2009_002772 +2009_002774 +2009_002777 +2009_002778 +2009_002779 +2009_002780 +2009_002784 +2009_002785 +2009_002789 +2009_002790 +2009_002791 +2009_002792 +2009_002798 +2009_002799 +2009_002800 +2009_002803 +2009_002806 +2009_002807 +2009_002808 +2009_002809 +2009_002813 +2009_002814 +2009_002816 +2009_002817 +2009_002820 +2009_002824 +2009_002827 +2009_002830 +2009_002831 +2009_002833 +2009_002835 +2009_002836 +2009_002837 +2009_002838 +2009_002841 +2009_002842 +2009_002843 +2009_002844 +2009_002845 +2009_002847 +2009_002849 +2009_002850 +2009_002851 +2009_002853 +2009_002855 +2009_002856 +2009_002862 +2009_002865 +2009_002867 +2009_002869 +2009_002872 +2009_002876 +2009_002877 +2009_002879 +2009_002882 +2009_002883 +2009_002885 +2009_002887 +2009_002888 +2009_002890 +2009_002893 +2009_002894 +2009_002897 +2009_002898 +2009_002901 +2009_002902 +2009_002908 +2009_002910 +2009_002912 +2009_002914 +2009_002917 +2009_002918 +2009_002920 +2009_002921 +2009_002925 +2009_002928 +2009_002932 +2009_002933 +2009_002935 +2009_002936 +2009_002937 +2009_002938 +2009_002940 +2009_002941 +2009_002946 +2009_002947 +2009_002952 +2009_002954 +2009_002955 +2009_002957 +2009_002958 +2009_002960 +2009_002961 +2009_002962 +2009_002967 +2009_002970 +2009_002971 +2009_002972 +2009_002975 +2009_002976 +2009_002977 +2009_002978 +2009_002980 +2009_002982 +2009_002983 +2009_002984 +2009_002985 +2009_002986 +2009_002988 +2009_002990 +2009_002993 +2009_002995 +2009_002998 +2009_002999 +2009_003000 +2009_003002 +2009_003003 +2009_003005 +2009_003006 +2009_003007 +2009_003010 +2009_003012 +2009_003013 +2009_003018 +2009_003019 +2009_003020 +2009_003022 +2009_003023 +2009_003031 +2009_003032 +2009_003033 +2009_003034 +2009_003035 +2009_003039 +2009_003042 +2009_003043 +2009_003044 +2009_003052 +2009_003053 +2009_003054 +2009_003056 +2009_003058 +2009_003059 +2009_003063 +2009_003064 +2009_003065 +2009_003066 +2009_003067 +2009_003068 +2009_003070 +2009_003071 +2009_003074 +2009_003075 +2009_003076 +2009_003077 +2009_003078 +2009_003080 +2009_003082 +2009_003083 +2009_003084 +2009_003087 +2009_003088 +2009_003089 +2009_003090 +2009_003091 +2009_003093 +2009_003095 +2009_003097 +2009_003098 +2009_003105 +2009_003107 +2009_003108 +2009_003109 +2009_003110 +2009_003114 +2009_003115 +2009_003116 +2009_003118 +2009_003122 +2009_003123 +2009_003125 +2009_003126 +2009_003127 +2009_003128 +2009_003129 +2009_003130 +2009_003132 +2009_003136 +2009_003138 +2009_003140 +2009_003142 +2009_003143 +2009_003144 +2009_003146 +2009_003147 +2009_003150 +2009_003151 +2009_003153 +2009_003154 +2009_003155 +2009_003156 +2009_003157 +2009_003164 +2009_003165 +2009_003166 +2009_003168 +2009_003172 +2009_003173 +2009_003175 +2009_003181 +2009_003183 +2009_003185 +2009_003187 +2009_003189 +2009_003191 +2009_003193 +2009_003194 +2009_003196 +2009_003198 +2009_003199 +2009_003200 +2009_003201 +2009_003204 +2009_003208 +2009_003209 +2009_003212 +2009_003214 +2009_003217 +2009_003218 +2009_003219 +2009_003222 +2009_003224 +2009_003225 +2009_003229 +2009_003230 +2009_003232 +2009_003233 +2009_003234 +2009_003238 +2009_003241 +2009_003247 +2009_003249 +2009_003251 +2009_003253 +2009_003254 +2009_003255 +2009_003257 +2009_003259 +2009_003261 +2009_003262 +2009_003265 +2009_003266 +2009_003267 +2009_003269 +2009_003271 +2009_003272 +2009_003273 +2009_003276 +2009_003277 +2009_003278 +2009_003282 +2009_003284 +2009_003285 +2009_003288 +2009_003290 +2009_003294 +2009_003297 +2009_003299 +2009_003300 +2009_003301 +2009_003304 +2009_003305 +2009_003309 +2009_003310 +2009_003311 +2009_003312 +2009_003315 +2009_003316 +2009_003317 +2009_003320 +2009_003323 +2009_003326 +2009_003327 +2009_003333 +2009_003338 +2009_003340 +2009_003343 +2009_003345 +2009_003346 +2009_003347 +2009_003348 +2009_003349 +2009_003350 +2009_003351 +2009_003352 +2009_003353 +2009_003360 +2009_003361 +2009_003363 +2009_003365 +2009_003367 +2009_003369 +2009_003372 +2009_003373 +2009_003375 +2009_003376 +2009_003377 +2009_003378 +2009_003379 +2009_003380 +2009_003381 +2009_003383 +2009_003384 +2009_003385 +2009_003386 +2009_003387 +2009_003394 +2009_003395 +2009_003396 +2009_003399 +2009_003400 +2009_003402 +2009_003406 +2009_003407 +2009_003409 +2009_003411 +2009_003415 +2009_003416 +2009_003417 +2009_003419 +2009_003422 +2009_003425 +2009_003430 +2009_003431 +2009_003433 +2009_003436 +2009_003440 +2009_003441 +2009_003443 +2009_003445 +2009_003446 +2009_003447 +2009_003450 +2009_003453 +2009_003454 +2009_003455 +2009_003456 +2009_003457 +2009_003458 +2009_003459 +2009_003460 +2009_003461 +2009_003462 +2009_003466 +2009_003467 +2009_003468 +2009_003469 +2009_003476 +2009_003481 +2009_003482 +2009_003487 +2009_003488 +2009_003489 +2009_003490 +2009_003491 +2009_003492 +2009_003494 +2009_003497 +2009_003498 +2009_003499 +2009_003500 +2009_003504 +2009_003507 +2009_003508 +2009_003509 +2009_003510 +2009_003511 +2009_003513 +2009_003517 +2009_003519 +2009_003520 +2009_003521 +2009_003522 +2009_003523 +2009_003524 +2009_003528 +2009_003530 +2009_003531 +2009_003533 +2009_003534 +2009_003537 +2009_003538 +2009_003539 +2009_003540 +2009_003541 +2009_003542 +2009_003543 +2009_003544 +2009_003545 +2009_003546 +2009_003549 +2009_003551 +2009_003554 +2009_003555 +2009_003560 +2009_003562 +2009_003563 +2009_003564 +2009_003565 +2009_003566 +2009_003569 +2009_003571 +2009_003572 +2009_003576 +2009_003577 +2009_003581 +2009_003583 +2009_003588 +2009_003589 +2009_003592 +2009_003594 +2009_003598 +2009_003600 +2009_003601 +2009_003605 +2009_003606 +2009_003607 +2009_003608 +2009_003609 +2009_003612 +2009_003613 +2009_003614 +2009_003618 +2009_003624 +2009_003626 +2009_003627 +2009_003629 +2009_003633 +2009_003634 +2009_003635 +2009_003636 +2009_003637 +2009_003638 +2009_003639 +2009_003640 +2009_003642 +2009_003644 +2009_003646 +2009_003647 +2009_003650 +2009_003652 +2009_003654 +2009_003655 +2009_003656 +2009_003657 +2009_003660 +2009_003663 +2009_003664 +2009_003666 +2009_003667 +2009_003668 +2009_003669 +2009_003671 +2009_003677 +2009_003679 +2009_003683 +2009_003685 +2009_003686 +2009_003688 +2009_003689 +2009_003690 +2009_003694 +2009_003695 +2009_003696 +2009_003697 +2009_003698 +2009_003702 +2009_003703 +2009_003704 +2009_003705 +2009_003707 +2009_003708 +2009_003709 +2009_003710 +2009_003711 +2009_003713 +2009_003714 +2009_003717 +2009_003718 +2009_003720 +2009_003722 +2009_003725 +2009_003726 +2009_003732 +2009_003734 +2009_003735 +2009_003736 +2009_003738 +2009_003739 +2009_003743 +2009_003747 +2009_003751 +2009_003752 +2009_003753 +2009_003756 +2009_003757 +2009_003758 +2009_003759 +2009_003760 +2009_003765 +2009_003768 +2009_003771 +2009_003773 +2009_003775 +2009_003776 +2009_003781 +2009_003783 +2009_003784 +2009_003785 +2009_003786 +2009_003790 +2009_003793 +2009_003795 +2009_003799 +2009_003800 +2009_003801 +2009_003802 +2009_003804 +2009_003805 +2009_003806 +2009_003808 +2009_003810 +2009_003813 +2009_003814 +2009_003815 +2009_003816 +2009_003818 +2009_003819 +2009_003820 +2009_003821 +2009_003822 +2009_003825 +2009_003827 +2009_003829 +2009_003832 +2009_003835 +2009_003836 +2009_003837 +2009_003838 +2009_003840 +2009_003843 +2009_003846 +2009_003847 +2009_003848 +2009_003849 +2009_003852 +2009_003855 +2009_003857 +2009_003858 +2009_003860 +2009_003863 +2009_003865 +2009_003867 +2009_003870 +2009_003873 +2009_003874 +2009_003879 +2009_003883 +2009_003884 +2009_003888 +2009_003892 +2009_003895 +2009_003896 +2009_003897 +2009_003899 +2009_003900 +2009_003901 +2009_003902 +2009_003903 +2009_003904 +2009_003905 +2009_003908 +2009_003911 +2009_003912 +2009_003913 +2009_003914 +2009_003916 +2009_003920 +2009_003921 +2009_003922 +2009_003928 +2009_003929 +2009_003933 +2009_003936 +2009_003938 +2009_003942 +2009_003944 +2009_003947 +2009_003950 +2009_003951 +2009_003955 +2009_003956 +2009_003958 +2009_003961 +2009_003962 +2009_003965 +2009_003966 +2009_003969 +2009_003971 +2009_003973 +2009_003974 +2009_003975 +2009_003976 +2009_003977 +2009_003982 +2009_003985 +2009_003986 +2009_003991 +2009_003992 +2009_003993 +2009_003994 +2009_003995 +2009_004001 +2009_004002 +2009_004004 +2009_004005 +2009_004007 +2009_004012 +2009_004016 +2009_004018 +2009_004019 +2009_004020 +2009_004021 +2009_004022 +2009_004023 +2009_004025 +2009_004031 +2009_004032 +2009_004033 +2009_004034 +2009_004037 +2009_004038 +2009_004040 +2009_004042 +2009_004043 +2009_004044 +2009_004050 +2009_004051 +2009_004052 +2009_004055 +2009_004058 +2009_004062 +2009_004069 +2009_004070 +2009_004072 +2009_004073 +2009_004074 +2009_004075 +2009_004076 +2009_004078 +2009_004082 +2009_004083 +2009_004084 +2009_004085 +2009_004088 +2009_004091 +2009_004092 +2009_004093 +2009_004094 +2009_004095 +2009_004096 +2009_004099 +2009_004100 +2009_004102 +2009_004103 +2009_004105 +2009_004108 +2009_004109 +2009_004111 +2009_004112 +2009_004113 +2009_004117 +2009_004118 +2009_004121 +2009_004122 +2009_004124 +2009_004125 +2009_004126 +2009_004128 +2009_004129 +2009_004131 +2009_004133 +2009_004134 +2009_004138 +2009_004139 +2009_004140 +2009_004141 +2009_004142 +2009_004148 +2009_004150 +2009_004152 +2009_004153 +2009_004154 +2009_004157 +2009_004159 +2009_004161 +2009_004162 +2009_004163 +2009_004164 +2009_004165 +2009_004166 +2009_004168 +2009_004169 +2009_004170 +2009_004171 +2009_004173 +2009_004174 +2009_004175 +2009_004176 +2009_004177 +2009_004178 +2009_004179 +2009_004180 +2009_004181 +2009_004183 +2009_004186 +2009_004187 +2009_004188 +2009_004191 +2009_004193 +2009_004197 +2009_004199 +2009_004200 +2009_004201 +2009_004202 +2009_004203 +2009_004205 +2009_004207 +2009_004210 +2009_004211 +2009_004212 +2009_004213 +2009_004217 +2009_004218 +2009_004221 +2009_004222 +2009_004224 +2009_004225 +2009_004227 +2009_004228 +2009_004229 +2009_004231 +2009_004232 +2009_004233 +2009_004234 +2009_004241 +2009_004242 +2009_004243 +2009_004244 +2009_004247 +2009_004248 +2009_004249 +2009_004255 +2009_004258 +2009_004261 +2009_004262 +2009_004263 +2009_004264 +2009_004271 +2009_004272 +2009_004273 +2009_004274 +2009_004276 +2009_004277 +2009_004278 +2009_004279 +2009_004283 +2009_004284 +2009_004285 +2009_004289 +2009_004290 +2009_004291 +2009_004295 +2009_004298 +2009_004300 +2009_004301 +2009_004303 +2009_004307 +2009_004308 +2009_004309 +2009_004312 +2009_004315 +2009_004316 +2009_004317 +2009_004319 +2009_004322 +2009_004323 +2009_004324 +2009_004327 +2009_004328 +2009_004329 +2009_004332 +2009_004334 +2009_004336 +2009_004338 +2009_004340 +2009_004341 +2009_004346 +2009_004347 +2009_004350 +2009_004351 +2009_004357 +2009_004358 +2009_004359 +2009_004361 +2009_004364 +2009_004366 +2009_004368 +2009_004369 +2009_004370 +2009_004371 +2009_004374 +2009_004375 +2009_004377 +2009_004382 +2009_004383 +2009_004390 +2009_004392 +2009_004394 +2009_004397 +2009_004399 +2009_004402 +2009_004403 +2009_004404 +2009_004406 +2009_004409 +2009_004410 +2009_004411 +2009_004414 +2009_004417 +2009_004419 +2009_004424 +2009_004425 +2009_004426 +2009_004429 +2009_004432 +2009_004434 +2009_004435 +2009_004436 +2009_004438 +2009_004440 +2009_004442 +2009_004444 +2009_004445 +2009_004446 +2009_004448 +2009_004449 +2009_004451 +2009_004452 +2009_004453 +2009_004454 +2009_004455 +2009_004456 +2009_004457 +2009_004464 +2009_004465 +2009_004468 +2009_004471 +2009_004475 +2009_004477 +2009_004478 +2009_004479 +2009_004483 +2009_004486 +2009_004492 +2009_004494 +2009_004496 +2009_004497 +2009_004499 +2009_004501 +2009_004502 +2009_004503 +2009_004504 +2009_004507 +2009_004508 +2009_004509 +2009_004511 +2009_004513 +2009_004514 +2009_004518 +2009_004519 +2009_004524 +2009_004525 +2009_004527 +2009_004529 +2009_004530 +2009_004532 +2009_004535 +2009_004536 +2009_004537 +2009_004539 +2009_004540 +2009_004542 +2009_004543 +2009_004545 +2009_004547 +2009_004548 +2009_004551 +2009_004552 +2009_004554 +2009_004556 +2009_004557 +2009_004559 +2009_004560 +2009_004561 +2009_004562 +2009_004565 +2009_004567 +2009_004568 +2009_004570 +2009_004571 +2009_004572 +2009_004579 +2009_004580 +2009_004581 +2009_004582 +2009_004587 +2009_004588 +2009_004590 +2009_004592 +2009_004593 +2009_004594 +2009_004598 +2009_004601 +2009_004606 +2009_004607 +2009_004614 +2009_004616 +2009_004619 +2009_004620 +2009_004623 +2009_004624 +2009_004625 +2009_004626 +2009_004628 +2009_004629 +2009_004630 +2009_004631 +2009_004634 +2009_004635 +2009_004639 +2009_004642 +2009_004643 +2009_004645 +2009_004647 +2009_004648 +2009_004651 +2009_004652 +2009_004653 +2009_004655 +2009_004656 +2009_004661 +2009_004662 +2009_004664 +2009_004667 +2009_004669 +2009_004670 +2009_004671 +2009_004674 +2009_004677 +2009_004679 +2009_004681 +2009_004683 +2009_004684 +2009_004686 +2009_004687 +2009_004688 +2009_004694 +2009_004697 +2009_004701 +2009_004705 +2009_004706 +2009_004708 +2009_004709 +2009_004710 +2009_004713 +2009_004716 +2009_004718 +2009_004719 +2009_004720 +2009_004721 +2009_004723 +2009_004728 +2009_004730 +2009_004731 +2009_004732 +2009_004734 +2009_004737 +2009_004738 +2009_004744 +2009_004745 +2009_004746 +2009_004748 +2009_004749 +2009_004754 +2009_004756 +2009_004758 +2009_004759 +2009_004760 +2009_004761 +2009_004763 +2009_004764 +2009_004765 +2009_004766 +2009_004768 +2009_004769 +2009_004771 +2009_004772 +2009_004779 +2009_004780 +2009_004781 +2009_004782 +2009_004784 +2009_004786 +2009_004787 +2009_004789 +2009_004790 +2009_004794 +2009_004796 +2009_004797 +2009_004798 +2009_004799 +2009_004801 +2009_004804 +2009_004805 +2009_004806 +2009_004812 +2009_004813 +2009_004815 +2009_004817 +2009_004820 +2009_004822 +2009_004823 +2009_004824 +2009_004828 +2009_004829 +2009_004830 +2009_004831 +2009_004834 +2009_004836 +2009_004839 +2009_004841 +2009_004845 +2009_004846 +2009_004847 +2009_004848 +2009_004849 +2009_004855 +2009_004856 +2009_004857 +2009_004858 +2009_004859 +2009_004865 +2009_004867 +2009_004868 +2009_004869 +2009_004871 +2009_004872 +2009_004874 +2009_004876 +2009_004877 +2009_004880 +2009_004882 +2009_004885 +2009_004886 +2009_004887 +2009_004888 +2009_004889 +2009_004890 +2009_004895 +2009_004897 +2009_004898 +2009_004899 +2009_004901 +2009_004902 +2009_004903 +2009_004904 +2009_004905 +2009_004907 +2009_004913 +2009_004914 +2009_004917 +2009_004919 +2009_004921 +2009_004922 +2009_004926 +2009_004929 +2009_004930 +2009_004933 +2009_004934 +2009_004939 +2009_004940 +2009_004942 +2009_004943 +2009_004944 +2009_004945 +2009_004946 +2009_004947 +2009_004953 +2009_004956 +2009_004958 +2009_004959 +2009_004961 +2009_004962 +2009_004965 +2009_004969 +2009_004971 +2009_004972 +2009_004974 +2009_004975 +2009_004977 +2009_004979 +2009_004980 +2009_004982 +2009_004983 +2009_004984 +2009_004986 +2009_004987 +2009_004988 +2009_004990 +2009_004993 +2009_004994 +2009_004996 +2009_004999 +2009_005000 +2009_005001 +2009_005005 +2009_005006 +2009_005008 +2009_005015 +2009_005016 +2009_005019 +2009_005024 +2009_005025 +2009_005030 +2009_005031 +2009_005033 +2009_005035 +2009_005036 +2009_005037 +2009_005038 +2009_005040 +2009_005042 +2009_005044 +2009_005045 +2009_005051 +2009_005055 +2009_005056 +2009_005057 +2009_005060 +2009_005061 +2009_005062 +2009_005064 +2009_005068 +2009_005069 +2009_005070 +2009_005073 +2009_005075 +2009_005076 +2009_005078 +2009_005079 +2009_005080 +2009_005081 +2009_005082 +2009_005083 +2009_005084 +2009_005085 +2009_005086 +2009_005087 +2009_005089 +2009_005094 +2009_005095 +2009_005098 +2009_005102 +2009_005103 +2009_005104 +2009_005107 +2009_005111 +2009_005114 +2009_005118 +2009_005119 +2009_005120 +2009_005126 +2009_005127 +2009_005128 +2009_005129 +2009_005130 +2009_005131 +2009_005133 +2009_005137 +2009_005140 +2009_005141 +2009_005142 +2009_005144 +2009_005145 +2009_005147 +2009_005148 +2009_005149 +2009_005150 +2009_005152 +2009_005153 +2009_005154 +2009_005155 +2009_005156 +2009_005158 +2009_005160 +2009_005161 +2009_005162 +2009_005163 +2009_005165 +2009_005168 +2009_005170 +2009_005171 +2009_005172 +2009_005177 +2009_005178 +2009_005181 +2009_005183 +2009_005185 +2009_005189 +2009_005190 +2009_005191 +2009_005193 +2009_005194 +2009_005198 +2009_005201 +2009_005202 +2009_005203 +2009_005204 +2009_005205 +2009_005210 +2009_005211 +2009_005215 +2009_005216 +2009_005217 +2009_005218 +2009_005219 +2009_005220 +2009_005221 +2009_005222 +2009_005225 +2009_005229 +2009_005231 +2009_005232 +2009_005234 +2009_005236 +2009_005239 +2009_005240 +2009_005242 +2009_005246 +2009_005247 +2009_005251 +2009_005256 +2009_005257 +2009_005260 +2009_005262 +2009_005263 +2009_005265 +2009_005267 +2009_005268 +2009_005269 +2009_005272 +2009_005278 +2009_005279 +2009_005282 +2009_005286 +2009_005287 +2009_005288 +2009_005292 +2009_005293 +2009_005294 +2009_005297 +2009_005299 +2009_005300 +2009_005302 +2009_005303 +2009_005307 +2009_005308 +2009_005309 +2009_005310 +2009_005311 +2010_000001 +2010_000002 +2010_000003 +2010_000009 +2010_000014 +2010_000015 +2010_000018 +2010_000020 +2010_000023 +2010_000024 +2010_000026 +2010_000027 +2010_000031 +2010_000033 +2010_000035 +2010_000036 +2010_000038 +2010_000043 +2010_000045 +2010_000048 +2010_000050 +2010_000052 +2010_000053 +2010_000054 +2010_000055 +2010_000056 +2010_000061 +2010_000063 +2010_000065 +2010_000067 +2010_000069 +2010_000071 +2010_000072 +2010_000073 +2010_000074 +2010_000075 +2010_000076 +2010_000079 +2010_000080 +2010_000082 +2010_000083 +2010_000084 +2010_000085 +2010_000087 +2010_000088 +2010_000089 +2010_000090 +2010_000091 +2010_000095 +2010_000097 +2010_000098 +2010_000099 +2010_000103 +2010_000109 +2010_000110 +2010_000111 +2010_000113 +2010_000114 +2010_000117 +2010_000118 +2010_000120 +2010_000124 +2010_000127 +2010_000131 +2010_000132 +2010_000133 +2010_000136 +2010_000137 +2010_000138 +2010_000139 +2010_000140 +2010_000141 +2010_000145 +2010_000148 +2010_000151 +2010_000152 +2010_000157 +2010_000159 +2010_000160 +2010_000162 +2010_000163 +2010_000165 +2010_000169 +2010_000170 +2010_000172 +2010_000174 +2010_000175 +2010_000177 +2010_000178 +2010_000182 +2010_000183 +2010_000184 +2010_000187 +2010_000189 +2010_000190 +2010_000193 +2010_000194 +2010_000195 +2010_000196 +2010_000197 +2010_000198 +2010_000199 +2010_000202 +2010_000203 +2010_000204 +2010_000209 +2010_000211 +2010_000213 +2010_000216 +2010_000218 +2010_000222 +2010_000224 +2010_000227 +2010_000229 +2010_000233 +2010_000234 +2010_000238 +2010_000241 +2010_000244 +2010_000245 +2010_000246 +2010_000247 +2010_000248 +2010_000249 +2010_000250 +2010_000254 +2010_000255 +2010_000256 +2010_000260 +2010_000261 +2010_000262 +2010_000263 +2010_000264 +2010_000266 +2010_000269 +2010_000270 +2010_000272 +2010_000273 +2010_000276 +2010_000279 +2010_000283 +2010_000284 +2010_000285 +2010_000286 +2010_000291 +2010_000293 +2010_000295 +2010_000296 +2010_000299 +2010_000302 +2010_000303 +2010_000307 +2010_000308 +2010_000309 +2010_000310 +2010_000312 +2010_000313 +2010_000317 +2010_000318 +2010_000320 +2010_000321 +2010_000323 +2010_000324 +2010_000325 +2010_000327 +2010_000329 +2010_000330 +2010_000335 +2010_000336 +2010_000337 +2010_000342 +2010_000344 +2010_000347 +2010_000349 +2010_000351 +2010_000352 +2010_000356 +2010_000358 +2010_000361 +2010_000362 +2010_000370 +2010_000371 +2010_000372 +2010_000374 +2010_000375 +2010_000376 +2010_000377 +2010_000379 +2010_000381 +2010_000382 +2010_000384 +2010_000386 +2010_000388 +2010_000389 +2010_000390 +2010_000392 +2010_000393 +2010_000394 +2010_000395 +2010_000399 +2010_000401 +2010_000404 +2010_000406 +2010_000409 +2010_000413 +2010_000415 +2010_000418 +2010_000419 +2010_000420 +2010_000422 +2010_000426 +2010_000427 +2010_000431 +2010_000432 +2010_000433 +2010_000435 +2010_000436 +2010_000437 +2010_000439 +2010_000442 +2010_000444 +2010_000446 +2010_000447 +2010_000448 +2010_000449 +2010_000453 +2010_000456 +2010_000458 +2010_000459 +2010_000461 +2010_000462 +2010_000463 +2010_000465 +2010_000466 +2010_000468 +2010_000469 +2010_000470 +2010_000473 +2010_000474 +2010_000475 +2010_000477 +2010_000480 +2010_000483 +2010_000484 +2010_000485 +2010_000488 +2010_000490 +2010_000492 +2010_000493 +2010_000495 +2010_000497 +2010_000498 +2010_000500 +2010_000502 +2010_000503 +2010_000506 +2010_000508 +2010_000510 +2010_000511 +2010_000513 +2010_000515 +2010_000519 +2010_000522 +2010_000524 +2010_000526 +2010_000527 +2010_000530 +2010_000534 +2010_000536 +2010_000537 +2010_000538 +2010_000541 +2010_000545 +2010_000547 +2010_000548 +2010_000549 +2010_000552 +2010_000553 +2010_000556 +2010_000557 +2010_000559 +2010_000561 +2010_000562 +2010_000564 +2010_000567 +2010_000568 +2010_000571 +2010_000572 +2010_000573 +2010_000574 +2010_000576 +2010_000577 +2010_000578 +2010_000581 +2010_000582 +2010_000583 +2010_000586 +2010_000588 +2010_000590 +2010_000591 +2010_000601 +2010_000602 +2010_000603 +2010_000604 +2010_000608 +2010_000613 +2010_000616 +2010_000617 +2010_000621 +2010_000622 +2010_000624 +2010_000626 +2010_000628 +2010_000630 +2010_000632 +2010_000633 +2010_000635 +2010_000639 +2010_000641 +2010_000644 +2010_000645 +2010_000646 +2010_000647 +2010_000648 +2010_000651 +2010_000655 +2010_000658 +2010_000661 +2010_000664 +2010_000665 +2010_000666 +2010_000667 +2010_000669 +2010_000671 +2010_000674 +2010_000675 +2010_000678 +2010_000679 +2010_000681 +2010_000682 +2010_000683 +2010_000685 +2010_000687 +2010_000688 +2010_000689 +2010_000690 +2010_000691 +2010_000692 +2010_000694 +2010_000695 +2010_000697 +2010_000702 +2010_000705 +2010_000707 +2010_000710 +2010_000711 +2010_000712 +2010_000715 +2010_000716 +2010_000717 +2010_000721 +2010_000722 +2010_000723 +2010_000724 +2010_000726 +2010_000727 +2010_000729 +2010_000731 +2010_000735 +2010_000737 +2010_000738 +2010_000739 +2010_000740 +2010_000743 +2010_000744 +2010_000746 +2010_000747 +2010_000748 +2010_000749 +2010_000750 +2010_000754 +2010_000759 +2010_000760 +2010_000761 +2010_000764 +2010_000765 +2010_000769 +2010_000770 +2010_000771 +2010_000772 +2010_000773 +2010_000778 +2010_000782 +2010_000785 +2010_000786 +2010_000787 +2010_000788 +2010_000791 +2010_000792 +2010_000797 +2010_000799 +2010_000800 +2010_000802 +2010_000803 +2010_000805 +2010_000806 +2010_000807 +2010_000808 +2010_000810 +2010_000811 +2010_000814 +2010_000815 +2010_000821 +2010_000822 +2010_000827 +2010_000828 +2010_000829 +2010_000830 +2010_000831 +2010_000836 +2010_000837 +2010_000838 +2010_000842 +2010_000846 +2010_000847 +2010_000849 +2010_000855 +2010_000857 +2010_000860 +2010_000862 +2010_000863 +2010_000865 +2010_000866 +2010_000870 +2010_000871 +2010_000872 +2010_000874 +2010_000875 +2010_000876 +2010_000879 +2010_000883 +2010_000885 +2010_000887 +2010_000889 +2010_000891 +2010_000893 +2010_000897 +2010_000898 +2010_000899 +2010_000904 +2010_000906 +2010_000907 +2010_000908 +2010_000910 +2010_000912 +2010_000914 +2010_000915 +2010_000918 +2010_000920 +2010_000922 +2010_000923 +2010_000926 +2010_000927 +2010_000928 +2010_000929 +2010_000931 +2010_000938 +2010_000939 +2010_000941 +2010_000942 +2010_000944 +2010_000945 +2010_000947 +2010_000948 +2010_000952 +2010_000954 +2010_000955 +2010_000956 +2010_000959 +2010_000961 +2010_000968 +2010_000970 +2010_000971 +2010_000973 +2010_000974 +2010_000975 +2010_000978 +2010_000979 +2010_000981 +2010_000983 +2010_000984 +2010_000986 +2010_000989 +2010_000991 +2010_000993 +2010_000994 +2010_000995 +2010_000996 +2010_001000 +2010_001002 +2010_001006 +2010_001008 +2010_001009 +2010_001010 +2010_001011 +2010_001012 +2010_001013 +2010_001016 +2010_001017 +2010_001020 +2010_001021 +2010_001023 +2010_001024 +2010_001025 +2010_001030 +2010_001032 +2010_001036 +2010_001039 +2010_001042 +2010_001043 +2010_001044 +2010_001049 +2010_001051 +2010_001052 +2010_001054 +2010_001057 +2010_001061 +2010_001063 +2010_001066 +2010_001069 +2010_001070 +2010_001074 +2010_001076 +2010_001077 +2010_001079 +2010_001080 +2010_001082 +2010_001085 +2010_001087 +2010_001089 +2010_001092 +2010_001094 +2010_001098 +2010_001099 +2010_001100 +2010_001103 +2010_001104 +2010_001105 +2010_001106 +2010_001107 +2010_001109 +2010_001110 +2010_001111 +2010_001112 +2010_001113 +2010_001117 +2010_001118 +2010_001119 +2010_001120 +2010_001121 +2010_001123 +2010_001124 +2010_001125 +2010_001126 +2010_001127 +2010_001130 +2010_001131 +2010_001134 +2010_001139 +2010_001140 +2010_001142 +2010_001143 +2010_001147 +2010_001148 +2010_001149 +2010_001151 +2010_001152 +2010_001154 +2010_001158 +2010_001159 +2010_001160 +2010_001163 +2010_001164 +2010_001172 +2010_001174 +2010_001175 +2010_001177 +2010_001179 +2010_001181 +2010_001183 +2010_001184 +2010_001185 +2010_001188 +2010_001189 +2010_001192 +2010_001193 +2010_001195 +2010_001199 +2010_001201 +2010_001204 +2010_001205 +2010_001206 +2010_001210 +2010_001211 +2010_001212 +2010_001214 +2010_001215 +2010_001216 +2010_001218 +2010_001219 +2010_001220 +2010_001224 +2010_001225 +2010_001229 +2010_001234 +2010_001237 +2010_001240 +2010_001241 +2010_001242 +2010_001245 +2010_001246 +2010_001247 +2010_001250 +2010_001251 +2010_001253 +2010_001254 +2010_001256 +2010_001257 +2010_001261 +2010_001263 +2010_001264 +2010_001270 +2010_001271 +2010_001272 +2010_001273 +2010_001274 +2010_001275 +2010_001277 +2010_001279 +2010_001282 +2010_001286 +2010_001287 +2010_001288 +2010_001289 +2010_001291 +2010_001292 +2010_001293 +2010_001294 +2010_001299 +2010_001301 +2010_001305 +2010_001310 +2010_001311 +2010_001312 +2010_001313 +2010_001315 +2010_001317 +2010_001320 +2010_001321 +2010_001325 +2010_001326 +2010_001327 +2010_001328 +2010_001329 +2010_001331 +2010_001333 +2010_001337 +2010_001338 +2010_001339 +2010_001343 +2010_001344 +2010_001347 +2010_001351 +2010_001355 +2010_001356 +2010_001357 +2010_001360 +2010_001361 +2010_001363 +2010_001364 +2010_001366 +2010_001367 +2010_001370 +2010_001372 +2010_001374 +2010_001376 +2010_001382 +2010_001383 +2010_001385 +2010_001386 +2010_001390 +2010_001394 +2010_001395 +2010_001397 +2010_001399 +2010_001401 +2010_001402 +2010_001403 +2010_001405 +2010_001406 +2010_001407 +2010_001408 +2010_001410 +2010_001411 +2010_001412 +2010_001413 +2010_001417 +2010_001418 +2010_001421 +2010_001422 +2010_001425 +2010_001426 +2010_001430 +2010_001431 +2010_001432 +2010_001433 +2010_001434 +2010_001435 +2010_001439 +2010_001441 +2010_001448 +2010_001449 +2010_001450 +2010_001451 +2010_001452 +2010_001453 +2010_001455 +2010_001456 +2010_001457 +2010_001458 +2010_001461 +2010_001463 +2010_001464 +2010_001465 +2010_001468 +2010_001472 +2010_001473 +2010_001478 +2010_001479 +2010_001480 +2010_001481 +2010_001486 +2010_001487 +2010_001489 +2010_001497 +2010_001499 +2010_001501 +2010_001502 +2010_001503 +2010_001505 +2010_001511 +2010_001514 +2010_001515 +2010_001516 +2010_001518 +2010_001520 +2010_001522 +2010_001525 +2010_001528 +2010_001529 +2010_001533 +2010_001534 +2010_001535 +2010_001536 +2010_001537 +2010_001539 +2010_001540 +2010_001543 +2010_001544 +2010_001547 +2010_001548 +2010_001550 +2010_001551 +2010_001552 +2010_001553 +2010_001555 +2010_001557 +2010_001560 +2010_001561 +2010_001562 +2010_001563 +2010_001569 +2010_001571 +2010_001572 +2010_001574 +2010_001576 +2010_001577 +2010_001579 +2010_001580 +2010_001583 +2010_001584 +2010_001586 +2010_001587 +2010_001590 +2010_001592 +2010_001594 +2010_001595 +2010_001596 +2010_001599 +2010_001601 +2010_001602 +2010_001603 +2010_001606 +2010_001607 +2010_001608 +2010_001614 +2010_001618 +2010_001619 +2010_001625 +2010_001626 +2010_001630 +2010_001633 +2010_001635 +2010_001636 +2010_001637 +2010_001638 +2010_001640 +2010_001644 +2010_001645 +2010_001646 +2010_001647 +2010_001649 +2010_001650 +2010_001652 +2010_001656 +2010_001659 +2010_001660 +2010_001665 +2010_001668 +2010_001669 +2010_001671 +2010_001674 +2010_001675 +2010_001676 +2010_001679 +2010_001680 +2010_001682 +2010_001685 +2010_001687 +2010_001689 +2010_001690 +2010_001691 +2010_001692 +2010_001694 +2010_001697 +2010_001698 +2010_001699 +2010_001700 +2010_001705 +2010_001706 +2010_001709 +2010_001710 +2010_001712 +2010_001715 +2010_001717 +2010_001718 +2010_001719 +2010_001720 +2010_001726 +2010_001729 +2010_001731 +2010_001732 +2010_001734 +2010_001737 +2010_001739 +2010_001743 +2010_001744 +2010_001746 +2010_001747 +2010_001748 +2010_001749 +2010_001752 +2010_001753 +2010_001754 +2010_001756 +2010_001757 +2010_001759 +2010_001760 +2010_001762 +2010_001763 +2010_001767 +2010_001768 +2010_001770 +2010_001771 +2010_001773 +2010_001776 +2010_001777 +2010_001780 +2010_001783 +2010_001784 +2010_001785 +2010_001787 +2010_001788 +2010_001794 +2010_001795 +2010_001796 +2010_001797 +2010_001801 +2010_001803 +2010_001806 +2010_001807 +2010_001808 +2010_001810 +2010_001814 +2010_001817 +2010_001819 +2010_001820 +2010_001821 +2010_001823 +2010_001827 +2010_001828 +2010_001829 +2010_001830 +2010_001836 +2010_001837 +2010_001838 +2010_001841 +2010_001842 +2010_001843 +2010_001845 +2010_001846 +2010_001849 +2010_001850 +2010_001851 +2010_001852 +2010_001853 +2010_001856 +2010_001857 +2010_001858 +2010_001860 +2010_001862 +2010_001863 +2010_001864 +2010_001868 +2010_001869 +2010_001870 +2010_001877 +2010_001881 +2010_001884 +2010_001885 +2010_001891 +2010_001892 +2010_001893 +2010_001896 +2010_001899 +2010_001904 +2010_001907 +2010_001908 +2010_001911 +2010_001913 +2010_001916 +2010_001918 +2010_001919 +2010_001921 +2010_001922 +2010_001923 +2010_001924 +2010_001927 +2010_001929 +2010_001931 +2010_001933 +2010_001934 +2010_001937 +2010_001938 +2010_001939 +2010_001940 +2010_001941 +2010_001944 +2010_001948 +2010_001950 +2010_001951 +2010_001954 +2010_001956 +2010_001957 +2010_001960 +2010_001962 +2010_001966 +2010_001967 +2010_001968 +2010_001970 +2010_001973 +2010_001974 +2010_001976 +2010_001978 +2010_001979 +2010_001980 +2010_001981 +2010_001982 +2010_001986 +2010_001987 +2010_001988 +2010_001992 +2010_001993 +2010_001994 +2010_001995 +2010_001998 +2010_002000 +2010_002002 +2010_002005 +2010_002006 +2010_002015 +2010_002017 +2010_002018 +2010_002019 +2010_002020 +2010_002022 +2010_002023 +2010_002025 +2010_002026 +2010_002029 +2010_002030 +2010_002032 +2010_002037 +2010_002039 +2010_002040 +2010_002041 +2010_002042 +2010_002044 +2010_002045 +2010_002046 +2010_002047 +2010_002048 +2010_002050 +2010_002054 +2010_002055 +2010_002057 +2010_002058 +2010_002060 +2010_002065 +2010_002067 +2010_002068 +2010_002070 +2010_002073 +2010_002080 +2010_002085 +2010_002086 +2010_002089 +2010_002094 +2010_002095 +2010_002096 +2010_002097 +2010_002098 +2010_002100 +2010_002102 +2010_002104 +2010_002105 +2010_002106 +2010_002107 +2010_002113 +2010_002117 +2010_002118 +2010_002121 +2010_002124 +2010_002127 +2010_002128 +2010_002129 +2010_002130 +2010_002132 +2010_002133 +2010_002136 +2010_002137 +2010_002138 +2010_002139 +2010_002141 +2010_002142 +2010_002143 +2010_002146 +2010_002147 +2010_002149 +2010_002150 +2010_002152 +2010_002154 +2010_002161 +2010_002166 +2010_002167 +2010_002168 +2010_002172 +2010_002175 +2010_002176 +2010_002177 +2010_002179 +2010_002180 +2010_002181 +2010_002182 +2010_002183 +2010_002185 +2010_002187 +2010_002191 +2010_002192 +2010_002193 +2010_002194 +2010_002195 +2010_002199 +2010_002200 +2010_002203 +2010_002204 +2010_002207 +2010_002208 +2010_002211 +2010_002213 +2010_002215 +2010_002216 +2010_002218 +2010_002219 +2010_002220 +2010_002221 +2010_002223 +2010_002224 +2010_002226 +2010_002227 +2010_002228 +2010_002229 +2010_002232 +2010_002236 +2010_002242 +2010_002243 +2010_002244 +2010_002245 +2010_002247 +2010_002248 +2010_002251 +2010_002254 +2010_002255 +2010_002261 +2010_002263 +2010_002267 +2010_002269 +2010_002271 +2010_002274 +2010_002276 +2010_002278 +2010_002279 +2010_002283 +2010_002286 +2010_002287 +2010_002289 +2010_002294 +2010_002295 +2010_002299 +2010_002301 +2010_002303 +2010_002305 +2010_002307 +2010_002309 +2010_002310 +2010_002312 +2010_002313 +2010_002315 +2010_002316 +2010_002318 +2010_002319 +2010_002320 +2010_002321 +2010_002326 +2010_002327 +2010_002332 +2010_002333 +2010_002336 +2010_002337 +2010_002338 +2010_002340 +2010_002346 +2010_002348 +2010_002349 +2010_002353 +2010_002354 +2010_002356 +2010_002357 +2010_002361 +2010_002363 +2010_002364 +2010_002365 +2010_002366 +2010_002368 +2010_002369 +2010_002370 +2010_002371 +2010_002372 +2010_002373 +2010_002374 +2010_002378 +2010_002379 +2010_002382 +2010_002383 +2010_002387 +2010_002388 +2010_002390 +2010_002391 +2010_002392 +2010_002393 +2010_002396 +2010_002398 +2010_002399 +2010_002400 +2010_002402 +2010_002405 +2010_002406 +2010_002408 +2010_002409 +2010_002410 +2010_002413 +2010_002418 +2010_002420 +2010_002422 +2010_002424 +2010_002425 +2010_002427 +2010_002429 +2010_002431 +2010_002435 +2010_002436 +2010_002438 +2010_002439 +2010_002440 +2010_002445 +2010_002446 +2010_002448 +2010_002449 +2010_002450 +2010_002452 +2010_002455 +2010_002456 +2010_002457 +2010_002458 +2010_002459 +2010_002460 +2010_002461 +2010_002462 +2010_002468 +2010_002469 +2010_002472 +2010_002475 +2010_002479 +2010_002480 +2010_002482 +2010_002484 +2010_002485 +2010_002487 +2010_002492 +2010_002496 +2010_002497 +2010_002498 +2010_002499 +2010_002501 +2010_002504 +2010_002507 +2010_002509 +2010_002510 +2010_002512 +2010_002513 +2010_002516 +2010_002518 +2010_002520 +2010_002526 +2010_002527 +2010_002529 +2010_002531 +2010_002532 +2010_002533 +2010_002534 +2010_002536 +2010_002537 +2010_002538 +2010_002539 +2010_002542 +2010_002543 +2010_002546 +2010_002547 +2010_002551 +2010_002552 +2010_002553 +2010_002556 +2010_002561 +2010_002562 +2010_002565 +2010_002567 +2010_002569 +2010_002570 +2010_002573 +2010_002575 +2010_002577 +2010_002578 +2010_002579 +2010_002580 +2010_002582 +2010_002583 +2010_002586 +2010_002587 +2010_002589 +2010_002592 +2010_002594 +2010_002597 +2010_002598 +2010_002601 +2010_002602 +2010_002603 +2010_002605 +2010_002614 +2010_002615 +2010_002616 +2010_002618 +2010_002620 +2010_002621 +2010_002623 +2010_002624 +2010_002625 +2010_002626 +2010_002628 +2010_002629 +2010_002631 +2010_002632 +2010_002638 +2010_002639 +2010_002642 +2010_002644 +2010_002645 +2010_002647 +2010_002652 +2010_002653 +2010_002654 +2010_002656 +2010_002659 +2010_002660 +2010_002661 +2010_002662 +2010_002665 +2010_002666 +2010_002667 +2010_002668 +2010_002674 +2010_002675 +2010_002676 +2010_002678 +2010_002679 +2010_002682 +2010_002684 +2010_002686 +2010_002688 +2010_002691 +2010_002692 +2010_002693 +2010_002695 +2010_002696 +2010_002697 +2010_002701 +2010_002702 +2010_002704 +2010_002705 +2010_002708 +2010_002710 +2010_002713 +2010_002714 +2010_002716 +2010_002720 +2010_002721 +2010_002722 +2010_002723 +2010_002725 +2010_002728 +2010_002729 +2010_002733 +2010_002734 +2010_002736 +2010_002737 +2010_002740 +2010_002741 +2010_002742 +2010_002746 +2010_002747 +2010_002750 +2010_002752 +2010_002754 +2010_002758 +2010_002759 +2010_002760 +2010_002763 +2010_002767 +2010_002770 +2010_002771 +2010_002772 +2010_002774 +2010_002775 +2010_002778 +2010_002779 +2010_002780 +2010_002781 +2010_002783 +2010_002786 +2010_002789 +2010_002790 +2010_002791 +2010_002792 +2010_002793 +2010_002794 +2010_002797 +2010_002801 +2010_002803 +2010_002805 +2010_002807 +2010_002808 +2010_002811 +2010_002813 +2010_002814 +2010_002815 +2010_002816 +2010_002817 +2010_002820 +2010_002821 +2010_002822 +2010_002824 +2010_002827 +2010_002830 +2010_002831 +2010_002834 +2010_002838 +2010_002839 +2010_002840 +2010_002841 +2010_002842 +2010_002843 +2010_002844 +2010_002845 +2010_002851 +2010_002853 +2010_002854 +2010_002855 +2010_002856 +2010_002857 +2010_002858 +2010_002860 +2010_002864 +2010_002865 +2010_002868 +2010_002870 +2010_002871 +2010_002873 +2010_002876 +2010_002877 +2010_002879 +2010_002880 +2010_002881 +2010_002884 +2010_002887 +2010_002891 +2010_002892 +2010_002896 +2010_002899 +2010_002900 +2010_002901 +2010_002902 +2010_002903 +2010_002905 +2010_002907 +2010_002909 +2010_002914 +2010_002915 +2010_002917 +2010_002921 +2010_002924 +2010_002927 +2010_002929 +2010_002930 +2010_002931 +2010_002935 +2010_002937 +2010_002938 +2010_002939 +2010_002940 +2010_002941 +2010_002946 +2010_002947 +2010_002948 +2010_002954 +2010_002955 +2010_002956 +2010_002958 +2010_002960 +2010_002962 +2010_002963 +2010_002965 +2010_002972 +2010_002973 +2010_002976 +2010_002978 +2010_002979 +2010_002980 +2010_002982 +2010_002985 +2010_002987 +2010_002988 +2010_002990 +2010_002991 +2010_002993 +2010_002995 +2010_003002 +2010_003003 +2010_003007 +2010_003010 +2010_003011 +2010_003013 +2010_003014 +2010_003015 +2010_003016 +2010_003017 +2010_003019 +2010_003024 +2010_003025 +2010_003027 +2010_003028 +2010_003032 +2010_003034 +2010_003035 +2010_003037 +2010_003040 +2010_003043 +2010_003044 +2010_003047 +2010_003050 +2010_003051 +2010_003053 +2010_003054 +2010_003055 +2010_003056 +2010_003057 +2010_003060 +2010_003062 +2010_003067 +2010_003071 +2010_003072 +2010_003074 +2010_003077 +2010_003078 +2010_003081 +2010_003082 +2010_003084 +2010_003086 +2010_003088 +2010_003091 +2010_003092 +2010_003093 +2010_003094 +2010_003097 +2010_003098 +2010_003101 +2010_003102 +2010_003103 +2010_003106 +2010_003107 +2010_003108 +2010_003112 +2010_003114 +2010_003115 +2010_003117 +2010_003119 +2010_003120 +2010_003122 +2010_003123 +2010_003127 +2010_003129 +2010_003132 +2010_003133 +2010_003135 +2010_003137 +2010_003138 +2010_003139 +2010_003143 +2010_003146 +2010_003147 +2010_003148 +2010_003149 +2010_003151 +2010_003153 +2010_003154 +2010_003156 +2010_003157 +2010_003159 +2010_003160 +2010_003162 +2010_003168 +2010_003169 +2010_003170 +2010_003173 +2010_003174 +2010_003176 +2010_003179 +2010_003183 +2010_003185 +2010_003186 +2010_003187 +2010_003190 +2010_003191 +2010_003192 +2010_003197 +2010_003199 +2010_003200 +2010_003201 +2010_003203 +2010_003204 +2010_003206 +2010_003207 +2010_003212 +2010_003214 +2010_003218 +2010_003219 +2010_003220 +2010_003222 +2010_003223 +2010_003227 +2010_003230 +2010_003231 +2010_003232 +2010_003233 +2010_003236 +2010_003238 +2010_003239 +2010_003240 +2010_003241 +2010_003244 +2010_003248 +2010_003249 +2010_003250 +2010_003251 +2010_003252 +2010_003253 +2010_003255 +2010_003256 +2010_003257 +2010_003259 +2010_003260 +2010_003263 +2010_003264 +2010_003269 +2010_003270 +2010_003274 +2010_003275 +2010_003276 +2010_003278 +2010_003279 +2010_003280 +2010_003283 +2010_003285 +2010_003287 +2010_003290 +2010_003291 +2010_003293 +2010_003297 +2010_003299 +2010_003300 +2010_003301 +2010_003302 +2010_003303 +2010_003304 +2010_003305 +2010_003309 +2010_003314 +2010_003316 +2010_003321 +2010_003325 +2010_003326 +2010_003329 +2010_003331 +2010_003332 +2010_003333 +2010_003335 +2010_003337 +2010_003341 +2010_003342 +2010_003343 +2010_003344 +2010_003345 +2010_003350 +2010_003351 +2010_003353 +2010_003355 +2010_003358 +2010_003361 +2010_003362 +2010_003365 +2010_003366 +2010_003367 +2010_003368 +2010_003370 +2010_003371 +2010_003372 +2010_003374 +2010_003375 +2010_003376 +2010_003379 +2010_003380 +2010_003381 +2010_003383 +2010_003384 +2010_003385 +2010_003390 +2010_003391 +2010_003395 +2010_003397 +2010_003398 +2010_003400 +2010_003401 +2010_003402 +2010_003405 +2010_003406 +2010_003409 +2010_003411 +2010_003415 +2010_003418 +2010_003419 +2010_003421 +2010_003427 +2010_003429 +2010_003432 +2010_003435 +2010_003436 +2010_003437 +2010_003439 +2010_003446 +2010_003450 +2010_003451 +2010_003453 +2010_003458 +2010_003461 +2010_003465 +2010_003467 +2010_003468 +2010_003469 +2010_003470 +2010_003473 +2010_003474 +2010_003477 +2010_003478 +2010_003479 +2010_003481 +2010_003482 +2010_003483 +2010_003488 +2010_003490 +2010_003491 +2010_003493 +2010_003495 +2010_003496 +2010_003497 +2010_003503 +2010_003506 +2010_003507 +2010_003508 +2010_003509 +2010_003512 +2010_003513 +2010_003514 +2010_003520 +2010_003522 +2010_003526 +2010_003527 +2010_003529 +2010_003531 +2010_003532 +2010_003534 +2010_003535 +2010_003537 +2010_003538 +2010_003539 +2010_003540 +2010_003541 +2010_003546 +2010_003547 +2010_003549 +2010_003551 +2010_003554 +2010_003556 +2010_003559 +2010_003560 +2010_003561 +2010_003562 +2010_003563 +2010_003567 +2010_003568 +2010_003569 +2010_003573 +2010_003574 +2010_003576 +2010_003579 +2010_003582 +2010_003585 +2010_003588 +2010_003592 +2010_003594 +2010_003597 +2010_003598 +2010_003599 +2010_003601 +2010_003603 +2010_003604 +2010_003605 +2010_003608 +2010_003609 +2010_003610 +2010_003612 +2010_003613 +2010_003618 +2010_003625 +2010_003628 +2010_003629 +2010_003630 +2010_003632 +2010_003634 +2010_003635 +2010_003640 +2010_003641 +2010_003643 +2010_003644 +2010_003645 +2010_003648 +2010_003649 +2010_003651 +2010_003653 +2010_003655 +2010_003656 +2010_003659 +2010_003664 +2010_003665 +2010_003667 +2010_003670 +2010_003671 +2010_003672 +2010_003673 +2010_003674 +2010_003675 +2010_003677 +2010_003679 +2010_003680 +2010_003686 +2010_003687 +2010_003688 +2010_003689 +2010_003690 +2010_003695 +2010_003696 +2010_003701 +2010_003703 +2010_003708 +2010_003709 +2010_003714 +2010_003716 +2010_003717 +2010_003719 +2010_003721 +2010_003723 +2010_003724 +2010_003725 +2010_003728 +2010_003729 +2010_003730 +2010_003731 +2010_003734 +2010_003735 +2010_003736 +2010_003737 +2010_003742 +2010_003743 +2010_003744 +2010_003745 +2010_003746 +2010_003747 +2010_003752 +2010_003754 +2010_003755 +2010_003757 +2010_003758 +2010_003761 +2010_003762 +2010_003764 +2010_003768 +2010_003770 +2010_003771 +2010_003772 +2010_003773 +2010_003774 +2010_003779 +2010_003781 +2010_003784 +2010_003788 +2010_003789 +2010_003791 +2010_003792 +2010_003798 +2010_003799 +2010_003800 +2010_003801 +2010_003804 +2010_003805 +2010_003806 +2010_003807 +2010_003811 +2010_003813 +2010_003815 +2010_003816 +2010_003818 +2010_003820 +2010_003821 +2010_003822 +2010_003823 +2010_003825 +2010_003826 +2010_003828 +2010_003837 +2010_003844 +2010_003845 +2010_003847 +2010_003848 +2010_003852 +2010_003854 +2010_003855 +2010_003856 +2010_003857 +2010_003859 +2010_003860 +2010_003861 +2010_003863 +2010_003864 +2010_003865 +2010_003871 +2010_003874 +2010_003875 +2010_003877 +2010_003878 +2010_003879 +2010_003884 +2010_003887 +2010_003890 +2010_003891 +2010_003892 +2010_003893 +2010_003894 +2010_003897 +2010_003898 +2010_003899 +2010_003900 +2010_003906 +2010_003910 +2010_003911 +2010_003912 +2010_003914 +2010_003915 +2010_003919 +2010_003920 +2010_003925 +2010_003928 +2010_003929 +2010_003931 +2010_003933 +2010_003936 +2010_003937 +2010_003938 +2010_003939 +2010_003942 +2010_003943 +2010_003944 +2010_003945 +2010_003947 +2010_003949 +2010_003950 +2010_003954 +2010_003955 +2010_003956 +2010_003957 +2010_003958 +2010_003961 +2010_003966 +2010_003970 +2010_003971 +2010_003974 +2010_003976 +2010_003980 +2010_003981 +2010_003982 +2010_003983 +2010_003987 +2010_003988 +2010_003994 +2010_003995 +2010_003996 +2010_003999 +2010_004002 +2010_004005 +2010_004006 +2010_004007 +2010_004008 +2010_004009 +2010_004010 +2010_004011 +2010_004014 +2010_004017 +2010_004021 +2010_004023 +2010_004025 +2010_004026 +2010_004027 +2010_004028 +2010_004029 +2010_004030 +2010_004031 +2010_004033 +2010_004036 +2010_004037 +2010_004041 +2010_004042 +2010_004043 +2010_004045 +2010_004048 +2010_004050 +2010_004052 +2010_004053 +2010_004054 +2010_004056 +2010_004059 +2010_004060 +2010_004061 +2010_004062 +2010_004063 +2010_004064 +2010_004065 +2010_004066 +2010_004067 +2010_004069 +2010_004071 +2010_004072 +2010_004073 +2010_004074 +2010_004075 +2010_004081 +2010_004084 +2010_004088 +2010_004089 +2010_004092 +2010_004094 +2010_004095 +2010_004096 +2010_004102 +2010_004104 +2010_004105 +2010_004107 +2010_004108 +2010_004109 +2010_004111 +2010_004116 +2010_004118 +2010_004119 +2010_004120 +2010_004121 +2010_004123 +2010_004124 +2010_004125 +2010_004129 +2010_004130 +2010_004133 +2010_004137 +2010_004138 +2010_004139 +2010_004140 +2010_004141 +2010_004143 +2010_004144 +2010_004145 +2010_004148 +2010_004149 +2010_004154 +2010_004157 +2010_004160 +2010_004161 +2010_004162 +2010_004163 +2010_004165 +2010_004168 +2010_004171 +2010_004172 +2010_004173 +2010_004175 +2010_004178 +2010_004179 +2010_004180 +2010_004182 +2010_004184 +2010_004186 +2010_004187 +2010_004188 +2010_004191 +2010_004192 +2010_004193 +2010_004197 +2010_004198 +2010_004201 +2010_004204 +2010_004207 +2010_004208 +2010_004209 +2010_004210 +2010_004211 +2010_004216 +2010_004219 +2010_004222 +2010_004223 +2010_004224 +2010_004225 +2010_004226 +2010_004227 +2010_004228 +2010_004229 +2010_004230 +2010_004231 +2010_004238 +2010_004239 +2010_004242 +2010_004244 +2010_004247 +2010_004248 +2010_004249 +2010_004252 +2010_004253 +2010_004254 +2010_004256 +2010_004257 +2010_004258 +2010_004259 +2010_004263 +2010_004264 +2010_004271 +2010_004275 +2010_004276 +2010_004278 +2010_004279 +2010_004280 +2010_004282 +2010_004283 +2010_004286 +2010_004288 +2010_004289 +2010_004290 +2010_004291 +2010_004295 +2010_004296 +2010_004297 +2010_004301 +2010_004304 +2010_004306 +2010_004307 +2010_004311 +2010_004312 +2010_004313 +2010_004314 +2010_004318 +2010_004320 +2010_004322 +2010_004325 +2010_004327 +2010_004332 +2010_004333 +2010_004335 +2010_004336 +2010_004337 +2010_004339 +2010_004341 +2010_004344 +2010_004345 +2010_004346 +2010_004348 +2010_004349 +2010_004350 +2010_004351 +2010_004352 +2010_004355 +2010_004357 +2010_004358 +2010_004360 +2010_004361 +2010_004362 +2010_004363 +2010_004365 +2010_004366 +2010_004367 +2010_004368 +2010_004369 +2010_004370 +2010_004371 +2010_004373 +2010_004374 +2010_004380 +2010_004382 +2010_004385 +2010_004387 +2010_004390 +2010_004391 +2010_004400 +2010_004402 +2010_004404 +2010_004409 +2010_004412 +2010_004415 +2010_004417 +2010_004419 +2010_004420 +2010_004422 +2010_004423 +2010_004425 +2010_004428 +2010_004429 +2010_004431 +2010_004432 +2010_004436 +2010_004439 +2010_004441 +2010_004445 +2010_004447 +2010_004448 +2010_004450 +2010_004451 +2010_004455 +2010_004456 +2010_004457 +2010_004459 +2010_004460 +2010_004461 +2010_004466 +2010_004467 +2010_004469 +2010_004472 +2010_004475 +2010_004476 +2010_004477 +2010_004478 +2010_004479 +2010_004481 +2010_004483 +2010_004484 +2010_004486 +2010_004488 +2010_004491 +2010_004492 +2010_004493 +2010_004499 +2010_004501 +2010_004503 +2010_004505 +2010_004506 +2010_004509 +2010_004511 +2010_004514 +2010_004515 +2010_004517 +2010_004518 +2010_004519 +2010_004520 +2010_004521 +2010_004523 +2010_004529 +2010_004533 +2010_004536 +2010_004537 +2010_004540 +2010_004542 +2010_004543 +2010_004545 +2010_004546 +2010_004550 +2010_004551 +2010_004553 +2010_004554 +2010_004556 +2010_004557 +2010_004558 +2010_004559 +2010_004560 +2010_004561 +2010_004567 +2010_004569 +2010_004570 +2010_004573 +2010_004575 +2010_004576 +2010_004577 +2010_004581 +2010_004584 +2010_004585 +2010_004586 +2010_004588 +2010_004591 +2010_004592 +2010_004594 +2010_004596 +2010_004597 +2010_004598 +2010_004600 +2010_004601 +2010_004604 +2010_004608 +2010_004609 +2010_004616 +2010_004618 +2010_004620 +2010_004621 +2010_004624 +2010_004625 +2010_004627 +2010_004628 +2010_004629 +2010_004631 +2010_004634 +2010_004635 +2010_004637 +2010_004638 +2010_004642 +2010_004646 +2010_004654 +2010_004655 +2010_004656 +2010_004657 +2010_004659 +2010_004660 +2010_004661 +2010_004662 +2010_004665 +2010_004666 +2010_004667 +2010_004669 +2010_004670 +2010_004672 +2010_004676 +2010_004677 +2010_004679 +2010_004680 +2010_004681 +2010_004683 +2010_004686 +2010_004690 +2010_004691 +2010_004692 +2010_004694 +2010_004696 +2010_004697 +2010_004698 +2010_004703 +2010_004704 +2010_004708 +2010_004710 +2010_004712 +2010_004714 +2010_004717 +2010_004721 +2010_004722 +2010_004726 +2010_004728 +2010_004729 +2010_004730 +2010_004733 +2010_004735 +2010_004738 +2010_004741 +2010_004743 +2010_004747 +2010_004748 +2010_004749 +2010_004750 +2010_004751 +2010_004753 +2010_004756 +2010_004757 +2010_004760 +2010_004763 +2010_004765 +2010_004766 +2010_004768 +2010_004770 +2010_004772 +2010_004773 +2010_004775 +2010_004777 +2010_004778 +2010_004779 +2010_004782 +2010_004783 +2010_004785 +2010_004786 +2010_004789 +2010_004791 +2010_004792 +2010_004793 +2010_004795 +2010_004797 +2010_004804 +2010_004805 +2010_004806 +2010_004807 +2010_004808 +2010_004809 +2010_004812 +2010_004813 +2010_004815 +2010_004816 +2010_004817 +2010_004821 +2010_004822 +2010_004824 +2010_004825 +2010_004826 +2010_004828 +2010_004829 +2010_004830 +2010_004831 +2010_004832 +2010_004836 +2010_004838 +2010_004841 +2010_004844 +2010_004847 +2010_004848 +2010_004849 +2010_004852 +2010_004854 +2010_004855 +2010_004856 +2010_004857 +2010_004861 +2010_004865 +2010_004866 +2010_004868 +2010_004871 +2010_004874 +2010_004877 +2010_004878 +2010_004879 +2010_004888 +2010_004889 +2010_004890 +2010_004891 +2010_004894 +2010_004896 +2010_004900 +2010_004901 +2010_004903 +2010_004906 +2010_004908 +2010_004909 +2010_004910 +2010_004913 +2010_004916 +2010_004917 +2010_004918 +2010_004919 +2010_004921 +2010_004922 +2010_004928 +2010_004930 +2010_004931 +2010_004933 +2010_004937 +2010_004938 +2010_004941 +2010_004942 +2010_004943 +2010_004944 +2010_004945 +2010_004946 +2010_004948 +2010_004950 +2010_004951 +2010_004952 +2010_004953 +2010_004954 +2010_004957 +2010_004959 +2010_004960 +2010_004962 +2010_004963 +2010_004966 +2010_004967 +2010_004968 +2010_004970 +2010_004971 +2010_004973 +2010_004974 +2010_004980 +2010_004982 +2010_004983 +2010_004987 +2010_004989 +2010_004991 +2010_004992 +2010_004994 +2010_004995 +2010_004997 +2010_004998 +2010_005000 +2010_005002 +2010_005005 +2010_005006 +2010_005008 +2010_005011 +2010_005013 +2010_005016 +2010_005017 +2010_005018 +2010_005019 +2010_005021 +2010_005022 +2010_005023 +2010_005026 +2010_005028 +2010_005031 +2010_005033 +2010_005035 +2010_005041 +2010_005042 +2010_005044 +2010_005046 +2010_005048 +2010_005049 +2010_005052 +2010_005053 +2010_005054 +2010_005055 +2010_005059 +2010_005060 +2010_005061 +2010_005062 +2010_005063 +2010_005064 +2010_005066 +2010_005068 +2010_005071 +2010_005072 +2010_005075 +2010_005079 +2010_005080 +2010_005082 +2010_005083 +2010_005087 +2010_005090 +2010_005093 +2010_005094 +2010_005096 +2010_005098 +2010_005099 +2010_005100 +2010_005101 +2010_005106 +2010_005107 +2010_005108 +2010_005109 +2010_005110 +2010_005111 +2010_005115 +2010_005116 +2010_005118 +2010_005119 +2010_005120 +2010_005123 +2010_005127 +2010_005128 +2010_005129 +2010_005130 +2010_005133 +2010_005134 +2010_005136 +2010_005138 +2010_005141 +2010_005143 +2010_005147 +2010_005148 +2010_005149 +2010_005152 +2010_005155 +2010_005158 +2010_005159 +2010_005160 +2010_005161 +2010_005164 +2010_005166 +2010_005167 +2010_005169 +2010_005170 +2010_005174 +2010_005180 +2010_005182 +2010_005183 +2010_005184 +2010_005185 +2010_005187 +2010_005188 +2010_005190 +2010_005192 +2010_005193 +2010_005198 +2010_005199 +2010_005201 +2010_005202 +2010_005206 +2010_005208 +2010_005211 +2010_005213 +2010_005215 +2010_005216 +2010_005217 +2010_005222 +2010_005223 +2010_005224 +2010_005226 +2010_005229 +2010_005230 +2010_005232 +2010_005236 +2010_005238 +2010_005239 +2010_005241 +2010_005242 +2010_005243 +2010_005245 +2010_005246 +2010_005250 +2010_005252 +2010_005253 +2010_005257 +2010_005258 +2010_005260 +2010_005261 +2010_005264 +2010_005266 +2010_005268 +2010_005270 +2010_005272 +2010_005273 +2010_005274 +2010_005275 +2010_005276 +2010_005277 +2010_005279 +2010_005284 +2010_005285 +2010_005287 +2010_005292 +2010_005293 +2010_005297 +2010_005299 +2010_005301 +2010_005303 +2010_005305 +2010_005306 +2010_005308 +2010_005309 +2010_005310 +2010_005312 +2010_005314 +2010_005317 +2010_005318 +2010_005320 +2010_005323 +2010_005327 +2010_005330 +2010_005331 +2010_005332 +2010_005338 +2010_005340 +2010_005344 +2010_005345 +2010_005346 +2010_005349 +2010_005350 +2010_005352 +2010_005353 +2010_005359 +2010_005361 +2010_005364 +2010_005365 +2010_005366 +2010_005369 +2010_005371 +2010_005372 +2010_005374 +2010_005375 +2010_005376 +2010_005377 +2010_005379 +2010_005382 +2010_005384 +2010_005385 +2010_005386 +2010_005388 +2010_005389 +2010_005391 +2010_005393 +2010_005394 +2010_005398 +2010_005401 +2010_005402 +2010_005403 +2010_005405 +2010_005406 +2010_005408 +2010_005409 +2010_005410 +2010_005414 +2010_005415 +2010_005416 +2010_005417 +2010_005419 +2010_005421 +2010_005424 +2010_005425 +2010_005426 +2010_005428 +2010_005429 +2010_005432 +2010_005433 +2010_005434 +2010_005437 +2010_005441 +2010_005442 +2010_005448 +2010_005450 +2010_005452 +2010_005455 +2010_005456 +2010_005457 +2010_005458 +2010_005462 +2010_005463 +2010_005466 +2010_005467 +2010_005468 +2010_005471 +2010_005472 +2010_005474 +2010_005475 +2010_005480 +2010_005482 +2010_005483 +2010_005484 +2010_005489 +2010_005491 +2010_005492 +2010_005493 +2010_005494 +2010_005496 +2010_005497 +2010_005498 +2010_005500 +2010_005501 +2010_005502 +2010_005505 +2010_005506 +2010_005508 +2010_005511 +2010_005512 +2010_005513 +2010_005514 +2010_005515 +2010_005516 +2010_005518 +2010_005519 +2010_005522 +2010_005527 +2010_005531 +2010_005532 +2010_005534 +2010_005535 +2010_005536 +2010_005538 +2010_005540 +2010_005542 +2010_005543 +2010_005546 +2010_005548 +2010_005551 +2010_005556 +2010_005557 +2010_005559 +2010_005561 +2010_005562 +2010_005565 +2010_005566 +2010_005567 +2010_005570 +2010_005571 +2010_005572 +2010_005573 +2010_005575 +2010_005576 +2010_005578 +2010_005582 +2010_005584 +2010_005585 +2010_005586 +2010_005587 +2010_005588 +2010_005591 +2010_005592 +2010_005593 +2010_005594 +2010_005595 +2010_005596 +2010_005597 +2010_005601 +2010_005603 +2010_005604 +2010_005606 +2010_005608 +2010_005610 +2010_005612 +2010_005614 +2010_005615 +2010_005616 +2010_005619 +2010_005620 +2010_005625 +2010_005626 +2010_005627 +2010_005628 +2010_005629 +2010_005632 +2010_005635 +2010_005636 +2010_005637 +2010_005640 +2010_005643 +2010_005644 +2010_005646 +2010_005647 +2010_005651 +2010_005652 +2010_005654 +2010_005657 +2010_005658 +2010_005663 +2010_005664 +2010_005665 +2010_005666 +2010_005668 +2010_005669 +2010_005670 +2010_005671 +2010_005672 +2010_005676 +2010_005678 +2010_005681 +2010_005683 +2010_005684 +2010_005688 +2010_005692 +2010_005696 +2010_005697 +2010_005700 +2010_005705 +2010_005706 +2010_005709 +2010_005712 +2010_005715 +2010_005716 +2010_005718 +2010_005719 +2010_005721 +2010_005723 +2010_005725 +2010_005727 +2010_005731 +2010_005732 +2010_005733 +2010_005734 +2010_005735 +2010_005736 +2010_005738 +2010_005740 +2010_005744 +2010_005746 +2010_005747 +2010_005748 +2010_005750 +2010_005752 +2010_005753 +2010_005754 +2010_005755 +2010_005756 +2010_005758 +2010_005761 +2010_005762 +2010_005763 +2010_005764 +2010_005767 +2010_005768 +2010_005770 +2010_005775 +2010_005776 +2010_005777 +2010_005780 +2010_005782 +2010_005784 +2010_005785 +2010_005788 +2010_005791 +2010_005794 +2010_005796 +2010_005800 +2010_005804 +2010_005805 +2010_005806 +2010_005807 +2010_005810 +2010_005815 +2010_005816 +2010_005817 +2010_005820 +2010_005821 +2010_005823 +2010_005824 +2010_005825 +2010_005826 +2010_005827 +2010_005830 +2010_005833 +2010_005835 +2010_005836 +2010_005837 +2010_005838 +2010_005840 +2010_005841 +2010_005843 +2010_005845 +2010_005847 +2010_005848 +2010_005849 +2010_005853 +2010_005854 +2010_005855 +2010_005860 +2010_005865 +2010_005867 +2010_005868 +2010_005870 +2010_005871 +2010_005874 +2010_005875 +2010_005876 +2010_005877 +2010_005882 +2010_005883 +2010_005884 +2010_005885 +2010_005886 +2010_005888 +2010_005891 +2010_005892 +2010_005894 +2010_005896 +2010_005897 +2010_005898 +2010_005899 +2010_005901 +2010_005903 +2010_005904 +2010_005906 +2010_005907 +2010_005909 +2010_005914 +2010_005919 +2010_005921 +2010_005922 +2010_005927 +2010_005928 +2010_005929 +2010_005930 +2010_005932 +2010_005934 +2010_005935 +2010_005936 +2010_005937 +2010_005938 +2010_005942 +2010_005943 +2010_005948 +2010_005949 +2010_005951 +2010_005952 +2010_005953 +2010_005954 +2010_005957 +2010_005958 +2010_005959 +2010_005960 +2010_005967 +2010_005968 +2010_005972 +2010_005973 +2010_005974 +2010_005975 +2010_005976 +2010_005978 +2010_005980 +2010_005981 +2010_005982 +2010_005984 +2010_005985 +2010_005986 +2010_005987 +2010_005991 +2010_005992 +2010_005993 +2010_005995 +2010_005996 +2010_005997 +2010_005998 +2010_006000 +2010_006003 +2010_006004 +2010_006009 +2010_006010 +2010_006011 +2010_006012 +2010_006015 +2010_006021 +2010_006023 +2010_006025 +2010_006026 +2010_006028 +2010_006031 +2010_006032 +2010_006033 +2010_006034 +2010_006035 +2010_006037 +2010_006040 +2010_006041 +2010_006042 +2010_006050 +2010_006051 +2010_006054 +2010_006056 +2010_006057 +2010_006058 +2010_006061 +2010_006062 +2010_006063 +2010_006066 +2010_006067 +2010_006070 +2010_006073 +2010_006076 +2010_006078 +2010_006079 +2010_006082 +2010_006084 +2010_006086 +2011_000002 +2011_000003 +2011_000006 +2011_000007 +2011_000009 +2011_000010 +2011_000012 +2011_000016 +2011_000017 +2011_000022 +2011_000025 +2011_000027 +2011_000028 +2011_000030 +2011_000034 +2011_000036 +2011_000037 +2011_000038 +2011_000041 +2011_000043 +2011_000044 +2011_000045 +2011_000048 +2011_000051 +2011_000052 +2011_000053 +2011_000054 +2011_000057 +2011_000058 +2011_000060 +2011_000061 +2011_000065 +2011_000066 +2011_000068 +2011_000069 +2011_000070 +2011_000071 +2011_000072 +2011_000076 +2011_000077 +2011_000082 +2011_000083 +2011_000084 +2011_000086 +2011_000087 +2011_000090 +2011_000094 +2011_000095 +2011_000096 +2011_000098 +2011_000102 +2011_000103 +2011_000105 +2011_000108 +2011_000109 +2011_000112 +2011_000114 +2011_000116 +2011_000122 +2011_000124 +2011_000128 +2011_000129 +2011_000130 +2011_000137 +2011_000138 +2011_000142 +2011_000145 +2011_000146 +2011_000147 +2011_000149 +2011_000152 +2011_000161 +2011_000162 +2011_000163 +2011_000165 +2011_000166 +2011_000173 +2011_000176 +2011_000178 +2011_000180 +2011_000181 +2011_000182 +2011_000185 +2011_000192 +2011_000194 +2011_000195 +2011_000196 +2011_000197 +2011_000202 +2011_000206 +2011_000208 +2011_000210 +2011_000213 +2011_000214 +2011_000216 +2011_000219 +2011_000220 +2011_000221 +2011_000222 +2011_000224 +2011_000226 +2011_000228 +2011_000229 +2011_000232 +2011_000233 +2011_000234 +2011_000238 +2011_000239 +2011_000241 +2011_000243 +2011_000246 +2011_000248 +2011_000249 +2011_000250 +2011_000252 +2011_000253 +2011_000257 +2011_000258 +2011_000267 +2011_000268 +2011_000269 +2011_000273 +2011_000276 +2011_000277 +2011_000278 +2011_000282 +2011_000283 +2011_000285 +2011_000286 +2011_000288 +2011_000290 +2011_000291 +2011_000293 +2011_000297 +2011_000299 +2011_000304 +2011_000305 +2011_000307 +2011_000309 +2011_000310 +2011_000312 +2011_000314 +2011_000315 +2011_000317 +2011_000319 +2011_000320 +2011_000321 +2011_000322 +2011_000324 +2011_000329 +2011_000332 +2011_000338 +2011_000342 +2011_000343 +2011_000344 +2011_000345 +2011_000346 +2011_000347 +2011_000359 +2011_000361 +2011_000362 +2011_000364 +2011_000369 +2011_000370 +2011_000374 +2011_000375 +2011_000376 +2011_000379 +2011_000382 +2011_000383 +2011_000385 +2011_000386 +2011_000388 +2011_000391 +2011_000392 +2011_000396 +2011_000397 +2011_000398 +2011_000399 +2011_000400 +2011_000404 +2011_000408 +2011_000412 +2011_000413 +2011_000416 +2011_000418 +2011_000419 +2011_000420 +2011_000426 +2011_000427 +2011_000428 +2011_000430 +2011_000432 +2011_000434 +2011_000435 +2011_000436 +2011_000438 +2011_000442 +2011_000444 +2011_000445 +2011_000449 +2011_000450 +2011_000453 +2011_000454 +2011_000455 +2011_000456 +2011_000457 +2011_000461 +2011_000465 +2011_000468 +2011_000469 +2011_000471 +2011_000472 +2011_000474 +2011_000475 +2011_000477 +2011_000479 +2011_000481 +2011_000482 +2011_000485 +2011_000487 +2011_000491 +2011_000492 +2011_000494 +2011_000496 +2011_000498 +2011_000499 +2011_000502 +2011_000503 +2011_000505 +2011_000509 +2011_000511 +2011_000512 +2011_000513 +2011_000514 +2011_000518 +2011_000519 +2011_000520 +2011_000521 +2011_000526 +2011_000530 +2011_000531 +2011_000532 +2011_000534 +2011_000536 +2011_000538 +2011_000541 +2011_000542 +2011_000548 +2011_000550 +2011_000551 +2011_000554 +2011_000556 +2011_000557 +2011_000558 +2011_000559 +2011_000560 +2011_000565 +2011_000566 +2011_000567 +2011_000569 +2011_000572 +2011_000573 +2011_000575 +2011_000577 +2011_000578 +2011_000579 +2011_000585 +2011_000586 +2011_000589 +2011_000592 +2011_000594 +2011_000596 +2011_000598 +2011_000600 +2011_000607 +2011_000608 +2011_000609 +2011_000612 +2011_000618 +2011_000621 +2011_000622 +2011_000627 +2011_000628 +2011_000629 +2011_000630 +2011_000631 +2011_000634 +2011_000637 +2011_000638 +2011_000641 +2011_000642 +2011_000646 +2011_000651 +2011_000652 +2011_000655 +2011_000656 +2011_000657 +2011_000658 +2011_000661 +2011_000666 +2011_000669 +2011_000673 +2011_000675 +2011_000679 +2011_000682 +2011_000683 +2011_000684 +2011_000685 +2011_000688 +2011_000689 +2011_000690 +2011_000692 +2011_000698 +2011_000701 +2011_000703 +2011_000704 +2011_000709 +2011_000711 +2011_000713 +2011_000718 +2011_000724 +2011_000725 +2011_000730 +2011_000731 +2011_000734 +2011_000743 +2011_000744 +2011_000745 +2011_000747 +2011_000748 +2011_000749 +2011_000753 +2011_000755 +2011_000757 +2011_000758 +2011_000759 +2011_000763 +2011_000765 +2011_000767 +2011_000768 +2011_000769 +2011_000770 +2011_000771 +2011_000772 +2011_000774 +2011_000778 +2011_000780 +2011_000784 +2011_000785 +2011_000788 +2011_000789 +2011_000790 +2011_000791 +2011_000793 +2011_000800 +2011_000804 +2011_000806 +2011_000807 +2011_000809 +2011_000813 +2011_000815 +2011_000819 +2011_000820 +2011_000823 +2011_000824 +2011_000827 +2011_000828 +2011_000829 +2011_000830 +2011_000831 +2011_000834 +2011_000837 +2011_000839 +2011_000840 +2011_000843 +2011_000845 +2011_000847 +2011_000848 +2011_000850 +2011_000851 +2011_000853 +2011_000855 +2011_000858 +2011_000859 +2011_000872 +2011_000874 +2011_000875 +2011_000882 +2011_000885 +2011_000887 +2011_000888 +2011_000893 +2011_000895 +2011_000897 +2011_000898 +2011_000899 +2011_000900 +2011_000901 +2011_000908 +2011_000909 +2011_000912 +2011_000917 +2011_000919 +2011_000920 +2011_000922 +2011_000927 +2011_000930 +2011_000932 +2011_000933 +2011_000934 +2011_000940 +2011_000944 +2011_000947 +2011_000950 +2011_000951 +2011_000953 +2011_000954 +2011_000957 +2011_000961 +2011_000965 +2011_000969 +2011_000971 +2011_000973 +2011_000975 +2011_000977 +2011_000979 +2011_000981 +2011_000982 +2011_000983 +2011_000986 +2011_000987 +2011_000990 +2011_000991 +2011_000996 +2011_000997 +2011_000999 +2011_001001 +2011_001004 +2011_001005 +2011_001008 +2011_001009 +2011_001010 +2011_001011 +2011_001014 +2011_001015 +2011_001016 +2011_001019 +2011_001020 +2011_001022 +2011_001023 +2011_001025 +2011_001027 +2011_001028 +2011_001029 +2011_001030 +2011_001031 +2011_001032 +2011_001033 +2011_001034 +2011_001036 +2011_001040 +2011_001044 +2011_001047 +2011_001052 +2011_001054 +2011_001055 +2011_001056 +2011_001058 +2011_001060 +2011_001062 +2011_001064 +2011_001066 +2011_001069 +2011_001071 +2011_001073 +2011_001079 +2011_001080 +2011_001081 +2011_001082 +2011_001084 +2011_001086 +2011_001091 +2011_001093 +2011_001097 +2011_001100 +2011_001105 +2011_001106 +2011_001107 +2011_001110 +2011_001111 +2011_001114 +2011_001116 +2011_001117 +2011_001123 +2011_001124 +2011_001126 +2011_001127 +2011_001128 +2011_001133 +2011_001134 +2011_001135 +2011_001136 +2011_001137 +2011_001138 +2011_001139 +2011_001144 +2011_001146 +2011_001149 +2011_001150 +2011_001152 +2011_001153 +2011_001158 +2011_001159 +2011_001160 +2011_001161 +2011_001163 +2011_001166 +2011_001167 +2011_001168 +2011_001169 +2011_001173 +2011_001175 +2011_001176 +2011_001188 +2011_001189 +2011_001190 +2011_001192 +2011_001193 +2011_001198 +2011_001201 +2011_001203 +2011_001208 +2011_001211 +2011_001213 +2011_001215 +2011_001216 +2011_001217 +2011_001220 +2011_001221 +2011_001223 +2011_001226 +2011_001227 +2011_001229 +2011_001232 +2011_001238 +2011_001240 +2011_001245 +2011_001246 +2011_001251 +2011_001252 +2011_001253 +2011_001254 +2011_001255 +2011_001257 +2011_001259 +2011_001260 +2011_001261 +2011_001263 +2011_001264 +2011_001266 +2011_001270 +2011_001271 +2011_001272 +2011_001276 +2011_001277 +2011_001281 +2011_001282 +2011_001283 +2011_001284 +2011_001285 +2011_001286 +2011_001287 +2011_001288 +2011_001290 +2011_001292 +2011_001295 +2011_001302 +2011_001304 +2011_001305 +2011_001310 +2011_001311 +2011_001313 +2011_001315 +2011_001318 +2011_001319 +2011_001320 +2011_001323 +2011_001326 +2011_001327 +2011_001329 +2011_001330 +2011_001333 +2011_001335 +2011_001336 +2011_001337 +2011_001341 +2011_001344 +2011_001346 +2011_001350 +2011_001354 +2011_001355 +2011_001357 +2011_001360 +2011_001366 +2011_001369 +2011_001370 +2011_001373 +2011_001375 +2011_001381 +2011_001382 +2011_001384 +2011_001387 +2011_001388 +2011_001389 +2011_001390 +2011_001394 +2011_001399 +2011_001400 +2011_001402 +2011_001404 +2011_001406 +2011_001407 +2011_001411 +2011_001412 +2011_001414 +2011_001416 +2011_001421 +2011_001422 +2011_001424 +2011_001432 +2011_001434 +2011_001440 +2011_001441 +2011_001447 +2011_001449 +2011_001451 +2011_001455 +2011_001456 +2011_001463 +2011_001464 +2011_001466 +2011_001467 +2011_001471 +2011_001475 +2011_001476 +2011_001479 +2011_001480 +2011_001489 +2011_001498 +2011_001501 +2011_001503 +2011_001505 +2011_001507 +2011_001508 +2011_001510 +2011_001514 +2011_001518 +2011_001519 +2011_001521 +2011_001524 +2011_001525 +2011_001526 +2011_001529 +2011_001530 +2011_001531 +2011_001532 +2011_001534 +2011_001535 +2011_001536 +2011_001537 +2011_001538 +2011_001541 +2011_001542 +2011_001543 +2011_001544 +2011_001546 +2011_001547 +2011_001549 +2011_001557 +2011_001558 +2011_001560 +2011_001566 +2011_001567 +2011_001568 +2011_001571 +2011_001572 +2011_001573 +2011_001582 +2011_001586 +2011_001589 +2011_001591 +2011_001592 +2011_001596 +2011_001597 +2011_001599 +2011_001600 +2011_001601 +2011_001602 +2011_001605 +2011_001606 +2011_001607 +2011_001608 +2011_001611 +2011_001612 +2011_001613 +2011_001614 +2011_001616 +2011_001618 +2011_001619 +2011_001620 +2011_001621 +2011_001622 +2011_001624 +2011_001625 +2011_001628 +2011_001629 +2011_001632 +2011_001641 +2011_001642 +2011_001643 +2011_001647 +2011_001649 +2011_001650 +2011_001652 +2011_001653 +2011_001655 +2011_001656 +2011_001662 +2011_001663 +2011_001665 +2011_001666 +2011_001669 +2011_001671 +2011_001673 +2011_001674 +2011_001678 +2011_001679 +2011_001689 +2011_001691 +2011_001693 +2011_001694 +2011_001695 +2011_001698 +2011_001699 +2011_001700 +2011_001705 +2011_001707 +2011_001708 +2011_001710 +2011_001712 +2011_001713 +2011_001714 +2011_001715 +2011_001716 +2011_001719 +2011_001720 +2011_001722 +2011_001726 +2011_001727 +2011_001730 +2011_001732 +2011_001733 +2011_001739 +2011_001740 +2011_001741 +2011_001745 +2011_001747 +2011_001748 +2011_001751 +2011_001753 +2011_001754 +2011_001755 +2011_001757 +2011_001764 +2011_001765 +2011_001766 +2011_001769 +2011_001770 +2011_001771 +2011_001775 +2011_001776 +2011_001779 +2011_001782 +2011_001785 +2011_001789 +2011_001790 +2011_001791 +2011_001793 +2011_001794 +2011_001796 +2011_001799 +2011_001800 +2011_001801 +2011_001805 +2011_001806 +2011_001810 +2011_001811 +2011_001812 +2011_001815 +2011_001819 +2011_001820 +2011_001822 +2011_001824 +2011_001825 +2011_001826 +2011_001827 +2011_001833 +2011_001834 +2011_001837 +2011_001840 +2011_001841 +2011_001842 +2011_001845 +2011_001847 +2011_001854 +2011_001855 +2011_001856 +2011_001858 +2011_001862 +2011_001863 +2011_001866 +2011_001868 +2011_001870 +2011_001871 +2011_001872 +2011_001873 +2011_001875 +2011_001876 +2011_001877 +2011_001880 +2011_001884 +2011_001885 +2011_001886 +2011_001889 +2011_001891 +2011_001893 +2011_001895 +2011_001896 +2011_001900 +2011_001901 +2011_001902 +2011_001904 +2011_001906 +2011_001910 +2011_001911 +2011_001914 +2011_001919 +2011_001920 +2011_001922 +2011_001924 +2011_001926 +2011_001927 +2011_001928 +2011_001929 +2011_001930 +2011_001932 +2011_001937 +2011_001938 +2011_001941 +2011_001942 +2011_001944 +2011_001945 +2011_001946 +2011_001949 +2011_001950 +2011_001951 +2011_001952 +2011_001956 +2011_001959 +2011_001961 +2011_001962 +2011_001964 +2011_001966 +2011_001967 +2011_001971 +2011_001972 +2011_001974 +2011_001975 +2011_001977 +2011_001980 +2011_001982 +2011_001984 +2011_001986 +2011_001987 +2011_001988 +2011_001989 +2011_001991 +2011_002002 +2011_002003 +2011_002004 +2011_002005 +2011_002006 +2011_002012 +2011_002016 +2011_002018 +2011_002019 +2011_002021 +2011_002022 +2011_002027 +2011_002031 +2011_002033 +2011_002034 +2011_002036 +2011_002038 +2011_002039 +2011_002040 +2011_002041 +2011_002042 +2011_002044 +2011_002045 +2011_002046 +2011_002047 +2011_002049 +2011_002050 +2011_002053 +2011_002055 +2011_002062 +2011_002063 +2011_002064 +2011_002073 +2011_002074 +2011_002075 +2011_002079 +2011_002085 +2011_002088 +2011_002091 +2011_002093 +2011_002096 +2011_002097 +2011_002098 +2011_002100 +2011_002102 +2011_002105 +2011_002106 +2011_002107 +2011_002108 +2011_002109 +2011_002110 +2011_002111 +2011_002113 +2011_002114 +2011_002116 +2011_002119 +2011_002121 +2011_002124 +2011_002128 +2011_002131 +2011_002132 +2011_002134 +2011_002135 +2011_002137 +2011_002142 +2011_002143 +2011_002144 +2011_002147 +2011_002148 +2011_002149 +2011_002150 +2011_002154 +2011_002156 +2011_002158 +2011_002159 +2011_002160 +2011_002163 +2011_002167 +2011_002169 +2011_002173 +2011_002174 +2011_002177 +2011_002178 +2011_002179 +2011_002184 +2011_002185 +2011_002186 +2011_002189 +2011_002192 +2011_002193 +2011_002200 +2011_002211 +2011_002215 +2011_002218 +2011_002221 +2011_002222 +2011_002223 +2011_002224 +2011_002227 +2011_002228 +2011_002230 +2011_002234 +2011_002236 +2011_002237 +2011_002239 +2011_002241 +2011_002244 +2011_002245 +2011_002246 +2011_002247 +2011_002248 +2011_002251 +2011_002252 +2011_002253 +2011_002260 +2011_002265 +2011_002268 +2011_002269 +2011_002270 +2011_002272 +2011_002273 +2011_002276 +2011_002278 +2011_002279 +2011_002280 +2011_002281 +2011_002284 +2011_002291 +2011_002292 +2011_002294 +2011_002295 +2011_002298 +2011_002300 +2011_002301 +2011_002303 +2011_002308 +2011_002312 +2011_002317 +2011_002318 +2011_002322 +2011_002324 +2011_002325 +2011_002327 +2011_002330 +2011_002335 +2011_002341 +2011_002343 +2011_002346 +2011_002347 +2011_002348 +2011_002350 +2011_002357 +2011_002358 +2011_002359 +2011_002362 +2011_002365 +2011_002366 +2011_002371 +2011_002379 +2011_002380 +2011_002381 +2011_002384 +2011_002385 +2011_002386 +2011_002387 +2011_002388 +2011_002389 +2011_002391 +2011_002393 +2011_002394 +2011_002395 +2011_002396 +2011_002397 +2011_002398 +2011_002402 +2011_002406 +2011_002407 +2011_002409 +2011_002410 +2011_002413 +2011_002414 +2011_002418 +2011_002419 +2011_002420 +2011_002421 +2011_002422 +2011_002429 +2011_002433 +2011_002435 +2011_002436 +2011_002443 +2011_002447 +2011_002448 +2011_002453 +2011_002455 +2011_002457 +2011_002458 +2011_002459 +2011_002460 +2011_002461 +2011_002462 +2011_002463 +2011_002464 +2011_002470 +2011_002474 +2011_002476 +2011_002479 +2011_002482 +2011_002484 +2011_002488 +2011_002490 +2011_002491 +2011_002492 +2011_002494 +2011_002495 +2011_002498 +2011_002503 +2011_002504 +2011_002505 +2011_002507 +2011_002509 +2011_002511 +2011_002514 +2011_002515 +2011_002516 +2011_002519 +2011_002520 +2011_002526 +2011_002528 +2011_002531 +2011_002532 +2011_002533 +2011_002535 +2011_002536 +2011_002542 +2011_002543 +2011_002548 +2011_002551 +2011_002552 +2011_002553 +2011_002554 +2011_002555 +2011_002556 +2011_002558 +2011_002559 +2011_002560 +2011_002561 +2011_002566 +2011_002567 +2011_002568 +2011_002571 +2011_002575 +2011_002578 +2011_002579 +2011_002582 +2011_002583 +2011_002584 +2011_002585 +2011_002588 +2011_002589 +2011_002590 +2011_002592 +2011_002594 +2011_002598 +2011_002601 +2011_002605 +2011_002606 +2011_002609 +2011_002610 +2011_002612 +2011_002614 +2011_002616 +2011_002617 +2011_002618 +2011_002620 +2011_002623 +2011_002624 +2011_002629 +2011_002631 +2011_002636 +2011_002638 +2011_002639 +2011_002640 +2011_002641 +2011_002644 +2011_002649 +2011_002650 +2011_002652 +2011_002656 +2011_002657 +2011_002658 +2011_002661 +2011_002662 +2011_002664 +2011_002673 +2011_002674 +2011_002675 +2011_002676 +2011_002677 +2011_002678 +2011_002685 +2011_002687 +2011_002694 +2011_002697 +2011_002699 +2011_002706 +2011_002709 +2011_002713 +2011_002714 +2011_002715 +2011_002717 +2011_002719 +2011_002724 +2011_002725 +2011_002726 +2011_002730 +2011_002738 +2011_002740 +2011_002742 +2011_002746 +2011_002748 +2011_002750 +2011_002751 +2011_002752 +2011_002754 +2011_002756 +2011_002760 +2011_002765 +2011_002766 +2011_002767 +2011_002770 +2011_002772 +2011_002775 +2011_002776 +2011_002779 +2011_002780 +2011_002782 +2011_002784 +2011_002786 +2011_002790 +2011_002795 +2011_002796 +2011_002798 +2011_002802 +2011_002803 +2011_002805 +2011_002808 +2011_002810 +2011_002811 +2011_002812 +2011_002814 +2011_002817 +2011_002818 +2011_002821 +2011_002823 +2011_002826 +2011_002830 +2011_002831 +2011_002833 +2011_002834 +2011_002838 +2011_002841 +2011_002842 +2011_002851 +2011_002852 +2011_002854 +2011_002863 +2011_002864 +2011_002867 +2011_002868 +2011_002870 +2011_002871 +2011_002872 +2011_002873 +2011_002879 +2011_002880 +2011_002881 +2011_002883 +2011_002884 +2011_002885 +2011_002887 +2011_002889 +2011_002890 +2011_002897 +2011_002900 +2011_002908 +2011_002911 +2011_002912 +2011_002913 +2011_002916 +2011_002917 +2011_002920 +2011_002921 +2011_002924 +2011_002925 +2011_002927 +2011_002929 +2011_002930 +2011_002932 +2011_002933 +2011_002935 +2011_002937 +2011_002938 +2011_002940 +2011_002942 +2011_002943 +2011_002944 +2011_002947 +2011_002949 +2011_002951 +2011_002953 +2011_002956 +2011_002958 +2011_002962 +2011_002965 +2011_002966 +2011_002967 +2011_002969 +2011_002970 +2011_002971 +2011_002974 +2011_002975 +2011_002978 +2011_002979 +2011_002983 +2011_002985 +2011_002987 +2011_002988 +2011_002992 +2011_002993 +2011_002994 +2011_002997 +2011_002999 +2011_003002 +2011_003003 +2011_003005 +2011_003010 +2011_003011 +2011_003012 +2011_003013 +2011_003016 +2011_003019 +2011_003020 +2011_003023 +2011_003025 +2011_003027 +2011_003028 +2011_003029 +2011_003030 +2011_003034 +2011_003038 +2011_003039 +2011_003041 +2011_003043 +2011_003044 +2011_003047 +2011_003048 +2011_003049 +2011_003050 +2011_003054 +2011_003055 +2011_003057 +2011_003059 +2011_003063 +2011_003065 +2011_003066 +2011_003073 +2011_003074 +2011_003076 +2011_003078 +2011_003079 +2011_003081 +2011_003085 +2011_003086 +2011_003089 +2011_003091 +2011_003097 +2011_003098 +2011_003103 +2011_003109 +2011_003111 +2011_003114 +2011_003115 +2011_003121 +2011_003124 +2011_003132 +2011_003134 +2011_003138 +2011_003141 +2011_003145 +2011_003146 +2011_003148 +2011_003149 +2011_003150 +2011_003151 +2011_003152 +2011_003154 +2011_003158 +2011_003159 +2011_003162 +2011_003163 +2011_003166 +2011_003167 +2011_003168 +2011_003169 +2011_003171 +2011_003176 +2011_003177 +2011_003182 +2011_003183 +2011_003184 +2011_003185 +2011_003187 +2011_003188 +2011_003192 +2011_003194 +2011_003197 +2011_003201 +2011_003205 +2011_003207 +2011_003211 +2011_003212 +2011_003213 +2011_003216 +2011_003220 +2011_003223 +2011_003228 +2011_003230 +2011_003232 +2011_003236 +2011_003238 +2011_003240 +2011_003242 +2011_003244 +2011_003246 +2011_003247 +2011_003253 +2011_003254 +2011_003255 +2011_003256 +2011_003259 +2011_003260 +2011_003261 +2011_003262 +2011_003269 +2011_003271 +2011_003274 +2011_003275 +2011_003276 diff --git a/ImageSets/Main/val.txt b/ImageSets/Main/val.txt new file mode 100644 index 0000000..0349a31 --- /dev/null +++ b/ImageSets/Main/val.txt @@ -0,0 +1,8333 @@ +000005 +000007 +000009 +000016 +000019 +000020 +000021 +000024 +000030 +000039 +000041 +000046 +000050 +000051 +000052 +000060 +000063 +000065 +000072 +000081 +000093 +000095 +000099 +000101 +000102 +000107 +000109 +000110 +000113 +000117 +000118 +000120 +000121 +000123 +000125 +000130 +000131 +000132 +000142 +000143 +000146 +000150 +000156 +000158 +000165 +000169 +000170 +000177 +000180 +000184 +000190 +000203 +000208 +000210 +000211 +000214 +000215 +000218 +000221 +000224 +000229 +000232 +000233 +000236 +000241 +000244 +000245 +000246 +000249 +000251 +000257 +000266 +000268 +000269 +000270 +000275 +000285 +000289 +000298 +000302 +000303 +000304 +000305 +000308 +000318 +000321 +000322 +000323 +000328 +000329 +000332 +000336 +000338 +000340 +000343 +000352 +000354 +000363 +000373 +000374 +000380 +000381 +000396 +000403 +000408 +000417 +000419 +000420 +000424 +000427 +000428 +000433 +000435 +000439 +000443 +000448 +000459 +000460 +000461 +000462 +000464 +000480 +000482 +000483 +000486 +000491 +000492 +000494 +000498 +000499 +000500 +000501 +000509 +000513 +000514 +000515 +000520 +000523 +000530 +000531 +000540 +000543 +000545 +000563 +000564 +000579 +000581 +000582 +000588 +000591 +000598 +000599 +000601 +000608 +000610 +000613 +000619 +000626 +000628 +000637 +000645 +000647 +000653 +000656 +000660 +000661 +000663 +000667 +000675 +000676 +000677 +000682 +000684 +000686 +000690 +000694 +000702 +000705 +000707 +000712 +000713 +000714 +000717 +000720 +000728 +000730 +000738 +000742 +000746 +000748 +000750 +000752 +000755 +000756 +000760 +000763 +000771 +000772 +000776 +000777 +000780 +000782 +000786 +000787 +000791 +000794 +000797 +000799 +000800 +000802 +000806 +000808 +000814 +000815 +000816 +000826 +000831 +000832 +000834 +000842 +000843 +000847 +000848 +000854 +000855 +000857 +000862 +000863 +000868 +000872 +000874 +000876 +000878 +000879 +000880 +000882 +000885 +000895 +000896 +000903 +000911 +000917 +000918 +000920 +000921 +000923 +000926 +000931 +000934 +000935 +000937 +000946 +000947 +000948 +000949 +000971 +000972 +000973 +000982 +001004 +001009 +001012 +001017 +001018 +001027 +001028 +001041 +001042 +001045 +001052 +001053 +001056 +001061 +001062 +001066 +001069 +001072 +001074 +001083 +001084 +001091 +001092 +001093 +001097 +001102 +001104 +001107 +001109 +001110 +001121 +001124 +001125 +001136 +001137 +001142 +001143 +001144 +001145 +001148 +001149 +001154 +001160 +001161 +001164 +001166 +001170 +001175 +001176 +001184 +001185 +001186 +001187 +001192 +001199 +001200 +001201 +001203 +001206 +001211 +001215 +001221 +001224 +001225 +001231 +001233 +001236 +001241 +001247 +001250 +001254 +001259 +001260 +001265 +001266 +001272 +001274 +001277 +001281 +001284 +001286 +001288 +001289 +001290 +001292 +001293 +001298 +001310 +001311 +001316 +001324 +001330 +001337 +001341 +001343 +001350 +001352 +001360 +001361 +001362 +001371 +001375 +001383 +001386 +001387 +001397 +001400 +001413 +001430 +001432 +001439 +001441 +001443 +001444 +001445 +001460 +001463 +001464 +001465 +001466 +001467 +001472 +001475 +001481 +001484 +001490 +001493 +001497 +001509 +001510 +001514 +001522 +001523 +001531 +001536 +001537 +001541 +001543 +001544 +001545 +001553 +001554 +001561 +001565 +001571 +001577 +001582 +001588 +001595 +001598 +001603 +001608 +001614 +001617 +001618 +001628 +001632 +001638 +001640 +001642 +001647 +001653 +001675 +001677 +001678 +001682 +001685 +001686 +001689 +001691 +001693 +001718 +001724 +001725 +001726 +001727 +001730 +001746 +001747 +001749 +001755 +001756 +001771 +001772 +001775 +001778 +001782 +001784 +001785 +001793 +001795 +001797 +001799 +001801 +001807 +001816 +001818 +001827 +001830 +001833 +001837 +001842 +001847 +001849 +001855 +001860 +001862 +001872 +001875 +001877 +001878 +001882 +001887 +001888 +001899 +001901 +001907 +001911 +001918 +001920 +001927 +001931 +001932 +001933 +001934 +001936 +001940 +001944 +001948 +001958 +001962 +001964 +001970 +001972 +001976 +001982 +002000 +002011 +002019 +002021 +002022 +002023 +002024 +002030 +002036 +002045 +002054 +002058 +002063 +002064 +002067 +002070 +002082 +002083 +002086 +002088 +002090 +002091 +002094 +002098 +002099 +002101 +002102 +002109 +002112 +002114 +002124 +002125 +002129 +002135 +002136 +002140 +002142 +002145 +002146 +002152 +002163 +002165 +002169 +002171 +002174 +002181 +002183 +002184 +002190 +002201 +002202 +002209 +002213 +002214 +002218 +002220 +002226 +002228 +002233 +002244 +002248 +002251 +002257 +002259 +002261 +002263 +002266 +002267 +002268 +002270 +002272 +002273 +002276 +002278 +002281 +002285 +002288 +002290 +002300 +002302 +002305 +002308 +002324 +002328 +002329 +002330 +002332 +002333 +002337 +002340 +002343 +002345 +002348 +002352 +002361 +002364 +002366 +002367 +002369 +002371 +002372 +002374 +002375 +002376 +002377 +002378 +002382 +002385 +002387 +002391 +002393 +002404 +002407 +002415 +002417 +002425 +002427 +002435 +002437 +002441 +002444 +002450 +002452 +002454 +002456 +002459 +002460 +002462 +002470 +002476 +002477 +002479 +002491 +002492 +002493 +002497 +002504 +002505 +002508 +002513 +002520 +002523 +002524 +002525 +002529 +002537 +002540 +002542 +002546 +002549 +002561 +002563 +002565 +002566 +002567 +002578 +002584 +002585 +002586 +002589 +002593 +002598 +002600 +002605 +002606 +002613 +002615 +002618 +002621 +002632 +002633 +002636 +002637 +002641 +002643 +002646 +002649 +002657 +002658 +002659 +002667 +002668 +002670 +002675 +002677 +002678 +002689 +002690 +002693 +002695 +002696 +002699 +002706 +002709 +002714 +002717 +002718 +002721 +002723 +002727 +002732 +002734 +002741 +002747 +002751 +002760 +002762 +002767 +002772 +002775 +002776 +002784 +002785 +002786 +002794 +002798 +002800 +002803 +002810 +002812 +002815 +002827 +002833 +002835 +002836 +002838 +002842 +002847 +002854 +002859 +002875 +002879 +002880 +002884 +002886 +002889 +002891 +002893 +002896 +002901 +002910 +002912 +002913 +002915 +002916 +002917 +002924 +002932 +002933 +002935 +002938 +002940 +002941 +002942 +002943 +002944 +002946 +002947 +002952 +002954 +002960 +002963 +002965 +002966 +002967 +002977 +002978 +002984 +002986 +002994 +003000 +003004 +003005 +003008 +003009 +003015 +003017 +003021 +003023 +003028 +003031 +003032 +003038 +003039 +003044 +003045 +003054 +003056 +003057 +003058 +003064 +003065 +003072 +003078 +003082 +003086 +003089 +003090 +003093 +003094 +003098 +003102 +003112 +003117 +003118 +003120 +003121 +003126 +003127 +003129 +003137 +003142 +003154 +003162 +003164 +003170 +003176 +003177 +003178 +003186 +003189 +003194 +003195 +003199 +003200 +003207 +003210 +003213 +003216 +003218 +003219 +003223 +003228 +003239 +003243 +003250 +003255 +003256 +003258 +003262 +003271 +003272 +003274 +003285 +003293 +003294 +003296 +003299 +003300 +003301 +003307 +003311 +003313 +003316 +003325 +003327 +003335 +003344 +003351 +003360 +003362 +003370 +003376 +003377 +003386 +003390 +003391 +003397 +003398 +003403 +003404 +003407 +003410 +003415 +003419 +003422 +003425 +003429 +003435 +003443 +003444 +003449 +003451 +003453 +003455 +003458 +003461 +003462 +003464 +003465 +003468 +003469 +003470 +003492 +003516 +003518 +003519 +003521 +003528 +003530 +003536 +003537 +003546 +003554 +003556 +003566 +003567 +003580 +003587 +003589 +003593 +003594 +003597 +003606 +003611 +003618 +003620 +003623 +003632 +003636 +003638 +003639 +003640 +003648 +003651 +003654 +003655 +003657 +003660 +003667 +003669 +003673 +003674 +003675 +003684 +003685 +003690 +003691 +003696 +003703 +003706 +003708 +003709 +003711 +003717 +003721 +003722 +003727 +003729 +003750 +003753 +003754 +003760 +003772 +003774 +003780 +003783 +003791 +003793 +003796 +003798 +003803 +003808 +003809 +003814 +003820 +003821 +003826 +003837 +003838 +003844 +003845 +003846 +003848 +003855 +003857 +003863 +003868 +003869 +003871 +003872 +003876 +003877 +003885 +003886 +003891 +003895 +003905 +003911 +003915 +003918 +003919 +003923 +003924 +003926 +003937 +003941 +003946 +003947 +003948 +003954 +003957 +003960 +003963 +003965 +003966 +003973 +003979 +003984 +003986 +003990 +003992 +003994 +003996 +004003 +004010 +004011 +004015 +004020 +004025 +004031 +004039 +004047 +004051 +004057 +004060 +004066 +004069 +004073 +004075 +004076 +004077 +004082 +004085 +004087 +004089 +004102 +004105 +004108 +004110 +004113 +004117 +004122 +004135 +004141 +004142 +004143 +004145 +004148 +004150 +004174 +004178 +004185 +004186 +004191 +004192 +004193 +004194 +004195 +004203 +004204 +004205 +004212 +004229 +004230 +004239 +004246 +004257 +004258 +004259 +004264 +004265 +004274 +004275 +004279 +004284 +004286 +004293 +004295 +004298 +004304 +004310 +004312 +004321 +004323 +004326 +004329 +004331 +004341 +004346 +004349 +004351 +004352 +004354 +004356 +004364 +004368 +004369 +004380 +004384 +004390 +004396 +004397 +004405 +004409 +004411 +004421 +004423 +004424 +004429 +004430 +004432 +004433 +004437 +004438 +004446 +004450 +004455 +004457 +004459 +004463 +004464 +004466 +004468 +004474 +004487 +004488 +004490 +004493 +004494 +004495 +004498 +004499 +004507 +004509 +004512 +004518 +004527 +004528 +004530 +004532 +004535 +004539 +004542 +004552 +004555 +004558 +004574 +004581 +004585 +004588 +004592 +004600 +004601 +004606 +004609 +004618 +004626 +004630 +004632 +004647 +004649 +004652 +004653 +004654 +004655 +004660 +004662 +004672 +004673 +004674 +004676 +004682 +004689 +004692 +004699 +004707 +004708 +004719 +004722 +004727 +004732 +004746 +004750 +004761 +004768 +004770 +004777 +004785 +004786 +004788 +004789 +004796 +004805 +004812 +004814 +004816 +004818 +004825 +004826 +004831 +004834 +004839 +004840 +004850 +004852 +004856 +004859 +004863 +004866 +004867 +004868 +004872 +004878 +004886 +004890 +004895 +004896 +004903 +004912 +004916 +004926 +004928 +004931 +004935 +004936 +004938 +004939 +004943 +004948 +004950 +004953 +004954 +004956 +004960 +004963 +004967 +004977 +004982 +004983 +004985 +004986 +004994 +004997 +004998 +004999 +005003 +005014 +005028 +005036 +005037 +005039 +005042 +005054 +005055 +005056 +005062 +005063 +005064 +005067 +005072 +005077 +005079 +005081 +005085 +005102 +005104 +005110 +005111 +005116 +005128 +005131 +005135 +005136 +005144 +005145 +005146 +005150 +005159 +005160 +005161 +005175 +005176 +005179 +005185 +005195 +005199 +005209 +005210 +005212 +005214 +005220 +005222 +005224 +005229 +005230 +005239 +005242 +005248 +005253 +005254 +005263 +005264 +005267 +005268 +005270 +005274 +005278 +005281 +005293 +005298 +005305 +005306 +005312 +005314 +005315 +005319 +005320 +005325 +005326 +005328 +005331 +005340 +005343 +005346 +005348 +005349 +005350 +005352 +005355 +005365 +005367 +005370 +005371 +005378 +005379 +005380 +005383 +005384 +005385 +005393 +005395 +005397 +005398 +005407 +005416 +005418 +005419 +005421 +005423 +005429 +005430 +005431 +005434 +005436 +005438 +005439 +005441 +005454 +005461 +005465 +005469 +005470 +005471 +005475 +005481 +005485 +005486 +005497 +005507 +005510 +005517 +005518 +005521 +005522 +005530 +005531 +005535 +005539 +005549 +005550 +005552 +005554 +005559 +005573 +005576 +005577 +005583 +005584 +005586 +005588 +005590 +005593 +005606 +005608 +005613 +005614 +005615 +005618 +005620 +005629 +005640 +005641 +005645 +005647 +005652 +005653 +005655 +005657 +005660 +005662 +005664 +005672 +005674 +005676 +005679 +005682 +005685 +005687 +005693 +005696 +005701 +005702 +005714 +005716 +005719 +005723 +005729 +005732 +005736 +005741 +005743 +005747 +005749 +005755 +005760 +005761 +005762 +005768 +005773 +005779 +005781 +005788 +005790 +005791 +005794 +005799 +005811 +005812 +005815 +005818 +005819 +005825 +005828 +005829 +005830 +005839 +005841 +005845 +005852 +005853 +005854 +005856 +005863 +005868 +005874 +005875 +005877 +005878 +005879 +005894 +005897 +005906 +005912 +005914 +005917 +005919 +005928 +005940 +005952 +005954 +005956 +005963 +005968 +005970 +005975 +005979 +005981 +005985 +005988 +005989 +005991 +005995 +005996 +005998 +006000 +006001 +006005 +006012 +006018 +006026 +006027 +006028 +006029 +006035 +006041 +006042 +006045 +006046 +006055 +006058 +006062 +006069 +006071 +006084 +006089 +006097 +006098 +006107 +006108 +006111 +006117 +006120 +006124 +006125 +006129 +006133 +006136 +006139 +006146 +006148 +006150 +006151 +006153 +006159 +006161 +006163 +006184 +006185 +006188 +006190 +006198 +006201 +006202 +006203 +006206 +006209 +006212 +006214 +006215 +006216 +006218 +006219 +006220 +006222 +006233 +006234 +006235 +006240 +006241 +006249 +006252 +006254 +006258 +006259 +006260 +006269 +006276 +006277 +006281 +006282 +006284 +006286 +006295 +006296 +006300 +006301 +006306 +006309 +006314 +006318 +006319 +006321 +006323 +006325 +006330 +006335 +006337 +006338 +006339 +006346 +006348 +006350 +006351 +006355 +006357 +006377 +006385 +006387 +006391 +006392 +006396 +006398 +006404 +006409 +006421 +006424 +006425 +006428 +006430 +006437 +006440 +006443 +006444 +006445 +006449 +006450 +006456 +006463 +006465 +006468 +006473 +006480 +006484 +006488 +006492 +006497 +006507 +006509 +006512 +006519 +006520 +006529 +006530 +006532 +006534 +006538 +006542 +006543 +006553 +006562 +006565 +006570 +006572 +006575 +006576 +006578 +006583 +006584 +006585 +006587 +006588 +006593 +006599 +006603 +006606 +006611 +006617 +006618 +006619 +006621 +006625 +006628 +006631 +006632 +006643 +006645 +006647 +006657 +006661 +006664 +006666 +006667 +006668 +006670 +006671 +006673 +006677 +006678 +006679 +006681 +006682 +006687 +006690 +006696 +006699 +006702 +006709 +006718 +006719 +006722 +006725 +006730 +006739 +006747 +006751 +006759 +006760 +006761 +006762 +006765 +006768 +006769 +006772 +006783 +006786 +006789 +006797 +006799 +006800 +006802 +006803 +006808 +006813 +006814 +006819 +006821 +006827 +006828 +006829 +006835 +006838 +006841 +006842 +006850 +006855 +006859 +006860 +006862 +006865 +006867 +006876 +006878 +006880 +006884 +006886 +006892 +006903 +006908 +006918 +006922 +006924 +006932 +006933 +006934 +006935 +006940 +006944 +006945 +006949 +006952 +006953 +006956 +006962 +006963 +006965 +006966 +006972 +006981 +006987 +006988 +006989 +006990 +006994 +006995 +007004 +007008 +007009 +007020 +007021 +007022 +007031 +007035 +007038 +007042 +007046 +007048 +007049 +007052 +007054 +007056 +007058 +007059 +007065 +007068 +007070 +007071 +007074 +007077 +007084 +007086 +007097 +007100 +007101 +007104 +007109 +007114 +007117 +007122 +007123 +007132 +007139 +007140 +007141 +007144 +007146 +007147 +007148 +007149 +007153 +007162 +007165 +007167 +007172 +007174 +007187 +007189 +007191 +007200 +007204 +007208 +007210 +007211 +007212 +007215 +007216 +007217 +007224 +007227 +007230 +007236 +007244 +007245 +007247 +007249 +007258 +007259 +007260 +007266 +007270 +007274 +007275 +007276 +007280 +007283 +007284 +007292 +007294 +007296 +007297 +007299 +007300 +007302 +007311 +007314 +007318 +007329 +007330 +007343 +007344 +007346 +007350 +007356 +007359 +007363 +007372 +007374 +007376 +007383 +007388 +007390 +007408 +007414 +007416 +007422 +007424 +007427 +007432 +007433 +007435 +007436 +007438 +007439 +007443 +007445 +007448 +007449 +007451 +007457 +007460 +007461 +007465 +007470 +007475 +007480 +007482 +007484 +007486 +007489 +007498 +007506 +007511 +007517 +007523 +007525 +007527 +007528 +007533 +007537 +007543 +007546 +007547 +007551 +007555 +007559 +007563 +007568 +007571 +007576 +007579 +007585 +007592 +007603 +007605 +007612 +007614 +007615 +007618 +007622 +007624 +007626 +007639 +007640 +007642 +007647 +007649 +007650 +007656 +007657 +007662 +007664 +007666 +007668 +007670 +007671 +007672 +007673 +007675 +007677 +007678 +007679 +007680 +007682 +007687 +007688 +007691 +007694 +007702 +007705 +007709 +007712 +007715 +007720 +007723 +007724 +007727 +007732 +007742 +007743 +007745 +007746 +007754 +007758 +007760 +007763 +007765 +007768 +007772 +007773 +007776 +007779 +007786 +007793 +007798 +007799 +007812 +007813 +007815 +007824 +007826 +007833 +007834 +007841 +007843 +007845 +007855 +007856 +007857 +007865 +007868 +007869 +007873 +007886 +007889 +007890 +007897 +007899 +007902 +007909 +007916 +007919 +007920 +007921 +007924 +007928 +007931 +007933 +007935 +007943 +007946 +007947 +007950 +007954 +007956 +007958 +007970 +007971 +007979 +007984 +007987 +007997 +007998 +007999 +008002 +008009 +008023 +008024 +008029 +008031 +008032 +008033 +008036 +008048 +008057 +008060 +008061 +008068 +008069 +008085 +008086 +008087 +008091 +008100 +008101 +008103 +008105 +008107 +008112 +008115 +008122 +008125 +008132 +008138 +008140 +008141 +008144 +008151 +008159 +008160 +008168 +008171 +008173 +008175 +008177 +008180 +008189 +008190 +008191 +008200 +008208 +008209 +008220 +008222 +008224 +008225 +008229 +008236 +008241 +008244 +008251 +008258 +008268 +008275 +008279 +008281 +008284 +008285 +008292 +008293 +008294 +008295 +008297 +008299 +008300 +008306 +008307 +008318 +008319 +008320 +008323 +008326 +008327 +008329 +008335 +008345 +008349 +008355 +008359 +008364 +008365 +008368 +008370 +008376 +008386 +008387 +008390 +008410 +008413 +008415 +008416 +008423 +008424 +008429 +008430 +008433 +008434 +008438 +008444 +008450 +008454 +008461 +008472 +008484 +008485 +008492 +008494 +008498 +008499 +008502 +008503 +008509 +008512 +008513 +008514 +008518 +008519 +008521 +008522 +008524 +008526 +008534 +008535 +008541 +008542 +008553 +008556 +008557 +008562 +008564 +008572 +008573 +008576 +008582 +008584 +008586 +008592 +008601 +008604 +008606 +008607 +008608 +008612 +008620 +008621 +008624 +008635 +008636 +008638 +008639 +008644 +008647 +008653 +008654 +008667 +008680 +008683 +008687 +008692 +008695 +008698 +008701 +008709 +008713 +008716 +008717 +008718 +008722 +008728 +008730 +008733 +008739 +008742 +008747 +008749 +008752 +008753 +008759 +008766 +008769 +008772 +008773 +008775 +008793 +008796 +008799 +008801 +008805 +008810 +008817 +008822 +008823 +008826 +008831 +008833 +008835 +008836 +008837 +008843 +008848 +008849 +008854 +008858 +008859 +008867 +008871 +008873 +008874 +008876 +008880 +008884 +008888 +008890 +008892 +008911 +008913 +008914 +008917 +008919 +008921 +008927 +008931 +008940 +008942 +008943 +008951 +008953 +008955 +008965 +008976 +008982 +008983 +008997 +009002 +009006 +009007 +009015 +009019 +009022 +009024 +009034 +009035 +009037 +009039 +009048 +009051 +009053 +009060 +009064 +009072 +009079 +009085 +009087 +009089 +009091 +009094 +009105 +009112 +009113 +009116 +009126 +009128 +009129 +009131 +009133 +009138 +009141 +009147 +009150 +009151 +009155 +009157 +009159 +009162 +009163 +009168 +009174 +009177 +009178 +009179 +009180 +009186 +009187 +009189 +009192 +009193 +009194 +009195 +009202 +009212 +009213 +009221 +009224 +009236 +009239 +009244 +009246 +009247 +009249 +009250 +009254 +009268 +009273 +009278 +009279 +009281 +009282 +009286 +009291 +009303 +009309 +009312 +009315 +009323 +009326 +009330 +009331 +009334 +009337 +009347 +009348 +009349 +009350 +009351 +009354 +009368 +009371 +009373 +009374 +009375 +009378 +009382 +009401 +009405 +009408 +009412 +009414 +009433 +009437 +009438 +009439 +009440 +009443 +009445 +009448 +009454 +009455 +009456 +009457 +009459 +009461 +009464 +009468 +009470 +009472 +009477 +009479 +009480 +009481 +009484 +009494 +009500 +009502 +009507 +009517 +009519 +009527 +009531 +009532 +009533 +009540 +009543 +009546 +009550 +009558 +009560 +009565 +009567 +009568 +009571 +009580 +009586 +009588 +009591 +009597 +009598 +009603 +009611 +009617 +009619 +009620 +009627 +009636 +009641 +009647 +009649 +009655 +009658 +009667 +009670 +009676 +009678 +009681 +009685 +009686 +009687 +009692 +009695 +009698 +009699 +009700 +009706 +009710 +009711 +009712 +009719 +009724 +009726 +009732 +009737 +009738 +009743 +009745 +009746 +009747 +009748 +009754 +009758 +009761 +009764 +009767 +009772 +009773 +009778 +009780 +009781 +009785 +009794 +009796 +009801 +009809 +009816 +009819 +009822 +009823 +009831 +009833 +009836 +009841 +009858 +009862 +009863 +009865 +009870 +009880 +009881 +009886 +009894 +009897 +009898 +009900 +009902 +009905 +009908 +009913 +009917 +009923 +009932 +009935 +009939 +009946 +009947 +009950 +009954 +009955 +009958 +2008_000002 +2008_000003 +2008_000007 +2008_000009 +2008_000016 +2008_000021 +2008_000026 +2008_000027 +2008_000032 +2008_000034 +2008_000042 +2008_000043 +2008_000050 +2008_000051 +2008_000052 +2008_000054 +2008_000056 +2008_000059 +2008_000062 +2008_000064 +2008_000067 +2008_000073 +2008_000075 +2008_000076 +2008_000078 +2008_000080 +2008_000082 +2008_000084 +2008_000090 +2008_000107 +2008_000115 +2008_000116 +2008_000119 +2008_000120 +2008_000123 +2008_000133 +2008_000134 +2008_000138 +2008_000140 +2008_000145 +2008_000149 +2008_000163 +2008_000174 +2008_000177 +2008_000182 +2008_000183 +2008_000190 +2008_000194 +2008_000195 +2008_000203 +2008_000204 +2008_000213 +2008_000215 +2008_000219 +2008_000222 +2008_000223 +2008_000233 +2008_000234 +2008_000239 +2008_000243 +2008_000244 +2008_000246 +2008_000251 +2008_000253 +2008_000254 +2008_000257 +2008_000261 +2008_000264 +2008_000268 +2008_000270 +2008_000271 +2008_000272 +2008_000274 +2008_000277 +2008_000278 +2008_000281 +2008_000298 +2008_000304 +2008_000305 +2008_000306 +2008_000307 +2008_000321 +2008_000328 +2008_000339 +2008_000340 +2008_000345 +2008_000354 +2008_000358 +2008_000359 +2008_000367 +2008_000373 +2008_000376 +2008_000378 +2008_000381 +2008_000382 +2008_000383 +2008_000391 +2008_000398 +2008_000401 +2008_000403 +2008_000406 +2008_000407 +2008_000408 +2008_000413 +2008_000414 +2008_000418 +2008_000419 +2008_000423 +2008_000424 +2008_000446 +2008_000452 +2008_000457 +2008_000464 +2008_000465 +2008_000466 +2008_000469 +2008_000472 +2008_000473 +2008_000474 +2008_000475 +2008_000481 +2008_000489 +2008_000492 +2008_000496 +2008_000498 +2008_000501 +2008_000510 +2008_000511 +2008_000516 +2008_000519 +2008_000522 +2008_000532 +2008_000533 +2008_000535 +2008_000536 +2008_000541 +2008_000547 +2008_000553 +2008_000558 +2008_000562 +2008_000564 +2008_000566 +2008_000568 +2008_000569 +2008_000573 +2008_000579 +2008_000581 +2008_000589 +2008_000599 +2008_000602 +2008_000605 +2008_000609 +2008_000614 +2008_000620 +2008_000622 +2008_000623 +2008_000629 +2008_000630 +2008_000634 +2008_000640 +2008_000647 +2008_000652 +2008_000656 +2008_000657 +2008_000659 +2008_000660 +2008_000661 +2008_000662 +2008_000666 +2008_000670 +2008_000673 +2008_000677 +2008_000690 +2008_000691 +2008_000695 +2008_000697 +2008_000699 +2008_000700 +2008_000705 +2008_000706 +2008_000714 +2008_000725 +2008_000727 +2008_000731 +2008_000734 +2008_000737 +2008_000740 +2008_000745 +2008_000748 +2008_000765 +2008_000769 +2008_000776 +2008_000780 +2008_000782 +2008_000783 +2008_000788 +2008_000793 +2008_000795 +2008_000796 +2008_000803 +2008_000804 +2008_000805 +2008_000806 +2008_000811 +2008_000817 +2008_000825 +2008_000828 +2008_000834 +2008_000835 +2008_000837 +2008_000839 +2008_000848 +2008_000853 +2008_000857 +2008_000858 +2008_000863 +2008_000864 +2008_000868 +2008_000876 +2008_000878 +2008_000880 +2008_000884 +2008_000885 +2008_000897 +2008_000904 +2008_000910 +2008_000911 +2008_000916 +2008_000917 +2008_000919 +2008_000922 +2008_000931 +2008_000936 +2008_000939 +2008_000940 +2008_000942 +2008_000943 +2008_000950 +2008_000952 +2008_000956 +2008_000957 +2008_000960 +2008_000964 +2008_000965 +2008_000971 +2008_000972 +2008_000976 +2008_000982 +2008_000984 +2008_000992 +2008_000993 +2008_001004 +2008_001007 +2008_001009 +2008_001012 +2008_001013 +2008_001024 +2008_001028 +2008_001034 +2008_001040 +2008_001041 +2008_001046 +2008_001055 +2008_001060 +2008_001062 +2008_001063 +2008_001066 +2008_001068 +2008_001070 +2008_001074 +2008_001075 +2008_001076 +2008_001077 +2008_001078 +2008_001080 +2008_001089 +2008_001090 +2008_001092 +2008_001098 +2008_001099 +2008_001111 +2008_001113 +2008_001114 +2008_001120 +2008_001121 +2008_001122 +2008_001135 +2008_001136 +2008_001139 +2008_001140 +2008_001142 +2008_001150 +2008_001154 +2008_001155 +2008_001160 +2008_001166 +2008_001167 +2008_001168 +2008_001170 +2008_001177 +2008_001183 +2008_001185 +2008_001192 +2008_001194 +2008_001199 +2008_001205 +2008_001210 +2008_001218 +2008_001220 +2008_001221 +2008_001225 +2008_001226 +2008_001227 +2008_001231 +2008_001236 +2008_001241 +2008_001248 +2008_001249 +2008_001255 +2008_001257 +2008_001260 +2008_001262 +2008_001264 +2008_001271 +2008_001275 +2008_001283 +2008_001284 +2008_001290 +2008_001296 +2008_001301 +2008_001304 +2008_001306 +2008_001308 +2008_001314 +2008_001318 +2008_001320 +2008_001322 +2008_001333 +2008_001334 +2008_001338 +2008_001340 +2008_001344 +2008_001349 +2008_001350 +2008_001353 +2008_001356 +2008_001366 +2008_001367 +2008_001369 +2008_001374 +2008_001376 +2008_001379 +2008_001380 +2008_001388 +2008_001391 +2008_001395 +2008_001401 +2008_001404 +2008_001406 +2008_001410 +2008_001415 +2008_001427 +2008_001428 +2008_001429 +2008_001430 +2008_001432 +2008_001433 +2008_001436 +2008_001437 +2008_001439 +2008_001445 +2008_001451 +2008_001456 +2008_001466 +2008_001468 +2008_001470 +2008_001475 +2008_001478 +2008_001481 +2008_001486 +2008_001491 +2008_001494 +2008_001503 +2008_001504 +2008_001513 +2008_001514 +2008_001516 +2008_001520 +2008_001522 +2008_001527 +2008_001531 +2008_001534 +2008_001536 +2008_001539 +2008_001540 +2008_001542 +2008_001543 +2008_001544 +2008_001546 +2008_001547 +2008_001549 +2008_001551 +2008_001553 +2008_001564 +2008_001574 +2008_001575 +2008_001580 +2008_001586 +2008_001589 +2008_001590 +2008_001593 +2008_001594 +2008_001596 +2008_001598 +2008_001602 +2008_001605 +2008_001607 +2008_001613 +2008_001619 +2008_001622 +2008_001624 +2008_001625 +2008_001629 +2008_001636 +2008_001638 +2008_001640 +2008_001648 +2008_001649 +2008_001655 +2008_001659 +2008_001660 +2008_001663 +2008_001667 +2008_001668 +2008_001669 +2008_001676 +2008_001680 +2008_001681 +2008_001682 +2008_001688 +2008_001697 +2008_001702 +2008_001712 +2008_001714 +2008_001715 +2008_001717 +2008_001722 +2008_001723 +2008_001727 +2008_001730 +2008_001731 +2008_001736 +2008_001742 +2008_001745 +2008_001750 +2008_001757 +2008_001763 +2008_001764 +2008_001765 +2008_001769 +2008_001772 +2008_001773 +2008_001774 +2008_001782 +2008_001784 +2008_001792 +2008_001799 +2008_001802 +2008_001805 +2008_001806 +2008_001808 +2008_001810 +2008_001812 +2008_001814 +2008_001815 +2008_001816 +2008_001820 +2008_001821 +2008_001823 +2008_001825 +2008_001830 +2008_001838 +2008_001841 +2008_001843 +2008_001850 +2008_001858 +2008_001862 +2008_001863 +2008_001867 +2008_001869 +2008_001871 +2008_001874 +2008_001885 +2008_001895 +2008_001899 +2008_001905 +2008_001907 +2008_001908 +2008_001909 +2008_001910 +2008_001914 +2008_001919 +2008_001920 +2008_001928 +2008_001930 +2008_001932 +2008_001934 +2008_001945 +2008_001946 +2008_001951 +2008_001958 +2008_001961 +2008_001965 +2008_001966 +2008_001969 +2008_001971 +2008_001978 +2008_001979 +2008_001985 +2008_001987 +2008_001989 +2008_001992 +2008_001998 +2008_002003 +2008_002004 +2008_002007 +2008_002011 +2008_002013 +2008_002017 +2008_002021 +2008_002026 +2008_002031 +2008_002035 +2008_002036 +2008_002037 +2008_002039 +2008_002042 +2008_002043 +2008_002045 +2008_002046 +2008_002047 +2008_002052 +2008_002069 +2008_002071 +2008_002082 +2008_002084 +2008_002086 +2008_002088 +2008_002092 +2008_002098 +2008_002099 +2008_002107 +2008_002113 +2008_002114 +2008_002115 +2008_002124 +2008_002132 +2008_002138 +2008_002140 +2008_002144 +2008_002146 +2008_002151 +2008_002152 +2008_002153 +2008_002155 +2008_002158 +2008_002167 +2008_002169 +2008_002172 +2008_002176 +2008_002179 +2008_002185 +2008_002191 +2008_002193 +2008_002194 +2008_002198 +2008_002199 +2008_002201 +2008_002205 +2008_002207 +2008_002209 +2008_002212 +2008_002222 +2008_002223 +2008_002231 +2008_002234 +2008_002239 +2008_002240 +2008_002241 +2008_002250 +2008_002259 +2008_002267 +2008_002269 +2008_002272 +2008_002273 +2008_002283 +2008_002292 +2008_002293 +2008_002298 +2008_002305 +2008_002312 +2008_002314 +2008_002321 +2008_002322 +2008_002324 +2008_002328 +2008_002330 +2008_002347 +2008_002349 +2008_002356 +2008_002358 +2008_002359 +2008_002366 +2008_002372 +2008_002374 +2008_002378 +2008_002379 +2008_002383 +2008_002384 +2008_002395 +2008_002401 +2008_002403 +2008_002404 +2008_002408 +2008_002410 +2008_002412 +2008_002414 +2008_002419 +2008_002424 +2008_002428 +2008_002429 +2008_002430 +2008_002436 +2008_002438 +2008_002439 +2008_002444 +2008_002445 +2008_002446 +2008_002451 +2008_002452 +2008_002454 +2008_002456 +2008_002464 +2008_002467 +2008_002470 +2008_002477 +2008_002481 +2008_002483 +2008_002485 +2008_002492 +2008_002494 +2008_002495 +2008_002499 +2008_002502 +2008_002504 +2008_002508 +2008_002509 +2008_002510 +2008_002512 +2008_002516 +2008_002521 +2008_002523 +2008_002526 +2008_002527 +2008_002536 +2008_002540 +2008_002542 +2008_002558 +2008_002564 +2008_002567 +2008_002576 +2008_002579 +2008_002588 +2008_002589 +2008_002590 +2008_002597 +2008_002598 +2008_002599 +2008_002603 +2008_002606 +2008_002610 +2008_002616 +2008_002623 +2008_002624 +2008_002631 +2008_002639 +2008_002640 +2008_002643 +2008_002645 +2008_002652 +2008_002653 +2008_002670 +2008_002673 +2008_002677 +2008_002678 +2008_002680 +2008_002681 +2008_002682 +2008_002684 +2008_002687 +2008_002696 +2008_002700 +2008_002701 +2008_002705 +2008_002709 +2008_002714 +2008_002715 +2008_002716 +2008_002720 +2008_002725 +2008_002732 +2008_002735 +2008_002738 +2008_002746 +2008_002751 +2008_002752 +2008_002753 +2008_002756 +2008_002766 +2008_002768 +2008_002773 +2008_002775 +2008_002778 +2008_002783 +2008_002789 +2008_002792 +2008_002795 +2008_002801 +2008_002806 +2008_002809 +2008_002811 +2008_002814 +2008_002817 +2008_002820 +2008_002826 +2008_002829 +2008_002830 +2008_002831 +2008_002835 +2008_002838 +2008_002843 +2008_002845 +2008_002847 +2008_002852 +2008_002859 +2008_002860 +2008_002864 +2008_002869 +2008_002870 +2008_002875 +2008_002876 +2008_002879 +2008_002882 +2008_002883 +2008_002897 +2008_002899 +2008_002900 +2008_002904 +2008_002906 +2008_002908 +2008_002909 +2008_002910 +2008_002920 +2008_002926 +2008_002929 +2008_002932 +2008_002936 +2008_002942 +2008_002946 +2008_002947 +2008_002956 +2008_002958 +2008_002965 +2008_002968 +2008_002971 +2008_002973 +2008_002992 +2008_002999 +2008_003001 +2008_003003 +2008_003005 +2008_003008 +2008_003020 +2008_003022 +2008_003026 +2008_003030 +2008_003034 +2008_003045 +2008_003051 +2008_003052 +2008_003053 +2008_003055 +2008_003056 +2008_003062 +2008_003067 +2008_003072 +2008_003073 +2008_003076 +2008_003082 +2008_003088 +2008_003089 +2008_003090 +2008_003095 +2008_003100 +2008_003104 +2008_003105 +2008_003106 +2008_003107 +2008_003108 +2008_003110 +2008_003132 +2008_003133 +2008_003135 +2008_003136 +2008_003141 +2008_003144 +2008_003152 +2008_003155 +2008_003161 +2008_003167 +2008_003170 +2008_003178 +2008_003181 +2008_003186 +2008_003187 +2008_003189 +2008_003193 +2008_003202 +2008_003205 +2008_003210 +2008_003211 +2008_003220 +2008_003222 +2008_003225 +2008_003228 +2008_003232 +2008_003238 +2008_003239 +2008_003245 +2008_003249 +2008_003255 +2008_003256 +2008_003263 +2008_003270 +2008_003271 +2008_003278 +2008_003280 +2008_003286 +2008_003289 +2008_003291 +2008_003295 +2008_003300 +2008_003305 +2008_003316 +2008_003320 +2008_003326 +2008_003330 +2008_003331 +2008_003333 +2008_003334 +2008_003336 +2008_003343 +2008_003344 +2008_003348 +2008_003350 +2008_003351 +2008_003359 +2008_003361 +2008_003369 +2008_003374 +2008_003379 +2008_003382 +2008_003384 +2008_003395 +2008_003402 +2008_003405 +2008_003407 +2008_003420 +2008_003423 +2008_003424 +2008_003432 +2008_003433 +2008_003439 +2008_003442 +2008_003443 +2008_003449 +2008_003451 +2008_003453 +2008_003461 +2008_003464 +2008_003466 +2008_003467 +2008_003472 +2008_003475 +2008_003476 +2008_003477 +2008_003479 +2008_003482 +2008_003483 +2008_003484 +2008_003492 +2008_003493 +2008_003499 +2008_003511 +2008_003514 +2008_003519 +2008_003524 +2008_003526 +2008_003531 +2008_003542 +2008_003545 +2008_003546 +2008_003547 +2008_003552 +2008_003557 +2008_003565 +2008_003572 +2008_003576 +2008_003577 +2008_003580 +2008_003591 +2008_003592 +2008_003593 +2008_003598 +2008_003604 +2008_003607 +2008_003609 +2008_003610 +2008_003613 +2008_003618 +2008_003619 +2008_003621 +2008_003624 +2008_003636 +2008_003638 +2008_003647 +2008_003650 +2008_003658 +2008_003662 +2008_003671 +2008_003672 +2008_003673 +2008_003676 +2008_003680 +2008_003681 +2008_003683 +2008_003684 +2008_003694 +2008_003704 +2008_003709 +2008_003713 +2008_003718 +2008_003720 +2008_003721 +2008_003722 +2008_003733 +2008_003737 +2008_003743 +2008_003744 +2008_003745 +2008_003749 +2008_003753 +2008_003754 +2008_003755 +2008_003756 +2008_003763 +2008_003766 +2008_003767 +2008_003768 +2008_003772 +2008_003775 +2008_003777 +2008_003780 +2008_003782 +2008_003789 +2008_003793 +2008_003794 +2008_003799 +2008_003800 +2008_003801 +2008_003805 +2008_003812 +2008_003813 +2008_003820 +2008_003821 +2008_003825 +2008_003826 +2008_003827 +2008_003829 +2008_003830 +2008_003835 +2008_003838 +2008_003840 +2008_003843 +2008_003844 +2008_003846 +2008_003856 +2008_003858 +2008_003860 +2008_003868 +2008_003873 +2008_003874 +2008_003876 +2008_003881 +2008_003884 +2008_003885 +2008_003886 +2008_003894 +2008_003904 +2008_003905 +2008_003915 +2008_003921 +2008_003924 +2008_003926 +2008_003929 +2008_003932 +2008_003933 +2008_003940 +2008_003941 +2008_003943 +2008_003944 +2008_003945 +2008_003948 +2008_003951 +2008_003958 +2008_003962 +2008_003965 +2008_003969 +2008_003971 +2008_003976 +2008_003983 +2008_003988 +2008_003989 +2008_003996 +2008_003997 +2008_004002 +2008_004003 +2008_004006 +2008_004007 +2008_004015 +2008_004016 +2008_004018 +2008_004020 +2008_004027 +2008_004030 +2008_004040 +2008_004045 +2008_004046 +2008_004048 +2008_004054 +2008_004056 +2008_004058 +2008_004064 +2008_004069 +2008_004071 +2008_004075 +2008_004076 +2008_004081 +2008_004088 +2008_004090 +2008_004093 +2008_004101 +2008_004103 +2008_004105 +2008_004119 +2008_004121 +2008_004123 +2008_004124 +2008_004125 +2008_004126 +2008_004127 +2008_004135 +2008_004137 +2008_004140 +2008_004142 +2008_004155 +2008_004166 +2008_004174 +2008_004175 +2008_004178 +2008_004182 +2008_004188 +2008_004189 +2008_004190 +2008_004198 +2008_004203 +2008_004205 +2008_004212 +2008_004213 +2008_004214 +2008_004216 +2008_004221 +2008_004230 +2008_004234 +2008_004242 +2008_004243 +2008_004245 +2008_004251 +2008_004257 +2008_004258 +2008_004263 +2008_004270 +2008_004271 +2008_004273 +2008_004278 +2008_004279 +2008_004289 +2008_004290 +2008_004292 +2008_004297 +2008_004306 +2008_004308 +2008_004312 +2008_004317 +2008_004318 +2008_004324 +2008_004326 +2008_004327 +2008_004330 +2008_004333 +2008_004339 +2008_004344 +2008_004345 +2008_004347 +2008_004348 +2008_004354 +2008_004357 +2008_004361 +2008_004363 +2008_004367 +2008_004371 +2008_004374 +2008_004384 +2008_004389 +2008_004391 +2008_004394 +2008_004396 +2008_004399 +2008_004402 +2008_004406 +2008_004408 +2008_004414 +2008_004417 +2008_004419 +2008_004422 +2008_004425 +2008_004426 +2008_004427 +2008_004431 +2008_004433 +2008_004438 +2008_004445 +2008_004453 +2008_004455 +2008_004459 +2008_004460 +2008_004464 +2008_004469 +2008_004470 +2008_004471 +2008_004476 +2008_004477 +2008_004478 +2008_004479 +2008_004482 +2008_004487 +2008_004497 +2008_004498 +2008_004501 +2008_004502 +2008_004504 +2008_004510 +2008_004520 +2008_004522 +2008_004525 +2008_004526 +2008_004528 +2008_004533 +2008_004534 +2008_004538 +2008_004540 +2008_004541 +2008_004546 +2008_004549 +2008_004550 +2008_004552 +2008_004553 +2008_004554 +2008_004564 +2008_004575 +2008_004579 +2008_004589 +2008_004592 +2008_004599 +2008_004605 +2008_004606 +2008_004610 +2008_004612 +2008_004613 +2008_004614 +2008_004615 +2008_004619 +2008_004621 +2008_004624 +2008_004630 +2008_004632 +2008_004633 +2008_004640 +2008_004646 +2008_004647 +2008_004653 +2008_004654 +2008_004656 +2008_004659 +2008_004662 +2008_004665 +2008_004670 +2008_004684 +2008_004687 +2008_004688 +2008_004689 +2008_004695 +2008_004696 +2008_004701 +2008_004702 +2008_004704 +2008_004705 +2008_004706 +2008_004711 +2008_004716 +2008_004718 +2008_004720 +2008_004722 +2008_004726 +2008_004729 +2008_004730 +2008_004736 +2008_004740 +2008_004742 +2008_004745 +2008_004754 +2008_004756 +2008_004758 +2008_004760 +2008_004766 +2008_004768 +2008_004771 +2008_004774 +2008_004778 +2008_004784 +2008_004794 +2008_004795 +2008_004797 +2008_004805 +2008_004812 +2008_004814 +2008_004819 +2008_004825 +2008_004832 +2008_004833 +2008_004837 +2008_004851 +2008_004852 +2008_004854 +2008_004862 +2008_004866 +2008_004873 +2008_004875 +2008_004881 +2008_004885 +2008_004887 +2008_004894 +2008_004896 +2008_004898 +2008_004900 +2008_004904 +2008_004907 +2008_004910 +2008_004921 +2008_004923 +2008_004926 +2008_004930 +2008_004933 +2008_004935 +2008_004940 +2008_004942 +2008_004948 +2008_004955 +2008_004967 +2008_004968 +2008_004974 +2008_004975 +2008_004979 +2008_004982 +2008_004984 +2008_004986 +2008_004995 +2008_005001 +2008_005003 +2008_005008 +2008_005010 +2008_005015 +2008_005023 +2008_005032 +2008_005035 +2008_005037 +2008_005043 +2008_005046 +2008_005049 +2008_005054 +2008_005057 +2008_005061 +2008_005063 +2008_005065 +2008_005068 +2008_005070 +2008_005072 +2008_005085 +2008_005089 +2008_005092 +2008_005096 +2008_005097 +2008_005098 +2008_005105 +2008_005107 +2008_005109 +2008_005110 +2008_005111 +2008_005114 +2008_005117 +2008_005123 +2008_005139 +2008_005140 +2008_005147 +2008_005151 +2008_005156 +2008_005160 +2008_005166 +2008_005167 +2008_005175 +2008_005181 +2008_005182 +2008_005183 +2008_005185 +2008_005190 +2008_005191 +2008_005194 +2008_005197 +2008_005204 +2008_005205 +2008_005208 +2008_005215 +2008_005217 +2008_005233 +2008_005235 +2008_005242 +2008_005243 +2008_005244 +2008_005245 +2008_005251 +2008_005252 +2008_005253 +2008_005254 +2008_005255 +2008_005257 +2008_005260 +2008_005261 +2008_005270 +2008_005272 +2008_005276 +2008_005277 +2008_005282 +2008_005288 +2008_005304 +2008_005309 +2008_005313 +2008_005316 +2008_005319 +2008_005323 +2008_005327 +2008_005335 +2008_005337 +2008_005338 +2008_005346 +2008_005347 +2008_005348 +2008_005356 +2008_005359 +2008_005360 +2008_005361 +2008_005369 +2008_005373 +2008_005374 +2008_005378 +2008_005379 +2008_005393 +2008_005398 +2008_005399 +2008_005404 +2008_005406 +2008_005417 +2008_005421 +2008_005422 +2008_005423 +2008_005427 +2008_005431 +2008_005436 +2008_005439 +2008_005444 +2008_005445 +2008_005446 +2008_005447 +2008_005455 +2008_005460 +2008_005467 +2008_005469 +2008_005472 +2008_005480 +2008_005485 +2008_005490 +2008_005498 +2008_005501 +2008_005504 +2008_005510 +2008_005511 +2008_005522 +2008_005525 +2008_005530 +2008_005534 +2008_005538 +2008_005544 +2008_005548 +2008_005550 +2008_005552 +2008_005553 +2008_005563 +2008_005564 +2008_005566 +2008_005573 +2008_005574 +2008_005582 +2008_005588 +2008_005599 +2008_005601 +2008_005608 +2008_005611 +2008_005612 +2008_005614 +2008_005627 +2008_005628 +2008_005631 +2008_005633 +2008_005635 +2008_005637 +2008_005638 +2008_005642 +2008_005643 +2008_005646 +2008_005649 +2008_005652 +2008_005657 +2008_005660 +2008_005663 +2008_005664 +2008_005676 +2008_005677 +2008_005680 +2008_005681 +2008_005685 +2008_005686 +2008_005687 +2008_005691 +2008_005695 +2008_005699 +2008_005701 +2008_005702 +2008_005703 +2008_005720 +2008_005721 +2008_005726 +2008_005727 +2008_005732 +2008_005734 +2008_005735 +2008_005738 +2008_005748 +2008_005750 +2008_005763 +2008_005764 +2008_005768 +2008_005774 +2008_005777 +2008_005779 +2008_005788 +2008_005790 +2008_005792 +2008_005796 +2008_005798 +2008_005801 +2008_005805 +2008_005808 +2008_005812 +2008_005816 +2008_005821 +2008_005825 +2008_005831 +2008_005838 +2008_005846 +2008_005847 +2008_005848 +2008_005857 +2008_005860 +2008_005863 +2008_005865 +2008_005869 +2008_005875 +2008_005877 +2008_005881 +2008_005883 +2008_005884 +2008_005889 +2008_005895 +2008_005898 +2008_005904 +2008_005907 +2008_005914 +2008_005915 +2008_005918 +2008_005924 +2008_005928 +2008_005933 +2008_005934 +2008_005936 +2008_005939 +2008_005943 +2008_005957 +2008_005962 +2008_005964 +2008_005970 +2008_005975 +2008_005977 +2008_005978 +2008_005980 +2008_005984 +2008_005987 +2008_005989 +2008_006002 +2008_006007 +2008_006008 +2008_006010 +2008_006017 +2008_006021 +2008_006024 +2008_006027 +2008_006028 +2008_006031 +2008_006034 +2008_006036 +2008_006037 +2008_006038 +2008_006041 +2008_006042 +2008_006045 +2008_006047 +2008_006050 +2008_006052 +2008_006055 +2008_006058 +2008_006059 +2008_006063 +2008_006068 +2008_006071 +2008_006072 +2008_006082 +2008_006087 +2008_006088 +2008_006094 +2008_006100 +2008_006104 +2008_006108 +2008_006109 +2008_006112 +2008_006113 +2008_006117 +2008_006120 +2008_006130 +2008_006143 +2008_006144 +2008_006147 +2008_006148 +2008_006151 +2008_006154 +2008_006159 +2008_006163 +2008_006166 +2008_006169 +2008_006175 +2008_006178 +2008_006179 +2008_006185 +2008_006190 +2008_006200 +2008_006203 +2008_006205 +2008_006207 +2008_006216 +2008_006218 +2008_006219 +2008_006222 +2008_006227 +2008_006229 +2008_006233 +2008_006239 +2008_006240 +2008_006254 +2008_006262 +2008_006267 +2008_006269 +2008_006275 +2008_006282 +2008_006285 +2008_006288 +2008_006290 +2008_006298 +2008_006303 +2008_006307 +2008_006310 +2008_006311 +2008_006316 +2008_006320 +2008_006325 +2008_006327 +2008_006330 +2008_006337 +2008_006341 +2008_006347 +2008_006356 +2008_006359 +2008_006362 +2008_006366 +2008_006368 +2008_006377 +2008_006382 +2008_006392 +2008_006394 +2008_006397 +2008_006403 +2008_006407 +2008_006408 +2008_006409 +2008_006416 +2008_006424 +2008_006425 +2008_006429 +2008_006438 +2008_006441 +2008_006449 +2008_006452 +2008_006458 +2008_006463 +2008_006467 +2008_006477 +2008_006480 +2008_006487 +2008_006489 +2008_006502 +2008_006503 +2008_006517 +2008_006519 +2008_006520 +2008_006523 +2008_006524 +2008_006526 +2008_006528 +2008_006530 +2008_006534 +2008_006547 +2008_006548 +2008_006553 +2008_006554 +2008_006568 +2008_006576 +2008_006587 +2008_006588 +2008_006591 +2008_006600 +2008_006604 +2008_006605 +2008_006609 +2008_006611 +2008_006614 +2008_006616 +2008_006617 +2008_006621 +2008_006631 +2008_006635 +2008_006642 +2008_006646 +2008_006656 +2008_006660 +2008_006662 +2008_006665 +2008_006671 +2008_006684 +2008_006686 +2008_006690 +2008_006694 +2008_006696 +2008_006701 +2008_006703 +2008_006708 +2008_006710 +2008_006714 +2008_006716 +2008_006722 +2008_006731 +2008_006732 +2008_006743 +2008_006746 +2008_006747 +2008_006752 +2008_006758 +2008_006765 +2008_006773 +2008_006774 +2008_006776 +2008_006777 +2008_006779 +2008_006781 +2008_006784 +2008_006792 +2008_006793 +2008_006796 +2008_006797 +2008_006798 +2008_006800 +2008_006811 +2008_006813 +2008_006815 +2008_006816 +2008_006817 +2008_006824 +2008_006825 +2008_006828 +2008_006831 +2008_006833 +2008_006835 +2008_006837 +2008_006839 +2008_006841 +2008_006844 +2008_006849 +2008_006855 +2008_006863 +2008_006870 +2008_006874 +2008_006880 +2008_006885 +2008_006887 +2008_006890 +2008_006892 +2008_006896 +2008_006900 +2008_006904 +2008_006907 +2008_006912 +2008_006924 +2008_006925 +2008_006939 +2008_006941 +2008_006944 +2008_006948 +2008_006949 +2008_006951 +2008_006952 +2008_006956 +2008_006959 +2008_006967 +2008_006968 +2008_006979 +2008_006980 +2008_006981 +2008_006986 +2008_006987 +2008_006989 +2008_006991 +2008_006997 +2008_006998 +2008_006999 +2008_007006 +2008_007010 +2008_007019 +2008_007021 +2008_007025 +2008_007031 +2008_007032 +2008_007034 +2008_007042 +2008_007048 +2008_007050 +2008_007056 +2008_007057 +2008_007059 +2008_007064 +2008_007067 +2008_007070 +2008_007084 +2008_007086 +2008_007091 +2008_007096 +2008_007103 +2008_007105 +2008_007108 +2008_007112 +2008_007114 +2008_007119 +2008_007120 +2008_007123 +2008_007130 +2008_007133 +2008_007134 +2008_007143 +2008_007163 +2008_007164 +2008_007166 +2008_007167 +2008_007171 +2008_007176 +2008_007181 +2008_007182 +2008_007184 +2008_007187 +2008_007188 +2008_007189 +2008_007190 +2008_007194 +2008_007195 +2008_007196 +2008_007207 +2008_007214 +2008_007216 +2008_007219 +2008_007222 +2008_007225 +2008_007227 +2008_007229 +2008_007231 +2008_007241 +2008_007247 +2008_007250 +2008_007256 +2008_007264 +2008_007266 +2008_007269 +2008_007273 +2008_007277 +2008_007279 +2008_007282 +2008_007285 +2008_007287 +2008_007293 +2008_007295 +2008_007305 +2008_007311 +2008_007314 +2008_007317 +2008_007319 +2008_007323 +2008_007324 +2008_007327 +2008_007332 +2008_007334 +2008_007336 +2008_007339 +2008_007344 +2008_007348 +2008_007350 +2008_007352 +2008_007358 +2008_007374 +2008_007378 +2008_007384 +2008_007389 +2008_007390 +2008_007392 +2008_007393 +2008_007398 +2008_007402 +2008_007403 +2008_007404 +2008_007409 +2008_007415 +2008_007417 +2008_007430 +2008_007431 +2008_007434 +2008_007435 +2008_007441 +2008_007446 +2008_007452 +2008_007455 +2008_007458 +2008_007459 +2008_007461 +2008_007466 +2008_007476 +2008_007478 +2008_007480 +2008_007488 +2008_007494 +2008_007497 +2008_007498 +2008_007501 +2008_007507 +2008_007513 +2008_007514 +2008_007521 +2008_007525 +2008_007527 +2008_007529 +2008_007531 +2008_007534 +2008_007536 +2008_007538 +2008_007548 +2008_007558 +2008_007561 +2008_007567 +2008_007574 +2008_007579 +2008_007583 +2008_007585 +2008_007586 +2008_007587 +2008_007591 +2008_007594 +2008_007595 +2008_007596 +2008_007599 +2008_007610 +2008_007612 +2008_007617 +2008_007618 +2008_007623 +2008_007632 +2008_007635 +2008_007643 +2008_007649 +2008_007656 +2008_007661 +2008_007662 +2008_007668 +2008_007669 +2008_007673 +2008_007676 +2008_007677 +2008_007685 +2008_007688 +2008_007690 +2008_007693 +2008_007694 +2008_007702 +2008_007704 +2008_007706 +2008_007714 +2008_007716 +2008_007719 +2008_007729 +2008_007733 +2008_007735 +2008_007736 +2008_007737 +2008_007738 +2008_007739 +2008_007741 +2008_007745 +2008_007749 +2008_007757 +2008_007760 +2008_007764 +2008_007766 +2008_007768 +2008_007787 +2008_007791 +2008_007793 +2008_007794 +2008_007797 +2008_007798 +2008_007804 +2008_007806 +2008_007811 +2008_007814 +2008_007816 +2008_007819 +2008_007823 +2008_007827 +2008_007828 +2008_007831 +2008_007836 +2008_007839 +2008_007841 +2008_007850 +2008_007853 +2008_007854 +2008_007855 +2008_007871 +2008_007872 +2008_007875 +2008_007884 +2008_007887 +2008_007888 +2008_007890 +2008_007893 +2008_007902 +2008_007914 +2008_007915 +2008_007917 +2008_007922 +2008_007923 +2008_007931 +2008_007932 +2008_007933 +2008_007935 +2008_007936 +2008_007942 +2008_007945 +2008_007948 +2008_007949 +2008_007954 +2008_007955 +2008_007964 +2008_007966 +2008_007970 +2008_007981 +2008_007986 +2008_007989 +2008_007993 +2008_007994 +2008_008001 +2008_008011 +2008_008022 +2008_008024 +2008_008025 +2008_008029 +2008_008040 +2008_008044 +2008_008050 +2008_008052 +2008_008053 +2008_008055 +2008_008057 +2008_008066 +2008_008069 +2008_008075 +2008_008084 +2008_008086 +2008_008091 +2008_008093 +2008_008096 +2008_008103 +2008_008105 +2008_008109 +2008_008113 +2008_008115 +2008_008120 +2008_008123 +2008_008127 +2008_008130 +2008_008131 +2008_008134 +2008_008141 +2008_008145 +2008_008146 +2008_008155 +2008_008175 +2008_008177 +2008_008179 +2008_008185 +2008_008190 +2008_008191 +2008_008192 +2008_008203 +2008_008208 +2008_008210 +2008_008217 +2008_008221 +2008_008224 +2008_008231 +2008_008232 +2008_008233 +2008_008234 +2008_008235 +2008_008241 +2008_008246 +2008_008252 +2008_008254 +2008_008257 +2008_008268 +2008_008269 +2008_008271 +2008_008272 +2008_008278 +2008_008279 +2008_008284 +2008_008292 +2008_008296 +2008_008297 +2008_008301 +2008_008302 +2008_008307 +2008_008310 +2008_008313 +2008_008314 +2008_008318 +2008_008320 +2008_008322 +2008_008331 +2008_008335 +2008_008336 +2008_008337 +2008_008341 +2008_008346 +2008_008354 +2008_008357 +2008_008359 +2008_008362 +2008_008365 +2008_008373 +2008_008376 +2008_008377 +2008_008379 +2008_008380 +2008_008387 +2008_008388 +2008_008392 +2008_008393 +2008_008395 +2008_008406 +2008_008421 +2008_008424 +2008_008429 +2008_008433 +2008_008434 +2008_008435 +2008_008437 +2008_008439 +2008_008443 +2008_008444 +2008_008446 +2008_008450 +2008_008453 +2008_008461 +2008_008466 +2008_008467 +2008_008469 +2008_008470 +2008_008474 +2008_008488 +2008_008500 +2008_008501 +2008_008506 +2008_008512 +2008_008519 +2008_008524 +2008_008527 +2008_008531 +2008_008536 +2008_008537 +2008_008538 +2008_008552 +2008_008554 +2008_008564 +2008_008570 +2008_008574 +2008_008585 +2008_008588 +2008_008595 +2008_008598 +2008_008606 +2008_008611 +2008_008613 +2008_008615 +2008_008617 +2008_008619 +2008_008621 +2008_008622 +2008_008627 +2008_008628 +2008_008629 +2008_008632 +2008_008636 +2008_008652 +2008_008658 +2008_008659 +2008_008662 +2008_008675 +2008_008676 +2008_008679 +2008_008682 +2008_008683 +2008_008684 +2008_008690 +2008_008695 +2008_008700 +2008_008705 +2008_008708 +2008_008711 +2008_008713 +2008_008718 +2008_008724 +2008_008726 +2008_008732 +2008_008739 +2008_008746 +2008_008751 +2008_008753 +2008_008755 +2008_008758 +2008_008765 +2008_008767 +2008_008772 +2009_000001 +2009_000002 +2009_000009 +2009_000011 +2009_000012 +2009_000013 +2009_000017 +2009_000022 +2009_000026 +2009_000032 +2009_000035 +2009_000037 +2009_000039 +2009_000041 +2009_000045 +2009_000051 +2009_000055 +2009_000060 +2009_000063 +2009_000066 +2009_000067 +2009_000068 +2009_000072 +2009_000074 +2009_000078 +2009_000080 +2009_000084 +2009_000087 +2009_000089 +2009_000090 +2009_000093 +2009_000096 +2009_000097 +2009_000102 +2009_000121 +2009_000124 +2009_000136 +2009_000142 +2009_000146 +2009_000149 +2009_000156 +2009_000157 +2009_000158 +2009_000165 +2009_000169 +2009_000171 +2009_000181 +2009_000182 +2009_000183 +2009_000184 +2009_000189 +2009_000192 +2009_000198 +2009_000199 +2009_000201 +2009_000205 +2009_000206 +2009_000212 +2009_000214 +2009_000216 +2009_000219 +2009_000225 +2009_000242 +2009_000244 +2009_000247 +2009_000249 +2009_000254 +2009_000257 +2009_000260 +2009_000276 +2009_000282 +2009_000283 +2009_000284 +2009_000286 +2009_000288 +2009_000291 +2009_000293 +2009_000297 +2009_000298 +2009_000300 +2009_000304 +2009_000305 +2009_000308 +2009_000309 +2009_000312 +2009_000316 +2009_000318 +2009_000321 +2009_000328 +2009_000330 +2009_000335 +2009_000337 +2009_000342 +2009_000351 +2009_000354 +2009_000356 +2009_000366 +2009_000370 +2009_000378 +2009_000387 +2009_000389 +2009_000391 +2009_000397 +2009_000398 +2009_000399 +2009_000402 +2009_000410 +2009_000411 +2009_000412 +2009_000414 +2009_000417 +2009_000418 +2009_000421 +2009_000422 +2009_000426 +2009_000430 +2009_000435 +2009_000440 +2009_000446 +2009_000453 +2009_000455 +2009_000456 +2009_000457 +2009_000461 +2009_000466 +2009_000469 +2009_000472 +2009_000483 +2009_000487 +2009_000488 +2009_000496 +2009_000499 +2009_000501 +2009_000511 +2009_000512 +2009_000513 +2009_000516 +2009_000519 +2009_000523 +2009_000526 +2009_000536 +2009_000542 +2009_000545 +2009_000549 +2009_000550 +2009_000552 +2009_000558 +2009_000559 +2009_000563 +2009_000566 +2009_000568 +2009_000573 +2009_000574 +2009_000590 +2009_000593 +2009_000597 +2009_000606 +2009_000608 +2009_000611 +2009_000614 +2009_000615 +2009_000619 +2009_000624 +2009_000625 +2009_000628 +2009_000631 +2009_000634 +2009_000637 +2009_000641 +2009_000647 +2009_000648 +2009_000658 +2009_000661 +2009_000664 +2009_000670 +2009_000674 +2009_000675 +2009_000676 +2009_000677 +2009_000681 +2009_000683 +2009_000689 +2009_000691 +2009_000702 +2009_000704 +2009_000705 +2009_000712 +2009_000716 +2009_000719 +2009_000723 +2009_000724 +2009_000725 +2009_000726 +2009_000727 +2009_000730 +2009_000731 +2009_000732 +2009_000734 +2009_000741 +2009_000742 +2009_000756 +2009_000758 +2009_000760 +2009_000762 +2009_000763 +2009_000771 +2009_000778 +2009_000779 +2009_000782 +2009_000783 +2009_000791 +2009_000811 +2009_000812 +2009_000817 +2009_000820 +2009_000821 +2009_000823 +2009_000824 +2009_000825 +2009_000828 +2009_000829 +2009_000830 +2009_000837 +2009_000839 +2009_000840 +2009_000843 +2009_000845 +2009_000846 +2009_000851 +2009_000852 +2009_000856 +2009_000858 +2009_000862 +2009_000865 +2009_000871 +2009_000879 +2009_000886 +2009_000890 +2009_000892 +2009_000896 +2009_000897 +2009_000898 +2009_000901 +2009_000904 +2009_000909 +2009_000919 +2009_000923 +2009_000924 +2009_000925 +2009_000928 +2009_000931 +2009_000934 +2009_000935 +2009_000939 +2009_000948 +2009_000954 +2009_000955 +2009_000958 +2009_000960 +2009_000964 +2009_000966 +2009_000971 +2009_000985 +2009_000989 +2009_000991 +2009_000992 +2009_000995 +2009_000998 +2009_001000 +2009_001006 +2009_001007 +2009_001008 +2009_001011 +2009_001016 +2009_001021 +2009_001024 +2009_001026 +2009_001028 +2009_001030 +2009_001038 +2009_001044 +2009_001054 +2009_001055 +2009_001057 +2009_001061 +2009_001066 +2009_001069 +2009_001075 +2009_001082 +2009_001083 +2009_001084 +2009_001090 +2009_001094 +2009_001097 +2009_001106 +2009_001108 +2009_001113 +2009_001118 +2009_001120 +2009_001121 +2009_001126 +2009_001128 +2009_001134 +2009_001139 +2009_001148 +2009_001155 +2009_001160 +2009_001164 +2009_001166 +2009_001181 +2009_001184 +2009_001194 +2009_001195 +2009_001196 +2009_001198 +2009_001207 +2009_001212 +2009_001215 +2009_001225 +2009_001227 +2009_001240 +2009_001242 +2009_001243 +2009_001245 +2009_001249 +2009_001252 +2009_001255 +2009_001257 +2009_001259 +2009_001266 +2009_001278 +2009_001279 +2009_001286 +2009_001288 +2009_001289 +2009_001299 +2009_001300 +2009_001309 +2009_001313 +2009_001314 +2009_001316 +2009_001320 +2009_001321 +2009_001322 +2009_001326 +2009_001332 +2009_001333 +2009_001343 +2009_001345 +2009_001348 +2009_001349 +2009_001350 +2009_001355 +2009_001361 +2009_001363 +2009_001366 +2009_001367 +2009_001370 +2009_001371 +2009_001384 +2009_001387 +2009_001391 +2009_001393 +2009_001397 +2009_001398 +2009_001406 +2009_001407 +2009_001409 +2009_001411 +2009_001413 +2009_001414 +2009_001417 +2009_001419 +2009_001426 +2009_001427 +2009_001431 +2009_001433 +2009_001437 +2009_001440 +2009_001447 +2009_001449 +2009_001456 +2009_001468 +2009_001470 +2009_001479 +2009_001484 +2009_001490 +2009_001498 +2009_001501 +2009_001505 +2009_001509 +2009_001518 +2009_001519 +2009_001521 +2009_001522 +2009_001526 +2009_001534 +2009_001535 +2009_001536 +2009_001539 +2009_001549 +2009_001554 +2009_001562 +2009_001565 +2009_001568 +2009_001575 +2009_001577 +2009_001581 +2009_001587 +2009_001591 +2009_001593 +2009_001594 +2009_001606 +2009_001607 +2009_001617 +2009_001618 +2009_001621 +2009_001623 +2009_001627 +2009_001631 +2009_001633 +2009_001635 +2009_001643 +2009_001644 +2009_001645 +2009_001646 +2009_001648 +2009_001653 +2009_001663 +2009_001667 +2009_001673 +2009_001675 +2009_001682 +2009_001683 +2009_001684 +2009_001687 +2009_001696 +2009_001699 +2009_001707 +2009_001709 +2009_001713 +2009_001718 +2009_001723 +2009_001731 +2009_001733 +2009_001738 +2009_001741 +2009_001743 +2009_001752 +2009_001754 +2009_001758 +2009_001759 +2009_001764 +2009_001765 +2009_001767 +2009_001768 +2009_001774 +2009_001775 +2009_001778 +2009_001780 +2009_001784 +2009_001794 +2009_001799 +2009_001804 +2009_001805 +2009_001810 +2009_001811 +2009_001816 +2009_001818 +2009_001820 +2009_001822 +2009_001823 +2009_001830 +2009_001833 +2009_001835 +2009_001839 +2009_001848 +2009_001850 +2009_001851 +2009_001852 +2009_001853 +2009_001854 +2009_001858 +2009_001864 +2009_001881 +2009_001890 +2009_001905 +2009_001906 +2009_001907 +2009_001909 +2009_001911 +2009_001915 +2009_001916 +2009_001929 +2009_001931 +2009_001940 +2009_001941 +2009_001945 +2009_001949 +2009_001965 +2009_001967 +2009_001971 +2009_001973 +2009_001976 +2009_001977 +2009_001979 +2009_001980 +2009_001984 +2009_001988 +2009_001991 +2009_002001 +2009_002002 +2009_002003 +2009_002008 +2009_002009 +2009_002011 +2009_002012 +2009_002024 +2009_002031 +2009_002035 +2009_002039 +2009_002042 +2009_002046 +2009_002047 +2009_002053 +2009_002055 +2009_002056 +2009_002058 +2009_002061 +2009_002073 +2009_002078 +2009_002082 +2009_002087 +2009_002094 +2009_002097 +2009_002110 +2009_002111 +2009_002122 +2009_002127 +2009_002128 +2009_002131 +2009_002136 +2009_002137 +2009_002139 +2009_002141 +2009_002144 +2009_002150 +2009_002155 +2009_002164 +2009_002165 +2009_002169 +2009_002171 +2009_002175 +2009_002177 +2009_002185 +2009_002191 +2009_002194 +2009_002199 +2009_002202 +2009_002205 +2009_002208 +2009_002211 +2009_002212 +2009_002215 +2009_002219 +2009_002221 +2009_002222 +2009_002226 +2009_002228 +2009_002230 +2009_002231 +2009_002232 +2009_002238 +2009_002239 +2009_002242 +2009_002252 +2009_002257 +2009_002265 +2009_002267 +2009_002268 +2009_002272 +2009_002282 +2009_002286 +2009_002291 +2009_002295 +2009_002302 +2009_002305 +2009_002306 +2009_002308 +2009_002317 +2009_002319 +2009_002320 +2009_002325 +2009_002328 +2009_002333 +2009_002335 +2009_002346 +2009_002349 +2009_002350 +2009_002360 +2009_002363 +2009_002366 +2009_002370 +2009_002372 +2009_002373 +2009_002374 +2009_002380 +2009_002382 +2009_002390 +2009_002393 +2009_002398 +2009_002399 +2009_002400 +2009_002401 +2009_002407 +2009_002414 +2009_002415 +2009_002420 +2009_002432 +2009_002433 +2009_002436 +2009_002441 +2009_002444 +2009_002445 +2009_002449 +2009_002453 +2009_002457 +2009_002464 +2009_002465 +2009_002470 +2009_002471 +2009_002474 +2009_002475 +2009_002476 +2009_002477 +2009_002487 +2009_002488 +2009_002499 +2009_002500 +2009_002510 +2009_002512 +2009_002515 +2009_002517 +2009_002518 +2009_002521 +2009_002524 +2009_002525 +2009_002527 +2009_002531 +2009_002532 +2009_002535 +2009_002537 +2009_002539 +2009_002546 +2009_002549 +2009_002552 +2009_002562 +2009_002563 +2009_002566 +2009_002568 +2009_002569 +2009_002570 +2009_002571 +2009_002573 +2009_002580 +2009_002584 +2009_002591 +2009_002592 +2009_002594 +2009_002604 +2009_002607 +2009_002608 +2009_002609 +2009_002614 +2009_002618 +2009_002632 +2009_002634 +2009_002635 +2009_002638 +2009_002645 +2009_002649 +2009_002651 +2009_002663 +2009_002665 +2009_002667 +2009_002668 +2009_002669 +2009_002670 +2009_002673 +2009_002675 +2009_002680 +2009_002681 +2009_002683 +2009_002684 +2009_002687 +2009_002698 +2009_002708 +2009_002711 +2009_002712 +2009_002717 +2009_002727 +2009_002732 +2009_002733 +2009_002739 +2009_002741 +2009_002743 +2009_002744 +2009_002749 +2009_002752 +2009_002753 +2009_002754 +2009_002755 +2009_002762 +2009_002765 +2009_002771 +2009_002772 +2009_002774 +2009_002777 +2009_002778 +2009_002779 +2009_002785 +2009_002790 +2009_002800 +2009_002803 +2009_002806 +2009_002807 +2009_002808 +2009_002809 +2009_002816 +2009_002830 +2009_002833 +2009_002836 +2009_002837 +2009_002838 +2009_002841 +2009_002856 +2009_002865 +2009_002876 +2009_002877 +2009_002882 +2009_002883 +2009_002887 +2009_002888 +2009_002894 +2009_002898 +2009_002902 +2009_002910 +2009_002918 +2009_002920 +2009_002925 +2009_002928 +2009_002936 +2009_002938 +2009_002940 +2009_002941 +2009_002960 +2009_002962 +2009_002967 +2009_002975 +2009_002977 +2009_002978 +2009_002982 +2009_002985 +2009_002986 +2009_002990 +2009_002995 +2009_002998 +2009_003003 +2009_003005 +2009_003010 +2009_003013 +2009_003018 +2009_003020 +2009_003022 +2009_003023 +2009_003031 +2009_003033 +2009_003043 +2009_003044 +2009_003052 +2009_003058 +2009_003059 +2009_003063 +2009_003065 +2009_003070 +2009_003071 +2009_003074 +2009_003076 +2009_003080 +2009_003083 +2009_003084 +2009_003089 +2009_003097 +2009_003098 +2009_003105 +2009_003110 +2009_003114 +2009_003122 +2009_003123 +2009_003125 +2009_003126 +2009_003128 +2009_003129 +2009_003130 +2009_003132 +2009_003136 +2009_003140 +2009_003143 +2009_003144 +2009_003150 +2009_003151 +2009_003153 +2009_003154 +2009_003181 +2009_003183 +2009_003185 +2009_003189 +2009_003191 +2009_003193 +2009_003194 +2009_003196 +2009_003198 +2009_003199 +2009_003201 +2009_003204 +2009_003212 +2009_003214 +2009_003217 +2009_003224 +2009_003230 +2009_003238 +2009_003241 +2009_003247 +2009_003251 +2009_003254 +2009_003255 +2009_003259 +2009_003262 +2009_003266 +2009_003269 +2009_003271 +2009_003273 +2009_003276 +2009_003278 +2009_003282 +2009_003284 +2009_003288 +2009_003294 +2009_003297 +2009_003299 +2009_003300 +2009_003301 +2009_003304 +2009_003305 +2009_003311 +2009_003312 +2009_003320 +2009_003323 +2009_003343 +2009_003346 +2009_003347 +2009_003348 +2009_003351 +2009_003372 +2009_003373 +2009_003375 +2009_003376 +2009_003378 +2009_003379 +2009_003380 +2009_003387 +2009_003394 +2009_003399 +2009_003400 +2009_003406 +2009_003409 +2009_003411 +2009_003415 +2009_003417 +2009_003422 +2009_003431 +2009_003433 +2009_003440 +2009_003441 +2009_003445 +2009_003450 +2009_003453 +2009_003456 +2009_003457 +2009_003460 +2009_003462 +2009_003466 +2009_003467 +2009_003469 +2009_003476 +2009_003481 +2009_003487 +2009_003491 +2009_003494 +2009_003498 +2009_003499 +2009_003500 +2009_003504 +2009_003507 +2009_003508 +2009_003509 +2009_003517 +2009_003521 +2009_003523 +2009_003528 +2009_003530 +2009_003537 +2009_003538 +2009_003542 +2009_003543 +2009_003544 +2009_003549 +2009_003551 +2009_003554 +2009_003560 +2009_003564 +2009_003565 +2009_003566 +2009_003569 +2009_003571 +2009_003576 +2009_003581 +2009_003588 +2009_003589 +2009_003592 +2009_003598 +2009_003606 +2009_003607 +2009_003612 +2009_003618 +2009_003626 +2009_003627 +2009_003633 +2009_003635 +2009_003637 +2009_003638 +2009_003640 +2009_003642 +2009_003650 +2009_003655 +2009_003656 +2009_003664 +2009_003666 +2009_003669 +2009_003671 +2009_003679 +2009_003686 +2009_003689 +2009_003696 +2009_003698 +2009_003703 +2009_003704 +2009_003707 +2009_003710 +2009_003713 +2009_003714 +2009_003718 +2009_003725 +2009_003726 +2009_003738 +2009_003747 +2009_003751 +2009_003756 +2009_003758 +2009_003759 +2009_003771 +2009_003773 +2009_003776 +2009_003781 +2009_003785 +2009_003795 +2009_003800 +2009_003802 +2009_003804 +2009_003805 +2009_003806 +2009_003810 +2009_003813 +2009_003814 +2009_003821 +2009_003822 +2009_003832 +2009_003835 +2009_003836 +2009_003840 +2009_003847 +2009_003849 +2009_003855 +2009_003857 +2009_003858 +2009_003863 +2009_003870 +2009_003874 +2009_003879 +2009_003884 +2009_003892 +2009_003895 +2009_003899 +2009_003901 +2009_003902 +2009_003903 +2009_003904 +2009_003905 +2009_003908 +2009_003911 +2009_003914 +2009_003916 +2009_003928 +2009_003929 +2009_003936 +2009_003938 +2009_003944 +2009_003947 +2009_003950 +2009_003951 +2009_003955 +2009_003962 +2009_003965 +2009_003969 +2009_003971 +2009_003973 +2009_003977 +2009_003982 +2009_003986 +2009_003991 +2009_003992 +2009_003995 +2009_004001 +2009_004004 +2009_004016 +2009_004019 +2009_004021 +2009_004022 +2009_004031 +2009_004032 +2009_004033 +2009_004034 +2009_004038 +2009_004040 +2009_004043 +2009_004044 +2009_004050 +2009_004051 +2009_004052 +2009_004062 +2009_004070 +2009_004072 +2009_004075 +2009_004076 +2009_004084 +2009_004085 +2009_004092 +2009_004093 +2009_004099 +2009_004102 +2009_004108 +2009_004111 +2009_004113 +2009_004124 +2009_004125 +2009_004126 +2009_004128 +2009_004129 +2009_004131 +2009_004138 +2009_004140 +2009_004141 +2009_004142 +2009_004148 +2009_004150 +2009_004152 +2009_004157 +2009_004161 +2009_004163 +2009_004164 +2009_004166 +2009_004170 +2009_004175 +2009_004188 +2009_004193 +2009_004197 +2009_004203 +2009_004205 +2009_004207 +2009_004210 +2009_004217 +2009_004221 +2009_004224 +2009_004232 +2009_004233 +2009_004241 +2009_004242 +2009_004243 +2009_004247 +2009_004248 +2009_004255 +2009_004258 +2009_004262 +2009_004263 +2009_004272 +2009_004273 +2009_004274 +2009_004277 +2009_004284 +2009_004291 +2009_004298 +2009_004300 +2009_004303 +2009_004307 +2009_004309 +2009_004324 +2009_004329 +2009_004332 +2009_004346 +2009_004350 +2009_004359 +2009_004361 +2009_004364 +2009_004366 +2009_004377 +2009_004382 +2009_004390 +2009_004402 +2009_004403 +2009_004404 +2009_004410 +2009_004411 +2009_004414 +2009_004419 +2009_004435 +2009_004436 +2009_004440 +2009_004448 +2009_004453 +2009_004455 +2009_004456 +2009_004457 +2009_004468 +2009_004471 +2009_004478 +2009_004483 +2009_004494 +2009_004496 +2009_004497 +2009_004499 +2009_004502 +2009_004504 +2009_004507 +2009_004509 +2009_004518 +2009_004524 +2009_004525 +2009_004529 +2009_004530 +2009_004532 +2009_004535 +2009_004536 +2009_004537 +2009_004540 +2009_004542 +2009_004543 +2009_004548 +2009_004551 +2009_004552 +2009_004556 +2009_004559 +2009_004567 +2009_004568 +2009_004579 +2009_004580 +2009_004581 +2009_004587 +2009_004588 +2009_004590 +2009_004592 +2009_004594 +2009_004601 +2009_004607 +2009_004614 +2009_004623 +2009_004624 +2009_004625 +2009_004629 +2009_004630 +2009_004634 +2009_004635 +2009_004645 +2009_004648 +2009_004653 +2009_004655 +2009_004664 +2009_004669 +2009_004670 +2009_004677 +2009_004679 +2009_004686 +2009_004687 +2009_004697 +2009_004706 +2009_004713 +2009_004716 +2009_004718 +2009_004720 +2009_004721 +2009_004730 +2009_004732 +2009_004738 +2009_004744 +2009_004746 +2009_004748 +2009_004749 +2009_004754 +2009_004758 +2009_004763 +2009_004765 +2009_004768 +2009_004769 +2009_004780 +2009_004781 +2009_004782 +2009_004784 +2009_004789 +2009_004796 +2009_004799 +2009_004801 +2009_004812 +2009_004820 +2009_004822 +2009_004823 +2009_004828 +2009_004834 +2009_004841 +2009_004845 +2009_004848 +2009_004849 +2009_004856 +2009_004857 +2009_004858 +2009_004859 +2009_004865 +2009_004867 +2009_004868 +2009_004869 +2009_004872 +2009_004876 +2009_004882 +2009_004885 +2009_004886 +2009_004889 +2009_004895 +2009_004897 +2009_004899 +2009_004902 +2009_004913 +2009_004917 +2009_004922 +2009_004929 +2009_004930 +2009_004933 +2009_004934 +2009_004940 +2009_004942 +2009_004946 +2009_004947 +2009_004956 +2009_004961 +2009_004969 +2009_004971 +2009_004974 +2009_004982 +2009_004987 +2009_004988 +2009_004993 +2009_004994 +2009_004996 +2009_005001 +2009_005005 +2009_005008 +2009_005019 +2009_005025 +2009_005033 +2009_005036 +2009_005038 +2009_005060 +2009_005061 +2009_005062 +2009_005064 +2009_005068 +2009_005073 +2009_005078 +2009_005079 +2009_005080 +2009_005082 +2009_005083 +2009_005086 +2009_005087 +2009_005089 +2009_005098 +2009_005103 +2009_005104 +2009_005111 +2009_005114 +2009_005119 +2009_005137 +2009_005140 +2009_005148 +2009_005149 +2009_005150 +2009_005152 +2009_005153 +2009_005156 +2009_005158 +2009_005161 +2009_005165 +2009_005171 +2009_005172 +2009_005178 +2009_005185 +2009_005189 +2009_005190 +2009_005193 +2009_005202 +2009_005203 +2009_005204 +2009_005205 +2009_005210 +2009_005211 +2009_005215 +2009_005216 +2009_005217 +2009_005219 +2009_005220 +2009_005221 +2009_005222 +2009_005225 +2009_005229 +2009_005231 +2009_005232 +2009_005239 +2009_005242 +2009_005257 +2009_005260 +2009_005262 +2009_005267 +2009_005268 +2009_005279 +2009_005286 +2009_005288 +2009_005292 +2009_005294 +2009_005299 +2009_005300 +2009_005302 +2009_005309 +2009_005310 +2010_000001 +2010_000003 +2010_000015 +2010_000020 +2010_000024 +2010_000027 +2010_000033 +2010_000035 +2010_000036 +2010_000038 +2010_000050 +2010_000053 +2010_000054 +2010_000065 +2010_000069 +2010_000072 +2010_000074 +2010_000083 +2010_000084 +2010_000085 +2010_000087 +2010_000088 +2010_000090 +2010_000095 +2010_000097 +2010_000098 +2010_000099 +2010_000110 +2010_000113 +2010_000118 +2010_000127 +2010_000139 +2010_000140 +2010_000145 +2010_000151 +2010_000159 +2010_000160 +2010_000162 +2010_000163 +2010_000170 +2010_000172 +2010_000174 +2010_000175 +2010_000178 +2010_000184 +2010_000193 +2010_000194 +2010_000196 +2010_000197 +2010_000199 +2010_000202 +2010_000211 +2010_000213 +2010_000216 +2010_000218 +2010_000238 +2010_000241 +2010_000246 +2010_000247 +2010_000254 +2010_000256 +2010_000260 +2010_000261 +2010_000262 +2010_000266 +2010_000272 +2010_000273 +2010_000279 +2010_000283 +2010_000284 +2010_000286 +2010_000291 +2010_000295 +2010_000308 +2010_000309 +2010_000312 +2010_000313 +2010_000317 +2010_000318 +2010_000321 +2010_000324 +2010_000325 +2010_000327 +2010_000330 +2010_000335 +2010_000336 +2010_000342 +2010_000344 +2010_000349 +2010_000351 +2010_000352 +2010_000358 +2010_000370 +2010_000372 +2010_000374 +2010_000375 +2010_000376 +2010_000379 +2010_000381 +2010_000382 +2010_000384 +2010_000390 +2010_000399 +2010_000401 +2010_000406 +2010_000409 +2010_000418 +2010_000422 +2010_000426 +2010_000427 +2010_000431 +2010_000433 +2010_000435 +2010_000442 +2010_000444 +2010_000446 +2010_000449 +2010_000456 +2010_000461 +2010_000462 +2010_000468 +2010_000470 +2010_000474 +2010_000475 +2010_000483 +2010_000485 +2010_000493 +2010_000497 +2010_000502 +2010_000506 +2010_000510 +2010_000515 +2010_000524 +2010_000526 +2010_000530 +2010_000536 +2010_000537 +2010_000541 +2010_000547 +2010_000548 +2010_000552 +2010_000553 +2010_000559 +2010_000562 +2010_000572 +2010_000573 +2010_000574 +2010_000582 +2010_000583 +2010_000586 +2010_000590 +2010_000602 +2010_000603 +2010_000604 +2010_000608 +2010_000617 +2010_000621 +2010_000622 +2010_000624 +2010_000628 +2010_000633 +2010_000635 +2010_000639 +2010_000646 +2010_000647 +2010_000655 +2010_000665 +2010_000666 +2010_000669 +2010_000679 +2010_000682 +2010_000683 +2010_000689 +2010_000690 +2010_000692 +2010_000695 +2010_000697 +2010_000705 +2010_000711 +2010_000712 +2010_000722 +2010_000724 +2010_000726 +2010_000727 +2010_000729 +2010_000731 +2010_000735 +2010_000737 +2010_000738 +2010_000743 +2010_000744 +2010_000749 +2010_000754 +2010_000759 +2010_000761 +2010_000764 +2010_000771 +2010_000778 +2010_000786 +2010_000788 +2010_000791 +2010_000792 +2010_000797 +2010_000802 +2010_000805 +2010_000811 +2010_000814 +2010_000821 +2010_000822 +2010_000828 +2010_000829 +2010_000830 +2010_000831 +2010_000836 +2010_000846 +2010_000865 +2010_000866 +2010_000870 +2010_000874 +2010_000875 +2010_000876 +2010_000883 +2010_000889 +2010_000893 +2010_000897 +2010_000898 +2010_000904 +2010_000906 +2010_000907 +2010_000915 +2010_000918 +2010_000923 +2010_000927 +2010_000928 +2010_000929 +2010_000931 +2010_000941 +2010_000944 +2010_000945 +2010_000947 +2010_000948 +2010_000952 +2010_000955 +2010_000956 +2010_000959 +2010_000961 +2010_000968 +2010_000973 +2010_000975 +2010_000981 +2010_000989 +2010_000993 +2010_000996 +2010_001000 +2010_001006 +2010_001008 +2010_001009 +2010_001010 +2010_001011 +2010_001016 +2010_001017 +2010_001021 +2010_001024 +2010_001030 +2010_001032 +2010_001036 +2010_001042 +2010_001049 +2010_001051 +2010_001052 +2010_001057 +2010_001061 +2010_001069 +2010_001070 +2010_001077 +2010_001079 +2010_001080 +2010_001082 +2010_001085 +2010_001089 +2010_001099 +2010_001104 +2010_001107 +2010_001109 +2010_001117 +2010_001119 +2010_001124 +2010_001125 +2010_001127 +2010_001130 +2010_001147 +2010_001149 +2010_001151 +2010_001158 +2010_001163 +2010_001164 +2010_001172 +2010_001174 +2010_001181 +2010_001188 +2010_001189 +2010_001192 +2010_001201 +2010_001204 +2010_001206 +2010_001214 +2010_001215 +2010_001216 +2010_001218 +2010_001219 +2010_001220 +2010_001229 +2010_001234 +2010_001241 +2010_001242 +2010_001246 +2010_001251 +2010_001256 +2010_001257 +2010_001263 +2010_001264 +2010_001270 +2010_001272 +2010_001286 +2010_001287 +2010_001291 +2010_001292 +2010_001293 +2010_001294 +2010_001301 +2010_001305 +2010_001313 +2010_001315 +2010_001321 +2010_001325 +2010_001326 +2010_001327 +2010_001331 +2010_001333 +2010_001343 +2010_001351 +2010_001355 +2010_001357 +2010_001364 +2010_001367 +2010_001376 +2010_001382 +2010_001394 +2010_001403 +2010_001405 +2010_001407 +2010_001411 +2010_001412 +2010_001417 +2010_001421 +2010_001426 +2010_001432 +2010_001439 +2010_001441 +2010_001448 +2010_001449 +2010_001451 +2010_001452 +2010_001453 +2010_001455 +2010_001461 +2010_001463 +2010_001468 +2010_001473 +2010_001479 +2010_001486 +2010_001497 +2010_001501 +2010_001502 +2010_001505 +2010_001516 +2010_001518 +2010_001520 +2010_001522 +2010_001525 +2010_001528 +2010_001534 +2010_001535 +2010_001536 +2010_001539 +2010_001540 +2010_001543 +2010_001544 +2010_001548 +2010_001553 +2010_001557 +2010_001563 +2010_001571 +2010_001574 +2010_001577 +2010_001579 +2010_001584 +2010_001586 +2010_001587 +2010_001601 +2010_001606 +2010_001614 +2010_001625 +2010_001633 +2010_001635 +2010_001636 +2010_001637 +2010_001640 +2010_001645 +2010_001646 +2010_001652 +2010_001656 +2010_001659 +2010_001668 +2010_001669 +2010_001671 +2010_001675 +2010_001679 +2010_001680 +2010_001682 +2010_001685 +2010_001690 +2010_001691 +2010_001692 +2010_001697 +2010_001699 +2010_001705 +2010_001712 +2010_001717 +2010_001720 +2010_001731 +2010_001734 +2010_001737 +2010_001739 +2010_001749 +2010_001752 +2010_001754 +2010_001757 +2010_001760 +2010_001763 +2010_001767 +2010_001768 +2010_001771 +2010_001773 +2010_001777 +2010_001783 +2010_001787 +2010_001788 +2010_001796 +2010_001803 +2010_001814 +2010_001819 +2010_001820 +2010_001821 +2010_001823 +2010_001827 +2010_001828 +2010_001829 +2010_001830 +2010_001836 +2010_001837 +2010_001838 +2010_001843 +2010_001845 +2010_001851 +2010_001857 +2010_001862 +2010_001863 +2010_001868 +2010_001869 +2010_001877 +2010_001891 +2010_001892 +2010_001893 +2010_001904 +2010_001907 +2010_001908 +2010_001913 +2010_001916 +2010_001918 +2010_001921 +2010_001927 +2010_001929 +2010_001937 +2010_001938 +2010_001950 +2010_001951 +2010_001954 +2010_001956 +2010_001962 +2010_001966 +2010_001967 +2010_001968 +2010_001986 +2010_001987 +2010_001988 +2010_001992 +2010_001995 +2010_001998 +2010_002000 +2010_002002 +2010_002005 +2010_002006 +2010_002017 +2010_002019 +2010_002022 +2010_002025 +2010_002029 +2010_002030 +2010_002040 +2010_002041 +2010_002046 +2010_002048 +2010_002050 +2010_002058 +2010_002060 +2010_002067 +2010_002073 +2010_002085 +2010_002086 +2010_002089 +2010_002094 +2010_002096 +2010_002098 +2010_002100 +2010_002102 +2010_002105 +2010_002106 +2010_002113 +2010_002117 +2010_002124 +2010_002128 +2010_002133 +2010_002137 +2010_002138 +2010_002142 +2010_002146 +2010_002147 +2010_002150 +2010_002161 +2010_002167 +2010_002172 +2010_002175 +2010_002181 +2010_002182 +2010_002183 +2010_002187 +2010_002192 +2010_002194 +2010_002195 +2010_002199 +2010_002200 +2010_002211 +2010_002213 +2010_002219 +2010_002223 +2010_002224 +2010_002228 +2010_002229 +2010_002232 +2010_002244 +2010_002245 +2010_002247 +2010_002251 +2010_002255 +2010_002261 +2010_002269 +2010_002271 +2010_002276 +2010_002279 +2010_002283 +2010_002287 +2010_002289 +2010_002294 +2010_002303 +2010_002305 +2010_002307 +2010_002310 +2010_002313 +2010_002315 +2010_002316 +2010_002319 +2010_002321 +2010_002326 +2010_002332 +2010_002336 +2010_002337 +2010_002340 +2010_002348 +2010_002354 +2010_002357 +2010_002361 +2010_002365 +2010_002366 +2010_002370 +2010_002372 +2010_002373 +2010_002383 +2010_002388 +2010_002390 +2010_002396 +2010_002398 +2010_002402 +2010_002405 +2010_002406 +2010_002408 +2010_002409 +2010_002420 +2010_002422 +2010_002427 +2010_002436 +2010_002446 +2010_002448 +2010_002449 +2010_002450 +2010_002458 +2010_002460 +2010_002461 +2010_002468 +2010_002479 +2010_002480 +2010_002482 +2010_002484 +2010_002504 +2010_002510 +2010_002512 +2010_002516 +2010_002518 +2010_002526 +2010_002531 +2010_002533 +2010_002534 +2010_002536 +2010_002538 +2010_002539 +2010_002542 +2010_002543 +2010_002546 +2010_002547 +2010_002561 +2010_002565 +2010_002569 +2010_002578 +2010_002579 +2010_002580 +2010_002586 +2010_002587 +2010_002597 +2010_002598 +2010_002601 +2010_002602 +2010_002603 +2010_002605 +2010_002621 +2010_002623 +2010_002629 +2010_002631 +2010_002632 +2010_002638 +2010_002639 +2010_002645 +2010_002652 +2010_002654 +2010_002660 +2010_002661 +2010_002666 +2010_002667 +2010_002668 +2010_002676 +2010_002678 +2010_002679 +2010_002682 +2010_002691 +2010_002693 +2010_002695 +2010_002701 +2010_002704 +2010_002705 +2010_002710 +2010_002713 +2010_002714 +2010_002716 +2010_002721 +2010_002723 +2010_002725 +2010_002728 +2010_002736 +2010_002737 +2010_002740 +2010_002741 +2010_002754 +2010_002758 +2010_002763 +2010_002767 +2010_002770 +2010_002771 +2010_002774 +2010_002775 +2010_002780 +2010_002783 +2010_002789 +2010_002790 +2010_002791 +2010_002792 +2010_002793 +2010_002803 +2010_002807 +2010_002808 +2010_002814 +2010_002817 +2010_002822 +2010_002824 +2010_002827 +2010_002840 +2010_002845 +2010_002853 +2010_002854 +2010_002858 +2010_002860 +2010_002864 +2010_002868 +2010_002871 +2010_002873 +2010_002876 +2010_002877 +2010_002879 +2010_002881 +2010_002887 +2010_002900 +2010_002902 +2010_002905 +2010_002914 +2010_002921 +2010_002924 +2010_002927 +2010_002929 +2010_002930 +2010_002939 +2010_002940 +2010_002954 +2010_002956 +2010_002958 +2010_002960 +2010_002963 +2010_002965 +2010_002972 +2010_002980 +2010_002985 +2010_002988 +2010_002991 +2010_002993 +2010_002995 +2010_003014 +2010_003015 +2010_003016 +2010_003019 +2010_003024 +2010_003040 +2010_003043 +2010_003051 +2010_003054 +2010_003060 +2010_003067 +2010_003071 +2010_003072 +2010_003074 +2010_003081 +2010_003082 +2010_003091 +2010_003092 +2010_003098 +2010_003102 +2010_003103 +2010_003107 +2010_003112 +2010_003120 +2010_003122 +2010_003123 +2010_003127 +2010_003129 +2010_003132 +2010_003133 +2010_003135 +2010_003139 +2010_003146 +2010_003147 +2010_003154 +2010_003156 +2010_003160 +2010_003168 +2010_003176 +2010_003183 +2010_003187 +2010_003190 +2010_003199 +2010_003200 +2010_003201 +2010_003207 +2010_003212 +2010_003214 +2010_003219 +2010_003220 +2010_003223 +2010_003231 +2010_003232 +2010_003233 +2010_003236 +2010_003239 +2010_003240 +2010_003244 +2010_003248 +2010_003249 +2010_003251 +2010_003253 +2010_003257 +2010_003260 +2010_003270 +2010_003275 +2010_003276 +2010_003278 +2010_003279 +2010_003285 +2010_003287 +2010_003293 +2010_003299 +2010_003302 +2010_003303 +2010_003314 +2010_003316 +2010_003321 +2010_003325 +2010_003326 +2010_003331 +2010_003335 +2010_003341 +2010_003358 +2010_003361 +2010_003362 +2010_003365 +2010_003366 +2010_003368 +2010_003375 +2010_003376 +2010_003379 +2010_003381 +2010_003385 +2010_003390 +2010_003397 +2010_003398 +2010_003401 +2010_003402 +2010_003409 +2010_003411 +2010_003418 +2010_003419 +2010_003427 +2010_003429 +2010_003446 +2010_003450 +2010_003451 +2010_003453 +2010_003458 +2010_003461 +2010_003465 +2010_003467 +2010_003468 +2010_003470 +2010_003473 +2010_003479 +2010_003482 +2010_003488 +2010_003490 +2010_003493 +2010_003495 +2010_003496 +2010_003497 +2010_003503 +2010_003506 +2010_003508 +2010_003514 +2010_003520 +2010_003522 +2010_003527 +2010_003531 +2010_003532 +2010_003537 +2010_003540 +2010_003541 +2010_003547 +2010_003559 +2010_003561 +2010_003562 +2010_003563 +2010_003568 +2010_003569 +2010_003573 +2010_003579 +2010_003585 +2010_003588 +2010_003594 +2010_003597 +2010_003603 +2010_003605 +2010_003609 +2010_003610 +2010_003613 +2010_003628 +2010_003630 +2010_003632 +2010_003640 +2010_003641 +2010_003645 +2010_003653 +2010_003655 +2010_003659 +2010_003664 +2010_003667 +2010_003673 +2010_003675 +2010_003679 +2010_003687 +2010_003688 +2010_003695 +2010_003701 +2010_003708 +2010_003709 +2010_003716 +2010_003723 +2010_003724 +2010_003728 +2010_003729 +2010_003730 +2010_003731 +2010_003735 +2010_003742 +2010_003744 +2010_003745 +2010_003746 +2010_003755 +2010_003757 +2010_003758 +2010_003761 +2010_003762 +2010_003764 +2010_003768 +2010_003771 +2010_003772 +2010_003774 +2010_003779 +2010_003781 +2010_003792 +2010_003800 +2010_003801 +2010_003805 +2010_003806 +2010_003807 +2010_003811 +2010_003813 +2010_003820 +2010_003823 +2010_003826 +2010_003828 +2010_003847 +2010_003848 +2010_003852 +2010_003854 +2010_003855 +2010_003857 +2010_003859 +2010_003861 +2010_003863 +2010_003878 +2010_003879 +2010_003890 +2010_003898 +2010_003912 +2010_003915 +2010_003919 +2010_003920 +2010_003928 +2010_003933 +2010_003936 +2010_003939 +2010_003942 +2010_003943 +2010_003944 +2010_003947 +2010_003955 +2010_003956 +2010_003961 +2010_003966 +2010_003970 +2010_003971 +2010_003976 +2010_003980 +2010_003981 +2010_003983 +2010_003988 +2010_003999 +2010_004006 +2010_004010 +2010_004021 +2010_004023 +2010_004026 +2010_004027 +2010_004031 +2010_004036 +2010_004037 +2010_004041 +2010_004042 +2010_004050 +2010_004054 +2010_004056 +2010_004063 +2010_004064 +2010_004067 +2010_004073 +2010_004088 +2010_004094 +2010_004095 +2010_004096 +2010_004102 +2010_004104 +2010_004105 +2010_004107 +2010_004120 +2010_004124 +2010_004125 +2010_004129 +2010_004137 +2010_004139 +2010_004140 +2010_004141 +2010_004143 +2010_004145 +2010_004149 +2010_004157 +2010_004161 +2010_004165 +2010_004173 +2010_004178 +2010_004179 +2010_004182 +2010_004184 +2010_004187 +2010_004188 +2010_004193 +2010_004201 +2010_004207 +2010_004208 +2010_004209 +2010_004211 +2010_004219 +2010_004224 +2010_004225 +2010_004226 +2010_004227 +2010_004228 +2010_004229 +2010_004230 +2010_004238 +2010_004253 +2010_004254 +2010_004257 +2010_004263 +2010_004278 +2010_004279 +2010_004280 +2010_004286 +2010_004290 +2010_004291 +2010_004297 +2010_004304 +2010_004312 +2010_004313 +2010_004314 +2010_004318 +2010_004320 +2010_004322 +2010_004335 +2010_004337 +2010_004339 +2010_004341 +2010_004345 +2010_004348 +2010_004350 +2010_004351 +2010_004352 +2010_004355 +2010_004362 +2010_004369 +2010_004374 +2010_004380 +2010_004382 +2010_004387 +2010_004390 +2010_004391 +2010_004400 +2010_004404 +2010_004409 +2010_004415 +2010_004417 +2010_004419 +2010_004420 +2010_004422 +2010_004425 +2010_004428 +2010_004431 +2010_004432 +2010_004439 +2010_004447 +2010_004455 +2010_004456 +2010_004457 +2010_004460 +2010_004461 +2010_004469 +2010_004472 +2010_004475 +2010_004479 +2010_004483 +2010_004484 +2010_004486 +2010_004488 +2010_004503 +2010_004505 +2010_004506 +2010_004509 +2010_004515 +2010_004519 +2010_004520 +2010_004529 +2010_004533 +2010_004536 +2010_004537 +2010_004542 +2010_004543 +2010_004545 +2010_004550 +2010_004551 +2010_004553 +2010_004554 +2010_004556 +2010_004557 +2010_004559 +2010_004567 +2010_004570 +2010_004584 +2010_004585 +2010_004586 +2010_004588 +2010_004596 +2010_004597 +2010_004608 +2010_004618 +2010_004624 +2010_004627 +2010_004628 +2010_004629 +2010_004634 +2010_004635 +2010_004637 +2010_004642 +2010_004654 +2010_004659 +2010_004661 +2010_004662 +2010_004667 +2010_004670 +2010_004672 +2010_004677 +2010_004679 +2010_004681 +2010_004686 +2010_004691 +2010_004692 +2010_004697 +2010_004714 +2010_004722 +2010_004733 +2010_004735 +2010_004743 +2010_004747 +2010_004748 +2010_004750 +2010_004753 +2010_004756 +2010_004757 +2010_004763 +2010_004768 +2010_004772 +2010_004775 +2010_004778 +2010_004779 +2010_004783 +2010_004785 +2010_004786 +2010_004789 +2010_004792 +2010_004795 +2010_004804 +2010_004809 +2010_004813 +2010_004815 +2010_004817 +2010_004821 +2010_004825 +2010_004828 +2010_004829 +2010_004830 +2010_004836 +2010_004849 +2010_004854 +2010_004856 +2010_004857 +2010_004861 +2010_004865 +2010_004866 +2010_004868 +2010_004877 +2010_004889 +2010_004891 +2010_004894 +2010_004901 +2010_004903 +2010_004906 +2010_004908 +2010_004909 +2010_004917 +2010_004919 +2010_004921 +2010_004930 +2010_004931 +2010_004941 +2010_004946 +2010_004951 +2010_004952 +2010_004954 +2010_004957 +2010_004967 +2010_004980 +2010_004982 +2010_004989 +2010_004992 +2010_004994 +2010_004998 +2010_005000 +2010_005005 +2010_005006 +2010_005008 +2010_005013 +2010_005021 +2010_005023 +2010_005026 +2010_005031 +2010_005035 +2010_005042 +2010_005044 +2010_005046 +2010_005048 +2010_005049 +2010_005052 +2010_005053 +2010_005059 +2010_005061 +2010_005063 +2010_005066 +2010_005075 +2010_005079 +2010_005082 +2010_005083 +2010_005087 +2010_005096 +2010_005107 +2010_005108 +2010_005109 +2010_005115 +2010_005116 +2010_005118 +2010_005120 +2010_005123 +2010_005130 +2010_005136 +2010_005138 +2010_005141 +2010_005143 +2010_005148 +2010_005152 +2010_005158 +2010_005159 +2010_005160 +2010_005164 +2010_005166 +2010_005167 +2010_005169 +2010_005174 +2010_005180 +2010_005184 +2010_005185 +2010_005187 +2010_005188 +2010_005192 +2010_005206 +2010_005208 +2010_005215 +2010_005222 +2010_005224 +2010_005226 +2010_005230 +2010_005239 +2010_005242 +2010_005243 +2010_005245 +2010_005246 +2010_005250 +2010_005252 +2010_005264 +2010_005268 +2010_005272 +2010_005284 +2010_005285 +2010_005287 +2010_005292 +2010_005293 +2010_005305 +2010_005314 +2010_005323 +2010_005327 +2010_005330 +2010_005331 +2010_005332 +2010_005338 +2010_005340 +2010_005344 +2010_005345 +2010_005346 +2010_005353 +2010_005366 +2010_005369 +2010_005372 +2010_005374 +2010_005375 +2010_005379 +2010_005382 +2010_005394 +2010_005398 +2010_005401 +2010_005405 +2010_005406 +2010_005410 +2010_005414 +2010_005416 +2010_005421 +2010_005424 +2010_005425 +2010_005428 +2010_005432 +2010_005433 +2010_005441 +2010_005448 +2010_005452 +2010_005455 +2010_005456 +2010_005463 +2010_005467 +2010_005472 +2010_005474 +2010_005480 +2010_005482 +2010_005483 +2010_005484 +2010_005491 +2010_005493 +2010_005496 +2010_005501 +2010_005502 +2010_005508 +2010_005514 +2010_005515 +2010_005516 +2010_005527 +2010_005531 +2010_005532 +2010_005534 +2010_005538 +2010_005542 +2010_005543 +2010_005548 +2010_005551 +2010_005556 +2010_005562 +2010_005566 +2010_005567 +2010_005572 +2010_005575 +2010_005576 +2010_005582 +2010_005586 +2010_005587 +2010_005592 +2010_005594 +2010_005606 +2010_005610 +2010_005612 +2010_005620 +2010_005625 +2010_005626 +2010_005632 +2010_005635 +2010_005636 +2010_005637 +2010_005644 +2010_005647 +2010_005651 +2010_005654 +2010_005657 +2010_005658 +2010_005664 +2010_005666 +2010_005671 +2010_005676 +2010_005681 +2010_005688 +2010_005692 +2010_005697 +2010_005705 +2010_005706 +2010_005709 +2010_005712 +2010_005718 +2010_005719 +2010_005727 +2010_005731 +2010_005733 +2010_005738 +2010_005747 +2010_005752 +2010_005754 +2010_005756 +2010_005761 +2010_005762 +2010_005763 +2010_005764 +2010_005767 +2010_005768 +2010_005777 +2010_005780 +2010_005784 +2010_005788 +2010_005804 +2010_005806 +2010_005815 +2010_005817 +2010_005824 +2010_005827 +2010_005833 +2010_005837 +2010_005838 +2010_005843 +2010_005848 +2010_005849 +2010_005853 +2010_005860 +2010_005868 +2010_005870 +2010_005871 +2010_005877 +2010_005882 +2010_005883 +2010_005884 +2010_005885 +2010_005886 +2010_005888 +2010_005894 +2010_005896 +2010_005897 +2010_005899 +2010_005901 +2010_005903 +2010_005907 +2010_005914 +2010_005922 +2010_005934 +2010_005936 +2010_005937 +2010_005938 +2010_005943 +2010_005953 +2010_005973 +2010_005976 +2010_005980 +2010_005981 +2010_005991 +2010_005992 +2010_005993 +2010_005997 +2010_005998 +2010_006000 +2010_006003 +2010_006004 +2010_006010 +2010_006011 +2010_006021 +2010_006025 +2010_006026 +2010_006031 +2010_006032 +2010_006033 +2010_006034 +2010_006035 +2010_006037 +2010_006041 +2010_006051 +2010_006054 +2010_006056 +2010_006057 +2010_006058 +2010_006061 +2010_006062 +2010_006070 +2010_006076 +2010_006082 +2010_006084 +2010_006086 +2011_000002 +2011_000007 +2011_000009 +2011_000010 +2011_000016 +2011_000034 +2011_000036 +2011_000037 +2011_000038 +2011_000043 +2011_000045 +2011_000051 +2011_000054 +2011_000057 +2011_000060 +2011_000061 +2011_000065 +2011_000066 +2011_000070 +2011_000071 +2011_000076 +2011_000077 +2011_000082 +2011_000083 +2011_000084 +2011_000086 +2011_000087 +2011_000090 +2011_000094 +2011_000096 +2011_000098 +2011_000102 +2011_000103 +2011_000109 +2011_000112 +2011_000114 +2011_000124 +2011_000128 +2011_000129 +2011_000130 +2011_000142 +2011_000146 +2011_000147 +2011_000161 +2011_000162 +2011_000163 +2011_000165 +2011_000166 +2011_000173 +2011_000178 +2011_000180 +2011_000185 +2011_000194 +2011_000195 +2011_000202 +2011_000206 +2011_000210 +2011_000213 +2011_000214 +2011_000226 +2011_000229 +2011_000232 +2011_000234 +2011_000238 +2011_000239 +2011_000246 +2011_000248 +2011_000253 +2011_000257 +2011_000273 +2011_000276 +2011_000283 +2011_000288 +2011_000291 +2011_000299 +2011_000304 +2011_000307 +2011_000309 +2011_000310 +2011_000312 +2011_000314 +2011_000315 +2011_000319 +2011_000320 +2011_000321 +2011_000322 +2011_000332 +2011_000338 +2011_000344 +2011_000346 +2011_000364 +2011_000369 +2011_000374 +2011_000386 +2011_000391 +2011_000396 +2011_000404 +2011_000408 +2011_000412 +2011_000418 +2011_000419 +2011_000426 +2011_000427 +2011_000432 +2011_000435 +2011_000436 +2011_000438 +2011_000445 +2011_000455 +2011_000456 +2011_000471 +2011_000474 +2011_000477 +2011_000479 +2011_000481 +2011_000482 +2011_000487 +2011_000498 +2011_000503 +2011_000511 +2011_000512 +2011_000514 +2011_000518 +2011_000519 +2011_000521 +2011_000526 +2011_000530 +2011_000532 +2011_000536 +2011_000541 +2011_000548 +2011_000554 +2011_000557 +2011_000559 +2011_000566 +2011_000569 +2011_000575 +2011_000585 +2011_000592 +2011_000598 +2011_000600 +2011_000607 +2011_000608 +2011_000609 +2011_000612 +2011_000618 +2011_000622 +2011_000627 +2011_000630 +2011_000634 +2011_000638 +2011_000656 +2011_000658 +2011_000661 +2011_000666 +2011_000669 +2011_000679 +2011_000683 +2011_000685 +2011_000688 +2011_000690 +2011_000709 +2011_000718 +2011_000724 +2011_000734 +2011_000743 +2011_000744 +2011_000745 +2011_000747 +2011_000749 +2011_000753 +2011_000765 +2011_000767 +2011_000770 +2011_000772 +2011_000774 +2011_000778 +2011_000780 +2011_000784 +2011_000785 +2011_000789 +2011_000807 +2011_000809 +2011_000813 +2011_000824 +2011_000830 +2011_000843 +2011_000850 +2011_000851 +2011_000853 +2011_000872 +2011_000874 +2011_000887 +2011_000888 +2011_000897 +2011_000900 +2011_000901 +2011_000908 +2011_000909 +2011_000912 +2011_000917 +2011_000919 +2011_000927 +2011_000930 +2011_000932 +2011_000933 +2011_000950 +2011_000951 +2011_000953 +2011_000957 +2011_000961 +2011_000965 +2011_000969 +2011_000977 +2011_000986 +2011_000990 +2011_001005 +2011_001008 +2011_001014 +2011_001019 +2011_001020 +2011_001025 +2011_001029 +2011_001032 +2011_001036 +2011_001040 +2011_001044 +2011_001047 +2011_001054 +2011_001056 +2011_001058 +2011_001060 +2011_001064 +2011_001069 +2011_001071 +2011_001081 +2011_001082 +2011_001084 +2011_001086 +2011_001100 +2011_001105 +2011_001106 +2011_001110 +2011_001111 +2011_001114 +2011_001116 +2011_001124 +2011_001126 +2011_001128 +2011_001137 +2011_001138 +2011_001146 +2011_001149 +2011_001152 +2011_001158 +2011_001159 +2011_001160 +2011_001161 +2011_001167 +2011_001190 +2011_001201 +2011_001203 +2011_001213 +2011_001217 +2011_001221 +2011_001223 +2011_001226 +2011_001229 +2011_001232 +2011_001245 +2011_001251 +2011_001252 +2011_001260 +2011_001261 +2011_001263 +2011_001264 +2011_001266 +2011_001271 +2011_001276 +2011_001281 +2011_001282 +2011_001283 +2011_001284 +2011_001287 +2011_001288 +2011_001290 +2011_001292 +2011_001295 +2011_001304 +2011_001305 +2011_001311 +2011_001313 +2011_001315 +2011_001319 +2011_001326 +2011_001327 +2011_001329 +2011_001330 +2011_001335 +2011_001337 +2011_001341 +2011_001346 +2011_001350 +2011_001355 +2011_001360 +2011_001366 +2011_001370 +2011_001375 +2011_001387 +2011_001388 +2011_001389 +2011_001390 +2011_001399 +2011_001404 +2011_001406 +2011_001407 +2011_001416 +2011_001421 +2011_001434 +2011_001440 +2011_001441 +2011_001447 +2011_001467 +2011_001471 +2011_001489 +2011_001501 +2011_001507 +2011_001508 +2011_001518 +2011_001521 +2011_001524 +2011_001525 +2011_001529 +2011_001530 +2011_001531 +2011_001534 +2011_001535 +2011_001541 +2011_001543 +2011_001544 +2011_001546 +2011_001558 +2011_001567 +2011_001568 +2011_001573 +2011_001589 +2011_001591 +2011_001592 +2011_001596 +2011_001597 +2011_001601 +2011_001607 +2011_001608 +2011_001612 +2011_001613 +2011_001614 +2011_001618 +2011_001619 +2011_001620 +2011_001624 +2011_001628 +2011_001641 +2011_001642 +2011_001655 +2011_001665 +2011_001669 +2011_001674 +2011_001678 +2011_001691 +2011_001693 +2011_001699 +2011_001705 +2011_001707 +2011_001708 +2011_001712 +2011_001713 +2011_001714 +2011_001719 +2011_001720 +2011_001722 +2011_001726 +2011_001741 +2011_001745 +2011_001747 +2011_001748 +2011_001751 +2011_001757 +2011_001770 +2011_001771 +2011_001775 +2011_001782 +2011_001785 +2011_001793 +2011_001794 +2011_001800 +2011_001801 +2011_001806 +2011_001812 +2011_001815 +2011_001819 +2011_001820 +2011_001822 +2011_001824 +2011_001825 +2011_001827 +2011_001834 +2011_001837 +2011_001841 +2011_001842 +2011_001845 +2011_001847 +2011_001854 +2011_001856 +2011_001858 +2011_001862 +2011_001863 +2011_001868 +2011_001870 +2011_001873 +2011_001876 +2011_001877 +2011_001880 +2011_001900 +2011_001910 +2011_001911 +2011_001914 +2011_001919 +2011_001927 +2011_001932 +2011_001941 +2011_001942 +2011_001945 +2011_001946 +2011_001951 +2011_001962 +2011_001966 +2011_001975 +2011_001980 +2011_001982 +2011_001984 +2011_001986 +2011_001988 +2011_001989 +2011_002002 +2011_002003 +2011_002004 +2011_002016 +2011_002018 +2011_002019 +2011_002021 +2011_002033 +2011_002036 +2011_002038 +2011_002040 +2011_002041 +2011_002042 +2011_002044 +2011_002045 +2011_002047 +2011_002064 +2011_002074 +2011_002075 +2011_002079 +2011_002088 +2011_002091 +2011_002093 +2011_002098 +2011_002100 +2011_002102 +2011_002105 +2011_002108 +2011_002109 +2011_002110 +2011_002116 +2011_002121 +2011_002124 +2011_002128 +2011_002132 +2011_002137 +2011_002150 +2011_002154 +2011_002156 +2011_002158 +2011_002159 +2011_002160 +2011_002163 +2011_002169 +2011_002173 +2011_002174 +2011_002178 +2011_002184 +2011_002185 +2011_002192 +2011_002193 +2011_002200 +2011_002215 +2011_002221 +2011_002223 +2011_002230 +2011_002234 +2011_002241 +2011_002244 +2011_002247 +2011_002248 +2011_002260 +2011_002269 +2011_002270 +2011_002272 +2011_002276 +2011_002279 +2011_002280 +2011_002292 +2011_002294 +2011_002295 +2011_002298 +2011_002301 +2011_002308 +2011_002312 +2011_002317 +2011_002322 +2011_002324 +2011_002325 +2011_002327 +2011_002330 +2011_002343 +2011_002357 +2011_002358 +2011_002362 +2011_002365 +2011_002366 +2011_002371 +2011_002379 +2011_002380 +2011_002384 +2011_002386 +2011_002391 +2011_002393 +2011_002395 +2011_002396 +2011_002406 +2011_002407 +2011_002409 +2011_002414 +2011_002429 +2011_002453 +2011_002459 +2011_002463 +2011_002479 +2011_002482 +2011_002490 +2011_002491 +2011_002494 +2011_002495 +2011_002498 +2011_002505 +2011_002507 +2011_002509 +2011_002515 +2011_002516 +2011_002519 +2011_002520 +2011_002531 +2011_002532 +2011_002535 +2011_002536 +2011_002542 +2011_002548 +2011_002556 +2011_002558 +2011_002566 +2011_002575 +2011_002578 +2011_002579 +2011_002582 +2011_002583 +2011_002588 +2011_002589 +2011_002592 +2011_002605 +2011_002610 +2011_002612 +2011_002617 +2011_002623 +2011_002624 +2011_002629 +2011_002631 +2011_002639 +2011_002640 +2011_002641 +2011_002644 +2011_002662 +2011_002674 +2011_002675 +2011_002678 +2011_002685 +2011_002687 +2011_002694 +2011_002699 +2011_002713 +2011_002714 +2011_002725 +2011_002730 +2011_002738 +2011_002740 +2011_002742 +2011_002750 +2011_002751 +2011_002754 +2011_002760 +2011_002765 +2011_002766 +2011_002772 +2011_002784 +2011_002786 +2011_002796 +2011_002802 +2011_002805 +2011_002810 +2011_002812 +2011_002817 +2011_002830 +2011_002831 +2011_002833 +2011_002838 +2011_002841 +2011_002854 +2011_002863 +2011_002864 +2011_002868 +2011_002870 +2011_002871 +2011_002879 +2011_002880 +2011_002883 +2011_002885 +2011_002887 +2011_002890 +2011_002897 +2011_002900 +2011_002916 +2011_002925 +2011_002929 +2011_002933 +2011_002943 +2011_002944 +2011_002951 +2011_002962 +2011_002967 +2011_002970 +2011_002971 +2011_002975 +2011_002978 +2011_002983 +2011_002985 +2011_002992 +2011_002993 +2011_002994 +2011_002997 +2011_002999 +2011_003003 +2011_003011 +2011_003012 +2011_003013 +2011_003019 +2011_003023 +2011_003027 +2011_003028 +2011_003029 +2011_003030 +2011_003039 +2011_003043 +2011_003050 +2011_003055 +2011_003059 +2011_003076 +2011_003079 +2011_003085 +2011_003086 +2011_003089 +2011_003097 +2011_003098 +2011_003103 +2011_003111 +2011_003114 +2011_003115 +2011_003145 +2011_003146 +2011_003149 +2011_003152 +2011_003163 +2011_003166 +2011_003167 +2011_003168 +2011_003169 +2011_003176 +2011_003182 +2011_003185 +2011_003197 +2011_003201 +2011_003205 +2011_003207 +2011_003211 +2011_003212 +2011_003213 +2011_003220 +2011_003228 +2011_003232 +2011_003240 +2011_003242 +2011_003244 +2011_003254 +2011_003256 +2011_003260 +2011_003261 +2011_003262 +2011_003269 +2011_003271 +2011_003275 diff --git a/ImageSets/README.md b/ImageSets/README.md new file mode 100644 index 0000000..44b3a3a --- /dev/null +++ b/ImageSets/README.md @@ -0,0 +1 @@ +Please copy this folder to `$RFCN_ROOT/data/VOCdevkit0712/VOC0712` diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..317ecb6 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Yuwen Xiong + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..f3601c0 --- /dev/null +++ b/README.md @@ -0,0 +1,150 @@ +# py-R-FCN +R-FCN: Object Detection via Region-based Fully Convolutional Networks + +py-R-FCN now supports end-to-end training. + +### Disclaimer + +The official R-FCN code (written in MATLAB) is available [here](https://github.com/daijifeng001/R-FCN). +If your goal is to reproduce the results in the [NIPS 2016 paper](https://arxiv.org/abs/1605.06409), please use the [official code](https://github.com/daijifeng001/R-FCN). + +py-R-FCN is based on the [py-faster-rcnn code](https://github.com/rbgirshick/py-faster-rcnn ) and [the offcial R-FCN implementation](https://github.com/daijifeng001/R-FCN), and the usage is quite similar to [py-faster-rcnn](https://github.com/rbgirshick/py-faster-rcnn ), thanks for them. + +There are slight differences between the two implementations. +In particular, this Python port + - is ~10% slower at test-time, because some operations execute on the CPU in Python layers (e.g., 90ms / image vs. 99ms / image for ResNet-50) + - gives similar, but not exactly the same, mAP as the MATLAB version + +The original py-faster-rcnn uses class-aware bounding box regression. However, R-FCN use agonistic bounding box regression to reduce model complexity. So I add a configuration AGONISTIC into fast_rcnn/config.py, and the default value is False. You should set it to True both on train and test phase if you want to use class agonistic training and test. + +### License + +R-FCN is released under the MIT License (refer to the LICENSE file for details). + +### Citing R-FCN + +If you find R-FCN useful in your research, please consider citing: + + @article{dai16rfcn, + Author = {Jifeng Dai, Yi Li, Kaiming He, Jian Sun}, + Title = {{R-FCN}: Object Detection via Region-based Fully Convolutional Networks}, + Journal = {arXiv preprint arXiv:1605.06409}, + Year = {2016} + } + +### Main Results + | training data | test data | mAP | time/img (K40) | time/img (Titian X) +-------------------|:-------------------:|:---------------------:|:-----:|:--------------:|:------------------:| +R-FCN, ResNet-50 | VOC 07+12 trainval | VOC 07 test | 76.8% | N/A | 0.099sec | +R-FCN, ResNet-101 | VOC 07+12 trainval | VOC 07 test | N/A | N/A | N/A | + + +### Requirements: software + +0. **`Important`** Please use the [Microsoft-version Caffe(@commit 1a2be8e)](https://github.com/Microsoft/caffe/tree/1a2be8ecf9ba318d516d79187845e90ac6e73197), this Caffe supports R-FCN layer, and the prototxt in this repository follows the Microsoft-version Caffe's layer name. You need to put the Caffe root folder under py-R-FCN folder, just like what py-faster-rcnn does. + +1. Requirements for `Caffe` and `pycaffe` (see: [Caffe installation instructions](http://caffe.berkeleyvision.org/installation.html)) + + **Note:** Caffe *must* be built with support for Python layers! + + ```make + # In your Makefile.config, make sure to have this line uncommented + WITH_PYTHON_LAYER := 1 + # Unrelatedly, it's also recommended that you use CUDNN + USE_CUDNN := 1 + ``` +2. Python packages you might not have: `cython`, `python-opencv`, `easydict` +3. [Optional] MATLAB is required for **official** PASCAL VOC evaluation only. The code now includes unofficial Python evaluation code. + +### Requirements: hardware + +Any NVIDIA GPU with 6GB or larger memory is OK(4GB is enough for ResNet-50). + +### Demo +1. I do not provide demo currently, I'll add it soon. + +### Preparation for Training & Testing +1. Download the training, validation, test data and VOCdevkit + + ```Shell + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtrainval_06-Nov-2007.tar + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCdevkit_08-Jun-2007.tar + wget http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar + ``` + +2. Extract all of these tars into one directory named `VOCdevkit` + + ```Shell + tar xvf VOCtrainval_06-Nov-2007.tar + tar xvf VOCtest_06-Nov-2007.tar + tar xvf VOCdevkit_08-Jun-2007.tar + tar xvf VOCtrainval_11-May-2012.tar + ``` + +3. It should have this basic structure + + ```Shell + $VOCdevkit/ # development kit + $VOCdevkit/VOCcode/ # VOC utility code + $VOCdevkit/VOC2007 # image sets, annotations, etc. + $VOCdevkit/VOC2012 # image sets, annotations, etc. + # ... and several other directories ... + ``` + +4. Since py-faster-rcnn does not support muliple training dataset, we need to merge VOC 2007 data and VOC 2012 data maually. Just make a new directory named `VOC0712`, put all subfolder except `ImageSets` in `VOC2007` and `VOC2012` into `VOC0712`(you'll merge some folder). I provided merged-version ImageSet text files for you, please put them into VOCdevkit/VOC0712/ImageSets. + +5. Then the folder structure should look like this + ```Shell + $VOCdevkit/ # development kit + $VOCdevkit/VOCcode/ # VOC utility code + $VOCdevkit/VOC2007 # image sets, annotations, etc. + $VOCdevkit/VOC2012 # image sets, annotations, etc. + $VOCdevkit/VOC0712 # you just created this folder + # ... and several other directories ... + ``` + +4. Create symlinks for the PASCAL VOC dataset + + ```Shell + cd $RFCN_ROOT/data + ln -s $VOCdevkit VOCdevkit0712 + ``` + +5. Please download ImageNet-pre-trained ResNet-50 and ResNet-100 model manually, and put them into `$RFCN_ROOT/data/imagenet_models` +8. Then everything is done, you could train your own model. + +### Usage + +To train and test a R-FCN detector using the **approximate joint training** method, use `experiments/scripts/rfcn_end2end.sh`. +Output is written underneath `$RFCN_ROOT/output`. + +To train and test a R-FCN detector using the **approximate joint training** method **with OHEM**, use `experiments/scripts/rfcn_end2end_ohem.sh`. +Output is written underneath `$RFCN_ROOT/output`. + +```Shell +cd $RFCN_ROOT +./experiments/scripts/rfcn_end2end[_ohem].sh [GPU_ID] [NET] [--set ...] +# GPU_ID is the GPU you want to train on +# NET in {ResNet-50, ResNet-101} is the network arch to use +# --set ... allows you to specify fast_rcnn.config options, e.g. +# --set EXP_DIR seed_rng1701 RNG_SEED 1701 +``` + +Trained R-FCN networks are saved under: + +``` +output/// +``` + +Test outputs are saved under: + +``` +output//// +``` + +### Misc + +Tested on Ubuntu 14.04 with a Titan X GPU and Intel Xeon CPU E5-2620 v2 @ 2.10GHz + +py-faster-rcnn code can also work properly, but I do not add any other feature(such as ResNet and OHEM). diff --git a/data/.gitignore b/data/.gitignore new file mode 100644 index 0000000..dd09a04 --- /dev/null +++ b/data/.gitignore @@ -0,0 +1,6 @@ +selective_search* +imagenet_models* +fast_rcnn_models* +faster_rcnn_models* +VOCdevkit* +cache diff --git a/data/README.md b/data/README.md new file mode 100644 index 0000000..109c453 --- /dev/null +++ b/data/README.md @@ -0,0 +1,69 @@ +This directory holds (*after you download them*): +- Caffe models pre-trained on ImageNet +- Faster R-CNN models +- Symlinks to datasets + +To download Caffe models (ZF, VGG16) pre-trained on ImageNet, run: + +``` +./data/scripts/fetch_imagenet_models.sh +``` + +This script will populate `data/imagenet_models`. + +To download Faster R-CNN models trained on VOC 2007, run: + +``` +./data/scripts/fetch_faster_rcnn_models.sh +``` + +This script will populate `data/faster_rcnn_models`. + +In order to train and test with PASCAL VOC, you will need to establish symlinks. +From the `data` directory (`cd data`): + +``` +# For VOC 2007 +ln -s /your/path/to/VOC2007/VOCdevkit VOCdevkit2007 + +# For VOC 2012 +ln -s /your/path/to/VOC2012/VOCdevkit VOCdevkit2012 +``` + +Install the MS COCO dataset at /path/to/coco + +``` +ln -s /path/to/coco coco +``` + +For COCO with Fast R-CNN, place object proposals under `coco_proposals` (inside +the `data` directory). You can obtain proposals on COCO from Jan Hosang at +https://www.mpi-inf.mpg.de/departments/computer-vision-and-multimodal-computing/research/object-recognition-and-scene-understanding/how-good-are-detection-proposals-really/. +For COCO, using MCG is recommended over selective search. MCG boxes can be downloaded +from http://www.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/mcg/. +Use the tool `lib/datasets/tools/mcg_munge.py` to convert the downloaded MCG data +into the same file layout as those from Jan Hosang. + +Since you'll likely be experimenting with multiple installs of Fast/er R-CNN in +parallel, you'll probably want to keep all of this data in a shared place and +use symlinks. On my system I create the following symlinks inside `data`: + +Annotations for the 5k image 'minival' subset of COCO val2014 that I like to use +can be found at http://www.cs.berkeley.edu/~rbg/faster-rcnn-data/instances_minival2014.json.zip. +Annotations for COCO val2014 (set) minus minival (~35k images) can be found at +http://www.cs.berkeley.edu/~rbg/faster-rcnn-data/instances_valminusminival2014.json.zip. + +``` +# data/cache holds various outputs created by the datasets package +ln -s /data/fast_rcnn_shared/cache + +# move the imagenet_models to shared location and symlink to them +ln -s /data/fast_rcnn_shared/imagenet_models + +# move the selective search data to a shared location and symlink to them +# (only applicable to Fast R-CNN training) +ln -s /data/fast_rcnn_shared/selective_search_data + +ln -s /data/VOC2007/VOCdevkit VOCdevkit2007 +ln -s /data/VOC2012/VOCdevkit VOCdevkit2012 +``` diff --git a/data/demo/000456.jpg b/data/demo/000456.jpg new file mode 100644 index 0000000000000000000000000000000000000000..480afa5d5f83ffd40bbf007c3629b1345d7aa4f9 GIT binary patch literal 105302 zcmbTdcQjmI*graY@4ZDQdh|{ZJ!+m z`@U=4`_KL3-n-8_d!4h+K4(3jz0cnJ`8?(E=i@ejR7*`$4SO@Nuwl{-??Rr|R(=fD9Kc2<i#}7JOUjZ8=sh8 zSX^3O`MC<)+1=YeI6V4wd~$Jl_4oSb7V+=yKU`=4jQ@r8RR2HXB74Gx{@)nj{D%t- zJ?QDgAj8CB6~-o4GQhF-p0|(ATF`&rPtqsz8Khmyli4dsj;cS*(!Ysn#N z!AHRGopVuHxGQ!|HBdql^21USP0_no*kWC+KK}judTDy!tbBIXg{}}mg0Jo!A4i$>OWhdfW2{S;D+LZghw#RYFLBvkq?>t6Vrsv*&@vfMjBz|4kBxF zMxz9X6CIle&>a`YriOiOZ84``_Wo1a^Tj@;Ix5GkB{M7l zskFyT82oN)SO&aTc??dhF&{0LX@$*-WytsMzrvpyV{!Oh=H-%_ea_W+xLzx*27{cg z%D0~k?2OQ<{H$E-Zq+UeS_;$TXe^@%B@UznbK*h!UC@96&e1GN#*q(r*lDOe_J zc<#^j-!Brpc2m3Zmm~%yHYdoxv%ryTzS6F*ICs9h z;o$V?AdqwB#+giTiJvhAbhO2<=stk65vFt@QJ_%0kFa%u#c(e2K`$h(Q+fmsOfjiB zV(lSvQMRl|{@#@^mSPH&@Tp`CXqU*;G+7t!RprCO0*meJ%$t);7bwG{46&Oq*KiJC z&@Fx+AkS_KclqEG3aW75WXsp6$o%&uY3HM*yQW+#oq3X$Q|3`*LFYMW8Vvj9YS>0; z^zPS2du!zWz#AE3S6u1MGh~}Z$oU`Fe>S5V&)5;OJIN$5GU*9oTND-o{Z(aA6UXPou6`=?W1pD^Qhgt+Q+|lR<(%f63ohL(edU~BS zi~S{Ox^O88Z)r518lffl&6Qqkm=jDdtrw#qcvrhnZB_#$?={c{a=HBN#VTSI6HD>d z6p$ErS>Lhql2f1P5s)%K*3XUfn-`2=JAOy76|AS}<4PX#E0=8gZ3)9PJAP2hum-+` zc)_7F#|wzyJ0b1J%q8zF!|-1B!M3xi-P?r;SlDc@0&SQX{7*8Hr#85B)G*pZs%gKi z<6(^t&jE`|nO1PVi3@54pbg(8@{J^tGpIQ&V!B8leh_`tLVupMd!{w35qEa0iU~IC z=I3Ec{m$jn{W8MPH%LO%RupOSP}-gzZ-ZI9a~soGo_4o%uoSSNO8`z(t?1j8)xobj z&z{g?Jm-BuS|^vp*NW)7*JO@mZoN*AiJ(PPt{4rp{9wy9>;{X2jm%34Tbd2^t+oC5 zHtaJXn8m?XHKKZCcoj{rARCj&_IqSn@`PXOb!64?1L&8<${9mdFvfRqH&@Tz^Y&B~ zorSZ5Wv?%zIYHH%(n*`^f@e^m^`>eHGnms9^d;rC$!}YpV({5>M0)vjRdS)NiLJuj zRQ0{*MK`QAp0jNCa zs$zA2xT53O5TArd8Q_c7_MlmGxBcPq6Ocsp2rUWV$NX#XSPOjwSpWHV-mXwHaDn+F z7(8p!B5HMzpE+>-|WVZ=%tQ_2Sio` ze;6r-r~izPfeI1gx4fRys1GWgI55uM`P4i~C4Mx}n)CEO^)fs(GzMRphBPX$z|03j z%QnRHh@N!Sy$MLN5H>j&Ccw06cF84Df*?Y4uqaWYo6geVGx!_4fEElHTXz)Xef z&YGH=Sj&Rt7mYDH!k^upL28_z#>`q7Kh)M?>E+*7co!Bv0ytL=Jfv=GH3RibZ_E21 zyl=hBi(6^lw@owL99`H#n<#YZPU++0!k9~rpHU1fiPNTNa7h|kYh=8t{gTJ#_JK6Q ztgvr2d6^tmD_O$tWs$R3VwR{#Q%j{D!Ki2yb_$IrCdU>eSDF zx3SFh!Rj};ODOTvpl9M2DfwHmp7wK3J6}a{?75BUqSuyL=OVI~BPodm!o?Lt#MX8~ zX6a)x_0Saw6iSV3Z9sblo)9LfW%fDty-nV>(k7FlPA3b`u6t9AVGVPd``IP7fMS8bmE;{A=M% z{Y++=sL9~OgBELa^}eKeX7AtRJr1P%_GXtT)Z!v2syDHh!YM#D8sr-1 zp?#3V$3=1 zhX#nlJmv%b5L=m-50LhXET{bX)qG-VQva)#NyC^wQ%-9_bB)w0y4G~D_swp&8Uc?J zeEsHDrm+~rvJe>@Yxi-uLS$eV?WLJ(&KS;NQ(~CvcJjc_nfvd;7Pj08f0=RIx^GHc z0~)O%*mKGn9?Ft<$6lY>Z9 zPK5-b{O#?-)A#E@W)tWCXoX$I<-7*?%PX&~n`--VH~rNnW_apu|MYL|Z&s~8BYLcB0Wf2XZ z{H<+tq+Ntb9T(X*SMXJ;oz0@p#yF_grJw7B0e72VE5pyoP;<-P&o*zNqTN4_KblVr zxXxjo?2tB|K}6bz4QuRFc-PLPjM26G=@5MtVMOy`K9w8pGIj3VO$p*sNX0Me?(?$( zFg6vAZ(}KF83~0cc~(Actwb{cNAyWTs*M}>@7C^qcxUk4B6F`FtnRj~=OaiE{S$h) z2*_YJVa6!G@|JdIbLt1#uYG_t<~|#*UAx*0iV1I% zUFJEK4t|uFAtxQF8b0%h$clOk`A$E>g>igF7riYgMZWCr7Irs*ITf#dEui%Wft*Yr z{at((`=sv+9l5R2yuF2HE5lb$32*D_GRigo67WY7taMoLv77uB|aA}UHP%2Ft(Knfrl z_8tLAFbmtVCwWx3#Ch|$u z`7c@e{59{mol}LlvCPZdQlKy2dpX=&cbeE1+9RSaf_7M3+T1UR^o%&{x2qJaI z8(Xsxu>v)%5NnXtT@dsi<3Duc=qbA1Pm@(*W#-SECXDCCa#Wevr=LRrFLH+zgyHQS zWo75Rt&845c^s@}GKu9eAK&7?XXBS7+{vwGS0sv9mk~xD|?&D z8fs?9T{I_>M>dN2%#x1zkcb{xPnV=)t0#GDaT}?qmuI!L(v-U&?z}62G15IA~d;S@64+VaeQg zW$Hd2KH>6HQCfiML(Ib0Q}vXXFMCW={AH?!(*a@p{cwe*?JbJq2<{8{#;j1eyXnK7 zs7Rt-><~IjPhTBY$(Jpe_WZrXT{aQ0X-nGy-J-rt@VRZm4yjaW5mmSON&q!o z*-N2$lQ^h9Dx3`T?I|_sbf(oWzh-Vz9|(C)*JIKc|!|@-K<}p}cAf+@raLR)*6ppf!>ZeBd!H=3`YA7(nLvIEa1Bkds6v zvn{!8A@g8_;00HWV8Kov(F`~YZwr%A0fdnnIli%V2>J8GEu$5<+-zify662tB&Lv1 zJ~OV2b`YDTp0E>8A4Dw8G8}|9M#2!jqUN2bxZPv}Kt$bhqo#_P`lV5`o7747gNjs0 z&QSY{k+F)>PJ>8cZ{RnVKwxnw_bNZl-(5bX+FI>}V zmZ{7Ek*UALronG+kPlG-8fU-7NY+AkJku$KRR|qNC4@C&RFz^wj6Qs0 zPVe(Sy2*Yh=zU$AFLcHZzcLfyBBw=yDs_j6zlB#MGGPn9v80Hulf8E;u6+cEW>Mt2 ziq_E0Wq$#mf^WC#ESTvKWzajl;WOqTi*oqZugk$N8@A?F8^LipFJT)hb!9FdVEkzY zb*%5>2VPxcb&H_bX!kQ<>ebNq=@XwMVB2g^QAqUi-fgw zubPLbhx#vtxQ9zvO54L<7#H|i4#@UJL<*!=MSA_GGUFj};61zS>b1vPYp}`;pkojv z`S6NcoW}Vp!&hcw=p2`wh8QHmxYBFsR`7?J!riT?XXF>fu>t~~d(WPw0Ks#b2mIha z_~=pAXogcsx;|G#Q+RYuQ;z`HicMD}d2rQe)}TOdH!8Mr(~7fI;}R-#&G{BBOf(y^ zK$CK2EH#dK^ecTe=;l45ZkNb%D6+M0G`b{uWe8*stg%*MXXi*7tEc-3Vr3(y<_7~+ zuwOJ5YX#zA^)tb1o>fdr6_;)vgOrzm{9JpqiJN5C18PrZ9?(A;Qwu%f!iD9E3h z&M}E${6?$sqJaDC&|gUgc-ub38y@LU*?lmpD0^ zLyQe3X^f$}xGWeZk=vzL(|gQoxlkMYP!?EVam6p4aQ{rIU_%C}sxcG7`U>tmtypeZ zhD@cFeo&gfn%-t4r8fO#=y)|DbT?}X-m~;hOX2^=E>WQqC@i(i@5Xe9;)3fV@ z&GP*A+|Au9{`JvpJeZfhMNSf7h0{MMHVyo_Hy!drFzlGb1D&^fXd681%R190RGnOM zij-|kea89M)($zCPSD{*c5achkg(tt1dEV|)++R9kly;9tAXk1QUJ@FA4hO1vggItvI*u70-3P#_!3O%h7`lF)4*I+p@4 zW)#V+=gp0iYn{ne{~<${8LxC;YUQZZYGmzNF12v2ZkwmkA|RAGMjEm570 zV8h;Z%SV8K7-|5+Op8oAtLnfhoM~6K7!xt8b77hgphsbId}sXEkQ13Goj_X&1`IQ1 zkb+W3tS8@2g%iR?p|Wnh*5zU1i+`5!s#Ba~*+hE452o%4Q4p-kN?3ft^|Y~59?YCIgXI%7rdO&{4mIgSNdoAncx~(+_2w<4 zwBr#Vuh_s2dp@zUa`y;OJQ-bmGD-w<31}O^Z`!M#f3Se$1ejZ>ebhy>tJj?L9!|oZ z$KLHt<11e2wf}-37@g(ZEDpualrz8@Bce43GkxH+Yv{)o`GvOJcY^6vPpR9m0iO&MKQTfX z8_GL*X_{Et=hpUz%<6<%R3|uY-)v3yyP1l#^*lTfgEdS|gT+@lZXHRJj1okA7%`R1 zuY^R(uiq*-!X7&Q9R!!S%ChKB1(v%33_kN7VQ^%{awiJ;BjkPs(0sPmQ0C4wN4N8G zn1%p^U#^Z%3JNcG+sA{>9zr5*_d9u4j=)vkpbF8{lm-ui zh~GP1rDdPD-(=)!FV>f$Yy043{M^@=8g9mr0u49rEna;lTw(Rt6;Or|yovO|m~aZQ zf1a|hmZz)9%rhg2HR+T>=o@$@r;c3{ba)a7co7JAb%IX?o2q-SVJG?_&G(yYgSVIZ zzEf@Idb>}cUEY^DLO#!UrwDhj15H#`%02*xqe$ikOC{t$Y}0N)!}yz1iY_~v9y@;R z%e%nis1ySW-Wq}An?D2Fp>^&SzLqC9-6??=-jtE#4xtJB~MODW?SrCh@?>12#)Z$WV) zhdbDiN{f}dA>j3d@|xx?Ed|)l6u1sxFI@Y|TRdDCs?obrF`%P%YtWX4MibxAxTi@s zw>l(NEP?uXknSgZ7wx| zK2I_ApM;EyIscHb*rALwoV04paV&M$I^I1qvBu-~tne^5r#H?wTa+WEpfkj*3cd>b zNGyr2M+{Qp4v%=aw~Wm7&S;W5d1H`%{Rkjx`bSTlSh*H#ayTLcN-Z0~6ph613i(K7 zMT+K}^iqXXqLnPk(}Fl-&tuIjK-!@- z0f(=Lkn}x<>mjqS9L2D%NM++d6FYr_Dn}b?VC;G^X-)qU#P1J|J|t0C{-Mp24?G*@ z@k&z3h_2Ok_2wZRa=0`ZU=+(pn05V#%@$@Bbc-L}T4{f-f6pQ9Y`Szv+f$28SY0s5+A?m@UKQpGN))N*J zq8#25NC2eAZHYbl>Dw2Y{3i+SlWW*1^f1Y-5-y}v*SLf`UZRsn{K1iXt6a!cv#O{t zkiM_JOzD0lesF7f1M-3yg}59s!AA=H1FlFRXeUrdvlkes1M`6P03XI&c}0k(zE3nHb-I;0G6 zZ5BCe_fXC6UrdSUk0i1X?|pj$d*aM;J~jC4;Crq0GA8tf^3;^r8O>j+zNRR#W?Lp0 zqD&`KinXtM@fuH{WPt>HP~W)yzUMv08TjxOH~-5H0_ePl)^~;`(i0ks>xVXp`Qv`d z!ZQIO<>uGWA4)v@70#bD`E}m=B}BnO8rernmzjK!ij_m+^C{|H#6y1kZe{+BrUdjv z^J7L{fq~-((aT0(Pji%Y&u1^5TyRhH0w~jH7+^U6WDa@+#Cx2pN5W5*HN%KXk#Y&6 zOervRuow5nO-&67`oIj*XWOIMaRiolbnGt*8x9ibUVd!ipOTiH1(4$XHWkag z@_q=!Y(*Piieira4UKdm;su5i?zCE_|F17WGNNZeEh$3KITAp{pvkXf+g=uk^Es^xx_HzIfgdy`V;B3} zczFCge8r(R!G*2yO=|OO%_nf;+>49s0GxIv?tOYxRr*_;n;xhztCxG)bnVy*Z%-dm z*{y5f8|s#*B1$|hf?J1TnkQUHAsoWGy~A9APn zRdEgoG#OdcmR+}*WDO#IF_^%rKJkU|Tp$Iaw&;5AHjqXl{skskZFckA|6yX=83&9b zHSw{Yq%!`Dta2;3cvt=a_0hS9$B#p#jbnmk+(d)23HxO^wkKK}`3udB;uhc9@{Nt6 zCvD1hIbNpzpVN)|r1tg7li?uREn+;9`VWeFH0T1ud^sS>n`ytrV{}s4Ar7|8kf^~J z$0}guATAJNU6Dk18%Zh0SAqEOy-4I@<$co*btdXZuAx%c0+nT(WqDtFPof#0oNvj$ zGQLUUYxgCK)>aoUyaUKts*Xv_S^B|F~JRg*FU*p~C5| z@MXva7l_Rk7(|#(PqSV9j3hoejNV*z|WchfQahgLMNn zGg7%vFSowOn6+3ESV^STofcE|v%0@Km>woOfgQ#1Z-+G5B{_r7Pxds^{DVbXG_0#~ z{awAz>Lm%c|2`8zL0dc9d*|EMOz+>=vUGM^wN(;m%W~P_O@??-o-_ub6&Lu{P#wLa z&C*@Awd(2zZz^kB;w7r-V`$L%d;r6uG*~+gB^E2Vg@8n|Awg>k-Yaj+9blO;sK~&l zmGAc^)rC+D!>;a!Jv~a;kfR+KmY&qOeCKAuIndV?1d;umlEH4R$rvfhEW$D|qITYW zZwyZ>xgejlK_oY7)@pTwn|BGUUDXoLR$iMs^->QO>MnCJc&HkHHxB7&!`8!2FDPDR z?Qwvg;VksJFVC6%{C={WOIW=!OV8P>s?En+8}VBr!UQY(V)je$vK~%D#Wf__D@Mk^ zLq~zfDRnBD(xlb^O;LvE=7%U2^k8i5>V>dP`H8vaM#tLx{dM>)l8Bm(F%#t0fC#)F zpitjAJIn>agYF4u7A#Io@|~9l?C1dUkSy(t3Wkx zgM^^@*Pl%zI2Xn9#OaD>Tukn*8<6B9andYQ!V){*KTgoNfBhYBv;Yc|xzq2;vxnTj z<|B;kmc$oJmY4N=ucj3E`T00QUfxSw9lKzm*FV^nJaPL!zP<(L1|IH;(jQ9yaSi^8 z_I#jM-;`ODYj3$wS_-N+&@?-VqJ_4bb54Mt7)abhB>LP%ZYt99Ugjj%MtF%k^K{UL zr)4hex!9>>R7`%rrhN zSKMz}-5bPAX)H-)u)`Pru?SEiO1{NI{|Gvue?x|pf5GYn^nP^4HWNRfBXi$GOvw{b=Nu-g*F zd@jQ-sbKPV4aNND5g_4)5V0i>zS>!d-a|y~CdVy_H_=xw{c*|)Q(M+tJEMc4IW!Nm zSlOXb&-LIH;fzEOT#%$h$a_55)*}-d-{`hq1Z?)^tNw=YK0Da%(B!tq4X{V=8zzmL z_~MIRC-%C%)v+!(i9n;iSHQ!=8~cOZ3iYidnI_x!2C`=wF0cl6@`*tE4n{w0T&wb(;+4u)#qAqu#m%$wb z*&f)rktxP^O^P;HAV3R!Vl-_Zob2}!l)R(ufz_jhNY%E!MI@O$2o8h&6KB#2XM=t} zZsk>FTJW$hA^vIN$n_;fg;9boapw1W$T?fWv4zfhzPqLP`U){x2&c%G)E|_C9Pf9c z3(KFWQD(K4ZcWVtbGsGet8)CSJ?8WU+F;Oqyh2zdhQLayTz}}5;`~13CnO29{0i=UGB;J3Xl17wq z?;us>?H)rUIg)*+roE1t6cO}#(XMz%Ms>ETM120Z7hak)%EFtet@aZ=l< zhLfH(^p5+3CpWyZ`f}`v;lRS(&-XmARnr_57=8BZCS|IP9i}b>Me5a^7&!_0&Yb^U zpA8fh5}q301kh^_{1^1M9o9Qr+%FSc?{-%lBgb8?5`;F_mat_Aq{|JzUU;Z<(bwc_ zr^k(wtA79dpRvbz6M`G^Jv{~{`OO;}=><=ZY1p|?Yu*7iTKkepcD9}h z)mHxU+#pt8#CI5(xHlUI9-s}lPp7f{ejHTe4q5Tz$7feHcLb{oU;neuMyEtyvyS@J zd5?p_+wu0c0|{wWE<{J2SK#a75oFDHY?hcPn%?-I zO@rQ`dRlWPKGz)2H6*`@7-DW&f>s4J0S5@V+C#f6)OCc+d*Y-vk~0mcCcaEEKBv_O z=mV&_;NCY52v#`zE~sh5mISG2Z4oN)Tg;nk3YJ#D<7UC%*At6bEt?(3U~b$sD)kH( zK>;7wRp{EY+@iI#vN>7q7avgcYF+um#zu#)iRsCjV{C2^Z6}0=ttq$i8R?4<-nES% zAVo^LZWDP!n}5NMLtWYzdW{2~6iv?*u|f8$if>!^vUcQ_E;z5Boru>#=Tp0SHfNW_ znyS=q886G!-uRzrnRvKlS$Zzyv|pM0n(bh@NdIPSu`65Xdc%kilKa}-CaU_>^ZP1M z(dmpk*>GHz6cE!3zAVIG6>8c>1m25cY<`GGQChaOHcYR;&aN4%y#o0#P4wEJV?ktB zbxv5&g-nlW6a_Fs8sV|cu<=Iu!ZtzZny$gTXkp|A++FmKm86+kwv%p~oD>7oZi>;@ zTx;m;s$)*zjqsEv#Oi83KS(dY}dSP@)6^8ZaS=qL2 zpF4c|7VF!5t*3aM5kj@0T0W_bS-0#owII+2_e#kX_t`4ka%U?B#Tx9rGcaU}>88NE zpjMPx1B{?=@3!=)DOvLNWGN&ER`Sv_*h#cD)481U&3JBKU_4pF$ijHOSgTTJ?Skkp zL)Vh)a_H8=;&6wWSKF-@4h*Tx)2GKq)$f%Mla?1Wqdcq^loB@{CAe*=!u*@6D&CYW zuUn{sT!Ao6>y2c9Im+1B7$d2DvGKfqbNwIazB1ikqYEs8(V?}UV}h2hief9Az7_`aa+V!c0x~#ssO@SS%P5N#U&<3tu?v-pue3pQ+gKCjM0? z*Fvo%=Qgj&MZ(VP?iL+4Gd(P9U~_O!sr57o-RU?{b7_YQBerDyD`?THUw)1s&46y6 zuyCyD8QXQ~GKgQr07f8aEfIaq&y_ z)c$9`VBEhSwi~EzDa=)O89=tL7ps2kcD(VES-1X2s>Jv>Zwf9o?rS=y zt@8hFWEits@uq_W;hF(dbSPSil=H@!M6Iq4?-mashY_K`Ck@3iO}y-PrZu^(r0XL9 z@K=Z6rUugf6;2Il3U)L!|65Q@W}zi;_9|1Zdd_lejN<$o2l@L^!XPT%GS03^+thx3 zIL@%OwL>Zw$WOH=T*1vda1SVB{(S3- zs(6Ra3zG`2&qTxbP5j$wP&!|^ilXQ2VC9nX01X;m72(w`#KrC!0nnd>5XOH?6Tun! zpj!3m5iq78FK(l;qY|4u<&0&i$h-4Z`*{=h2WX|oWL$T~PG?G)hJCI8Z5&g9G8oOM z`$OzM2nz<_qun#k`2aqXF3DDg#ojntJHlFgInNd^FIq`(Gw(35p7bnjzsbhklkh9o zw^`TuxeLw*YbYE&1WxGA0L*n1MG7AsCSj2HluL2iF%P_O$FGa&%RQ^PWWQ8Kmr(P$ zgwiG8EB)&k8n0Y6R{{FeuEDI1Zvom&}PhtK4RHUx)`9aJk4TLWzG|(P}?t)0<#t5 z9vQ0|JCti4lB(gSq!Ib+2X&|AMjlliAwlD{rUPuRII&C1J_0Pl9~8GgPmok)%xR5>ItL!9CQJ&HB{<}e1y$j%}>GMDo+@rFE`A=Cl~3C0;^W`(z(u_uK+Nh6fH%-=Ux z1{clWTlR<3_{&5bNnEJ(cz^oc^L2(cwR6lN!g}EO_h^3p{Emz&&qYJ^oCD{gMUcyV z!9_NIBOb5OIS12V;*dSmm;MNhi3Q^U}r~>;_H{sBi^JWC2@<4+%7uuDOD=57?goM z?Qo(v`OR$8q@Ev(?7qE>@L448obfRMRl;3r>KgO>JSX=pL-ZNCIM#o1ULB^^4o6E$ zGG-0=21F`&9|2iKN%;-Ti|id!JOh%U`5o(`l`|`b{t#{2>%R zx`&0z2@v?(?LCzI#@pRV!IQtAeICuhTBf!KQ077Qk=ASQP)-puf2Q#!f<1M2o-N5B zga=!ltStT$lvqO`UFiLQ+=m4a5HNK{b9d~Z zvEf|n8o+VJG!9FPs`?8&b$ye3iP>~n$i!MV_$)2oypoMn?%M9pXJ`{SdQ(>-?C1Bx z&nIdkrjbjlZO$}$;ojcH&Jk4@yFF`qjEJ3@WsZ=bsf52TtJkPYeyEvh|#5MiP?eW?XG~;o4_P2wBH|7=?H#U zB>P4}hefM_yZ4!tl&t7ypp1Ho=jY?(%Kl#;8N|Ez--mUjcsYeJx9@uDvwN zPgs9=8yydG{|u&%;nSc;d=doi4p{kyou|<*w!=PN93vKee%bS~;8sNYJ|G`gj6A7dEy&!|$(i$mRPlI_m9eepl zz}nu5m(WX>#D=0x>P$JIDdOGw*j(-$t9XvA&7*j*H+`^uxv2Rphvo2c@_qF;{CeMl zY^iVjbG*?#gP8m0-+C6ehH}rGCF58jv88x~CN#QeSk0NzriR+_IK~$-hw01bECMU*9y4X5GcZhl!gdOIe^M03FwThnd$yCFy*2;&f$7{7G zt9SHuw6|MAmALbt;!3PhF^&C0Ncov9fU4hdO-(XjEYTA+$7Kwb~y%5+5GB ziZzSQto=D8J|vt$peD&DLCe%aXnnz*7;t)~u-bSX{tEWb6)%!sv3DoJY@1wI{~L{A z$=fRKOb-tCAh?ZY2h{Eo@csbRqI&#QM!J^_^*M zrA~!FMeoO$uWRftI*I73_j2@K0%#Z#1xBT+)=uAHH`nXYP^KyI zde%5^7dMW;4-=|bXcoriiO(;YoU==1wz%C_mg%iu?XOa6zY*o3b+pF4iSF_|3@!p0 zGO@N3In|!feN<)8s=?6M=htWfysb-@#FwPgbEc~=)2ZPUI=%ZB@-qNfEfjx;>eF%K zXUVdh>vgR<-c&28ko#zT?BT}|gpDU+{$^UKch3XFjX%U1h23X4DAQ1t(5o;_lUSCt zJHwYM)zC|HL>d0AhrKp-Xk9DN$=*4~l!ec9x(HjQv|ue&#bkc36AZ^os|csv)JBuI zwZo6&Cyt2h2Y46YEQJw7Dr7WGicilu`nVkK8g=?Hjp=c+Pc{>3s9|Z!Cb8g1yRT8` ztmX8XUjL`1ewu$%VW8UhUv?D1Q$T`i)3|-jCzkomX5TJ#0(r2uPt zK{kpp@ES5*${B3=wzm0uZKK9 zHB2e%HEy*Z{Vt;oP{Z?GA@b-Uu&aC^_&nDG*>JUntOr?IddM&y?W)mZzR@v=k5w9+^kEd2@%#=i;+QZE`3>)%bdR*}~_0 zU+CAXns}30)xlIe{qIA(G)xxD%(>B&(}mZ+cygJ7`6o((Kn^eEYgA&N=XvKf$GOaG({RMezVjbp#_?FWODm(Ct@+f*z95K}^FF2OeDy8b zI7_soFu^2o@JL800y;EIgZ{_#Ya&;)Im_Fyu>?OKWD$OGMvOH`kdilNpR!`&`VmFjs5I$ z=^O9bTY?ub+N=+)4NT0t1_N=HH}7?-Vob9h_$J;{SyACnLq3O%7`-3zo)E#LFCOL* zVoMs*4JIid%Np+^5@nIb%wpXb-1CKC#o^Lf@F)WOGdNFc zKbPMbFDlwt(vGNozni+JVWTVVd{^x%tjx=vf4f=A;EEK$E36+vWFl|VFAa$`Sa?%TEQ&;atQUzmOWZ$MlTOJ&{({@ zuSmRkWOKo25&jpq`Q`4*Omw^iN&a7>2l#JQ7^5|ye7`U+m{L2kRfaPLqvBX2L)u6> zY4-_z)ui!pg(5V-SeEtlmh}uz;v6J~^v2=7k(!QU!M4y(E zlu~Z&$d&k2Qg*1OhDD01V7%Tn1f$lw7n-K(*~i0XwAa)rR;+O@O~`x50*W zQX2XJ1|K7OdDP@{`6tQ(*NH?t@5tkT%-)j*uZuq=aEgtwZk8P_dwtWCU<#W*_1-lgXQu&WiObJ%dkwMUzzR_}sG!eA9aAzOV%x4!r zbUzs%W8MFD8Re)(bT=U)7q`WA{B*&i_d>=lT`q+5VjTb|fFmk1WL#Q$>{p2R2-0=_ zTKks`fh_Lrq)Q1)Cnle0r}aFk@<%+sOig^NXc475?rq5L0(iV8!lx0b{7}&l7Eoq2 zy2~+#CO22G`bfzJhG~I)`_%T% zw&2>{X8YhxZ7; zT)S|dv^p)4M?92j1D#4-@)tDo#r1o@%Bh{r8RHJ-E#?qW`zXqOoXA1G0f$Gx>gWiH z^+Gas0Mx|nZjyiaN$mJ<-6J5TnQz?53v?b$W7&qTMtwP&Sz9g&qQeMUc=O%)@7IRf z#6=Hlj()8R2$Eq-g&qi%IGFoWA2fe2`CxX4A^=_?^ma#+!?>&0VmcSw1PCm9*Y|U~ z!F1c*jv0d@!=ir~Gm;8!J_i3r%J0llBz!{+UEHMlnv?KRX0?}H{RDN%{cWm23fz;1 z2_u&7Uz`0Oj?Ox)$@gvJLt-?7bR(s7NXJB_r8`IW=W$c0=-LZ8W#6R=_wG~?{rz4 ze7GWtLP@h#k@m;GTJyC&=Qw6^-98sQTIpK&aZR+w+_NwDjlfatI!5Gk>x%aLQ435r zEy&T@hkI$YaakvB{Iyp!N7q~Cz{n=oV2uHxV{r<^E47CexXV5Rg?w$`izV5Div5^- zDIO#Brl>?avpcl6gADJaN%I=SsHm91<{FBM@>k6WXl{x#rcD2rduJxc2Q`QdXu`KQj=B0dri1b8HMDY0_j{y?Wj{Tc%pI z-L7kYDEOEPX+k#)8lN;JR4c|!f81kXFXX4`mKPQvP1$9i!at;89x4-w*Qc zg^s#*;!r&wJ>oMFAK|kF%Ac%_X~9S1WMsX@L10F$M1 z%JU>X{G-mY=d-f>@{#yxwZF=*w=eK)6Y9_a8Th+D@TV1|{3v%1eY%uRLFcHTY(?B7 zk3+&sy#AGe15y~MP5wYq_?oxL$bR_;hWD9BlhFvn^d)s9mCfcz^7yn#sB^%WzMVzW zPs9)BK{?)fwX3Yp+L;Pep}4;;8Oudd>pr?`|LLO%VpE}}aUtr_`aCGxmUoK)<{DlyhPcaMcgQO{av$%9hB3U^OZ2rNsuc6p*qV6UE>eNrR0E}AU^-TB zP>-4$3cSbKKKq-c9C&n1jsr?cN@G=j@9+-he6+hRL3Ivs0G;IC|2(G0f zq?-)KdXK59{pxRS@oBu9Z-UkK)@Nq&5)bwvFz`>l;D;}cQ6Z${WSlu6_{^Gi8N zCO7=H{ylK+x&Svf)_S*7=ee6OTRFv|$*6RX6yAtZ0aE4BsW#u9J+?0T>C+ss;Ivf> zzaP+@^B5RB5FM-FytQ_MT1b0l>#6?(UWt zLb#}@IqI2FxRTwZskDsjJ1B?yz;23Klk^ww`JGbbG`O87MmL3@_d3~Kb#+auhg=B> zOp=BzhV(P;op*C|weEzMSVA`_F(){*3H=Y)*Q;j^j38PcQR1O{*1$%ZZ8@bDY@8v2 zk~$ei%VZ*_u<@PEZAUKVBop$shw&d6KY&^8JxrtfikoUu-@+DC4AvGi>gFDi(9#$U z_s$K&K!r%&!z8Wtcr^@nEH77do@rsX^OVHS*Ck5`7q_Z4BiStTdphJ`CXIxT}1_SnQU4s;~FGPsPRD)X+ir|~RN=u#z zwyGJ9g#z|h-xe zkIX8PNIb^LYtYmW+wjJEUt7Ki@e(}@-R7Qljd?l7Bjf)5pCWVmz}#O~k5I>O7-(b; z$2V__wLY@+rO{wzfv3Yv;|A+Q;2}Rca}j#uaqGcN@YhN^3gI0hI>~c69A2t#%naO% z0N2&0;;1XqM_tVdO^#V^&>gZj#Du3UWaa)}Mk^iOK$mj9#IU&Wz*V;4vF~CSmM=jv z4l{R4qTEjZa$%G-C7f_s)QgUDEb6kPDVDu8133Jh3{AmRhP#9Fg|9JP2gZruK31*E z-tQW2=EB*GJu05kSgESlpE-rqXt7vA0tz@K25%i3>~~-(rW2MfuT7NF(o=@~d?gGN zKyRI}fIGz)OLxj^4l~*;r{&o@roSKGQH%dxzS%GeO3*3mu%}8_%OcdyOZDzXbTTLU zl1axrfe-@lTt`UtDTuuGU0gOjxXoaCx-(K$r0e1hi;XTvby(!43=8$$6gawFu|8KW z#df~nNMl(+QP;ib1%MWu^6*(8nR|4g5QZ3ZlEe^`sKLo5`u!cv4?DbK=aAA zvSe+eoaB#yLja>|Tl0R<;54CPElt5CZ%%OUe}MYz;I)z7z^;(Ye+Fj(J71s{DtM!Y z?^8D(!r^{&Juu~m=!(VFP}0uZX>O`9bnm6{@u1TDVsC`V6S2KC-ArH1lL{Q~V(N8Y zz6*Pt<(8Gn*DT`oRcEzi9vZHxWHo;n=*te}yE#CYiEnyMt8W3ovL(u^gSa{LVwhfz zmP8GJWyas~>T;V5-V2Kue)Q^{dk7>kQKRn3aIJCDJ6;#v#sML_&Wh#Nr_v`uaC|7p z{zv_Cn(omvNbOEAQ1J=iaA(+84IE+kO5q53i}#>LzQ%)NZSg|z4UG|3nRdF^!0wOI z{{W2?BMdJ~4S_K|D!ZLqJd&$I+@LXWub!WgxK;h!120rbi8C}cj4)7u*HXmZ%y3x+ zOfVuDBM$i4U5tl6vDYqqp`Vzb5pMcBBK+0GKy2RKuUjIg725}Eqgvg03TD^{s*DB6cAX--xk(OV{E(dW5$c zoIP`xlhFdzz)K=B>M7wX?PFa~%McFrR?1}6Q3@wKw6+!vtQYzXHiP>yzDH8@(BN=< z|CuVd0#BlAywqGi=;0!{Q8xB#|$^Y!l`v^wFjBSSu zuu6X9zF1lJ8DsczJQ*Z=ZPC7XuV_miyOC{)c^Nw?(#3OZGVDXO?pX`kO(_uX{pBnv zRvWwk#b-`^6jNDkd{S;*$r@ikXOhMHtL4aX2rt7x_+VG_pP_TF;VRpwE53e(_pf}snH$o^Kn;QWYWK*U74p>{I! zMm3*r;S%AuYhs99W7+1I%lj%pU=OHkP{BYB9!a6`pBO1B_R%fg;SoP z2JIoQxwp-5Ck$U#EMh$ymYBP`zmZ6` z@*73j9f(etb(oYer*hsufY1VY8-8)2mIXi1CV)mth)$>G{+HS}Y)BZ%8)u?rNK*wvJoh)pL~vJ0%Q{#qYjd?+foHHwgbCX1tv5 z*ZR*M>3w0&CF{_9eAqsvHit+$X_kj;{IGj*H8zq1eE60D*Q!t~`^>{#Oc2jyS=@Wl>}AdE)U0yVmOP^qAjD_rWhw8pdx&`1a}0F1$W6xAnjdJu;WaDiDth1lnyJBllS}zRXx$~V6`8*gYJdQ-Ake# z-ALM&S8-}YJL?&D?zfv?+rRO=0Lr}7_7+p$4E5+;3>=NziMTZH>FHRce!#vD5-n{Q zz~$a5(z8vJVS(H>HTMZ-tjgVU#y?TShFCJd-(<#mX)huyOMQKYZ(o!0nZlQM8?|Q9 zd`wEh`uh*cG~1sOPz=r%F_{+>&i)_ZVL?8XbzYivoCf1on=2t=V*CX9L#URkPKfNl zP-MZT@cfZb|Irn0S+P}tl6bK6!a0kBLEqX5zKLzLOaM92`mdtq}( zB0>d1S68-INFs|qddLars# z`Hg%t`~B_J6{r#p^}iobVbnzY7@|J+yk5V(lZ)au+}QUSl-Mv3>^f0z5Vm0qGM#-i ze&P4eQWB-ia8yR)lH^wT@lS*% zLwW+9Bv4w>ifAqfy$ajIin{U>XUunuF5>~ZGt*&O%0XqP=3DHSqO6(W)F%iswOyTD z7Su8h>|NSWXc2q~=3lDy88ylD$FbGze>3UxslvS}KHVv(7@nZR^G13^i;Y2bP%pX1|P*k}- zlPrJ68;R1g^2UVV{NYp~9~;ZbNU^Z9v2L+sPZ~lOK}yX5sNx zur~K2nY6>A+4A}u`I`rK=q6O_)@6hNrp>#Wdq8dYo1W)6Qqp!;jo8WcZ!pHq(#)a@ z|MsmCmyjLnd;RW4&FNqZAKFw`X~IBhc)*jA;|h5J+H>>kg;sEjU+V8ZAs7omB}-AW zvwI}fIG2L8=2UMJ1v|(e;eZMxY=ehEFRuV7KMo3A7%539fm_koe)oAV&t>ZFXH6YH z7SE(ZlOM0%$mv*6$;2bNA*dwz#$L~qiMlFy(Qu?~MU$t-GwU$dzbc6zU3A?bjO8SL z_swr9E}HroA6;(GE}wr-Kk9o=ZN2}PdP~=-iQZ{^+2d(zPMQ!?zBP@jOD{ycqIsx+ z-A_~Rga)%C_?N7{q(QuM2%f*hP8aL5H1JLuxKdW*hms!R>$cVpLgAcALkeV)LgYU@ zNO+jRz(9{BZtVV149(3-Iv`~|szz@E4j+23gA0`*n3ww`Q08lC2F5CHmRm1bEj!Yt z&9;prsUjjr$zo5O(N1&zY^ZoxaQu4-mhT|AnJf56MOR z+OJy8s@BR?K7_LL`4gDjq;6*5PFz?7Ns-ML`beyZScx9VIpgA`9S4%%P z>H~>H_^3@w9P^Rg;c9&gpN@^?AEME@_1uRFcuciHTS5gqME(hLxN54D#zwKoUb$v| z#1F#cm-6BhnUyxviG_@{&hK|mQm-!rKCrY9gf)rLa=NW@q9wrzg#Y$yG;jlwi=8TM z@d0DzE3o67fnWatGF8E1U@$Av=bp5&wW4$}_&fy6Y5Z?xpW>44$;c4=u^j$&Pd>ay zzNEG%McJbY{$klyk@PAdCUOrGn}8A(3@!@Pjap9HG0=I=MFfkdjcEn<(*u7=nz$y} zt(Pa+`;=yHeR1a6U%Z?@79&&I>tL+^2FIl=te%VT!dA!r9;<7;uB#CD(r;Ay+$}HO zb4I-fR-2d>!?bwisdxancz7)NK>Bz{W__miG10FooukokruBH|_a)rr2XQsl^<@1K z<<2eiz++b|(Blb!@V=4jtSem80tfj&X;VD5Y?Htvr8cf}!2C!2wOwCRVluhYW3s|J zUJOdd?f$(`d%_D|VtDD9gnDH3aUl$0rtLKfDB{nze!%~vN5gps=*vKw-Fz*LFQdulaYbAr4UtD5wom(126yjSR|9AAuEegS z(G1}7gZ}|&dUy|>LgGO$TZ3qZeB2!b?0p`4{1G^ zJk(7;K4P-A?vw@}*|i}dG@8{0b*8y4+!QmVy(G5B?3bOWAoecvK?hb0Z`|F9x%JId zT6Er9kjzb>x@2h)5N!E`L|S7Tau;3AqNG*I{MT^|irxy2-o|eK$=#yz8a7qI;^W=NI&Y|~&!%Wx?G(r| z;!dSDl`s@48z{3+L=q?hn;X6i7y?m**n~v(kv1&|5xw3%+v)ciwa{r~(d^C5_`twC z4S~L;93PO@<$RGZ|3+qGA|w~O3yI2OsTJegdHnqVMM&%%UD7-R^tQD)mISx8C6BQD zHMIJ1oRz)8O6fcm>HIOU=7aO>zKWYEU#1>RxEn&Xpgn+lX_Q?TW1EoGA;&drRm_M* zWo_Q!yafjPW{yCC7t2AQy^4!5SjC8c(2Y({*Dr)gt-bq4ipdV`JIc3c?s9vnGYgCa zQy}c%$)9$3j*h$6MC;BvGWNXDLe-Hi_-l+f?N~bI$;EBEFVQg21{*}vucfq}P-pFb zfLFgBvYd5DceefmpoQp3W8Wl21S;-xD%R1@-Umt+kCwPWaI|n!&uVWOZ9fQL5gu7j zaasZ=rD=V=7xpsa>kxufGk%aD8cw<9>NhF0h1u$Rxvm_YS{qWq8jm$d?B{Rg88{XJ z5@N5D`2*;JA4yagBpeQ!qYfyO3!=j`*!tAw#* zbM{qqtCMT1U_bWn2^=Y3idjo8cjl0^8lLu(un@LhqHrXWLymJgY)E|(U6IzYTxnX% z&fZEpL2R?9&As1R70qEU7;e_XVLZT`Qt4>+3L=~r zj9pS7lCo8Dz1a9#&a+?YCrQm4W3%>{sYxPwuY=;!l}Nmm)rR4frt*6maMXceIFN2* zFE&_-aV;Ttya_m{lM-=bUCM8s^KeZo>uLMd5(^tHeJR5lCMh~ZqGq8uz-W0<|1x+E z)FAcLx@b!JZH~~CMU5WJ$UV-P#O$hLN=l+|ka4JM+V~`m9P}^9c@!E8_i|zLiZ_n# zW8Gvjqtb5x+3e;40O)jgimgCN4U^bN(Q5GH zQ&c0r@*V;@{SR#1r^sybO0{*h-TlYrf;rAVta+JPN_<-Mc8p{}BR)9w;1#y#46lZQ z=B8w9#8poTJ~sQcyTTz|(c1wa*$~`u7z|5wY=%c#$3mzl6(LLtOJS+9P81loA4x6* z1mq<4yJ|rrx$OL0=Tg&GeV>fJWi7e&vm}PTN?8GTpPX_~8~Dd59t2!8s~|B`St~BP1`EE7RFzAU*JqU7%_Fu1GLZKX42q zFteUVIxLEM0kmd?zQmkONpFWnYd`a`1}iK_g0 zmKZOG5)pL~wm!vRRcEiPtM!2N*kw>NM`Qxl5+*4G$Vj#^aJ}9$rw0EP$e3x!x^$;R zB7^t2?n5Ju@dXeg6#JxzF)^!_21Is_|ocTU;jN zgO~qZ=x#?EYfWMi5V4jh>rz?J5GG!9? zY8^PaC2`WO&S^B1vo!6&dbgcIltE_9v%2L&sFbo4yK5Vd4^tDQRKgLyH+e!p_omC} zm6A|+Tc}RbkT?mcI54tw!SqTfpn9z32c&rT(j|hQY_x*qYQ!@Ab(jf{ zfxH+uyx!u%wa;!+vU0rWf=WD6tLo1E>a7QM9EaChnOGyY~@F2EM zYf|(|jjIR>;j;$P0eHQ~==>wV6uurvLO49)*eKuQfg85jP>J<>E%1H5LDz}RxiH@O z>_u?v4jlEKGkB1P2jE8jLcVUps1anBrgAgWIvg#f!O|KnBKkDl>z{L`AN>(&VssR; zl&g%#k<%3i10M!ym4^&aO9R)Z+52wv;+#Xx0!v&|>Htp_4&-A9JfT&12olHdN#Hl7 z72Mv@bMh(TEY-yL)xr_e(YibxDytX&w=l#2^D4X`s}}u=qWAjV+MPHnC-ACu0Uj+R z`a@oFE+X(y83%pH07szZiwTrb=E^&u&Boz*0WMTiY9d@rkA8QlCP=jUb_2QP_~%oI z?T$SdxeeX(SZ#r>LU+GL>aZ?IQ~NrjQ&|+qM{!d{`NwyWcw9M_{6ylxr6^JKk92zSZH&! zD@_Qo=k5>-P@F$~;XV-{c#$5k$h3HQkh6@wAE)5#jRKekIBOb3XYrk@&fIas`E{Pl zRF~aRG0<+>bIx^Xh~OtU)`DuBJ53G8t~}-d73FDs7~D;l36`Ed%#_2q#1b^&Y{ig8 zDC;fG*0`HQZV&_=$|=vSGRhdsH+M}dcUyR)Fat);wd~fg`i@aCl-bjM=HMLJxnkX8 zr{i)ZfgAX}%uV(vgu|1Ol8wcIoNpp!r#?@Mx945%)SLDYMzs2jPA-HtK>&w$DnW0t zH=FJX1c2u%+L6P*wI!FY-^bN%g^JRstF@#OnRn2h6mSX!(v&zDr65EIv4`WhTjUuu znjcqwZ2AV3GKI!W-r|krX=~cOnxMput$3PPxqbaZmB?KxKJ(+a7g2;Xb~^M>tySx% zPzOKXUrZy!lP+X#fQYb!3S!aQ_OJQ2X|l*?ZzXN6HsDhMeV69P!6x3Ws=^^M)$>o$ zpp1u9^l-PMSA|otUvUMNhdV?IUR5F}7LKNMAJbcgB+HI-P^M0GGN-5d3xOodmOXA1 zQCwvSVQbv!^HF16d}PG%2=^Vt|L2BpgyMor$|y_hF-wQBr428h^2|4@>7E9*-ra626y7C+R>m>>tEb1-#IH!AAG1z+#)RFeI^eI2U7>1M0|g< zl}OQ7j1{2NK;d45UCvA@bd8>?Q?sHTE$1`C=O3f~)X%Rv;68ijt68#mLy5y!=o^y6bFG$J zfh@uo3mb+&j`&OfxFX#D9+Sq*^}X3?V-g z#mF@Q56@Go2C1~TNdxP4JxnuUnGcGn0z9w1o;Y|YB&_-Ojd9mhfOPi0<9VU@CzJ0M zJS>dTKd~;H`vGZTNlnl_yylp;N?pRvr!bu;3F2pt{IM^86fNF9RJ)@wzBqFg{23%x z=l`=_R~W*Z3XZIWlYakxRsXm7q??G=7dL?DqHXCq-CER6C>Jxiy>YuFSo{zB@(%`F zI9Rt!aG=y^y2-jEKw`oi*uLqiC*AD|qJDUG+$#)s{xvgNV$pF}WNpzgi;b5m?)|4u zLJtmdbRlJzUl8g%0A#I?65J%YvmKsDnkA;6EDr8p)^qiN%b_XX^R&(q*;e|K*NnvG zx6kJ7X?Rj2*Qfm^{za)#U8sFtaLOIg&1A4zJZ#CTj}6~&_{10V~lpc!t=!XdP%y<}62=GYT*T;WlrrcBY zSSXrxta`_)wx1ddg<%<`6^0UGg+lzMCRyyR~=q;5&e>B7=xoA2%v~B-$E9frNc37h= zdzV{qpRngLX~*9Glc6!b_Tj$K1e=pB9VG1FJpb*=F_P8_)g`gC6UtLUvcs!jW8rj`UP)pIImhUmFW@GWZ^n$UisjfgH;ED3k)KOBlQJ)UQ9G9( zAd=-Iu}ANKA_;ZY|6pkKiFa`>{nX zyu8}kd9;TYwxS@TmWqqkU8%nX1aM1_0Ecm_#M=!u?{ER}sLbO8A9@+CB6pnq9?<>? zg4|~h7*>`ux&em!m2vmPlL>*T9kkh4X*e9Azg*8^9 zPTJ&1b?0vv?kuGGt+xLkWwqzloT`^=ym!wb%vT{4s9)fPWI4;p{kXeB=a93gN!ucg z3%7#oVpzF(RH@RnTfGP@4lTh;b(EKZCshak>heMGBvEXit`l}wZz?>Nr_}j|KKE_w zxNu{fx;PQz$wlwUDe7wa3H|%A??MNkCuSbv&JIgl-6k<6gA{p*P5>8@WWdECjrEa| z?7Psl!U5{PYfJ?PBc_yJhpA_};dT4720X{9c2gW20#$ylsPPb%xE>Q?#tm`y>>(l{ z=hxQS2}Nc9LUa+lL2>kKaK}A53mLyko;0Cp@Y&S`ud`)`R{W8*LVDQgQsWqN&PXRp zu~ItQ?vGX3mEIK!R#_=ee9`s!w!5VTz*B z_}-6COH_esX8I!x5oNaQ5^C`-#t}OSB`FsU-%2k%`~Cx*KX!&IvRh7-!yETzaAw+v z)grs1!sSR!Hyu1wET#kEq8FUKU14!mStwB>lf!CH75cVd6{( zC<0uX$K^ri#4!ka5!Ds{z(2^ohj+^l$UdjkAj^+H5?_PPH#s%{Nd3wljnj;}45F2T`6GTL05Pux$+)jmw2Z zDRhqG&B&A^^x-1=jw1^TgX^jZm8rl7>F%nthri~{KUoU7@_qT6(N}~jINPe#be?%R zS5+Q%PM(9KxcZo21EB7CW_-W#rE;fGIbVeTla6Dp;|Je6NSRy~3$5w+ZbV9+=y}?} z1B2n&y$SW;O}LTK>IQvBEa!y6W!f>bWxaE+Sk1pc4iuxwYi=<~II>`Cfp&oOJ)ycz zgqKZ~S1KtSjT|*6?ARth)qDkR$NuJ#B+>oM{mT@HSH2bY&+naX<}HOF^d5yHuN&(M z=Ws(K|2$Jq(#^4u$5?t=>0-}xW*sIgr^=}wq=f0v| zmOV)^mxoWrU5Z@vN)YbFVKE>(%}4rx-Tgk9pVbV0_va{}E@c@t3+rO!r%-OK*6@oV z?Qcc{$B@tolJ8esYH3O7uJ2v4V-BcxFs|KK<@*AQcJ zRoqIM#jaB}Tx_MhK2UZY|NBr}&uq8U-pKNnoeRgT=%oOb-|37BXePenG&9DaCwikOcv=B5`l2)>k{5jpNUy?mKTW;sL7~ zss69>bk5I{aJIS2`ZAdn=KHz#45)PFSlgX+7~`Gg)-OsUolYQ@fgqTmucchauLYHY zv*R;eMXvyRvI5a5a_oFnA`YStf?@FoWGWI9-9ioQlbxtT7rat$L5J7xvUmNmko>nt z3rq%wWL2D>wY^9T2<#ZZFiZ5m5xXJH8P@z*rDE&k%=U&c)1m=?T&G=3efULD$|~j@ zrgD!w3TJi+KklMnu>M3?{K=lll9HuTl*+t=3|Bz$P?(T_QqrKCf z%2@sZFi6DXk;2Y2^aA@|;xtSRK*-@7TU@ zYmx8Fr6gSqM${&wu0HVq3;$r0?w(Iml0|U(HauyJo}u}d%|gweWv-NejThh8SO9yk z(7BWvfo%eulYdBCZ%&fFD+keB2uQxnwzPIT9v^x(BZ_~EVM#@aj&a=ElSxFK=IqvG zLSL=H{dy~DVF%9AKD?#8w{s!$q#L|wDjfTZrw0U^kd!JjmUV91SCQAZ)sC;qbLN3H zGBX735<1@T3=QU7$MfEs$Pb;;R!fbgI4^vTre!29N4so6pF{CQe@uE+&qGUG=gN}B8o=nKc%kk4BNP3kjCs+#j z`1~KAR=U5oa((X2QDkZPo!$u*duJ{tYF@uI0qxxQ4*&uCCBd5a9%`Sf-7sP8SQ~Ds zFZY^;7(*4-{pkBzPYVg984Ml8zhsDXk@Xblk+_7CNF2OwLQ)$wF7n)SM#qlcHj`V1 z3k)AnNq_IYCh)EJ1naA)G%2;5?MT@AkcWx7-{$@aT`nco{oYNNTk}wd+rlO{G$IsC z2+^2_741wvP1#bSfvFC4>Pdp<`UqgUl{q2_TwW^D@G6A|kmGOzC(oh4SAxc9#tWA$ z^gFZ>8`MAoXocP52sF;it|Hc#USChM36FI<=az@LyGslFWwU@Z6C4zrT^sw40fKVrL5IYJtF(8x|-GG-2J~^ z^p?nzinK{HFCMgYCKODb7R4NV`!V%KLiq0hUUHceO zfnsBMl6oSA-C~%eTW3GUU1A6QQceYq;wb8BdcCjW^kXwrlu;KZp8&5=geSq2285m| zz_-7QiCTUOPT4a8i&@Fs7M`|(TO9|WF!k&8r+z+#0+QznY1UJY1IOC-L@!qS(&tUQ zJj*lAC0>i4^-KZWGBcyO0ceS`X4ux|lX-s*Zm@cYMs}J;4(TP@VV4PqKUa4o1dJ1?ZUe827{#qc`a(?Ib}H%|={?tieowTbzfb(4IE zZ&qohuv_3;FSH~nRr@=uXVH_U+U`i>b@x#T$A196p>l1Q4DF$=XVWyd_vSkj?lPof z2;8Gwe|_?(cQMcRHcD3eLXc@ zLwT|x3G1FSy_%_gQP7d=4Kk%y?w@D%jD*%F_IkxV3*b|CH>7@m4!AELgJcergE>dm zD6O!~igK$@Q_JzQB(T31Q2hyvAseFqdOWNSY2^p9G!9FJh%aC^;fGK}tv~8Yk zqxWzC>mZ1|#DW8%vtS^q4zY?_WNt*z>ygsnb$WtkA5ok-W3}?+Amke9d(Zp&lgaD7 zu`p*1w#>n8x&&h^H8z@ar($6BR~_Igz!lTPB^->?YHupx*Gglb!E|fjaLEW19c7V| z8q@e0w%oaBQ`jx?AHd~*-Thn|jx(d7D{sVA$mebR=PDH60E+RR@Ifuu!A^MVsx@Xw zroxR$!h8cdJH7F=zn+;j?C1%UBTxYcH3-$Ia+Ijf4Lp9F1e=px>Em&fCivym z*4ny2Zjuw<7CoPuOB$|TG<(cgD3e&;%7f%b{$c?3C?P3f&`X>Hf_L_MA0((eugM(9 zYbcx#eWyJFLnA`b81=MW;YdfBA9jU072C+m>AuEDhdUL|Yb5wE#DKRPZh8~@F^F7V z`&GnF2Pi?W8y;jSe9B#lJ54mx3>uF`+Dj9m7EuG0^HoZ}S|sOvf!quh?)dcixzKp= zvek2)4`;N5wgPns6!dbQg`gUt|34q3nf?V#9HDtb@HIS8rMMeA=K#eTwud^4G80so z%Cae3H`~n}8D}SeV$*_iklxJe)(hSz$}4JQsZGrM!j zim@QXZJR**r;k2Bn}}UM4n{#pzo4xrQIfV0>zk0>>!Ga>*+dxKCtp_w!=Rs`9!znw z1$zQTf`F+z$Qa-lsu<$Ju)ZMsoP!$OvZ1yvp?5QzPM+@-WVM`09iGKlAW<1tg5xdS zKhbim$bCXkjP<8=i(>RJEQ_Z*1_d6M(p3f@8?B@y%- zc9BEM5T9z_K5k*&cLZfUz2eK{l(T$n2U0=Q^`@mRs)ak>pO&L|zVZ&E{2rd?wEnga z?~o(Oec2TXMdw{=19)e zd?yvr({Tj^LmlT(W~0i1{x@ay54MR=#}L|`4Swb)UfJH%$wgjT>>|4o^i=-lhI51V zxU_D2KM-;WIzIo-{_$TmvJD*b^mLPk#4X+1fcMM4Lw_M7K+HF7ZI< zLIH@$gtXXyU-xYh6n}RM!fQ}Ir?isdfAQ#f%;pKMYFSDdzMm~Z!{z>9>#)66gUK^1_ zURoj%bFaU#Q_4JApX*w9`n^MnWOLTMe3ta+d}%#96S z8lCcb4|BeR`2hgcE%*TOm3a6Q5_jdv1U`ADSOoXl;})}un>0X}*O@v@N2~5)ywKRKQ}B)v&Lo>Pa4o! zZZ35$U{fV7-2s71P=O_!KTU`b;g#JWiGAJ;@I&1VS+~&{YPV2DF2l|7kE?;=^W1;* z*Af*qm)!c%>3d0-j#rkX^Iv@3ny;App<_s>6f*L=eBY)Li zze<{!zdX0K-=fG1K10}#BrS2@i?Y*t)wSt~xCDHg6Pkv~G*pnaSpTO}s!f z+XkoG^l)?ADgk4zDG~=C56SXZ-Lw31dd&I5B!EqH5(19`#v6)wp&*Sw&PpRA`?i-G zM%xbL;+++-?0k9u%n=z!6*jAW@`gTs)RDYAEJ+7Wp3xOgPuIHG`ji{c1lCwd z9(h&lef!}<(aR0e3>!v%Gk{5j1c#s0|GR+sZw znKs&rt4#G6fAg@-T~+PkKfsMh|LG5&eA_)bX4%v`?eI66Lvk^iYT4@5y+cY@K4N$) z5o0XW12fuXHaSl{eKtL5;hMjy?W>W{a?Y*5kBf)PJwdQ1aLxj1>Y%QpiiC=SRl%v= z6!=X*zt$W3Q+eO2lTf`Eljt8GE9Y#wxxDFvyP5BU$>0#C z*+SBGb=pzO?OA;+3{w0b@p0>r^&RrV=yjX-LL)Sp)}c6#8Bg{YGmz2Q&ymv)6#D zj*Ww7!FQ4QteH z&yXT#Xq=5g*;;tIHOmWBhof<~Y9DoQf~yKqmD8mAEs6&@_(#j)sS*Pt(2$9+{z2;7 zys^$JffCOMMl%48P5)7XeXQ|f$(t=H;il6&RDvt5Tw2lkc-9uIn3oZ8Lo?du-*UH# zob?|ZddH1RAbfQ;zefuDT#jc}hyA-(K;R>OWeyEA4znNt{Kb^1VX&Dg_2!7iEqzKV zwNgTjH5%1GMGrfD6{vY5=>w@$JVnI@TSNL*S zY6zcco_mp=f5rTK?$j7J-*)Nz1l6oBqqYwOhm9NWAm@?%U9l&oaMR=(>rYxA+K9)` zTdY!ljkQ8Oq0rzbO>esJFgGC|o3b4BD=X{KYIcu!Oy0xBYa;dsfX1=E} zW_Ei~C#yXO+`2i`OmqlW_4^F!yr{9?heke&$x9X>t2A{G*9 z&0wcDtTC4HCTb6|&&84kQaz6qBM_{G&$mmZzxEfbGqyEVFUI2|*zM6dTl;=vsw$rI z^tkO!P*T<0a~5WD*(_QmAMEhX1tBe!{uiRg1SSvndNFA z)lY{gUuK5R*z3O`Xr26-2s$YDrHALd;ruJ7pw0u5nRQ4J`jDb#0KMqcG_dfIfYwE2P=|%nrfMcD) zFg!3*UDkaJb>DR2)zI3agKk&<&Q;vAdZhY}4L7yzRjh|4Mxh}|H7?i@Q&*p!41)?$ zQkZf08wW+J)ET<#)w*Nz<2kws*=z;V%IYW%G)nfoSMiN?7ATD{$AZ_|m8Tk3%S}(Y zP|p6ZZ*51|(}aXeTN!`@D&w1+426?lX4Wg7Si^Wy&iKS990!bW)1g$C8WJU+%-Yl2=DWL&DM;Eary zV8C#3xM1`dtnCxTJ{{LDKH1>qmPuz&F(g7sAON0lzaR=Z+DhXc4QptcXN-J9s2hm& z>7$A=8fhICIMG#dNeoyImj@UJI2_g<^i`gg@BMxxN=ZkRt%vAdI-0`vcGY|}V+sd& z^w)tiGZ&1m(m^VUq=mwPfIoqacyr9=Vcc|(=66Lto^(#2- zp|)dhb|jQXBE-XSP*@dhq?5Owya)%aXt2?~_W2VUNz43>!M-xX;#K~~)CBWb z#PP{CtK^u}9m+&ef>=4^7AKNVrcGpci&na8JEpq0xv_#-a-K>MSUVSN9(NKm^8WyI zgUCES!^GCU7qPHrdvgzz^4hC~Xq+ikbF(FYXTV|e1Gs=rMPO?FFTC*`j9N{$i5J025f zg>Npkr_vuoSS}gTA|Xa*k&5q8y~YS-0OVt6Af9^dc#B3p9J|umSnblvZ9S6m(m0vF zd%dB9RVk{NyZKd<4kGZA(u^(=TEq` zhI@HhbcSUOEb)>F$UT7|s)7K=-pK-=d1qzfXwB{Hf!Y`ZP~2NM-4F~|%rk~ZAY^Br z0B>4tM*jBSiL#F>SKH=I;oE<=TFox6qolB-$Y7jERfqr*agGx#I32UbYX{)3!#@{% zY52Qu;T;a)qSZBf_$+)mey#{v{az$);$0=hj5#Ws4)quSZuC93U| zUD=Wgqm|mEo`f*T?ap{Ouc7|{;E$gQw7(O7!A*V#crQ?v4THt--;pCKmk<}Y3Nwt8 zGJ;og8u*$*ni;RABC{~#lHl9W*_(`4X25% zHCcWicn0G4Sh%uBSm(XFyF2Cq%PLs_=Dlkh4tH`Zv)vMBHvGtJ;bGB z05av-OOT`hLC=52zKQ<;gM$7t>L0YH{1wy3c77ay>aqB9P_*#YnPCL#8eg-jM(UaB zOrCi-&r{PC;@%dCxG;mteY#-VAYT54Zvd2~a2F!_IWWH>|x zcyEvfBxGfX2D!Uw?wL~MQJuGzl==cs2B_Fsd_mOwTc>MYC%mwoZ>(f?veM#czRwgl zA7>~5O{xJyv@YR*Yteiyqgv0dTxr*gs}zc>AIL^qsA5m@?fTcfhp8`Qy`#JD_uTU9 zMM2qgIEA#iwzyp`!q!)L=L;NBA!n46g;1lGB$0qJN$4_b+ne|vB1^8ULX-{Xxp#5$jfW7hmb zufwZ7{e)BLx}}`T)>rl%h?>)A3XrcVBT%L$1zke}ms`yg@vfhG>`&ZRlNmqku=U4& zhPV#|c!yZ{Yv4Jw&mTn5*lKp>_rqQuG9R}K=fE13usmAZumnnF+a16P5n0qY=zKBa zt6vz{NhB<*c@X{NOZ-^tfO`Episo@}moI1if59mwC4cMw7`Nh|3Eb$`GRP$*8g`G`5MI~@I_Vw>)D zVHCNvK3MP`i64xw?{zt+Nane`2_2+yu=7#bnnD-?5GwPQ_2VYJNv82^T4dTq*NUwo zv=OXp9IVsCS3Ha_V0gg*l20|~{{RQ=jgFaZdXhq}M6*UE8R~W}Kpg(Kt#288S<-$V zNeR{6<~I_VBDJuHDO>^>^1uKyfIm)#s$SkxP^b4DT$H6Iib693F#=SI&P5 zyno@J5BSGb@i&W>d7kh$k`fZbFDCXaj9~QVo_kkS;y>CJ-$>Tp-@_gg)8@7qA8MW7 z&5+}+Pu&AMc9GlOx-nQP_3Eglepsb{_s;*l4~g*Zg6s-`m9cnPQ)7lqh9+ncJS+?dR!QUlcwncu&Uq zeXY|%lV2`YE2t#IxCidGatOisayNJ5rFea_O7SJSpxn+-4*2bXT?{&U#SBqO=o&@= z_OK*(I0qRxC)&NZ;3+EJ*RS2s^QT$zaz4M;_5T14b-n}CH5+|NE^H?%{&pi9K~Q$} zB>cYJy7k3SxQ=-JK*hl-%;_^zK?W)?XD)2vOAIhF*qdpbQS#w{@eZ$xA=+w00cYm zSA_Lhlz(dcBk<{#N3&~}+?r*(sYnBV&`p0h-U+{w9~;GS5na*wth-N7 z+`k-uo(+FaKeSbW(SP8VpA7WrpgwJv!<{L-#6n6KG$nGt43Ggq&q3VR&gYVJs&g)i z{{Wfo;GdLPdp!k^j4_MrIrpe~{CH{jQa+e4j*hW-VaZD$O`WJs|jZVPe2VUR(< z6_NW(e#}1}KV!Q+Gfnu*;OL{D#kO}hcG_guv9rc3-Zf?^1hSu*r$x){CvF0carURe z@Axhs?2-FF{>gt1J`-tP81dJO_3sY&KT5XMlHW;+&fD!v3O&FR+&|0%k~awcZoLj` z=5P2ZAMKY9?eF1_0{mRkJ`-H%5!+~b||TbH(%{M$Y>)ckL%d9qj=NG+xtvIU`LQ@fzt%tvr| zsAbj-!bZHj^ZqaCjk>MaVtAQblq~a<-|zz5WNW2*;Yyviu5K%+?cF{gxS~ z^OEgcw1zeTc7eBWP~hhzsrh^L$*8X;c(l76NH3ZpotjW`GnT_C#xb6Ur?*PCb*vb! zKF?#Q!8M{fImNQYynNp>B!H8_VV20r9Y!jauFSd=X%c`-NWlYTv69&&6T2P#OQ2A25lnjDT)D9#s!46cA2HQPAU@ z@xe8#k48$CmqxFL?B$vZ?M^L2%wMufiyY){CmGy8C$Jp#=iFwMq#hvFZlt=?t{Q10 zR(Z%I6j6o9Kg2PP2OxA96yFX!jJlPM_5T2mC)nihIbL|rP(MLbA-h{C#^eGv!w`Cq zGCzp1aYF1?2{RIC2f+u*|`$_S~jP%bF>JZ0mrFeGK1ir#;cfAF_c)$frC>UZl z7RF8h9$)d#!E<;QP4U0PnYDJ8RGYN8X4(dN&)QaR4ax9_ zO8&#Nhse6|7l@rSb2D#_54S*(#K8ROF%Upo<~vVPE1prcM5WjI{*i|?qfy&UHU7Tt zr+fQV_y%trd{gm9ivIw%V)7vHr-PbEE!?n2YgM|8o10w);1q6Jg?k6PqZ~8+P1rxDNB|yeJ%ZdEc4$D{2K6`j;uAy=_S9q zg5GH*zLo&0rK4cmS36J=!~wVQHTSQAd@|Ae3;0tWmm@`O55oO9Ba8v^ZJzLcLx4Z6 zd@(eFIMVK0gB(b}%evu|1uUvvhI(XylhC#}&3=abHu!PluNQvKe+o6v2l#$TwB`6~ zV3ORK;ffzO&AKe1rF8^=LBP&8jP(_6bLzsP+t>7F8~AGKHD#66k4>#19SGyo+wrLX z0O1{2zjrLiz+-8^I2*H!5;}L!wO?&3TADzz$}+rxfIvJ0$n^F8btn8IH$q?EPW!nm z;|q_&InPd=aZ;OGk<`a;59x6)nV7}^0PW|VxW|9Wfv4%I#FLYb04hiX4ub=c^{tWN z_!RA)>@LyMcpxvS;~$UGl;~PANPcCN0~{4=KA3FFiB>w_OYRn#7AZ;^ls76ZOV zzvWsH{7H_=+(V<=Xf{E4-DBaK1!2cs{zEo->Kd0a^ZveI(`%vU=KD#N@n>jc$oWz> z3|JmV134p~&+@%x;U5-iAzw-Of2`aBzci3&vp6T}s7FETj%%m5_{HPW3`^tvSRe!{ z!G6#v=OB?0TpmYn!>x1LzmN56;2%)cZ)e?-e%YzXE<2HzcJwBfHM)L3@Bt@%{{XMN zdVDhYtjn0b8EU|KHKd4tBAFkLG*>_W00~Bz7?-xPIXM`FH|w^FT+;rSl$z>(ep&o5 z{h}_dWW2rjQ?IPCHru>eV3Z_&@d#2`z##7A;~hZFQ~v;kBk_LCT0J-6uZSkMjOE0d zWZ0Apu~Y?kKPkvuXMzF9&+Qer!IaaqJBVAsdS4ySIKE~6KPtW>;zF_jdNZ6e4n zG5}&(SA_sE13SVW1+*^~c&ZH_#s2^U^;>qtvi|^XT4}cWP=#J6WqD$YEpT^+4Dv`- zn8mayWC}P%@jo+{NpOTsv!;c%-%`)ho6ViGgBqhEvlSVOZWF@TbJp@qN?y1L3EN?=4|wON*^HK%P{PhR*I4i*kgC z6@fV%F5Fk`H;;TF@EhUu38DD?uOeM=ypcn7F4VkW%8{`|iJ4hLvn+9DU9C6Y`IKwg z>K_j@FA~}K9sD1vPjU7L=h9??>gwh#FrzvI*hlcFAP?#ESD1FU{%VDqk2@o?sjEGP&YD=YVGIjtMPl`@M|UbZQ=Psz_sRhGHc?)s~X<`Mk$iY}N zWeQ3N`HGL*2+f9SS8UyLpX>}w>494W5I%97lB|D3=Daac~ zh^Z*r&Hn&iir>}xm&)(V+Wx<;pCj>>FNR+orPl|D7vavYI>^}b*3wBGUukogRX6;x zCgRv3SCT@WJ#WJwus7{P@owbBXYlJ^({$*-GwK@LLdsDyHVx9pI;4_-av2n1RYM%- zHTrF!{6+CK)|X^HFWvY#bt^@5n&-p!PRk$+%&Y`~rb$lXtijoWouRgX1&cq7niiUp z!4HV_okC_{@cphy?vfQ%+Jy{^Fu?#;Z0>9+W+W2Aa8kOB->#pB`Ph$Ur9O8ne69Ka z0GH-`Yv8Z=A_c{)=HJBs0JLWBJ{e+}PlZjm3ZzE9OUFAL7A3M&5->o^bL~C={{VtL ze#T!6Y+p^gwebeL_bCjqrmw9yK*;J@gTjhPWKy^)GsgszUcW!a$5{;4z8=+XkxF?< zGb~C5aB@|$M&E3xCj@jDx8D~vYrCacbc+O55@qjilrn?Q8&z0vNbB!Tb*nqAHQz=3 ze_hJ{{XUA?DzXPf5Lt6ui>qhkK)a1#g-avoz11q!$TBo%_Yk! z%e1g$LNeJLr~opSZe#ukrSQ2w;pCnJ`03&ss7{6O$HN-$iS-!XJ~M5m&#}qq0-16f zu0{any+{5EBk;$@f3+XNTW<|`D*HmX)wHcjTl;$%E@Y4GZ1A!l-#*|<@5+WdbASrh zntsB55BS;pEq=rQ0JTTOKa9Q(y3nrdejV9(Kf?EyGRES0wR^!7!Y4*$V6etXAoOAQ zdl_vbw(B<9 zO}?e#JyPOX2xqyt^9{T!pS)UmM+c&-RV{MzJ70)*DW*pO8eWBLj>M3q+_SjI`=2g4 zdRL_W%oge3{{YzA#d_03MaPYNR|UMAUm3Nw%$rZBE)UDoj(b;#c)t2;{ddJ$sw9#D zaA1k$KQIXpTzx?&j@(yn7MqnRMk`Gh<^KT5HS##>&h`18`u_l5h0bT<9;2yRX?k3+ z>aXTo={A^ac^L{d#FJbyk_aUH<2d7y_c?T8dpA=!)Eyc8ofJ3RnYyuuO!)~d8_=BH(*QNLd#^+1$mYW&Ac)+qkRYy(3 zc0IWyXWqR^@wO5+c5UBJJ$ibWMru>le@jlMsp>j@f#SPJ9?se0n8k-yl?LJ0Ah2RN z?OtsM?Q`N!gqn7_@a29Urm>|x@Vl~uRhAb`KT(TvrR+-Z%AtrLHV4eA0|Uk`TN)euoR&CZar&P5{QYY@JZA3Z+e7-j zIWLFsDIeNoUTSmd{wL9I?yqlO&A+^o^h+hgz1*{cSoX<0E))O&70~FPw6BFAxQ|xT zZ8bYvh=>f!9BsP*V<0#~n?cJ{2_3_&!yoQjU+63XIZ zv~t@bW&@|F0=*ehqwJizFYEgK2=-BLZBMzrAO6!n3A|~dTxwQ&{I)uTF@L4mU0AxA z4hi4@*&K8^>t1#HLHu6SweJGSp+v}*mv=F?*z!vS`LGG?oN#OB{XfNaZyZxOQ?W)Q z*B~VE+Ir-Luvv@lEB9kg~}3$5Dw945Y3ZfFx(3$6;J}j2$>fn|{~+zd=#= zY9e2Xn`|f2H0?FzNbEGY%&ioxFwc|87{TE2TwUGXrF|mFs9jGx0fjdbGmtWHps^<% zO?p-T0K;D!{BH3NkGw(f!@`r@Sa>VLmm2=KA^C-#mv1akJj_WP2bDcfJvgr}luCrg z&GRc`3P{i4UX~syQ;O=pQaPJ%T?L9T#0D#zj&qJG%OH6bnnl4mJ8*D)D!ReA2G1js zGoF2M`BaH!3FIHB$pC&;(xTsxo7ejOzpIvGjxaW_UIuq}&w95MQ8Y@gl06g-r|DGE z76$-i5ynMZg50S&&T;%FjiVn^54t#4RK=G~ic%v2^< zb7SV?2j2&`Ls)i?Fmf5U4czCZM@sk4*|YX+(|#`LSN9jbJzYCi{>Zq#({zaJyvLs2 z827Z2J1`N4P(lH@n*^PzMrL!3DBe!X`uY9e@Yu=S=!(AzukJswZaga1%jI5N!*b=p z4zflVDIDVjImjUP!5QU$Y3*9(8%u2(b_83&<$hg97XwbBU zzF-eM{jt;1x)0l1!DMxr-gy1XfVd+V$Q|qYoBsfUhwGjj_;vpP1n1MQ=GNBR zN4EHNb7;DuaH$%;g(Pt|ILU4Tf!&83=D(Cbgtsl?Z;Vr4!0Y83CyFA6AO}LKG7nyP z$J4cb`G3LGKegw9eg%HdzY6~VWLrNK2GlisM!whlE2O%u(b8guIAC)lqj?I`qlG0- z{{XYd2r8^Av%_?PhZ%T3ih8)b7g zn!ZEaNdm0S0)VW{4^lWihB%}4=I{rDz72dl(7YYt^}2mR4L4A_OL20>J1D1E1EL@p z4Izp$Sp2|7xFWLu0A^o}-vz!K%c;(MU9IVHe`dAquASojBs{u=$uuaomh#v%o@~w~ zcFRK=D#FY`+yLRfZSRT~KN7rkt!r%|5lg4a@yGK=5AO!>oC)Fp>fo&=Qwtov0J3V=O{`@KNV^sV?Mvx+eep{Z&qB7i0P zEa=EaK2QrOBa965pK9cq+Go5}jerMh61d}lbAiup2&-~k>T%3TyP6;lNF4m$8)ye1i6HdG02JMIjG7uLlz<(8WdQEl z7id74GM$s&=KJGKxM5;0x`hOM>IimByEmKKO*Gk1ExRyRNQVf>~$U*f+v>t z<{OyI0@>O&aG}C00;Hx+0l>%vjP>oDY1}&~Raa?L5@W$TvxAY2**p$79dTCtGbD{^ zs}K(wM5L3*BYOT_N9k1(;zW$i2rTW(dvlqo*i5~}Uwe4@= zw6VZ-Jt1`K`O2v#-KUC1Xw|_C>m*TN^R$pi91-ZAG`WsjJuh7GG#6KQTAj_s&Z{oP zY3`-_S&`L=9ZvQej|=7!015?oPk{Ann+;E7PiTnfOor+J$Uq}-AdZA_)O~mr?|v`% zq2VtV>IpJ<3dX}q8n@eFS#?=fQ5TfhwReR=tZOC)An726$;LjDlS{|MMr(T=*s^9`72iL_eSrDJ|pWt6Lk%1;^&SmT;5(yYdy8P!L@AQ zH*R)B4;xhr$GDu3r*Yuc19-Q^S3VrD@g1j!BaYVQd#L>Dr)3HigRGmjg&>XO6*vWW zCbj%QvJv|b$=kNmoF1X86jRa!Dh)lzOCT@ z0FJtpwns?u&B94B14WoI&(5U7~%5iuYj5GVxknMxGzZEw-LTST~Kyi;FJ(dR*zJ@olv5 z8DN~mxf$D*2OW_rJ+d&}!6-l- zdmeG`$F)lhy|ab$QbI@=!5GHg!?!s4`V3YnN2u1j=sESxN6Q6nM4&MPF_p(*oD63t zlkHj4YgbW5a#3578aCy)#z?{S9^8&=L9I0O&g(oX=W%xDj-Uc_$F3=_9-jd7g{z_x zKp6}2{Rh|Zp)~pm#vI-t)nj1-+A23Ven>nX!y8W@Ky%uwNqwl_rmXlO~VBsW1$|H?M`nE#LlZ6rCC!Mc`{0m;mPNp=~w1RZI!L!SkVUu z4f}`*wfF>kyiE@2LEIFS`JXBn z;DW5D0%)_fHOobtzl*&bPItXfMt zwx6b820MbIvB3mmu*djxtt7hFWCQHFTQf9j=-xu8QhMidNz~)!;TuuYo5on zY2Tar`;S2X0EMmLt1TK`CQTmy09#2&`$Jts3(l&f6=L!nf(Yy}?nyOyzBlSu@G!9O zJn$@-EYfOID()Mw+X-L}81gViI3q2)%OlaeTRdh7b!j#;uwymR$i{QR@sFV3RqG!N zq;bgvTBVvP06L^bRv2JO48*eYgTWc#bgdN&=>GsQdv{~9)jlnF!b?FE@@cW6 zV87Z|67oF1pJ*99xWf*uo|qtGHDxS(J~=>M}Cqtw_1ONnr+GgnbKF-K{;%N z0f7W}Jg>Q}8=X@~w1Fg0up$*JED`NePdFGOo;n=xd8%^wy28)`(mo@ZgEJ7pi01=o z&mj77j(XOSqPu@zA{|!CvA2q@mDbxuj?kZ;CU}tist?LTIrRrP9=PU{%kd?|yQJ3a z<1e~06D5M=1CAGvNW%e>L7+TcIH+&U<(ezpge(tj(HtTRlMS%`|4}v}~OV%zSDdB)gG}geX;R=NyJ7XgxF3dQ_*w zo*#k{EVEp)gO+(NeMdPSod!O&%g^FnN!28ZW?0yQ3O)ho4=1MXNC&O-j6-a%pED5U+}m9nq^?c2Kp9OER8{PSLM{{RJB z@zs}tf8d*b82Ejqx|mczXVr`f5Bk&`86MiUj)7YLKvS9{5Z9|yo5G!cdA^y(}UYP8Ud5r>081L zN7beJ{-22)>z~m#`hLHG&8~(200>9GFNgjSyM3}fgWt~=Wm#lW2_B_MIrj7w^KO@P zHMq{?1nT}F+{B(pYmiSr?t}SP(BHHkzKL;n6bwu{E|M*v+BW%X4Y~gS*_?FiUJLO4 zIPCQOGE2y!0@Bu7)s=<{nBjIACmmRhKb?Enc=BP>eed_({#HDu|_)b@k@qp9CqgWQ752Oi*#YtW&b!(x`vwmi7iKo8g`QN}qO3Wr|!eWzY{3rN(p zE9SKR$kJk-LPze~dm&)N;~n{`dgilfEw~zW!!VEs^WqHHQgg>lf;;|Pb6%8aJWGu@ z^ndU_Ll;^x_oP?Wd`lLar(Z6=x)t8(3~>=N%mAdGdFPYvdiz&JrTF-1 zKMA!Tgu_L*yVLa@e%DjEhUWHvETjtgw`+LhRA}X9R(5PL@{l24n4Y@XRcKRQ!dmHc z)|Tt0`fqEin&C4XJSNq6-9Fp-F4lWC)3e`OP0xcHc;ZJq}20H%$cvq!d>9!L;*zPSY{ImyUxPU0h$I88MGH?fA zI^^b?AB2284}-i!(bx!{eIad(w)g==w^1%eN#xlKNJy zCZ7C+ljULtSRcAb{HwL_81gN(JGmr3xzWsM2+kSVMlsmqKb3OhQ&5aMv!axfyu6P_ z{hIzIc)#IS!Jm#=pN2I%i+Frp@bLIgQ~NxCEYM!u2_7dT9DxvvjvVp)#S zxt}>4^VIR)y*qZJqkhY>LlYv~_>1Boib3{f3{94p&M^D7mCfMF?I(5);4r6xOGRJ2 zLFYUS5HsJ~n(D*CF11No*J*vxU#n+3n^1~=f1STVidm93?9V&{Q!S(CE_(Z9=C64v zji(%rjA=!=cN>mJBi9GlKZSNE+xq^$uaTRVa=P1WQi!~Ik=CV(A=CnLInS+HiqxLI zgz|s-^!V+}YtDM-JPdK$_5T1Sp2&;b)9_x0rr&B(UF&f(>DDos?}D)f*Em@7{5#l% z&tO>biuxn=G4WT6u6$wR--tFY)7?ejEmhfCB`YM;LLh}OK2?oas{z0YfF1#_CHPzL zo5ueD5Ht-{G&?^o#?j)q)b3mnfECIh9nK3kJvjBneG~gScuvpZ7wnPZkBfdK(@2-a zDSf8td*vWVQWb_Y3OR4yTsxDUqsbj>$i`HSNx1LQ-oMxCTK8jfhy9#9A8q1&cEiKb zOse-&#EUHF7;l@JAFG&v57!eXM{b(j&Zr!od!9;2r=I=h^Uq>3)AubzTsi1b4E>c*ZTX+uZDga z(|jYV-FUIJFST4tb}nSntz%eu#tBp=T|fves)l62DTY}}Ap(c32^WaN5!9=)+%Rd~jwXD-dJ#l3&8 zL)WJ%D|y@UJZDMO^;^AG8)>x}ff9vd)1f^NW5N8Z+dpWJhyES#m+ZyxpW+>fy_Z+< zpNG6jHnnqieDbTCJ$bC)2&d(YEHHpV^2|#yB#d}ZhV5iJ{j8=?f6_r8K01wj+9eAyq4IfeQB>Fywqg%B1m$Hj{SOWlL zk>v`i2_Ouy10;dXB_3uMNm$9dNaK`wX9b2?Ph;tx{f$Qv5wIl#4D>m{{6%Vb%fg!X zf&5*mcrV4)4Qr`s8jSWjm9ceW6!6H=$eB_>A(e(fAmcg3Zg`44FU8&)@Zxw2;mzH& znl^y;-X+u_MwxXB(|L6_+D-uxfdsbkZR{W|-kOhaa#}Ji6T+S*(!3Qnk1uqWk4V*H zzSFEP*o~9i+lxoIjB}i#T|iDoKYFi+Dk(W9h*V4lcC^+0K72*j zJaKyv1X(n#Yhe~S+OerEw7xIJSC^yYu+G|7fWp`!q#$K+-_xM-1j2`BP8VmVEr>+Q1}b>F8!`PC`y`#is!TN zEu5os%c|YUf2lrLF>RZOMkE4A5uB(!g?-EL)8T)Ceivy?qx?PacA24CLjw~QmuRu? zVz|pN`JCjQSd&^X!3;2>ZpM8EKD|D*&neTr(!2B1^}piRk&CG|xBkB`!smf}DE|P0 zY<|yP5tViS03KOtJ~Y*ksry%nZi$2(9O48ZV<$LfYWHhP9TUJ_6}YwV-i>KvX=7_7 z(b#GBs}w*+g|>u1s($V|Wah0IV}>SXZZ`p(`t$?T^Tl$%9z05XE$~~!I@B$RpGlh5 z&@KsS=N>`;7>tl)-PCZT=BL_8!q0EJ@-cUl*Zj{i{hb@fr{YDN_ZJB~KiD=eVultF z+eL8-J>+arS7}#*H5>)r5CM#e<9;M+R@WXf)h-ShR_Tzoc^C>g!OlR(r{~3Y-?Fr_ z+W5O)hs(IPdwmAd(QKqv@?w%19Le^1Gqy<1==tE}bC6V@EAd5)()g25Xy;^+FQo!9 z3o0;et%1;tgZksGXJp=n@w$^MHll(sQ_KL64^BzojQ%}|@-NGSlDuaa0|Nkc@6cCC;Qs)DpAxF!LKZ81%sEr1PA6pHi3LG&&Yz@z=!v00`*J0S{$1v~@eM!?3s0(tV^3 z%*)@f<0r(rU&3u8N|Ago<1J@a)FWb%%_oMUv9cr{8x3;v9tT~ko;_-L;cfo_U-SG& zX?8sRSoIyGpotWMHUuProQ&rnkUI0<9qKuB?Lqe~#oKL2V$8>OD3 z{{RW)?9CW=n%dsg6OwQO#|*)E0N}oP&Hy5c`FrDM?F;d2=YlcYi;q#7Xaif!w+9SLN0$MM4mT1> z#s^S)9x!PVYoiuyopzQX#DuDzI^|amdkzLk^%cmWV}5N;qU*&ms)hSX#6D*Eq9L$w zHwN2+N$zp=&P`UA#m@GnVG(WHfJnnI$95U?2cOcggIrH;3S7vTnNUn(WZ^e#V$HCbdRw-Q98D8eH*IqQZP z^aS)BdsSeaC7VR?P@&_%r3H) z+UbqEcqcqD&*R(cRm_Bw*yb1QC>h|l3yA)0(LW>m#OEv9oO<)c5NP-Flw*3a%ai5A zi;zbhNhdtyaC?gBQ^VE(ndjWbGrma(Es__|V;@2K)bQGAkwNE5yHQl`83!DLw?B9j z)BrkX@JdKITRGS~J9{;%yjOC`9Id`jnNBzZ_&N;Z7#wmBtzEY8(6%g*>DkJL2MRgH z81hG;?bn*!Nvs?)CA*TUA9?^o0yrS>4tjPu1Ky&2LO4i!b!hh~c6lOX$nBHP4nILc zlWxd4Vz*>kAAkKE5d@(Ed_*ttBry~K#Bi;_tv zqUN=xMq_fcDw_6(v>B_3z-n$@JqE;E4;66lQh{x9({x3i9mv~*gEU7FE zZf)e2?4V-iFbQ3`8Os84PvKg6p1rSY{{RiGz831nIc&81wA6G9%V&FQjXO|9V6%r^ z+vCH&=)hgaYZeE_+sPX?c`8N3^B+q;bf31V6V!-NCbd+9ffoHZ-}7PZw8eFam9A(Lkp|8;~+4` z2J)&pj#!R3;=3;!_#4KaD%BfU6WE!a+8CxUb0IP^E*Z&XkH)EZlfp9i3s}@o zfpr_bTS9{BZ?e9nr;AI8U>O2`EIavb%kvobu>|9D5nW2As_j0ze_j1;v4XR9S4@`X z8%feOtFOVPc0UZiVO?+bhWJ16OZI@#d|hE>AHvTK#jENXE%l_{Zl`L@;o1q5 zlt~k4;njm~>Y!%4dqTE&G_6MnK$2995CQwS#(zBi75CTd9`~A8?4A2xwz1)>yQ`ZY zfyOwbismmW%_W*|ox?itN+}o&NQ{M2TL%2eSdu9e=mcj@mxj_uOcS^aj01!J z0PC-(d;|M4{5;iuFkW~c#kP8++IER!JUVQem9tyQ@-@pWtt@IoD=WrS#&)nPf&t>a zo5jDf7lHKO3&$VA{{RQ+F*U8IkltSxmRZ$?l z?YKOwyn}%pPDkEa>--6({1Nyk;mh4y;wOi!{8{2Ft8lhDZTxI4te7{cgNI z1Fd93@n6Khw9oAk;Vm!X0$a_dM|&NemX90?Ep4q4S%fJbF@u4Ue)oO=73JbF&E6ER z=KFei`RKh)h*E8HJ-jg{t$Q?FB{uS zKY?^W_Eyq8y~F8{!sF!tC~$xPH~?n?y^~7N_5T2gKeY4~nudvQb8DpD$YYaq6NE_A z6+q4BqmejMmu4ZmQEUk+)oD&6?Y z$I;A;UoLF~vd9NKkS(B#Bh{PMzbv)kF01kT#1~9ScGqNP_4#0b$2nu#zoGvC+Lz(4 zfHm7|4-abp01YiCwa`2~zl=#&9!TCy1`JZL;CACR z^w;fUrRrWI{h5Df4-e@U4K(TSlUkBEqa2?z#@f>sB#;0B3CX~~=Dey+8{x(EpM$(j z;Xe<_rs^^^#jdq#FPd2Ani7RBfMQA+70%=bXetVT8w1=wY5g+7UxUB1CY`7zAzy+E zJmj|3VLywt`3jP8*BLDM~i{H1fUg z^8H0pYMkvEU-5VOe!qF=dIx~gFNIzp@b0Vd+FPF#UF!N=+CHo`I9K~R(lxyMTu&F8 z2=iK7QW=*Ei#Rsss>3TxkcdE4~A;qy5qj9T*l z0AG#2ACdW!ru;0?ZZ(5#s6Mmm}ta21JTJg912D*1u5x8-C260)7fb z4Zp(Ag*rBsBtRdwC92DEPpOsnHhzSCE3**pcA1#Fk@uM7pU1y?_xoCAS+Ardcx#JtCQ*?T5i~X9)Bj`5*!~zcrsr9d@ zejR)Q_%HBQ2(;gYpA9sP6;(4O#jTh#vB&XD`=n#_Z@^b+5fPSk`ABdEII6cZ7rJtU z<(mLwIqV0owQ)}oQd_%ve_H&<%ci4dNMmj6#IvqRCnVG`JOy&u81?D+isil|d|mL5 z!`NE?09l;P9C{X`tI76fR#R9GFaQp(X662?Yqm8HpX}iGlRuO&u@{JJbM2CD)R5yDm_kLfL0zf z*H8RRd}*xT_>TOskVZMj@*=8FmQ5$Ke};CbCIXo)yoL47#sqZewa5xN5mIQp?FgWeUO!xPycN(Kz#(#FAJcPoM zKqT}y;dst7j-#H%aw%H+`sz}bDSm6uEeOd9QJAU~2nS3KG0t(vHJNj%+J;pY)43`Y zOh68}>PZ+SNFRqA1iN`4DBX^8%KjMU zqqwuXxkZY2$IPW$IUzvk0Otd*AB}I#rA8Q_0Fa@01m$u&XQegt`*=dnD{W!885w?2 zfJywnoi`=aEbpnxj}G2QH!aGiVyZ?n_884-Z>Mkt~t!L&(~yGJ2j)K>SV*r)%!Qo`H7Da9??Fk$EoBxU((51xN=94oJW}VCOwJ=8`Meu4MUGI~4_-#CZf~ z<;vsJr(@Qg7lx;n=le!Rk<)1j<&3K5pdcTZ@CfWVtvgQ+T}ctT4U!as&P=MHatU3* zSFhCm9GX~Kl#G%ZUcCHU|n02rZ5dPZ`1W$-&1p z(dgb8&?3LLU0+p-+6f#BqYbf^bx=-NMj2JGNB|aG|hDCg9Z$NnO? zi!Js;Z)c?2gV1He%OD*-3lY3p??H;g$aY?8&0|1L5bwTQ3Y9PsNDuaJsy=msfE^3zB7b04P?JVN`YR+P^ct z9%-NL_us#y%pBX*}_ZDalk5B~rKAMh*}mp&~0mi$jOyeOq6@bvbYnw_hX z0JNG$IP}}=Ur{_o6{Xetb^EvezW)H8jX6D1cJ$EkEhoX(+T70$m!_MBUPwR=0qAm1 zJ^ujCYP5bH*v2t$w;9MIC#UK3uT#=~9DGywQLS8Cc-P^VihMf^5pR(-?+@Kt5Wzsk zW{pAliN+2Gt$9Abt7=*u^22b^%p^_g<+3&gNXN=ge&&T=noc`^U+@L&SFg|L7IgIpNxdiz|fB;AxxBBM255ayN zhsE0cwS=)p zKn_RV?ZD~J(>1{vQ+1;xtp5N-{{RK+=cek%Yn4dQlrN|G+x&@Q{h)p%{0#k@ej@x3 z@I1O)9v%3R1XjK*S@c-$F6Tg0D#vo?%!n$3g#@TBO7Ta<{{Rc= zx|Nw=d`kuF@(?3eNFr&K3CIMO7{LcQ#eAjvQVR_$!FCq5LMLrP^pBEY#X>Ggz&_w} z$2qTh{f7QH>YoI3O)FdYPw^t}!rmyfhE$&VA1#t}ED1*wBP*xKpn|;OzIQyy;f}-N zY2j33I=1F}UFe+P24 zw)%~g#O$pc>UPU+!I4RjFzUPz!)ZH(dVh!i0BDceGseFR_5T13TzJ~z+c&zzHdh*H zYdIy0q_1>3ID#q}- zx^CQKSj#tJkDp3hAC_ksbgKP+q-7cM@+EenlC`wA#T1^AR+EvYDx_rx2DR4x@6%W7 zs#i#*_E(F&)uB|ANVw*#q&n%1=py)C`#=Oh+GtewFjZ09w)2yJjw$rrlve`id zVO9lH2#TZlLZ}1V74Dt`vC?!{0JRv7*GhYV^K^Mw#+-VTk3PRQy?B$9RCHGUzpo@h zPEt+uI3J5lx|fN(Rj5lZ?xMD`((R*D&=y%8yJX=FetG`h`ge)6ZwuK+uG}nyYu9tG z$(hvdgtFszL$r=P@n3)ZRk&o(bt&SSM3Vmi#B+ee1D`4ujAM`QXYsFuzu>0x7VyP_ zo26trjo?`K@(7AFB#&@u;h>#F-s^Ac=0xQAtdGv!4@yrJ{?a}M_-}o09Qtmru3N#W zTRbYpSlQIFh9I)#K?i9V$vt_mLjAaY8GJJMJ>Uz-ej9jVEhg(x*Wt3BSZ*Di>{`}& zWQ3MtLNka7=tzt%cC!O7>}~K9;%~?Q0Qe|}!*2w9De(99zNmF^ELI}oI9a@#X{K@v zjH&{pMTsmr9Aw~Ed;b6h8vU96G`EWF`~|G|GF>A4YWHsItdnkqtzfl<)f)^XA*Tzt z7G8KgYp)e89h&&R5AysD+A`&wIkr*!f2)7P9~@|!OmWHJX+b6Ke%&mY$p8t;smE>? zx8q-aXrHo=#cz+_41NVC!cPQQU20l)z-<90)%BYNme)^;Ywcd=a5Dby%`G~zk{OqS z>7R`LA^0lmz+NiwH-oNj5=rblJ3pH&KuO(aU>8l8z~zdhFWreAHZ>QG}H6w?a}o$t2W@8{d^i69f-iV;SJWc$ zFYPz+cU193;5(Ym(n7H*?4g_hqqY?>j)xWXU&3G6Q}%uM9j8Sf!%u}eMxQilp>G=4 zOL4$FnICae&+zRP-jymqB=CCqnw&0T5Z{B>Hd41&hq%3{{XJTtyH{? zJgQDViH=S_rm}SjS9x9e8Og?dbH!;%a<ru%r6z)8l;gt0|RXZz}BDq9d zG5f@Dd)J=+(jO8peir-<@o$dwxeRyKT12BwaluQeSz(2Tp;wSM{06$aO>0fKm&~=k zidBkPBb321xLFYGWF!-YR^X}SPXq&Acl%ZNGEa&h1vQTbTk5}I7ZTe|HO|fAIHH|o zk%-_7B9oGEyLTP|qLoT8tr=GM6Ib2o*F|>p?e{X}PHl3j{np=_vc7t$k3X#D@Ry}iHIs7;APNCtAV*dc*hL?KnY2oh*>RP{t zH2(lEY*x3I(+e#>2dl`)khx+879gXe6YD<#uY60PUU*yK?uB6`y`9&KAx&OkmTB!Y z?KzQ>V#)wANFtPho@Clj%5&3-ryMlbv$mH>z4?+&-@9>)IyHW4ZkfDSwS(td*|fH` z`gT`xe>K^o>5U&tvGCT7rubh?gBIFWpADP}7=6@0ynS$LTf4mZJ^GYltwY*3;cAC_5NaA1 ziDmeQcWW)+(l&VfwCztr4@rT2A`pw>x;GG6**`~Nj zVY9QhFE$kXjj?mE5_x7I9(m=ekB7bq_|M`O+Vu~Jz9Tw9Jg$kQ=~5|om4mkFXTd-^ z5+e*x065JxB%^ny_5FUPs-Bjy+UP&D--SFa;jJs+mxVOTyr*S-};&pan&GHh59Ali1Z%Ui65oHjS`XpkOo5`}7z;)~em=A|k6OK*gA= zIRKtBk?qj>{uK8c@)J?Gg4qn8FkW!PZd`Q91GY&203xVf+^qOtIf;C2C>Y=z^#ouJ zI5|Gub4hV{mlI|}tc|-ryj-czToKTmXZi9fh1@dBENuh%eCOq2qpvNHPEQ^AJoe^~ zAvWwXOKB$CA-$Q&QI>d@C-kDTuI(X`b0yppO2->sPNaX|G*jECuTT21?^bXNWceXh zkq+oW$AWYGBW?olPZ{r;lgg49+7Gf%6DoiVY@|5!$>>HpaB;{drEBQk6}fw#v)Ef+ zUCLOh+C~~#GC?>9r*?WCK?c1;!k@91$4?mAE~efAn@F&cR#AWA23EEaM8@Fd>cNfGW?-$JZ$pR=@oq_4V0Ovo&(DUt6cG%KLlpiWVWX3(_GRqo55t}*rNN$)v zm$f!yV-?~VZ_!#ouz9xxWrzTQjDUSWBc^?+lHMe0_W8M$3g^t;2>@_O%LANb@Hqpm zRJ)SR{(Fm<+9DP1Joq+=BgRO{7(GGVlY$4mG_D#V^mo?L%Jw1PA$IQDC}I@wNgUvN zlau&VaB2%Y&IHj&@Rm~qc*+$yIR_<=Anc1v?)MdHsBnq@r+|U?j3qyG@Y(Xk@6q>6+^|>n$N^<4tNmB zh12xeZe0^-Qt~k*fI0(#*P8kB;H)uRHT2hZkrYXUO4|Tz47ufSarGvhz(9yH2tJ35{x#8l&p)$Ii*Jeyqb*I>%y$cYC#SAD!fNgd>g^6+o=E|zK%;}F~@vp|3-0|3Wqv74R+b%EFG^=a7 zK{S^W?skq$Gv*+v8%gFc$WpD8+_~k%Ww@M6c!uAY{9Urk$GPW-!wGMp6; zs^YNx%`RK*t?t@dmM^r%X9N!}>fs(Mxm1kAlEE4Iq9VRY@TY`tyhW%c zw`*|Crr5h1CvHvbx%sC%vUc%>2e1ac+ykwMroE(_yXyYr{S~gZ=zP8{(W6rI)tB}E z01bbiQhh5+*R%^MwS9i&hM{_)?x3|`1VI#t;Rm1!4?*0q?_6(zTUYxf=BW$5+ucf6 zBP1!<0_XDvy(eGNZtZlN%h{(JVzrUUWx+U7Nf^K<@UDBpa@uHq71Q+ZC(m_+NWwMR zs?K*DvE#o%o|Wl->Xk))-`C~q_?!w=DpP)K93RB#@AWNbO44s8-T3i?4b?2AzJcyc{vU?jC$}s&A$w0Hh+&=;fzKfE#~bkz;q|_R zZuJc^#079KqiFe#TX`L~r`~q;N87-zpwPXiBP7Mq?3Qw_4%GJ<6n$l1blbn0XDDuD(RYW6D%;F2zbL( zu~W5(+#cU()I1B}UlpdQ6hn2ErF~;_09KV;?2$#Y z0!&}sJbL6;&p#79Cw<|68yO?BONX|dA0AsW7RYU+U=lNcLC2>R^vC=Ye%fCa{6f@x zMRlfITF9`V@~o3$m5wO^V60BkOMrU}a0fI>v{n{rw%(iTb=Oy}{{Zm)3#$a=np%AH z`F;I7&(z=9yWyU_qx=xL_^IW_vEOQuS_s}vvc&D2E5zKWCp%mgRsd~dn(b}1TU!l3 zQJUGoQL4dZv=8}b0-!h^nJrLwc4g3h9qIAMZpylSv{K9zb0lIgU2)EF_lNsR4RLn* zox}JN)z*01&7S7u%B(iKF_D7;56lQELG;CVv)but{eQt4vbDdi<}Z)Y?sP2z8DtVX z_tMV5OM~+;cE}?=39o>^;G~{2@TRY0qiDJph^%h!1h;c)7WS8GD_e$ zpOpGn+n*fu+l@EFFMAA_I!}Z33x$2(GB=i0SakL|>tB%GJHIhK#F980A-587&V0S? zlk1b8#=9_d?$n#Ln*P57DZhH%f3NdCnEwEScQl{vANVWpgLRn|U+mf!h%Uiyc7@WW ziCBB)Q~c}L{{Y~$z95fQ{jjucHVc{NT`$1S&lAWBp}EwhVX$;iR@@Ve9OAfd_%_GG zGirbEQXB1kp#W$f2erGyk)5p|vWZB~L;LbS6HosDg4THT^k3UGH0yXJlIA~$`Zc^l z+70a_v(0~SBSg>dB`Yc%02M&o#}((|8qZt-`)-xGM_;y#_=8_1!8wB)m@pHSdI_fo@XAD1Ru7lN#XlLdT~y#dtc(E3b*(AMk3)zSqM##-D1VZ_15j zrq1)W2mLm=AfHexjPaMnEe_!z*E~~rC9~E%Yd*iKq)eokAp#p%270q16M>Mb6r6A> zeWG=}ub(&G+^zQ}mz|4r?71G>en~&q(B$OscZlChz0iCqZr4*=_>)Mol^P~70c#OR zk@oab;&~Wnvh?DGcRL^C6Mlm@$5lDk;X|Q>Js$y({5Q>@#`e4NBj{ z-ygMGi?CxRZ5qn+$&I8WY!5Qw+z*=}N`t@*K;|pC)ROkLNoc#hmHz8@6gBnsDI$3-V^W_!`bHedwp|fJeN|(HnDwR3P&7!24GO^C3h|W2L*R7 zK;4n#{{R5>E5F+}MZ5U5uY6L}TGQ>{X1TStv5MZ}Mp)BmE(@-a;Ut4JOjV=Kgkya9da?E)vGC-Q#*s+=a|s7c_$s6>H+-XL2Z6Zu z0=KUuw6%HCHC@m`A{bnfSru{*UP(Wfxu-R}=@fTHMJIGh#gG;tZeT-y4$uJ4Cj%T; z3})NAI%OLz3#RHr@ao#sM5-lS?xoJ>CP1L)fJ+F8?bnVgo%7?4YS8qyn}cVlnO}?h9sPj2zkRQHduYXBpaXa1Tt6z*JHhT0NIj!njhW%2pUS^!^e79^a1@DM;p2h^(qan;BT; zpNt%ypkpKh{c3xC47dvf0qzLO$UzKGp(j3^5zRXV%&)S|aH8pwJYY0Zi2w|}Hx4)> zKBp(GO>3lFNQO3wKsVw?Mf?0`fKC7-p&fD4wRH9#7z+?LcbI+KoEXkeUfnn!PCe?) z%-Uv@8f$ooYy}b%oa5Vo21f_IL#UInN)K}y*Tc6l#ffzA#GLS*Gta5|_3uSVH;5;Z z_hFm1c@$Lh&&bl&>V1^|0JY!5-9E(WUk)wwUkBNiDzW%eNs8V?UBrblw&pyNdvly% zSDyHz#r`MpmGj+r&%{@ny~L6qJ@qT;B;~V~0gC{ANDIa~V-1?54EenBnk7f##n>u!=^zw&T=cC+4DQSU!Je;IvlxgZ9NU? zE*)3w?iB7SBYcduar0z);C8{zMruj53wRi}mJ@1V{G%lGzy$JpZ62A(u2}f5OMCJl zy@BP%-zwC*u?w8G0N?^Udi3U^I_os1`pxBBD;OQ*=t$%+Eu8gGqz$BW?OG_aH!W;+ z5o$M6+>zu6)eyRZ2+n#Ar#KnPpHFJ3bF84VlIrb{ACyQ@_W&5+V3N5lfsFLexUOM* zReKYnO*2eoP^%Y|Mj5gL5f|enoaf2I0fB*+O7&Zy$ah^CB z?M;JBUGc+l<}&$(z`1-f4ZVTDB;B5O`|d>gE+y z>MjZiQyA*352bjA#83D()BXuX@d9tR_)EZkCh%jPQ21BIR~}j(#bUhsLM}Z309SIU zLYFl-&eFBD{{VshM>Ocq6qK#Lr?$$!v`k(hyq@yLeLBMCD2pR(NBE;0Ve`co$Tf&T#Dzn=|krM38d z{{RIi@PCbLl}HvoH`5E5*RqjuWpkYL=1dIr#cZUetm(n4Tius0ZFP_4>~qTnh=#DM zE8hC9ZAr(9dx+<1ddYgIXxbd41f3z(#Kl2RiGuro7f3RPII z<-VaO?MwS$wzqL*;4N=hy}9v)*0bUrH$l@buH=&Hb}AUerde5S;Z2ICaE%w|ZOjcs zRwBH8nsQ6kHS)84JFh3B(R&C}sHY_t=W9R7cJI5}Lqk#VH^d*>W5@m^_@ClERLQLP z&t0DX0A1AQX89wX_q;KVes*1WX(YUyk~CHyzkz!ttDkLEuv z_}hW_WP4YkPEey8ZEZV$UCwIpsa8%+U-3DAiPv*nXji&*g{+b*K*-1EO89|2wbh4${39KXpQiBr{#LT6}2Srxx$0ELxyOYB=FiUl( z>o(5s&K6(@{0=>9GF>Xt?lJ&b0)xp=amQo%)>UdZEnUqie(fu-y!l_k5#C+h*xhQ^ z4{<#6tIabU7t3ex&IcWV0=+v^(C&|eucm-pc`nflq*un#yagqCsNC4kP1qGJ-+=7( zZ;Uf(HgXwlTTuDPhibOe4EO+?4Zwk(xCDWmSF-q9;opb6Poc?eb7d0TqbL(D?1d+& z+C~mCIQ6cKEmz9mw7-{;7`J&Tc0Bjt#)k)o@APXl@us(F3S6zEFxhC5+}=D42w0LN zc-D4yI|O^cC4nUO^vzSi@c3&}*RFhj1?{xjHKDY=(md2=w4O_dV|ZmSvm=#`SxZN_ z;l|U-;BM+)4eTYe`z^KI^iVRCSWkcX}VR1gS=6ytI6T(i7YPs z(pja`-Ze1FLwv(^tHCJi^V_)OlU?WR=kQWLg}y4*E;OxCO!hX@%W7vVB15#OW?)p{ zF+xb&fzXPx@XJlpJX7Gsdo4N?)%-bfx<0<%b19Qgx{#eV=0L!VMRU5rn>Y)16C7Tz_w6OWoMKThop@`Zy5)g3N1d?&UHRi$AT-4(K0I%z@ z*;Y`J=4Z{n7GTu8Kd9a7o+Q=dy3}{-GGvhkKyZU`zyJb4#{-4wRXk_#Ncg6Bf496NcCy;JXk>zQZd>mI zv9fcIO7;H$*>V{E75HNJP1n3Pb8S83NRMzWODK#o@a0Bvv=Tai3F(^76x?Y&9p0ao z!l_f8B%X)anw7%Xct%(wj}oFVWFxZ2#~sHNNnL&%J)f-}rywcZob(E`{;$QHImO`o6oS+IWh2 zu38wr&ATQ!Bn4VHnf$oj!zomDJD2YcEB*?F{{RH{@Q=n12zY*P*~8&)hAeztqh9&< z)*celqP7=0X44=NM`ar(e8d77CE8mqJjMg%?ZU}M4ma2S2b(x+V_`!ZN`s4TrR>tt zTYH{2{{RNvNBy7sSp0ACEJF#V{3DXp9FA9hrDYfcxX6J1Lb+e~DE0oCtbW};5WHBH zh%a>2)3j!p+bslE+Fh!=Xjkrn?&X5uvEb(dyU+MHyG#DX)<0`hzRDA3E2?_a3i37XD*BJTcKlQ)|Ftv=%(S+{CeI47@QKPvc_{s`Id zzd;|iFN*wU;frGqkKun0&jy;3%+j<;eW{{G3y?P-v)alFa86GHHTEaMXbtDXT^9CR zrV?FP{iZ`Ig)Y-b^AddtG82lqZ4{sN{Uax>{zqV|rp|n|y4>7k6u>|IdZnz{#bEi^^1p7siwGuf40%llxo`VigI2>1r{7(4Y;Qs&){6tIp z8;ErcKTe*?aqGTcwaG88{>$U^xLmB;?nWc+293iuZcH zyLe-e>i#+Y)47GDDq7CmFo1MBw-?U^kbI?`h+Y)(E9kF+{{XPhg1j-SL*h@1S5f%C zRJMtycrKw~VH;spRU&b@z-A;F2RIl-A1U_D14gjW>|xUMeLgE&Xm+GALmHwY=YSPJ z0AsKN`qrI>E$lT5h8W3$HO_ej%bfSHd@q1f3&CZA6TXSg8OC)mY7uM?R%nkv9B6g9U1db2~5q4sckH z@veKrp9(xjrRsK<9xm}E#oh0TZY|C82VoR0vo6@J(0{6lV%Zv~#zbL{B-dr|d&N-r zN8#>|qS;woUus&8mjsqtWsE!Lm2)OoD8NNha9xO11C|-hbXBP8LB)Stg(!O@dh=h{ z>E`EA)HVxunF-408*`kFIM1i!T&>TO4iLY5o(^ z;?gu5J9||@vd+%Z-C~X)I^hs1fH9l^GDodpO(xW0uKRQyM$)^`^UnzA{x|SQ1ON|AhM^~6*_~&EeiM9Q_=lw(f56`lueDnxo#8i7 z!kc`>KQaQ$GmL<7k(`X=8Y^E){{VuSd{xn`Q%3OT!(R((RIc?{vYOAi?-|W{{SLgM_Se)(#lV%JTgUdn|6_gFi0UmKro<^J7gRl zm0s^$-{!*(r*R~%0AM643Cfln7QhD^a=r76{|L{X0zNwZ+|IG`+UTc8;&`_9ZyaP8OKhQvQb(R_aw2qp2`+WM_B_L7a>_@ zY=Y0nAmDS;mEdv(7BeX`EtSNmSnp`2h&Pf*&Iw`(Ir*`;6NN%lfp6c2;Va`h#-B-;xz*1GZ6k6KBq2SSo* z(lo+PJ|&2SLm`-!E5=4nMnNQT!5khkD%)8_ar4`W)dYZ&;Bpr#04gv+$mxzh3R`$D zZc9fEoFzfV?8I$-mfTYFHGD@AFvGGw%w!zU!D9axTjT>InGO2*#ODE2j@ zBJ3r(WXk{sH-pLOcs%1ZZr&pVZw!u6OM>edEMFNpBuCw7EgHMb8CFq_SluH~p`kYZ2e$dMzs}Na~5!i#zL&q4+ zU`;;DgoH}FgjtBltA|i>ImZMNe)qQ(=)bbKi%|IIp*6hGNFtSstWy>^Rmze9Bh#Lz zJ#mV;1wn-pg2(0VJ7T#{gCDcar@^lmU3lxl{{R*IZ>edwpKG394eB?j)^gm> zCij^_sxIK$ut?{2N#drxN^ZJtDaBs*UY2U<->!+H*vjbTGRm$J67%zoA4}8|N{w1;3bPI|6DXClD z$j~>C_EW6OxQq;xZkg|q#(ArHEz}qGUuSq(b|4QFfF20wK*%+XsC*{)S>g%i8n44G zA>>8?+`f|%yJbUV9aJ|N#z7o{D{Lx{u#Ya@pLypsF6ZR${2W#LHh$Ni6n-K6Ki51j z;SUygi@+K-fR7;Xq!$xIWvwLA8D=&x?q!wYXqG9PcGQidR#UrorvCuJw4VcfcksvI zN5tP2c>Cdoqoe--ZTObk`wv&v^a(6)FCkdp&QIRq*+Z3PJ3#Xtfo3a({R%$=JR5Bh zO$Su*CX`ELe`ffq`4xVVw5=E%blZxo{{Ra%T+(bW6Fsua3{sFi z3+LXtq03G!C9A%-@2lzZ+{#d?DW=}v3-m~*5s&-kysyPS@NK8;BdAK( zehTrg#E%Y5xlC(q;wUd7To8Wk-Mox>1FKirQhal~(&bqEP4IWeRsowEqwwIq1KgV# zCpi2lkM^ASH672{b&nW-WJp-3O?yqd(}^Ru`czSm)~o9^R`2g$=4b4vcDLdAAD(_N z{{Vw!JQH`QU3iPYAGSYpgPuSb_U&G6uYbY5pSPES zQbF+Fb~O58|)a4-b4{_*>$~SiCRsA3)TYoB?62+e}IH z!Z|h2iNzRAmsc3EYK`TqcRxSP*{Kelhe%eB1tgZl-1OIRRcC3`;)&2<+Z4q(naw)%i%}uQKf7C zF8F2QJr6`Kw1Jj<4*hUW4_+(Ve`1f=3;qeg`(l2~o*DSh;vd*w!~Ph*)orC&?DTzRmO5X9yji3IK?6?l_M?D(K$b@Wk8GOw z?dreavo)7FE3H@cl#D!~X!=i^W=eaXENSp`b-GqhNr*+gw`=;Pn6+$JuiG>B zkNuK-D|hhw_Hftqe-Bw}_X6hmWbo`Vx?Lm<0$iiEF(RY+&8su)GEW4R@cw|80Ur>wHe>1r~E&2iT?nC8R#=D&e(MPY}yt z_Ef#TXwg(h0A})`iW_KSP+6GlEXOUhaGwT$V$Xs+;xJ-0EI2 z@f@FPpY078XJIVHNm%bB;w_2+7_Kj63iyd9D=jwv0D{s>PxuDaUnMk}zxC+PpMT(y z9}o49hu^Z##2<|s*M+rBE?*E|NiLe2uA3#i(^+Zya|D@DfNw3OmU5(gt+9wEzLxM; zhGElw8hB?+e5}^q9n<1w2dgctNVoPaezMOmOA{WrEAM7wJim={5O~7VI^gE)cbqHekt(Rg)Frhb?*`B z_RFQ(iS;8r;wiI_Yd4uFCo!%8Ag~z#<2CZ^DVxAQ=y@5L>C>%>l#I^<2;VcOFq^Fo~IO@Wk6J27ljAuknWaLI;13}5$Wy* z=`I-DB*L56w1`~Bk=bLY;v=bpXyTF(;q3RbD%Tsmq_``Etd zx+6eSL-wU9!2}1^|HTVaYdq{EF}Vr26_0wVZkoi! zUBy+q<{NWfFNspG^}n`K(3E|RE~ANo+i*Mb0Z{pT2_fx*E)SJTExXZQ-tUQ~dF*i- z?vj{(z%1zP^ID#x|oC0c=1n3=^zl6_qOfdbSwFHT)Z*-lD*jxgKK6{@sV;k(B*~j<^G#5DTLQ2@3n9$$tMy~$jJ)W<-f3;`#8WRUii()Civ^3_Q zhXxxTayKlCEA5B-nL`eI#>_uRFO~d(FjR4a3EYdF=3EF@OvjiSno$hz+Yg>DP){M$ z^Khyk>yAAHZ%?H9=(+|onxUqm7J2E+`Z%7sp;MFbJLGJ2Jv%>)^M2TI;oY zCBl!&byRvd6-P_n)Ord2sum6RA!oEQ{0 znU#o+68W| z0~iobg$En6rBiPqe4-{a`S}Cvp^Zs6eENPwdwMCopj0E>3vFesQEJQcc6!K@2!ioB z$JgYq?^XPNAc|Wm+vKuF=+mjA6Vb!V?x3mJgN)wm%vWt2fAH7tG}a>aYx<^%1I&*B zYfvY%;~raggx%uB)R+3z0k^-S>voKMDVlHe;!&G23FuLz(0_x;W?k>tx?z6B4wcXO zE~^7CM4R=`M?C)|r1a4gVt(U(sAwW!`Wdf58_3}OO4oa49}Q{VcCfJJxORU#598K} z&nQ36H-R(k9yYbh{f2)=ZV_S|R^2`ld8x4Mf;3fFazTP?v6no>c}HBP3=ejqNFVLl zvp>dPG$dl@KNudyY|)1H3ZH)wd#pW9#+HQ5>{GE-ni{KcqB$30Y`A_6P2+fHdteE? zWb-*DyYoctV$Bp5$TlU|WMm}nHgFv_KHu%c_Ry8f8nGsBjO*Cgex>;&9>!V=IBrtn z{e)Wh^P23njpWd*ImOtM8&|~baKkosqp%JR!oB}3MW*NdZ5^Dc{7}_K=z4H(jUuk3 zGe7`dEXDKamuR8+fV_Rti!_?R+n-0+x8{?5CEgEE5t!ru#wj|A3Q8XI>z$_mPNnwy zoSb9&=c)pwjoUAOvoh-TTu4JMaZyAn_K13()8@D4d-31GF2(QhdtZ>=g383-j}2pu zvWG=zWpPz8x~}gjAH1S1bQ@#wkb=>T%x72mdf7%%ftU0XVy-tXy#}bzLOYb0<2`$) zz1V&2*$dk$ehOxrw<@^&mY*-p<)0CXxfr3dJnb{U5#By@^Wn z5s4h>=XK`F%o%$DANeoM;%6~wTUb9Qc_JEgYnm_@t3pP3aqM(K%k*w09-LDu%#4JR z9n{2Rvmuh|vo%8t%GK6MuKktT*oKyO8kz3ixi&qdLrZ8_^mDJX?URKUGjRaI$ppc- zn`x`$KsrHR+%eher@+k&K9+2PPMR`VTH@Zs(1^R<@?{Lp?}hSn$1XkN;bba=kz166 zdM4qD>I4kNO*GN`mDzKZM`CXuZP+_hp{m0;f%GwLb+=TXjW0jg56kB~NrJP{ zM?0_&P0DH3D{|81+GJ%-&5byUeoG;9`^Y89~ox($NN40>Lm4bi6s$S5dAM*1Lx5c^H0 zV7ZCO=i#s#4F{pNDV(lOXe-_iR}RXA6_l8xu88|O7$gqN4D|}C0iEjmW&c4kbVn<- zcjd~X_wm~@3vy(xFgP8v>{s(!d=CMjS3|$&i(yKMhp?!h)!XJ0{w>s<{NM3RGTo*H z(Q?V{?Je6lK&Kzy7Qc~l7JqmavItXm!+FC~uxxYtuw#KBW7r`W=A|+MOE=Ae)c}c) zR*%W+bN6%0tdGZ+HCbrUpdG{%spq5{yaI^>9Q)Dk;J&aDLpT+T;l2{;5Q%UoRL}U_ zoVJ(uzH#i8R|Qi|mPZ9`tiDWNdY1ZSm-BJKs`I`1Ob88fHiQbWdfZPJj&T_X8yH}a z7ffLdMg8@rM-knL$QK)S?geI^MGzt&T6#iyEok_k|3xP7d9Ps(v6Z(c5d;bJ^sz^) zzHo9nt^IUnH*5PNsH$nr3fpbK8X z8wqjb4L5d8pDgY!uF`O8PpHoF^o&3g5P0sEw;$ahioaIyp$9!@OFO{#Op;cDkoYzW z;J0R;?j->oX5DiUC=YU&T`f9^(GQ{{qNLtU_j}Lo43k3e9B&nO3b^1yoAx;)>7R*5iexn1)_R2)567^GC z_jA-I?*IcN4muQF`V;R9II1ZWnZGu<^nHn*G05A4EwnT_47dJ#!jf2yu7pE$()btr z*K#=|zhBRBY*jqJt_Hyacit-lN>Ea;A1{eFE%O#D) z5B29Abb<(ixr^lLFc|7^cHoraHo2K8$~pS{2mXs#Hn=0~#lqz*Qq1Yxi?6*psbu3d z0^ssXKP>uYo3|C6`NahlIT4!mZ~K`TW#~!emN8WZj>YGVOB!JzeYHGO+v_AM%Eo?& z!7>(2kemesW;fT^onjK~bYX9N6odPLdiD{JxQ5=ZF3ssNZqK@6nMV&J8->Eb2z zzTv{{GXT^3n0;!qq)4O??RToen5x2(mi@2KKC=6KJR~Ylk9_Bf!X1bk6|nn2Y@F{2 zz2-LAwFtkxiryl4&b3gQ^$F5zntxkE;N-`)wMIs%Cv&YZ5|YIN{F`N!5_o~}a&@M9 z2UNJJn63jkAJU98M5UsN@*fl1v00vL$vzC3bD5F)55y(4PvSCsZsW&a;Bwsk=KmZh z0j}5)mg{mAhR|xT{Lj(Ht*bj8{==1hBt2N84K|(BX|gl0`_+{Z+H@;iR@$}V+v!Ny z)_;NScgvWzU?8{0Kn{2AucCrs{HhK7@jb4PC@em=RP^-SUTa`CDST>sICG0*rdwch zwJ*4o>*3vJN9@L78Y%Og&2Q)cTDOyNs_!0 zo_1ddPx^r{TiBZgzp9k@it%fyK}|AYu^yY6+_k4ud3AUWd*w6H4XSaOJ8@xHN*$I? zVwfVPg$(ES=bDgLj`XxkT)s6i-K_JC6OtkpHi5zZ;8!h8ErEOTj;sYZG!wS=BjZ%# zWh|%`(qI7b7~T^yDTUqAIxj_d@@{%iO=t4?q=OCmRGY;V#RCd}vTwwR>gr3$!NY&Z-4KeN;>y!z> ziFWo}bS>&5iyRL$1{1g*tYs&pZa)r?ZbI<(Q+RGav1L04ant>*i+TU%>yntL0mtLV zQjQON`mQAmBFKq?=8+5ZC*zPg@_aJbWMbSexTEV{Pt(j>8VqqW-t!FM{7XMawzXnd z7`5HkHDyy*UtT_HwK#GtRoI`~%A92@L9X1|22CX3r_vG0%17LkVYujJ=kiOn_F$3i<+T7fiEynJY<%&(G6d`*-SB5 z@J!ROo4#?blVW$9_$+d$IIOj^GeN*u$xk6cF<`#Y{{p;xQxS)4&wsSCJVUo-4j9N7ESVmtL5JoSIV7HzV9GgP-sg2`Jf-! z;W$Nlew_X@am)S{U96f?j{p2J7Oy@+T0T#F7ruJ^aThY0G%%9xAHz9j7F|OG^I_xgT@)=KK!zmufi%jQ#8H zG^z=TWUhK!HIG$Bg74w0E+`iErv(i_vZa)$GwNL{UXONf;7 zO8*q4sX(kbY{0l8cU?(*hc_yp9Iy&)O84he%##m%nBQlbnp;HsLjEf2t7j%_7}sQm zwP0>oVu{LZQ9ia+;7Xv}tN0&W5&kmLFZf3ejk7}{9YF%-10AaSIc1r zTI7_lx2{aT4zp@6lE601`CFY&HXOOre})tUs=krPqOPgYD1K-B<;qAjCUMN3A5H5& zslhrqW}j$C^WuGj4Qg&YY*-g%ePjMX*{s-T<8p+Xo8G~P8R?>qMDhte{bRtJA( zfPDV`TqoP8#Z{G?_fv+^*8kLd#08m7vyPtC-#ZHMrb}r&FzMb|(!wsi=Q7n4YDdD~ zq=g)4mT+zRfUtLNM$cw~IrQgN87>4yf1+7l`fXIo>`ONMqowBr&Or7(tIk8)RCm6M zhjT{_(7VR{i?xN?%Xoq3zm~ExfzA2Rhw4>yC zyxOz-***zb@3W1p*E!P-wruo?N6Y)9K(H?3UQ97>08H>lNB>N?6Dvb}%HxTO(2KrR?@c6E9KS0G3!xpV)+i6q=~Ma5oBPr2@F>-H!qs8p zyspj^Jyw87MsI>}YSBLGlgP`VT2$L(oB&F?0NRiyVuMpa+{c}Xmpi25(dmu7E%OI`aKu57&h4~eSKpXqruaY%+2y;4e zw#gAqc6-WStxf_Ti=!x))&?_TAuJ`)Ti!oonS`U$p6&HV&*4+b*No7f!9cN7zTkjL zL$I=40^KnsUS9^9wQMajnhS>{(i&ewJ>B5R{>A0ox(fFKm&W(zvIiwDl<^EqRF?RE zKtN1scZuNVJoKTZj7ka*G%1$rT~{E!&oc2dy`s8cs-maT8mo_yJ7(r984>w?OjPMX z?|kfkz7K~*B0tzxncIj%rDg)+ODD)^Oq^1cHm79+Y4th`-7H%Yk*9Qj$`XoE(XG|7 zHwHsxdi%zv>7%~6u+5gXbbqeoMQu!Z8E@PAo=M>0mC6WeDP^A+eeGnhxA5;Wr*3Ub z%x~89$r@B2dHB3@=~bpbOyvd_swe9BKCRA(g{9bFmCG>VB?t7UimmsaJDCLdl73ec zE~fZqT9UkUUJ)LsxsN`DvYtMM8-J5sOwOiMWo}_&Du?DQdQ*g<^5yIsrYAcNRg_@3 z41FhlV{vWvwR~&SBHfXlosn8^a-BY?6y1<_T-Tf0k zYLj7YR8S)+d@JFBY5udk&oQVOpEOwxHP;7~J={}2g5`?P#BlKt zkJ>*z^pp<+`Xk&6UofxxaWtHOV1+ZIZEm7o5XEJ*KF}`FADs$S6b-yp5*t)+_LQRF zT&et9Qbb$jFzwjo-fSm|E1Jsty3SsOqyty6c*O^>>^)%dls3~I? z1?J9&xSWAS4JtKUi~htN#jnX~S3vl>_#lww-LXVc7i1@Giz3z?AD;LucX0YTmX^Pl zsH~TJjEX%~}1DVAov(ipwB!Px$bpAF$a?(*E*L!P`#SFE4@8ynP&XnqQnzY zk-J>Zljd7?k|{;Iz$l`hxm0grX7Y~whRK)XEae8I3p8paD-H{&A$^4ZfvkJT;UN%Q zxRHe3uCrL7;ZS~Fg&c(Ry>kCAC&E9Yo%m!UznwGbxaMeUuzs@o`iiiY=;zw7LfD!v zlN`9BG_(a1-+4#0wC_@meSUjLabqBr1oc1ff)4bBFg3%ZBOI2hkR$&0tz0%ei9y;! zIN=74Ucz-^o2lwv&WDhsf)~;|9U;WK!3=@f63ND~U zE(6#Gg_;;?+|EzBWA9G#F}j=Qk2|;%DkNM<324as9QSlzC0@P^N{Z}po`!Q>4G^GC zK%P{P!}{-zV_Xl$9nDFhZ(HtL>dMB3ka?vWjUOlak0wKHZgzam8Vlgu(qV7am==*j z$f<4#D0!h!U+j$%jOT06Z#qdCv#D!S`MM7sUvXXu2%@CM-MIce{>I$6T<97i0jw@F zJXSKEilZh37ue{@8i5LYF_bm1ixet+-4lkpqmo|>?hbisF^EK-SBc}VlK$Pf0*LD z1mqf!J|&&;3lsU`Q?Xtp`etRmlgIG;%4;i@p!y#U zj+FHNkRL!prh$Rxsu;Rdb~W6xw^;^X6R5Zu*1jb}D2q z9C4YYp93%q!KT)fObqAv9?3E*Wzy>e!&ZE7el$^j?72^FNSU zyZ@CQ#O-Pgu(_E$Ndw(15qH?bo+s3O@F_=wcoW{fyt{e*-Y$rG+k`c((X&B`Gmx33 zR-ys#HH$%T32=cu5U3(Sf`Muk0tNoFY^4zhcLJW@I@i+I%iYJ%OTa4V1Jfy^-Q5tqG*NFm-MuR zntL5YPMy!=<9sSa1gQ`C2~)aAdr}Bc9O&=RKi_6uCsKxmp=cQ?GMup;=-d#%~vcVO?#{Bc>8WO7{#y!T!doCtEpu0KhTQzK4bv*$eKkp=v)ki5^~(oR6zC%~kSFu;C%L>?Ygxh6+SPSQsu zdv@%&{HoBZ<=`c~a6C1%%6Yg4>Q(cFIf#KFiRDbYhMPXWsow$a~%eEGuc_zDNd#d9c=AU{#bjCs%uR*zI)>j(<|O7;*;E z>Ap6Ae3=OS-58hR4cV>1Ql%j|Wo@Z%Lp&5z1^fq!mHy-oXL>#;(SZL40`8+8ybyZr zGgrGNOkiJ%PR_Jj0u6WK>5(NKM`}fl|pND^V6vX3+RfK8NiJVzQ(HwrEL#nTs zC5~&M4(EXQ1#P}SK5b~b!2&_?FtUXT;3b^~DklT2XK6$Ese*(V9j-u1<}zdvx;BMc z=G}7)P~7-(NjD_C;ZkGu;eHQz+6SF`Xu$l?k$ZqT8*aB208v;HYIPM#qWJr-V$8L8 zy-v#Zyl&cq62vEPDlov-T@ULSCiogc0gKP4osq)W4v5`kB*wkqG>D{;To`(duDqYR zB3>&PkrV;3B#11a@#$Gp>d%gj0={~8JLd2e-rBY*Th)%v-rM%>MMm=LT{8O1qmLEe zlJ^<79N5Y32`;pr%;3)V*0a92ZR2X?rjH4(F8i#jT6(g0*Ir&Kk23TnWF!f~b8-6D znIcC$JDC@(SS@VGGwQX13&-*RH`#8k=iCs+)w_xJu6FMdevnVCCkLRBAfb1pPtxn0 zJ2&fNzI&7i)k67RpzK`>q0Yq$-Hm(uzJFgL7iIz61@aIy~#%fn@jQ`s582oK}AE zixq5v*B)Q8K*RGXQ;KfsAF``lPySH_yOumdvi^xUlozp>7g7R zfq&muTZ|Z(n|+I?!Nh23Y%&R^-XaV+!0Aa1d*)1se-%%7=UM{C0_ioo3a>37YJnf^ z54`rY;3dnf46y}TJ{V}rqcQRB3R}ot%o52+~Vv=7L^L^6*yv=8;zh6 z?HvL8A)NDh{g13SE0ccp?4*B2^9Bbk+`Yk9NKCRJ<`YBp=*o%?fy~YvBb-kO)*bej z@}Ta!3}5e_CfX!czzmfM9Baa_s@)G7Qn!{~h2vB2eex%oTC_5M11#m*`TZ{eNv8bu z4W|?=$}>A;QB&h&H0>VlcB6Eh?DtEH$MA|LO9U63!TGD}@5KV`1=mr$mtDVd5~d4w zw_n8jNZom5a&smUd6)hIbM9ZQ&JXuscXCUW?J>}+Ryhiu%v5JM)S4K*-AS?H9>h^j z6aZ18As*R}?Ad3f-We%Wf_b5nixWhL<{wUDTSZ-kZ|H$jUJ1K|fAz?H@*+ZKFW6fX zdK?aZHHxFnCN173cbd>~?o2y?r1#3+(8>6V)dZVv zDK?R&26~+sdu}McXEv;>vHV)33(*k>>6KimR(ES>u7y5C+(}3IWp=;m2^VvyTX!y& zV`$Ynh+&SGHV>r5fc+5UY%evRp{u`P>bEb_%$# zUO?zsu6>EDv@)`bniGAH9O@I-tr2_FO!+H9tY+o-AjPtblx!4fzBW1ehZ&u!8Ry@o zFgYQLlCC(*7>_YQx2}xCM^!;3w&b2;{w(?Sy!&&;boes>;#x!3+1E z%$UYX$nWP&m4B&bjUOlwvQT)(-{YXzbBNVsKwNv^+zerz%#sgbJ+^o+)l5_J1M>$z6K-*ew2?xp|W zzo_EC1}zv*`Q-hl@1PG8UOP&}zf!(v3s*1d{%yK@INt>>=^i+MRA_Xz^xYJ6UHLjq=wKVM#q}btq%&j)J2;aUo~Bx#t_7Q3vTs zJE0XP4@GKa+oNPlbVI8bi?ogPuJ)QzdN0HOU?>DP6vB=7pFh^M(Q8t*4jg)3BMQ+y zlZE%!OUy0=-;>u?j=82asTd}6qHcuu{tL!@(U{MkFRVPUCGgfAcm3oa*zX-{X_UBG z8Le$N6HbS&ZP<+e$j5@ui&2ZleunGu7s+2yKA(CsS9B3m(e|vjZORM#S#sa6XL-xUxgPr_2tx{ zrGhZ&G@6(d$4E{7$aoP-_E#qA;-xhv@_A1G2QvQM;hU%%b+C_iX_w-qLf6`NaZANK z7X!Zcb%;gRot|dmIIwBHe7_ksdfcU6?CjM%lrNJ~4l3|JFb!26=1+?l7EH`@{JokA z8uoE`1qm4Opt=;L`e!4GDBmbMJlYtpYvZzw`K_B0zmsx(M~g2ISbo7}uj8{kV{-8- zWPw4VhWRp7w``huJuxcb^EvB^t?*@8lb*-8og+xQ@KZ04jv|Km4@BJr*=3+QNq#QW zfo>Jjf;+t@e}ANKoAZCOMjAFPCV@JI9xM*Ex&I7Dfm+A_{lF3c$@q`5t5M5aVB5L5 z-tZ(kK^366?l`!-7lNdK|^x0c_Vx|t!Uc>^F`_xfn2lY;(-kF=;?gS1+Z1A1&|9G>uV;(ICh`YAwG5y zC$EZmElY+UqTOcIq>6guMNp&=SUxrBS?T;aedAg=!CR`h6o6)oQ6?BGd8yX_Dz6|` z)@roG({_Alr%w!-EIje(BXz=D9coW!rcLxr+`tz-88UJw{@ZPl&fLJRpD;pow@-O8 z-$b4yZ``S(SmRCO?%n1+AQ5WmKs5!yDT9wSMf7Z?LG}}AE zc!^nhhn9#uzAeSPSO4ivy*@P_Exzp)-g!#{zl3N2LIzBMbJ=mX`H9#|wZ~x;fcyD_ z5!sVqK3{ofIi50n81b@b7=-#;qgaDHXZMWFA@O?4C^ljH6VPJ;k}yZqe^5j2gS!W> zc{*E1Ei$N7G`C|8ych1vf>TtjwpZS*mp2V}IpSW>L}zpsShR1eca@f=?*71d zDmD#vDqpYbr3Z@X9YUc(@Q3;C+i8K-T=BOdrYQ{=)6>JGX`vb{4?iu+={6CgS2geF zk&|cwU4OJH2CqNVTfWw;luh`fWVKKb^5%r)!Jxc-vM2xg{ju?2a;;9UHmX3ac_=NP zONJ14cU zbMKFrb2ZnF2GgDj%S8z}~ zc4fhXDCrwTNiMW_M@7yk5XS531@*)d5|ekdVd*2&$$Uz*iTewykOpIO$|lJVl(F@f zjR|uS@wUwywl4z5m}_J_wbv{Q7ixI&9GpKTp9-h++w>6!x{%W2lcx0bf~-1%od%6R zHp3)5hMe@7zcUd_#D7V-Ap?@=?nHM8K7=nl>9@6fQ*yh%@B1!0$cRAIqppas(DqMb3HBZ~QKBpY! zRw)NtRF)|8IMmy#w0IPYUjFur{)AGE>o661En290KbU)`RbDZf;eLYkQ19R_tzPkPI&rz zmILDyG_GE3ddoexBkwBveHOI+{SG9)hX4%7 zP>DjV=D`Y9r30dp@mA~pAvpJy?({%SE)KEM_}Z*yvp2m8>6dnJ;5o4=0%kH5{FEDN zb@3!uOJzEdDs75D?wHBO8uC#6f=LFG%sgAD5cR8bIt99m-f;uvYFMa`MlD{S#Ls+vNj zR*?+BvqZ={8^;HZ5(@2Pr42{i7m|Dd2u@w%chr1>xmx1}4H6?G1@#Z-H!z?rH8lNP z&`Gc-I6Bp_i_7^-Dsqn{`2A#{e+l&-~$VaeE)w$Bq$&X9j|1h>%Zo{PaOeX-WtjTYFE&A=-Lx1nwY=Pw~a!WBC?NmOv-d#y0f-=`V#guyEL8>5h` z_J3*jGS3-cp(i%Pi@6YVqktMs7>=WXh*xz(MKAi`R`9@N(|7|(OUR7BkJdaT-OHp4 z)ZGV|QUsFTY0XAskAY%s!jIeM#Y0BBYxW3d@#3E=T1CoY{zt7#1^@Ke^4eH^H3A&4 zGl0UDV212F=g=uB)y|)*A!@2lCfz1&QPbP8Gi+$MTqcrfKz8pLwY|~+Jugt~F=`q} zX*@SbJM_Rgo2y{kI`Fiommy34Fd}!2Dyzr=vc~gXWbCKCbx!ow`Dz_v(U*g9%el78 zB)b}4EubDH136j@gVJI2II!#Cfj@1tZjC_?P~?lx|bDwqd%`UUku_ zQ!$2OxAAj4VVk*QwU)IG$kPFhQT>nxvywYl=+Msc~tJDFyC3pce1J@gdVBhBi0A8{9RV9P>;><|( zCj1xYU6Z}U!5gh!v0={J?~Z5WA3u(3Xy=kV>WWZ<(*|cuNx0njPF`t^>++Nro)0RtvHzxr zrR$~72Vgi#1tWm4+kN2Ka|3);5g;(^Ckkyi>UlWZ8d=has z$D2ji{oLM^$|_z3%cWQ29jyf!=~eNtJJMFV^YvMLE0b`oDUncu-s~I?s;T&LYrTCs zi~ja*?_MlDw`g6RrgLKlG#sxF9p%2O21QU)!Uw8-3 z&P=GeqbyX}{bc*-c94=ZdX70c>}_8Bq!%(>c%K6oDgy(apRK}OF(IlS4$E&pX>Kqv zUw-Pd#xgK1w2E-G2gM&XlyTG>xT^evgU-AjbZPH>zsVYB?IvySwi14eIR4E1zO>iY^D4L0$q_y_oU;42IX-5k!!^ zg zvOYaH^M4n4RvWeoE;X1=&yr;Hz1w|7P=cD&p$6VTaTCLRUU`Xcud#+&8EN zpWR8?QH~IX{jkIBcc)~*M`3EMiKXyd{p&B{MMzG$y6*oGB@Izglek19oUu0lhaZDR zThUsHcBhUd{%JWoWS%C_q8T5d%u_C-g^mxL>`)=e|cQa)~$Oh&B$m+@!N)hVN zjMRg+I%w6h2mms=8!FWVhi{sXQhL)hGI7K~6DgB^Kx4FL=X?H( z*;i^7_hGP$k$84Y&lw+2o~**3$g>OgCUg7Z<}^5lS_yu`3N)&MQFfkQq7HZjl=O#S&Q|(NO3e?Rm8_g(o5im8In!0=eF}do zuL&a7c(dbcGscp3{V-(4vENtFG#%r+DZ6^{yAAGJTwEuJal_Qw-Vyv9erP2_F_R%3 zfHIwsnn+dr?DMRiORO`h?HyxDW z3zB$!70x;6Cq><{AYt|&2%EgD1Rk{q5r(1%7+LbX_)XVhd7+4hsB=2Jl6I8-BHd6h zL?<`QK!GqLLm0b9?bglj%#lpDE>?#sJ^xW3_lvYPR>{(IW8fDxzCMx5F5aI*c2UT! zRPcIDSG6OiYda7+TPny9seQUZk zyB3)0gCfzl>lp&(!|#IYJ;+Mv5KXvP(*1*O)&BR+&#{0FJE%3;~?tTG_WFAuKoEpf^Y6!fSu9vZ0og&J2@gCKbS_%9)tS@wyecQ zylvn?IJf2w>`%$;yX6H2>>k^Kux(81XiaEz*p930l(6C5#DUROUarj_;sz`1zCL*w zl=Vm)|7)3xp=#JNiCtq4xgGU?(`3XrsU;A0J=92|s&YJh1!iLA=I`g6JHhMRr- z8Rdb$BX-PS9^Uz=*5dWe^`|)vMZ@DJ_-N|RW9J+F7g$PK?&sXwA)_c+BFOBw7aldO z_52&krz&*(1;yIjZZXua5e*XpFDq`y{wiw*`5}#de7EtKH{%$u%VcA*EOJQ1X7u=+ z3k*O9C4D)T*jw}6v$z*tDBElE{qz!{O)NxnIMF@oXpFT7ld*WcN5lVqOtsH^caf%L zZZ6cpg?Y%BCa|medXzEpTs`QswjxyJEyTrFKHhzkI?9wji*o;LJ!~y|Bh#9uG~=Ef zKdku8^%x25Yb301$EJifqQF>b{(Qgf`A?J>&2PgO??J)@-YEqKpJQE$_Y)A4moa8p z>8N~^<7t0`Z1QR5x-@nu&WCCccCfeEcIPKWhDHSN?^~~oV&CBOS*xAfrexB{jF&!s zPb6KOF+RW96qH}O%PHe}8zesQTNy1%T$x%CN0j#qS}E-d<@K~H!?mk$uugF4NXZIK zb9xcm`o}~}0rjU4s~cCMP!ZzECYdR3i{Q%#>mab~@JTtm> zT>GxWY>1)1A$1xaWQ(t{uV$vB)s>v}DkV3U6vs;vLp;~I87%(eOk_xP#Z->kHjB-Q zIGI3fyG4?qM#H$n>(@sh`KdJ^1D8m!?NL|0SY`Aa|)&!F{02ogKbUfFIGAr(00&v_Tk+X?Q{)dnWF6Q=LCNIKD(s4^7E$; z8oJQ|U;Tv=ih3OAnU&Ts`Ne$cdn1R)VjI?nm5G(c*bf0?J@xB*|5NR6l^zt${ttW! zQp_L&OR{Q~8+;kuMvht>KELY$mRuV#lD=OXtmS{3+g&`3l)sENsO#k%KgMtQ zd>!cuS4u~qJOni21WFu(ZV#G zNu1H>Z3N{1SQ)W?S`c&Jn*dQTOW-9vd^V^COFXA`bAXkUeYsWpVDKP~?<8bP?fe}1 z>#p}%1!c5fFcUqz&7$g{6ZgvSS0!qWFp{D6q}V%$gfH2_okNCgG#<6aWQ1w)_C^Ar z`?omX zJj61llRo^XS?OcS^>Owb%=Ic%DTb{B?!BWr$*h6KB7A-7>iY&@7TANUJ!qWj^zM%8 zeF6^>vQ>TuN5r}gidOji|Xf9~VMh6I=e(jLBvpm|R-qn>3OH;Nb6`8XM9)7__FCVA6U z##0&m7Qx5&T&_X89jyW8yWx=ty*6l4FHO(O)h(0EdFjAcj64M6pt0$# zJB+Rx?FA{%+9}2?-6Ck9i^u_8BtDe^<4sz}ayC2%iDR)(w$KUr5g@!GOFWfhrE$b5*h8Y%5T8KVYV`k}KBPZ;&{gY5%belwacgj|1)ue5^lm!gw%c1mMC=kK8(DW`&A7Tp-V|sSV8KCOY z@#SBE;m9W3F8FQ48)x3Qi>eao`*Ht-n~UZy`->1dge|mW^+ISx$W0LZP_KC9R0@X=^gJn{Vr@Z0aw9e=56a#lvTdY35#m%J}I zNj}Z+o5as?e(`PuOL>;sGmEk=C62Fme4qW13GNOYE&YMjn}B$eC@MklMiF@l% zV+!@Pr_NPyNNrN!?$9SB0oHS^PZ%BpiL`@z6!ls-TB@O`#Yw8@)sjghCKA>BW``Fp zVY93F!#a9T<6LbImp(iYA-!*6(o55(+Zu(MRH3`(Ionq=a_vGAdF6d?JnMe-RvC3e z&DE2Is9~&5wSty>l(E`7IbgrVX$Q&q7s4Qfbz!pWS5=%7M-!Q>R?$y5N`v);LyV#; zz>tT<-H0-L2v_ZshWaieNC*bUXhT>5E@Xi3I=%Zkf|cx#W5InD4=W3#*ZabJai<3^ zN`b`2-TxA^9w~<1<2_`OeYPm{xbnO3^&0RhJqzJk z060knFNYi*fBsz~?;(+s5XoHn+kE{~-d+XKoObBoRaegQT#m~()_=!f3iwp|x+DFt8{OgAJ z!SDy+*T(0uyzy&m7PtCrQ^Tj~z8>)v-kEmED5CemQ6)3OmIUilYa8r0|*T$P^2+C2_$eT zIlp4sr+QW!9sAa{grZ5|zTKXC#G1R8sN9w+ zLE=TX9eE@71D-{7I<%Hnb~5T(t@IX_aIgAB<>c!eQ3Ktb$W`io@#el|@Yn5sd#89C zMEGm)d%-h3@@e3mOT-q;UEW4{8RD>xK_iXMPn&Z!s+{f$>OeT2Iq_G3bUE$3Py28F z&{_?Rx~ZBiC&gYOg338nRI!aAP`M3^Y`29}jZKf1v2UjTo>On+%eJ`w)bTI4#-jqI+r zS_YdO9abpqG~?^o6!@us(^>pGRzs9x&#Rk=4(-AlC!yst23 zbGbka2w}7gfB>wj&~;R0B>8Q&&F%SrUC&1j{;s5~uHOB({O)FaO!)ERzmFatiVbU5 zzr52ewYl!>>@|%xHMmI3QT?7HDz4^;sW?D(oxqcjN%_CTzYY9t@lWISwc&pRcwXmJ z)bDj?F<99`7~=r$B;X<_>daMC4oI(~b-xMxZ}F$b+k4Az0qcjs^1M+?tk`K{=|ZbA z34#Yw$gT*$^7;GG50knX;ZNRJAUX>VPx^gsL zU=q$kkC}7Na0jm(3}c=JY($VNm_Une0GOkZkOp}K5Z#GAy#--bwCX#_{{Uat%!HX= zOwc2?NncSvXPIMSs3-XVZuvMM}9!?5(vy?6e~^~oDc^cGD*QbanNTN zrU2_3D#;j)RFI*Y1CloayaG=h02$;|tur}mdv{|S!V7^RWgYimU}uaR_9XM$4fp=P ztxC1lq{UvwHi$|GlyZ#689d};IZy_D2Vv<@&f(%{&itd2tVmEYzypt_Fh&P@t}Y$q z{{WAaX7ZW7SojRbs3$q-G1sB=sHMIwI;E_$GbRj=6|#b?IbcpX9G*Rk4h|_<^&Rh` zCuyyj-Wg;H=)-#qzBwcxZaaJQ7^>E=NQ*6s-W!K-NpjdAf(cSb1aJ>xDX%u9WLYLI zpc1Sxl1Ab44oJ>$I(~WL&5e$sc@%P5S``IId@d9mV7Bbwp5PKOz@^s2?`sg;z08?u zEg)lpBaJ}XtZ|Yvz!>X+o-tA_l+wT1;DE;aPtJZ_qts(3zf+z;^~lxJbjuTVZLfg7 zk=;>ktfysXa(0CSt7PMVa!+HPeuel~`#yMgLz2q#;|jxPd3|p3>7ER>xLB?>s)Og= zTrpDwMVSFr-75ybRv@ibw;E5W^RAEK?M5iqTAx+&?a`4*1yHnyf&dCma(eO)a4>Pw zX}t=W#%18<04m79T|m6Zt!Nx}J)@=ttM)E*-6U%}sodLa0j@w3C)6~~J8 z3o5@AAD^!e)1L8|NJpTY>;uFSjLa{x$01?k@ z_v=)r*Kg#^&#Hd#0$7Imi0(-_IOncvt+tnUb3V&tcI&lvmIod32Tq(-i*E*aqT*#+ zYbODLz%m8M9nNvbO#Mf-9KA(#VV`M|{x6$RGGWHlW@aGck~`y$xcsWKpEw1)5H3i` zj9{N!l1C(Cn!9)4ol8oD+-f@Yg}K}g!E_6bm;fQpe;)Ohe`L17S-k1XHg<+UHsh!{ zC$4%P)mDo_yCS)Y21Z+W?bS|BLjXuVxzDFuoO)GJ3a;IPCwvfv4st&~a(Yyz>0vw9 zM%tkL>0P7*5zqoX$3g4sQ(AbVU$=~|ooj4k1fo1vN(m#8o`n17^s3jenUD4zU<@{g zaL1gJ8P8t+^i`?6Tdv;7ZZ5Sa4Bdd+$I~=Xs{12SYyKF@`!%dL0ts~JrE-kBup?!V zWB}j{l0YXs^y}->ct7B$i#{e_rJsX59dU5sl)RQjibg`5v$3C6#P_mtxgor*)GWnVD_IUUmi!Xq`58>3W%H#WK$pjyJnL|wDARVX`ImSnC zo;*wOGspft)*Dv%fvsKYQi(#Vyq;=A%rTWv0Elo104eL&HBM{9xVdO!k(op;j;P8B zUkdpu}Z z;B*dH{m^!^6(=VEu_uPOzna_ShCC~>D=Ck!4UA-95<$SuNY84dlV0sQZlHxAx`sq) z8MiAoGqVG3(||b783U=vbg0wWc^Yn{q^w{>u{Pyg4WOOGoVE!gJwfDhNf<&d$r9w> zQ7`T%k~e>}MG28YMu^N?||6p~49C3bWy=4!eUSNvm1`!}*A?o}=9GUl{(tKLWMui6qc0wS63mpp3RR=2RPr!3!#|;9zHWJaon@&%9gz00hW; zWoGc##c6HIjCmTym%A;Vet1{_I)4cxJn>&$C8f;Jh;O0-L`G|sF{yo|@XNa#ZUfL9 z_cfh+ZDnT^k7H#iib%%fymnyiw~|jGhdBozgU?FcV(}6CMDoVhJ`eE^{1fNn)u97M z)x0^ZAYvdky1$jYWMr?HfqL=D&mFiG=iVXxmHz-}j}8KLFNFHLAZ7vWRc;iO#&A)X zk;v!1Ml)ZfcJleQ%R6d{a^()uyUS7m;eaLfoD4B12d8?D(#OrY*)94+C3ao0!l7hr zaM@vi0qQYu*>swDP#h-f6POgYH>Cth6DH z$el?ev*hQmKAczUMfZhE$dKPyG?s-An$k$PC5OyF!tM=$*a44P!q$EX{3-Dn{jBqv~ zI2iQKPi}wu_4+NX{=@$Ovj>QZL2dBX8(AfggEpCUa{vb$wv0Xi#&N;v$G;!0f5A3B z2kLXQejC<&QDyU$4wq3+9&_7#eY*m~ftt}{Xt!-w$Zjn&;h`j=l^mG#GH~J2# z8NeP*+>jo<#E1asd-K~hq;WJPz9dV7T@MS@%B{z8rBCEXN}4|s zUBq()%@{ou7~p#XPvOa|$)IaS33A3Wo|7p<`5tM4CSi!dJm)LTUpBiF$#!i20K!i^ z*%P|RKk1+i{OeD{{{Ry_Md7*D=fm2?&4jU>i8T4z-GTK6GOeDK$dFeV$mcwapT?Ig zI5{in+NNI4e-gMTwXyU+fj{7-zqePzIK{t!e{6q>dPF0E6_1bg5Q81IG{tf4j8~}m zU;Yc#{{RJD@R5)EJND)AwBk0(e`-CQ;OGa<_KSBcI`t;LDH2FIJbpcXgYd`UPv0p_ zXCMxKzTVZ3u%UbMFS!T3Z5jGc;h*?AH~tFOpgc!X_|M_JX(Gnx^=|;!VoVR>HtQ39 zAcMfKR`5^!9sB!wXeyTeHTW^`t6o(j&2gpQ*h=1@GF>{a*J><2iLNduo>^`Rkf^E^RYOR` zGZh2#e=UI6;OCHVM--oCX@7v8GDbgI%2+BhTHo(wbF}Vzca6W`ra!foyW#clcY?fY zAA>|vge}Fb-QLg_boZ^g=0;#LcCs#WwN!9C15?*EO=DG;S=DcDF0Usgt}ia; zX=as=B(#i6G5V8TkHf!${{RtwEb8y0d_3@N?yWRzzH?heh@c0&vbGW-{__u|eMj(P z{t1C)XKj@qvxePCEEt?d6fx*BK{()=;;D*+tkMk`&iEg|+E<0V zFQ)iwPMDiZJvKXAOO?YpRZs&q0nCg+X6KUG&syp&H1&kXZVD_=g3bc6x81-ef~Tp- z@A8q_x1iB%ZSD;7>DMQ6vm(bjrzDNU4W}J*j)y1l)3VcJHo{9S8rcpHSVwsv6@;4Ie>GAfb43P|Ju z)L_(HdLuZ;aRRX@(hdPE#kPPs1h*LWBb@fCQX)!$9%b5o^s0(-17{=yjB$<+{{UN} zgTuEHtdlmTzG9t{q$~3d6pxw7!yM#io_G}Z@O`TvwO#L;MqGv{3Bcq6LiONt+wi8E z7N!hw+em`h457Awfm@JIA&4c2BPX7E`_vZ}b6cxP=T54Ng=X3r0M65cjDw#;#zuRc z>*3+u4+7w`rU-m7lji8Eqz*?+liLS8)?59A$t}vHcId2RmE{BwLCI%!YyrUemkYq) zk&0<63jIn}No#D51oF!^JkcZ(nB!MC&wiw5r%dGJQe9qLM|Ua~X(47LVT@`9)4?Z# za58zpKA0xG8^d1&q=UiD@f+gjjTu%5AN*jp&w|uc0CmoZrlh3 zNg~t!#lIVklX$DdY@Qj?A%pF59}z`nSpa78;<<^gS>ut=5A=0cWob6MZdNAjmHfW~ zeeDddfj%zr?}0TT;?DstpJgDnXsz`-{Vp4sB32kkTeOFA=u1Au@ z@lLto&3@xj*Dr2w?(R8jnIvghfan3_fH}YbDC`AiO{HqL11qLHBr4;~P%r`Mf^$}{ zd{5&$7{!gJgkZLb+-55&hahvfh6->8BdMwvTHlFggvohhdKFtT%CP_wq3XFk$3fqY zDX%0|iQ&;CfV9_|owd5`3yhLSAmbf7ew8%P>HAarG|O@V?)ycIvhfG1I5gqi2*dCwrQIOn14e-8MeYPTb1Y}@=m);>rgTWB%> z+Z~}FFQ_lh0PH<$Csfq+O-PxhyOqEUGOkx|unXIP$2|sW3AHOIRQ;WqOEy2&5)T}3 zco`?J1CPRoh6#&YG`Kh$K;s9$@~g0$ML)5k9kWcX>=@+Bki2J%_wUYWq5e>&;Dzc? zHw5&^8OBdO$Gup&w<9dKI63ObNd$CnrcG11w!GVuB7D3uD`4ljAdUg!>x!#3B-dfG ztit5VmvXD)X<#`d^~T?q03HuoWYgXw7MRHB3Cpx<4t@9^mwH*_c%>l9pr{xGt_kOk zy?FGf16;cq3h*#@M!6a8066uc&cchcD_l=y=AS$g-OF%Kzg~W{R1@7_m&(Kd#{tk1 zKRPJav-`~xNpvAiPTu(%S1RIk-L(}?YNWAQK;lVfTcA|2 zZ#j}k#s|t#vcZ(&01!DD6eN-|mG5&>=2@X-dwD;5wM#38RtmuN$pai?u0hXAjx?HE zue9A5QZ^-;31nv@u>c(G>;3ORyRIO)d#h;K+5sVyVJ+4rLhaC&VsVTR207$pnyn?h z%xjAqSS+KIN1gMqWpl>tCJ7khl6Q6YqeK_Bh3R3I(Vq0n99DzmQ6sXF#2hGgVhPV9 z<<2@D)Ur=)HK&MI zZ&KI+064)YFh?gRJ^G&4X1J2&QrhWA%ZG&wWlAqO+RDmYobJJGyaUuz4z?Mj%0$w6 zHj~Do&2Pyy_`4r`zBV=x4u>&IpMumYNFkkNc+!7An zqnc|&XFPIEe-v(@o#Ishs8tFE+z?L#l6l9zYMs6Eyy*9r!7Z114erH(1aXE0V~z;V zE5)LPTL@Seq?^7%<(QJw`K}b5<`T)-_1Htv^qRW_Kb# zISB=nfG{uyJ7WOx$6VCyzT~Ws-8hYd5CotH4c7!5kT^SYj&tl~ourVfT7{ZW>lMN) z6=2!N-W&tZw{KokyU<2_cGqGF%V{vi*7Ke2*D6rAe3k%?y$4(XdlPJFc2boHk%l14 zl2G$R63S z?Bsc+hsijXP}nSwwEC0RZO!<+0RsJqBr2ZSt@kHr_(q z5@r}t!3uYD;O98&!0%G;dzvj-%&&*8tz`Y;o44KzA0dQbdW0YaZccd^IOozk-2yqh zyZbh~jRDAA+d?)tEW93b?gn`k)}q?HZELBdYrkj#2vAEfBbC88(qA5QoNH* zx5~k1brZx`Pbysjh1w26wqGFQKPdL^U3LDFy1l~My^MDe%e1>R*ix7bN-098zdRgd zdQmrpbe}B6Y~l^h!J|b`Hx=PWp>8w5&peJzH5iRa+~w!cZC=Vti!*BTADR|L#^at? z6DA6==Qspo13fAkF775ls0dy;Vh()0%*t@3PI7a@WPmux>DsypbU3EJk#4Ll*4TjZ zUPCERgeg@6oxqF^gQ)LGf2NmF1ko0cb4VR`F_nSHIZ=RHkXeZ57{+K(`G+!FoQ}ES z?FU;TeQQs$z1x+#hW6GuK@7RbMi>ME@{EFWo=yScHU9wFv)~7a5dQ#Z{3`IolF5Xg zNNg4+i=2r8P`@$A1wT-8Ub8ocucNTrXpe=EmW|ym4$#9G8*<Y*FAJ?L+w(l zMj~eb&jqBuTI7?lw<{K4SLVYmKK3w9YT)M`rD0t@htFE4>_7WB>(PC?Meu&1V+&g; z5!v2cEzGQVED_m}c_-u`5x1vt%4$FGPX7RfJ3Q6#*1KW7nNegA$OA9%wo@SIsL#vK zJPP`GG-Gjaw-7$A;>m(?x7QxFg z+Au&kJg!SOTphKt#8;2n{=Q_{G<=V){{X=?R?(aL6XQe<#x;17=SF4@cX9(n?VMyD z7dX#MS2?JE!8-mdMy~otjC?zM5r`zoG((2#mrMig_l9`_zTebz_pn25>2q=!5ux%( zO^LS-u_hOQ2qgU31aQ0xsc#e8Omy8}$cg5V1d?}f!85ok`N~u$0D`PZIT_^D{c5_^ z+wT7Wh7FH{wU5|8_Jh)AOJ&ito1ZX#>EPC50zrUx0Q*ik9Gq2T{f7SlX|_b34~6X_ zxnQpruV~74gPugZ3<5ztc=Z|m0DTTasVeGLGsL1bKWp-a*W{3YcA-M$e~Y2~&cr`D zNfKE7uHrxzcX-uNM;Tw20E`pgt9K(5)-tY_N&f&|p=E8)&m9}~4*jDwWm%-~jh&)9 z?N+?jVo-7Ew|0Fv&*fg<@Gt%eso~8(QIAmg<>GjC1_N|jR*q#@M$!X`C1qBSf^oG~ zMh5J#IIpX0yfbDlm0{6gj3TfL@~K1%kf0HUR1Pv!V?1NH2N}W0z^!GnhWIMnD=<>d zvPUlB53pj#8*`qx_5>QY7M`*fk`hMP!YiVwLl7V^!~Na4_Re$7ayd6GcQmr-SGTn` zi7YmrS&BiizRs%}v-%bTgVY_QbiwW`L6u$weQPkQ~M!W!;q-A%Fu5$t(TheR|ei#nDWg z%V|gweZ_WAht!Th;QQy>nrdn(Y7s>KN*sZ{8)H@2x zmUY_90OT;^liM8R^c-fZz3i5#BD%OCT&XE3WCXE0NF!l zTd*kmd28$M1-mgdPs?Mzg2eGqGv`G$G-4}u>7{A=K8XViQ}@t*fnd%}F{K{BOV zN(|g~E2;El&XM46gq}6iJ~n(s_>FJzTgR~JYXn{)n^2V8#EX{Q5QMF*lqJ}( ziX;V#Gdi;!!-e>T@k_^F6t7cM@kQb_y}K8*vb2q3g4nn51qA%WU~ghuuOJ*&KB1*a zWhsi&EQrK740F2#frFgzJN7xx6tU@=iy)FqHX))VN)R*CImdE)52Y(zST-sD0BKy1 z?9pv2g3MJ}KaLgdDm_wLa!iPjt*D`I632|>sR$VE6C$FYb=nfsSNuF+%v`x zryyswV%^(ana2KHwjXrIYbf_Tk4~Prq|}oxI_f@cUf{Z)v=nB*R#I1=PNaL&<<+mF zb8s)_QIC<1&~wP(oRj%?6-n)969e{144p8zY_ad3!=dR+ot5r|v%tuTK-f=GI-iu} zo=GGTkEKdYZaZ~Gg`Tr~i@M_AHqgaG%r;yb3~f0*@fOqyt1GO%e#&<)cfQR z2N}&#x{)Gwjn&Rh?!SMpJvctZ5uDRp6qn2@xFmw66NYbmj=4OJyo%8^*oh|Dt*2^# zX%8d{C2<>hh`4_&FAc|_CkN9!;-fl-pQ_4IdpkBD5A{~+r#xdIebe~=0F7l07sK-3 zjZ!Ns7Q0{)ORux4$~oZ+@XP`ABywuhC2hh*4XkmNC3YylIPcJdfrIV*h`Fb&t^Iki zNyAHSB$hf9(V!PHz*J>Lyr7pE<2%)qj1X`~Jt{b~Bz6;9c(&_k%Y>Q>SN6dOic}07 zV4qx$1wP{4k~M3YM&dF8FeQdFkT}l-a1JnP3%B1OF+{4{F}gJXFvc=6dh{H2?apgN z*2E<5xiaau(C>IPS8%DxSTVG5{@w^Y^c-=;RlI{%Sb>16<*+>F5skohEHT@;82l-* zmW*y_hBksERr39FfynjuM%g`{JYU*eLt`3@(Z-A%=?X2 z@h&{Oof}YP2wUwD+pMt>!7Ld`&U$ptM?g8O`;Q-Ze%4Kwm&U=Fva5e-J-FnN_niXq zf=SN<2Y^LwM|peZGMkYq?k6TPLNF)LAEtZKuCH>1ie8irz~3VR-!UT{IOP8TDta<+ zuBL6iv1NM@n%e5w)1Y;jVZHj%S7+2O74dU2Gh>Egy*p7xo&Nx@ynM{%c52dwV|z8P zR%BqITrbUn3O3v>P65U?_sFESSntv9rqJ!0PDEx&*l#641cEm0Va`{s4;4=0?e2Gr zyT%IWjEL&EB;@c{aZa3#yc6lbbVZ}niMzcf*v@=~Ny|AIIK~b#I%AGLxvN?sbS}3jwArLf>&rGWyA&IvZLS9!fgtzJ2qTk& zwxE{Gtf>vND1>g7RCyg(1BLs*07y6)!yjr}?MB|l>KW}Kf>|S%%Yp^m*xCWWMJfRw z#7#XpK>rKKr#* z1;$jcA#%IAs5`n4c{N#eAGgN8+S+!@g+n1SGkI~T{t^xjdI6qCr+R|fwMMnho2SO0 zKvXkE1g_&8fOx}=Gn$&zq9*#6OOpele@uJ+t};IPI{DfaSM zM)KKSY6@R*GFr`Zx*^kU$7Kb19Sa=tIl!i^5k6*U!DcNX7S{544#-N#{{Y9v3t_TY zG5frL&5{ToE`PjEI*yn*&ILbBYv!CNo@PV=L(Ky)QP>7$ z;A0u*l6&(~&kgHHFF&)S-?3KRWePI*=kA`k1GhhgE3`JW7S`EJZ8Yhy$U#I%*_HQX z^gB*Q2;c*s%A}4}hIn4x0?YGY2!UQm7za2c1INE^^=dNk+pn2svX&&xvQ5Aea!1X< z>~qIYtxFZ#31om7B6W-gMoa(+#?V0E_vefcVU>MGw-N2_B#gr}j(4F)F36yQbI|n} z00KMIW=VAGAkqHvNL_yXzakO<3zo(P0qA<1Q|_)Jn6xNRhIJlcMj?41=Lc>wc=X0` zj8sdkeToKx&N*e1aS*6>Weg4uHUdE8ovrWMrtPAYhl}?wB(vQWMBH~PuIR`;KwfYO z#(2ojdWz>$Hwpfv1hB+p<+zVX!E4@HGf=OOaJY%PBDYOl@sF6Lkr)YHB$;HbwpFC^| zpl#R$4WY0xk~zmr_p3JRYi~4(WQZh)m1W#p2d^7g;0`$}{Q0dt^Fpa{6|``n+!9NG z>n2BBjzAoaSo59%;; z5X1&O>aD%xI7?X9e}?m5Tz- zt0p#pyx}>@@th2t=b+AFhT;je6tRx|A(bu7tAbeMk~swA?Ieu!&P6LOqcm+H6GFDF7Bwx4Tz43P_PWJlW~6v5iZC2&fdbmWopo-s9yTAWi9 zc!W^-z)1{ZBOA8dtC9DK3&%{7Itsa{X^595{Vzz07{Nuji*d&7#0-psxSTHTK>CU{ zEBgMwuhhkp!WOKsMG{(Ap<)tHHmj3>t%f*Yc^r&qAOpx7Q20@Hh;H=ag4uG{C=pk9 zTo%|cr)bYrDha8~?=%uaZ)F_GBLg8e?C62IfLmzBK5mB{J7TS-pB1*jEODcm*xLk= zzDCA33~-|yQwIq?!eejtJ1j4gNmQ&Lw2LzLjv`*26*ptm0NVs9;jCIKX z07~bP^7O}S)N?(HQA=`ySlG#Ro1_qigsre(au?emE5jyVxZm6JnpP8^06dm zj26Jj9Pj|>dJ4HC!w?d@`&G)A+D<_|4+p*nZ(ea#u1((Jmh%!^f*i%1?aw5xbAUz( z^;*n^Oi&_-9OmgVO|H zW2fWRtr=x|p>1%{lHl%h5n`Z%cLZhtoa4A7J?ey!0+=bkXC$#1Bh!v@Ip>}aYBdH& zJOzW>MKYrWKPt?5&Kvl$3C9>beJL6#twCkF5hG`CWKuDl4lsB)&#ne@#Yq;LR^++V zQKMWaNd^Puhsp}%=g{`#_NM)&;u%yVFpiNldsLpJ00Q+K=OE{=N_M!OgmK8l7HMtf zLQ5o)E2(0AN6q?U1QIG2hTeGua$Nb*GdGZl6B2STMg~Iz^%w*k)yLBzxSla`ZtT@z z&)TD%0-z&vV7~+1j&gf(UH<^W4~PE%4}1~gO*6(GG1D&mZF8$##ckn_2WU5umb<)Q z@+03mk`@(>Wt7CK&$R;>*kKu6b80?UxSjz0o9uj9;JfdNKNNgI+7E+tT{*6A^)|FG zb#ns%s@BPjNEygQX8Bo|Du&6f*TO&WPp^l67<5ZN5cm_sQRupB+r7=@--z|=OWT8N z5E$F+_RO)`hF086k}-&F44|_r`^SO29iZrXJ^uiVziRDD(^%AC)Q#P|7y4f97f6gV zeU{fwfsh7e0Bv?tF=Yf5D}~^GEPQQ05x~;49}8M(S}ZfO!)z^-NjS+u(zbp;MiaeqM zva2r}Lv3F7VlcX@EsfOKa9NNQU8HA{7$0xbjGETD)^vE?44N!?P(v`nhBzNKF@x#X z)6*4H>zX8mK+>aFVaZ6*unFWgLY_08oroQIrEYClb0k)p2J>3$>@BxDh!QaSycN!R z@G>~-f$LTw)+}Rq;wSfOAI;^0*dYAb9X&Wdbc5l14v1x$H$dSbDun7t)PLpDq@Sj~_6`6?w@$n*;0h z&MLF%!e-rU>f750yc}e6+!78r^v~xT_=`)Bh-KTA0a>?X?&?pkKsY^*Jxx9JEh<(I zCCs7t#>j>-&fq~@XVU~xZ9`s#IrQnEiK30;^S)I`+C@{IhZ*4G9Wr`zQOluT+Ssk^ zwjt%fBg_5fPtghBV4VJDwr!-e)Z+5)ngWsT+5us(5z39Y2X=GrbK8oF^Gb-x{ik_o z(z+=yl-vp@0YL4ZGIPo6Rcq`|GhIvxd^-0KEb*+kW0ee~9{X{FpV!;ADTl$DoY+r3 zL4)%v$`q1uk+&d|)3$4V7#7(+)p2opS&0fnD%%KeyM-aJc)&aoaqUrDtK3={XP!%V zfZvPjRt!MkDB4bX5C=WGb*$%&V#Q|KFz{cF;w+D){7S7fXT*C4K zQ*z)jBcU6J=t<)zIqOqstp|71z?w~36j3Z>i3tuFRFx#;9P!(qagkNTlB{Ajh+}Y7 z+71RzcqCx-KA0l4bqzOK!Ml@7k95jH$V002`9UR#=K~}j{{TQ^I(DeG-dy(bsvS4twO_gVzJEQhiCpGC)KbbY@-TCh&5{ zz6jxuuh4qdm82Sc%Pqy;q)^6iSzySKvC5Oc7{&)9B#~7`tXD=EC)N$S7Aj&zR!}zJ z1}s5en2wz=bAeN3(SvUKmF%>YZ{4f83^Gx6jAOO{$mEZH{?ys5rbGsBG?WFHjJKDS z2OwjQf9YCwdNrk}ct5vxkhxN1W(S;ZJ;3Zv2pH$Ns+yItfD5$L5_H-_u(?vrwBv*W zkqRenv7D*iS2+DX+W!Dg&8^En?#^Rapl4)1z)Ld_1_n65AY}4DrMTUv+~w|P%f?Ks z@qkMfBryPd#eRTe>6*@a*07Cdmd#@~4iac$m@&h0LaA-5z`@5So|QF`y1Iq6NM}F* z9%N|hBMZ}ZLuaR4oT(gSS4+O^t0f~^b0zCXG?KAgGnNchNFV?XK{-6&08daVkJ(n@ z-Y1GZ%oGemHWYfu1hAa%hwAmMuFJk-{) z+oqc{N2CcMbG}XRfxUCdV&epL$t(u|`ctO&Ga((MIU)?IYYWFgFJPfTstb zBh-4kNQzxXSmn32DR1*Mj1@yWcOb80$T>OBI2|iW+S)?JpoTV5`?gG7ST=KzHe=?` zL6OdC<))W!ca)P(Mt$W>OA_rPxdiZfjymVRt8FA4x6rL%mr^0qS|n?BOex(Yfmn0P zV<+fy=})tUSXFK!fkZLwQ4}TACkL>?sm3|L9kG*BOQ&p!WYewWiv@u~`$jW{U;#P7 z1CE}jJW^h0&|1kI{KceD37P^KS2+p^5w1xbk+_gQF;L!tYm!H2cjbjiP~poiGOWdj z+^0Qz`s31^(JYh46}&Erl5k{*vV-Uj+<*@m&fao;qPms~I~2dRwnYjEm&q_?6cCmWqOHl;GS%JTZz~f^B zoyt^#bDrD~dh=PQP!{nExvwUStYi}`;0bVf#^xY&>73vZgVLe5n&$3hk}W>*1*j#b zZSi0ZfbBUP@-Pp6JyzgYvZVJ~WCfl=9kzLF!Ff1*?IfIY>yh*=Wnn$E66KuBZK&B= zEJi>wUJS4x{oLeWV*ocG=bE!TGf!-{cS&-{(h$*G5P>+rCu4HGPBXYuqk=5SvLES#c$|0SU~{^E<+KLGQ@B@QfYHTZ5zi8jmeFWiR8U^jZa+U z5_aP_^~X6J)E8GWJilj(dFJxvGZaIyPIqLIq~M+i+@W*aQ}o5uC3U*7yR(|ykaxth zHe)AgQUeghap{rQkZ=U7gY6EQ_V!I$NnI_;Sj3)7+;3^sc>@3t0KpuA{9SsA_fE21 zMQL|{jTsFa-7_KvJSYg*Ao8KHjAISd812=y3!96G?gRlQC_L@5Fgr*fDZx>VxC1Al z>zb+}x@(u8PI4kpqCYMK^B?8L*K9Gv5yu}X;I1-il0e(Ja`NaT0BRDvWw$N7w#2+q zM%@&O>%o(A0A#K2JCv!LOwui_c zk%?gf;0{^Ujsbnn26;IIZUgv-O1U|WmD24HHW0|)KaA-BbitjU7Gjz~~WKA=!i(?R-eoPTGQ_QuvZMsvLgV9obSgl)qk1mFhG zY-X+6=+LS%!QuO+`&Q?9j5{}y2w4FQh4df==m8{*81$=a%aUX-5sKXsASFVqS$_M6 z;NX+a2~*#LS{g02@y~@P89lR?xlFmv*SC5g7!2Q7le#k}^r)53NTvnO3

=u^8?!2SdsFQ<`bFM89ikqt4k9FPQBk12_Z@c>Fu&qC~d2ki7BA+rCL9 z3%i_yxE}fA++(LhROh=@jn?7ll&=FF#&S9zQh;AfpK53t zc#=hpoa|w_hsvM>oZ(0So`CvN78`_@gl6d6C%Iq=RP7liCJon^d2e&Jy;U5Tilg9TK5O_mMmgnrY{{U&4;Za0UM01B} zUDZ@@TQ0yH9&wT_Y2ONdFy7ihuW6qS@8+?&c0{;+B*u;~q(~J>ARLgSkgP}}a4Wf` zcc83uNSc?5^>6G82AVm>*Nt8UW{6-C3x5jWjm3xue@tLkQ{caWnkR;QLkEg=7{lEv zvMd&IND&o^k_c5&a#dRdmEe4*0M|$0Uxiwi!d*V&Me!szRu}V5E@q16&5Xn_EIh#Y z#~352BaDCrbSZNfjb?^5Q;e384p<&a<97#<^dK5b-F7bRAs0`H?-}kO5;p;EB@Oce ze&}K{*Z_`wYUAm#Nw!#-%Bm0}Sxx{v5J|}I+XwKhx4uubc~2AtvxYgy13fTF$?54& zmihd%8=MS$sFChaM*spb&~`tct4mD;CB?L{1p7)z2qdu)kUF*j9D~8Y_vW)N^&4n_ zw~{C0hGf9bdK>|s!~@*%Q(fu_bD2@hgXPF9z~><2AdCPGGI-~;W?xRbL`bf{;E&$1 z%Wsxv>VDgkf(&gD2c9?` z2jG5`>yZ<~^KkqAXDUH)-;7jCV2T9$6%_CbNO?HMaynz0NvoA*xjX3kd$aEm8FmA> zFjKVjAe@8P=b9ta?O6W$Q4}Kq#^xY!PfiK{06vuj5yviE4YBi;^17Ui@<-=TY8O}0 z2lAsRa!Rr18R*1tHsDpo^<+z$#km?#zq)vqHQ`47t<;h+k2VB))`1uCv)xO05Q-ud-2b`S&s7FVIrbN{QSzY07(3>2T{}V;+k>l zMVYrJiY_fz7h*}mrqJRc&N8f0@Jc4-<&)lNX*46)h)@5!uE zU}Ya>6Bbfb`D_jcR>vnj4?KI(7-nHCk0K7gx=x;eka-_on5IpYjm7ce#RA)xF-Wp* z1>0yKkU(EtrJXL9^+}yIZ*H8VhTw+1+Y>8`U-^_A5#nEz(LL zP`C@ilyGtw^ZZ!r$6Bi((OM}dd(~o*tH~X_EQG450F~Ht*BgQ4f$3bu%$E~Fn2KDo za8!~39FC-P$@=~k1;o?o_5mVkB457X5*VEPxFeqay?S=R&^5W!T4}Zs*%7C;#E`Qo zw-!B48{GZ3Qb z*&GlY$^#YbShh(AlgTF|-k)u$>yXSvrlMKAyhh6)QrYZIM*}CXsHL`m^gFA+4(hh{ zD)3p@7x|IW=t@Wgjm(<}ToOKP^~gNqo_kc#yhj7hV!X9Nti~wuAW05LQdpKa&O3bo zs#YE+)a;9@Y4iC(+hl1WBP5KU_OL@3x*<-fq}oX?(U*$l$Ugr7bBvsvd(lzJ;#*l{X)bjKiOU?v0r^1B80XUz zRC33^;#V^`s3mE3EY_Q&X}}Qe+Q*;-62vEAoG*SoZ$)ol_DyYLU=h?6k_p(TjJGTg z8<^ubKQJd8`c=BHmw%L(5lQB$2)8#Z7_V=-pzY_T4>fO3QE#dXJvP#4G69j~QLr%i zND3D@>yk+0q2ryp5YkU#U5#*33q!q#*ipyKp?v@t=Quez82VJnU_on%yw-4}64SD* zj5+8y0~z=FW|A{qHjoJW#{guUj+G+;s@aR90FO`mk8q&I zz-Q!SC{73=YczIG|ga(eJGD#n!sl(zV5rV8%Nvw5MUe4o5a64}SMrxgY5xCiX- zZj<1TE@!wWIp-Ka8CYQO3F+H3uX3$)Xxdy|wXCs8Br-r3cg(pVv7BRoM{sl8lh+h! z^9O=yZ103{suHUSP*C*c!RI3*0B0t$rn|DXX<@cm?rtRo!iOP1#saH0at1*-IT^=Z zDVA~=WmQ2Pyi7Mn%ejP@2XO?TCmB4F4?Q|kuwL7j+fcW;jbyiWPE<4`vz!hP@J`?kc;pO`#zEu?jybF*ceb;R*|_rys|1z;PB#Oy zely&*GuIia*VmD<$!qgGkAx7-AX4#`DqH0TfC)L{0Q1cWAhp>(vJvGMw$>kMkC&^8`$Eo3ueIFAd1j6bhBsiqt_}i{*f}7TBWcG~;|V^2pvcZn zuQYE2ib#%SDuiPJiTlb3;ZJkG92$?r7c$#ii(5S=R(9PaXqL^ikTbNhGUou3@|Iqn zoLXIpZ$a`64Iqb8TPW4O&|)*Z;dU!>M%CT{I1E7qe7GZ}RFdab+8g^oOu-2-uazd| z$pw^r+kqz-!sos)F=|~cR>@ZW{Z{E1NTyX;x1I@73+@|09FR%lfs7h|*@DESlg;OL?zuBUff!$Y+ccW#{EUS8y3Taz;q` zs!RK+H2V=7+{riD?oaPo5n~EK!h^L}0Fi()*cm%&yi=qMrEjf7ij61Re8MA@RgcO_ z>^x^5HUP&Px`{0ymd5hUQhTt1V6p>j9x3;OhEuuMaTv)t>JParpY{E82H$cG7DlGOWn8x7oPa?Grrd+iw=P*DwwfDp6~u7)MSSGCK`L?wAqfO-IotCN zn8!+veI|R0l(HJU>Q%{;%eqI{*^(uump^O#yC7w7rr0Va&9178)C4t8N-%GDgY&RwlR`H85>(9 zj8puphtm(2dJ%~Q{KtoTAt0YEi)~;qNoH(;@0y=941SLlrS0o96Qjc@#E}RIY?9oT zY%FJ^Z5ce^9%}uXU)Uc$))_B$x+5%bke1;D!||ZLE(=h1ZM7sd8gc8i`f!ay0-()i7t}`mm_eGA%Gy{s00!V5u5-! zHF2vKA=9oFX1i$bWO-y#tt2W`FCc<9fUD5%7(226Jk^M-W`+TME#<*{?uOnw+(eFX zxj|qvkV(N9$R4?>;4R&hx}K8L+ms=qp4LYv3|DcMHcXBQIT<}ljDuJ7<20`w%n;o< zQX)|_$IFpdAhF8tW^9Ab4@_WGjUoDMNnJJ{ZpQmaz^Gspg8%X7eMOEF~Do#dv0x)u;x#?51 z7L)$~r0gY1D8s_aqY8Hux2V7)JbpC}_AMu3#4$x~LAWwB4J9COH+z~rZ45$_ekOyJ|#y1rp4hikSCm6>DqVwf~MQ4h4 zf8oUE7#ZXMM(p}}X9BCW&5}Ox=8-A?09y(`x z;{zuE3=z+4o_M0+w#2U}KvE)In*mDhh z;HGM|f>^vG9!*LV{ zH=y&bXFH;3!jeoQYqJ#rfNa;$-UIMQ!LJhd{%?t&5WXyk^$BEGva)+ug4)6)MOC+s zQx}+D2&~d9kt=XYu{&rETQg_jzrigN#}{*WCrZ{|Nbvovk&90bT<8oAvvQ3bg5|`3 zOC&7dGWo0-iYZu81`lQN2jge_D%*Rn4frozvDExP&*eU;YJe@EAP2@!i6#w^kj#oy z7A*e&BJrOTelKY-v-m6Fo$bqMl8I9HSh*}NH5nu%uoA!T5-<+ZNmgkwYIhz}` z>_PUxvaT)&`9leIXCpZ3KJR?gQfh`*EEqA#*x3L$?l~L}%AYl=Bb$f`ky1nDyj+!G z$x=8O01r(1Q)IUWG;6k&ADK%TVhx5M^Z<~1ax>TRtsRKHv?DiDPcGwtpbe|A9p?b3 zyH znIb>Do-pYqP7^p7895z$(4M*jw^Au4Nr9Ar3g7^Y4svi89k53@?^UjEp@u!r0=kd^ z{noOb>BmaGf;*dUI@%?0$8s_$0kh6?!8y);xity3b^_vCX89Z<$`ydmpg7u2 z0plltezYfKOGOpDG6J!#17w!Q3j33ey*NLWPK|KjM38J{A1j6+0nY=Dokc9y1wcV) z#HcNd^1nAz@6)g3deRGn9I>p?DCcWBo#!1$9Q$V{(yI0*)?^n%JiCrn_8+>W<#ES< z$AS9QVRdgAS&Wj7pob^r&rA$s9Fxxnj%xguagtUkP!0}d+mgg_kPdO4GI;c=lE8(- zyE-^;n1w8-=ITKszZmosDJ$89v=m&c0PlXt$_Dl$B;%35_XnQ8%ByPH%ZT1M?o_;r z6-#Fz0rPdi=e|2t)?z0mypVQ+Fv^>oxHW%ziW~<$G0na{|CzH~OD2Ke-f7ZckTxFK0EybEVYb2`chF!{} zfY>?XY3HZ_0x|e4*p5p#yt1~I=2Y9W%ViQ04saRDl~4~LFz7Kw6*p`8^#1@dxmCwn zwx4SSxt~*(6qj?zWDPDANzMniQJnOwd!0u~{@0c*9SCVjG2TS1+@4NV*>TS&gWDZ3 zMHQqMKIGJjgR<^v$#bK|kfhI<=8w&{m&?Y}fZ03{H)HrfAYgILM{}veWDTSdc^+hJ zU5PHmj?!6V`@jsGWM`g~QBbvhzwm#^YAZ$S{$))beL72_6{evoGzV*_g%wqmbJY=8 z=Lght&{ecBMkA8W$}w)t*x02|VD%s}k^^;I5Ods7MOxQVQdVAtT}I1MTS*FtVVX%4 zPVR~jIUE&a1%?!H^5A+m7^!mly`(VR-CW5F%)1rV@#Ktv2o4m1wC%wlf(g$9r4&+( zxu&AUy}z!dX>|(?GV*IX`4-KOmH>@kU}KU=+Qfj}iu3*Nc=Wr7uNqk_zQ_a{qjP6! zc903j%KJ8g2@Q;s#S~TSm#6s;l_GoT?=-Zvwu0w}Y}?4tmXVtaj2-K>az+OPdXZ4E zvef4mHd1ZWV3Y~riGIuTf_`vg3^U0Go^yaeCWG!Rt+xr+D3_@!3?2J3XYs(^`eS~^|0cv z%jaU&jTV6gvDNJ)X{L#R5+W2V%kwHe?3Pi_IoxtGanj`WI!3P3tU5ynx49~l2+>0S z02?>~v62H0LBRorNEA^~cVxz*xo&AerOzt)x@M_6O*%6H=VWDw$u8(vf^m_6GBbhF zp*9j3ttNYCw!M+c+DQW=ebJr30E(H;J#Ys~D5V#8r~PhRVxFJZLqS&7`c^R9x?QY7 zNn=@9fC*gX*?OGvR385Tj)l@ge{=FPG{sr%QRQP3VQ>!40+4z4{4qrpoSTa&D-vEs zHO;Jc@`W=3%P#fYs6hna6M#X+G5MaAUMPOfq^sfc`@EFpasFWKsY!&zxr%z{YX?Xri1_ zy@kq>&>Hn*jyqeM#s*ApK~WVq0IwsRr22rXk)C+t z8TIChDRsB?wZ#~xxnAXNC!8s@;zRqtEUK%JIVX<3gsnCu)HMfNDQ(@K02PW=Qs*53 z-~sG-;PuTEQs#S#%I$i7pXNH=-%W)70BCAbIy6C|{{U1OpJ_Q8fzEiyCye?6YfHm+ zdj9~8ppiTWC5c3IGirLJvYoN8&z6W3l6LdBR2j%13_3u@l_Bq+($&bs&lmW;?*@qu{C$DNKsDBIoy9xkJgGRWch!uBC1+8w^1a~ z&{;`!I>yE!@<}9WU9pcqIT;722AOFb_K=SvVDaST8E|kQ_>w0|$jNJ4qo+Wun}W$Fe&9@)>oKC~P|v|i0~B7&vkNYp6ca(Mb4xjfNDX|bOw zxeS_>yl|{nY_A$ADGtWU=L4uCJ-Yf+;hyFMM!&R;RFzoTBe-M}^AOz*c;Hb*2`6tq z_yT_FAh>y2@r)AAj6nrqxFJaU-579q`gY=$>r%CXF>xF!q=3k%%ok`FAm=0w2Lg&I zTiv-yDJ1<0QtB4*fMjVppO64RBd;f(J-XwyMSrU3wvv<*GRn*HL?8e^OcF;!@1EZD zQB=MXWRhjo(x#f+-a>#Q|slNxCDd_4lqU!PzSFcPJ0wn zvT$$u>HcDh_l)_Wl0*VYnN}t(F-GTa91c0i$EWFvZJ9xEu?vSsk})K?0YF?F08}10 z!Q(v9MGM}Qm?e2z%(9m3(;K5X1yeI5qagGnrg4AwSPk3lSXC5?c^*6_H=9*Zd2wF!6fuj2kAu=S2U_gNuk*v|JmdB*IEDo literal 0 HcmV?d00001 diff --git a/data/demo/000542.jpg b/data/demo/000542.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d87e91929b68701acab59828a70522171f9d796b GIT binary patch literal 115536 zcmbTdcQjmI+&(&@MJIX}B|6bXjffs35_Oaydd5T_43mgngCGbZTJ&Jd=o4L(h#uXH zL?32EnK1bIzVBN1{&WAh_wKXyUgxZ{&sm>m?X^DT+0VZEb+rayG}brL2au2e03_EJ z;A#<|3%Eh@U;A&o@n0h&{cj{EBO@iFAg7@CpH4}2lY)|pl7iwU%}pxm|JwB(Ee$p8 ze-Hot+^X2xstNlCAbCcn1$dUnM1djJ^|`K{Y>Iuy)LoGAtUSmfWN z7vB_m(9q3lF@Y0SaPg0#qGn^~;N%jyBPu2?p{S&+qIyqF_o1G?fuWJ{(`S}e);6|w zt}oo&Jv_a<0|JABLm;7HZ=+*kBDN4q_xO z+cENm9H85qsTi#D2wXxXQm7Nk-Ij2s)}9^5u%6=nZ4`;j`R*-koFDtx1gTY$CA3<< zYaN@523Z?i0ir7(<&?eatX(22)+gP+t9g+suAx%DXtUT|t`bYC1O07pfAR6_{lN^& zf%`kEC8&RUrll*`(JGq7*5WBx- zCJD!0(+0dftQn!VMKD9>jbb};=|z1Wth&$e487y@-HHf1jnf=0_h&IKHDRnI4X+JU z2Q~G5Dsa(ZUFnLKe4W!$%|>(gAVOps8mR@(qV5a5V<_`;;vohV>yRVTKHDwZ_+%@T z82N!;X0SehjK0(nO1`BQMrz-T=!(;04fPIi;_x8ctAXBYLA5vdBz+(5U_%)1N+KUb zDIg19J$uK^qoM)vAd2OK40`Oc3(&vTeg>I*nAqd2%QLLt4+XlS z1rEKjRHR=Sl?U=ZWQAS*_1gYpe^iS97<>SP0jCEhl_s7JhqaY!@%IjnzD zIw5~uHW~}H1>sQZ=FxYOtfP+lE4*ieKP`^934umvZ%kCbd=B7V6?WFVsY>gH#74i1{QF;NH5!IS=ST4Z|j^D(D177%fPejr|;w(Ek8jG zaX-rH4A7`}H68@pNpO>$9VGveL563k{xT1e$%7==HGp=FZeF!Nc;(J)hY_v`W7Qbw z;20XX$X%9Z4YVKe@!*iCR2u50d#T&lkn)veocviTqccu_tsT*1?RW*CjueF;(X=aqI9STS`y4gJ^d04ps3$K+e6@o# zDsQ#?6=E7#@voMDSC-@H3hcVqiVKM&-a|9+JNV49QI?G3~d@$0ENf-JO1 z1Xp<}pW9Mf6B@a0xhmCyEml=r4E=;4?F26K{sMM`skB6*4r1|1 z3$pWE16hS@w`ZkgAI^l{QJ{U(&CP4Qq3_@|QMp&3R(08Vei_oh+A3jcMn67+JYK~u zos|rt7&`dTP5`|s;zNQTR7%6By8jw*Ode|#(B!+yVqIS>n26qp$(yp5}KVIf3SDw7=NVEX8ySExiAJy zu+42xi*jmqZ17r3V}a7mb%=zlgDF--#`c`(*IIy@9=)TiGQP6#1mT2iWozNR1dr$u zW(~xlfy#$Y{=S3PXR8CNbPG@moNRx5Gd~67WMI^eu=CeEcG85`O3c>UfbRFEi%3@SaNa`N= z7mGNfFGjQd1Vb0Kv6>HmnS{G3W`4Dq&t5c*n$O|yuzDfI2K6Z#$ULK9Z;JuK+#6 z6NOIB9B%hIy*`gnj$`2h?eoV4d2fXjb%ThpMrJ4J}^wa!mMne4D@Fs@u?SFL7 zNUvq6vioEhXx|+$AAUVIS;Tgf1fzjQ5_)U4+N91prl8O8ZdZVKsmTXbv*tBEY)0;e z5|jC{e&@~Vh?K!t@`x(Ik<{5zXHq; z_I1(X9Fh1q)5LplNlBpWUWxj{yn^4wzDh~Et*%RBHx`R0pX+9fks`X0vMW%_pUr9f zkrUY=T=M;u4nTq%l)YHw$A<}}@%W&ABwnWJ%jefsWNeXvx0>j(Qv^Gb%UqJtP#K(N zpA%IOFXiAB0BmY*HZgW>dPVCjLk_Ic`^1NC+K~R|c)&?gz6}R#u=C@9f8A+3_ZNau zi2(*hxy{ciy%uLN7b!;Q(vCZ$`9&P@LYvSWs1I;02q(3VN`aeo=Tv zA(bRjI~^l^<`+kv%^m-eoJf&?)~VMQrYKEa@+~t&!+gQ9aVLHRJf^znCgk{7j3`>? zRmeEGUpPK?IhSKNCAB`IbnkCVjpP^mZX?{Aq^`#&84* zvClMfR+RS+dDIge^qSEXh$tYMR<_I6_}M$otH;JJdK1UaMTwRZ8lmTUo|mflvQHE6 z2%$c%&`GFz=|DZGbSqn8uPu;H;g3+_Lil&CV1o?XCDS|q>NsAwrLaee2?#MP~i`J;o_!yZKe97J<&$6>T2d#n{nhv)2rAT(NGM z+^aJm=9~Pn<={ORU|jOMF(x-4#=Nk@y4r!Ldr{JLm#{kEsM(2EoV{eqw89r9HpeJq zP9GQgh&!&?-kCA{9B(Ad_NxcZD3oE@bX1O7TO}nNpbO)0>^+w%*}G;lzCun+kf`3E znU8r{-BZVO!P`Aiz9rG$VK3|(Y@IBkW*&4P6FWk>i{_mS2>E=~=O()WgG%FxP*Zf|3^OA@*z0PS-u`c_O+f%Z|w$G}@gXl{q#PzHkAx2_GV=rs}fjjFCcHMq$1qWEf zUc2b-sm?6K!a?h%7G)C-oOVF8EwlYK=8TaJ>AMj=7JNDHp80U@*sH9?#Aa%P`llC; zU(3IH)?Y}M%RtCzBa)5qc$G1d5mM2Mrzz02!Yz;esonJsF`MJE?DO}|(L)=&lTSb+ ze)UC#JY?}7GsTt1RTEtJ`G_7(`J-%hrzeS9ufSe1T>BVOo_BxV1Ti@nH>gW0^vY%3 zw-#3TRs=WweJPy3HW#lUQs$ZGSY53ff}vT~v%~CQ_JQGVG-pOjoc$ymW0+DZ$?obW z`cIkORtw^(kT9LMEj_O*3LvJJslA`9;QH>ctlUDB@8AGVm=~5HC695|r{(y7kmY`m_Jr`ZoRHab;z&)9UgJLk zarbjcoen-Vxg8|Cph!5jo%}9$2$x@6T>uWt2j_TDqob1fqptw;K`=_FDjuBJ?BIS= zu_zHnuedm(clRYotd}e;ga;*`=YS zJ33FBf7)jkf@MWlTTZ^D8;{pwfMj!5XTAB<6rs;R<;{>`|J>I!rQ-&bX>UV42Ojq3 zQgEk;kr)a)3HLfapxK@Mx(fUy7@Ki$7kQ7s zA5^t{Yl)_aPPqtUqVq+exp`(hW<9Zg$hGs~7vB+yg3(FdB|pbPiR#{`Z&PPsXXZqe zAIs+I4CD!wN}5MRVbu3CS)p2j5PBQ{DLFBmz7!zT_vDQFYEdl>AC^8>U)f1lN`80a zQxkHDcW!tKXy|aM7Q_ zwJJvcC%7QilBM#bC|RwduP6rASoFG!8A8*?c2GBt202QbSx>Wq${S5a%4{u{pivwk z`%1l~FU6_46nx@%bz6K_0T~|;{)k;UPY)FX$8tZyb$*l>uQc}%_xzKEaMqZSef3T4 zor>7FR2Q#I(34^LWo-ENY49anv)&DBEt!g%+X*bfJzC}6Ex{ZQrfNKmbe_76&nml^ zKv2I(T-AH~6z?{gIg%xhjZk@%FyB+%|EfA`&05nHe*?s=hvTh?YhKm@BJfU0dv{Qs zEehS5<;6Z~%4Hk!e~bs!(YiZ@L)$i3onVsueiw0M?=6vhG0;KG& zkLRTJKGad&jGgM85|bFM<(=Q*W)0}&6L#VticUf~%OH=u`Q-2{CC0e2OhL@X$>G_U z6dK-5;*y}_<^JuC?QIT;_R^yU)xb?EJ+IHm;c;S`I-TQih5DQ|Vf)gf$rKOl)4mCN zvC48eWtYroubteCwdh61)Q-r@A z!JNUYkp4TFJ=%;~su09!AI_Y&or=g`yjqjK*!z-$JwpFnC4I0NHUY9Yv=T#eA}_g# z!UR+B8j9Y5=rv_J*IoaHj~}F{^p_3HY2p){!@r=M-7Au^QF3w~|2f|)g94d8^E}ZTe_TRJVO>bFX~b%s|A-=ABudPR5bAcp`i#yr;@%%50krP zQKO^x@=rxsnv<4^wSWIbs&UoYi>QXR;qE6Qa_liG5MF|m~ycslRPg25p zFXO9&&|}J&?ty%lpSQiNHZUup^}qnIPtFMIud0svJumbXfR`wGky=N*jVoL{4A;pae2}hwk~zG|=OWmtahYh`-W0>8)GnMLO}yvsj7+5Co42}?EzV_G`1_{drVZr5 z#{z@WJ>qiLIFT1hXhhFode(iqL8CKg}r;_~U9X z#cdp}koTIRV3!kd65Az<~_7r9UxJh%PDXT|*0nDOuc z^aZJ%&j>HgvzYb9P+!;Z{2=9di80=qr;aZ{^XKxCm9&<~>r|7UcD$B&KMvhlk@~V$ zHYlcLif9sw!CJWneOY?sQgAelL>k#K@jL;>HYYbc{qxNj`@*(adttM?hzd&Shlah0 zq%sBfg>^P#n~PGKYn9MG?d&$6x=vI*T!o;LSh%AIU<~WBVQKGAm70_2;Zi zDvEwDpYB#ZE~EAbb%uba&VC&`&DZx{-h>lEF|yoxL1nwxhlRoY~7tPkOgGd<;v@LAVYVE?0=N z6;!;gjp^ukpCfn6P7;#vlCh^*!)uM#Tm8eO=r5(L`Y4K$Tb_2%iJN>xrKyXgpte{E znHTaSbS=yAu1kx^YRwDRe!HA*2ZcujV43%bb2TA&fAa~xu8FnF?ypma`(MH2#(jQ| zgqW7}*We5*dy4KAI>iz?r(;xk(HmBlw(ZR`jzTUREETGAuCuy*CK~@z39H-P=7@E* zburD(W`a%1Jo%OSFnW-iImdm7gM*D2fu}(aw3ZLz;_kNx-U7AbDJn_X+~c3;3guRx@ zusLs=oU1}~4Wzb-)6uf7yzwdBlO7<%ADup_8C)=c&r>@8e}4r~^U$GXfrDPT32p z^3L9w02&0vL=6Z7n%eyD3zbK@e;eePa zhxlikLB=t~q9L89w+G~Ez50P8t|2E$6L23>HY3^J83&eyg4t@OpKj1t3Z)Rx`HK~^ zUC-X7@ck>9Me2!?ksa?&m8;E7V^!jzW{p045^|wqoHo*~kB!4q<(85QjW#+w`n9Ru zXO(j8UNHS=WfBL}sRRkb(#L#WSA9{TQs9Z8nn|oA13@WJ=Z+QJJ0X+$>^E4SPE85; zheb9YtZ*edu|T5ug^gX}NCV2*g}(B&?)ZGWtR_w9`fN2$MY(uWG+ z$;-wj%_|*yWOx{sf0JYm+|_#fw^p)c*dDL3Rh6`yzRm2!%k^DQxQFTfQYU^conY;S zTg8;Nxs)msEFEbKNrdwL!s6L2e%a;xoqewVj8B{Pw9d8HQC9jDo2_kosH-sZft~Z8 zKtC6BaKt}c2qviTi%^P(Lcf2=RBo~r3*UDtKXo{)E_9-hu* zBqY3Si_#?d_h$Rr^&HO)TfWabWgVZWe4w%Nw?QzE0=0?@43K}ts?S;!gfSuFv__pbTa-FJR=D;xyd3W7qfc$tqiVw)%B z9GNI^Xs8x7NXa{(Qx}5K?stmK6U*;uTI{_cEWj^6e`-D`$wBQ8*@3J#2dwy6XBVVI zd^et$;cyg~#zI{&I+zs7LAh<_>X(TdoI(q~ar%a&ys3WsJ|E{Uxrq1i9?8$;z1lz* z7a5ce=GenNx8%DkpPXa;7DX#w`NZLVYcbj}_n(#Fk#g2Z6y9#2LKN8Pj8f{Uy4>=Ai%Ya*V*fN!X>J<64`u(l8-?Mq0}Di`$^!Gm%k=?y)7e zz;)!UF9t&keNk1ByD#_{_*PZokJD%f`VKT0nX;geV2_taJ$X7s*^{>_e{EzN8m6kSBJLU7I^P_&)W@D zGuhq2FSdH?nm6d&PtOHaCYy5uF$>WdQN!OpoFHiq(Yn0DmN~Sw4%d$9&s=kXt1+sm zC$BIDBk7*(m1FejA<#PSUXsR(Z^2Ue&{1}q!_s#Q%XQ2rv4n&OJpaSC4xDxzQK!nr zadi-+To6Gfm5rQuS0+fIZ29gk$vdntQwvkQ)oneRL)JsADq}c=97BNX5Pj`&ul$In5C=yU2qY;az)_BQ6gKb@Z|*H;O>X^hwX{Gr_}j3aSmwSTvDJxAFFo z7{Y@dPxOjC&Ao#ZxSi>bm`)in;&gW^)Vh<#bDbYKK=pC5ui@YK$Bz5_w$&i|Df_I5 zjtxp9rgc#24^qj2QCp5`N$VDfns%I)0A6o34oeipq|y19hbo>RPC|YfP`xXD>hF54CGC4CJ?^rk0#aer9$^Eae1czCaOxJ`O_@c?4Y;Hu z^$!Q0oHU7zh=a;3nz_dQN*>e`Nv1PjHJPmnVHgq?+-otR9oqOfU7TZ4xoMs8+V`^g zP$uc&;dUzy4%Vgb0~~(}gtg`04V)%+2}1n;>c;8dQl?LDK?*O`3oK}S%x_EAvE8!5 ztcZ=M4K=-Px2d#xz{2`U5-S^j1!&0WmQ5|9Xy%VcQDTEWh&ISO5ZZgf%qqZMmE0dV zOf*cv0TpV4Qcn{;(AZLMSWwLQ$8{4)&+ifG2np3YMJ&g%%!D`a5ml_{T#b?@*FBM+ z9`gVE9?c=F<%dri8&nHGGXSv6Q|218m1gmY0vWF(l+)kKHs4p@e<<;|OKVuk>Xt)clT3%XWgJTwvTp zeOyQqQ4bAENGsmDwWFY6t%giJRyUE^gz zEJc2IVhJkZnm5Jm_AW3G?`Uy;ovKp=9?(PDloNM#qQ!36YWRQy?v#{M{x#P7ns0kY z62Y&k;1KrXu|@LByu&HH6|u13rJmYd)9xP!XWL+c=!`bAwtIm`pBD41M#%YHpf?{I zzAL@jlp&s|fbl6UJnd~O&3;u@ien=O3w_jhqKJ<MEP9koEKh0FZnM?))IC>oI8jUO2bq57i8Z00PtTtO>=%L|4HesZi*I78euH%=Mt z3$OJ!XYQ#lF>s|Xxo7}`Ja;DgJv2v$GFBGJa)uc+{0!MV8AdY{wDTi>!bqVE6fw1z zll(Lq>Qd&BZTIkLF@f7$z-p70&4k&9g(0@?Pre#A4!443U%rhb^>67Jvs=Y2Yu6v% zVRajK-_K6COgjQ^E))N)Z{jNwh}<={kfO(Lj>eq&He5IV_`(-i*wBK{f|t2VjaY}j zpFWt^H&W+74He!btTXh~F9h^ldyS5eDmx?B3c@^=7{pre6&ZD4m~Hj7^Fs-Jh>hdY zYqaHifgcd(qa3VcIaCL?pTO6Ek(3sFGNr`Nm@^eKdS_2h_z$FVBtUC<*`x=>@Jf!eam2k_U?KOLYgQcXxvgq{^4} z=i06SBWF(W4cIC=(aJueF#%i$r6KHMWMkAWF4}e+sqwmyNSsI;*&$YzYkIWQ?O9{_ z9|7hB&NsiU5uyXz8oK8egx;qPzLOX_>%m9?RMa$Zh)vaf)>p*fqd2Xrd-lEiP9LJ@ zTNGcX-Mc8)@W8+lSES*D!WF?8D_N#3HU0KV>Cu!wFz>}4#ZG;E;w=i_m%lT^pJV;@ z%%7YqUygT4%@UO=p!#^dvxqcXJYNZjv}`h|y1hlp&45Cm`;naa$%E_26?7DS&FdfG zElP1Y)jqXaut|*?7H=u179PjEe`k5kDrPl2j~FlhuMELKAfRIy`ib=XZ$s6fTe z_8P-;1(%k}nrr))sBJ%(N+&Fhfe@W@139al9W>=86+{|>PVq&)0U@Xi;(;zGWqTs! z#bfo(G~!F!2q(}7sM47|uyD8T)Y?KF6zWO~sh_7g3BaYjAu2fI*8BM6rfZI1)VAM( zl&9__VtjIMc^yYQVU)=9;mKD#*IQxj$$rDp(H>Vzi0kK*Xe7jiufgK@>9|a_4ti%# z@?^3+z8H(P=h78i^np~#Yu9`_trn>61V2kR=g~ z=ETlz+FEOYid@v5s;|VI_JiMYfp$jL)IS_u0T{Q=^p>hY=!jcqDw%ybAu1EsEX&^^ z<0{7;w$DaR!JA)yPRTTIR~ig+SnwRyI~Ab3q;j+$AbQgyhvk}2ng zXUb6FpBZee=-V;tf|6Tm-){+rM-MQ}6tO^rP9h0Dp%{Kr9W=`v(HS$&;p91C*OTKp zcXH{!t-^M~$hi6<709Mc5%hRM7pAJq+4USIfdb3M&UT4jv~-C&L4+J~p5m?mF$V{= z&Es540wxX*&blqsr@Vzo*f@iztcHIq11uA;&!EK%U1>6X5~#LqkAIgkr{@R_T9CH; z#phs9y%T{~yS9_th)^AYFX|5yeRB*7)L8?9Zfh7}E);s$bxjazL2uu(#$`s`tNd5)>ARTshnMn1cSA4Zpet?ERh8Ndh5w<5#y0Arar!q2e$_Ect|AXymx%5ZlE;ihb-EeS{&&?5Ui~B=tKIbzEpKEh%oBG-U*N2z0yCU*X`A$*W zbB1jhm7IDG)v(O-BFxOe)M4gHepga23a-^~u1?gB-O;4ry`d`WE^h#i_;ME+dLUcj zzHm~$Z;=nmP3rYTylUJ}coG>&C__g8`(bda;$njPFqLnIZrwwKvYoSgQc%x8&obEu zckvGWCym|j>PLHv;Hb4vY<5}oSOYZ3OR+t?fU6gnXU^uW7Eonpzw3Xw&-)k9_``~c zw274tVO07p!zWf-fuM`d?!@pfQg#+#VV|CWOKU5RcOx7tCL;ymb42jD+%E#RWBW{?IU8`rOFY7_ za`;E%!p^Pj6T7&NMgaZPvjBOC%4A;w))9X7i>6}+5`y>T;)A9+`{fa(Ry=(HyqEk8 zXs=nr)-*!8zxcifl>Vo*uaC6h$CwN7olOdHR@{M{`XS8^fb z`F*6miPHO$Z@dU`56=PHR~k!o3jNcT%GTfrCFGB}#ku{1QSV*`quNEIpzYc7m*E~n zW`?zwMf_I)Pg!<|EM_#v9USksT^sTxDdt9C`!u>UcB}2hoyA>G+kD?=!`7s#T4;qvVL!8qa*CHVW8ZIu{$gEPJ)sz?0+mHr_I40&27d z5;dD7>eQtuuT1jOLyfxTh|0*PHSbJV_hUrzKIyw~=mmv6Dd7(pUXqVyN)=^Qn|blO z7L-$#b9||XuCyp4AE`Zn6&+tF3~MPtBFDewsZJ72+)BRtm8_0kmxJQoB0j;LlRWYV zL)^30cbl*zCts+`5Xb>1IC~F@gOJm=B1p)W#vUy^;(DInGk4nf(VHu)EIwgbhvea_ z8upz{@Te=b*@?4LNaPfketTI(@_1~AN>I3V5(sW;IKO>D?SX9VJa_1AqzHENoYY*R z6>rx0v6uEEZBcanoKV!w$2J9(EwQ&T^{d$7deN>#eycBIWkdr%TJ@;L-KUmQao*}tV9>(s&qp9TOCE2qXYLFNnjC+XKJC-Zr3gPg|`P!3sNB1!S! zbHPas1+Yj{63F9eOXvHz={lxc>psA!1nG9QIJ#TJU7Sy!S_PJQo#7_*_EfDf90ymM zEvgtt7szba3C->^zK9jGt(Q?2q^l}dbmpXBxn*C}paoc3COk!_qHrZ^5it#oc#2}y z)xL3di2uNXx=*;c3jBo2K67{$F`y2?+}b(yH{y}|(&A-vk+A?P%91fo8GpDqH9mee zHGR6V5cO(VB;GQ1y7eH8JaYUx>(U~JxY~o`9To8w3%>|CHn?f9Bw^6DEj^oq zGNUKdIpq<)+XpZY^6>Ce&@Kw%79Ch+x)g$}cb}~k*AO2O-qP|3<2LHKS9ini6y~Uf z#K~m62CB<%pZFxrkk8%}3=RIy($rK%o~_;45#ON~Qn9N|Ph==IcNIO;FRIqz$)k)T zPi1{l=UX>z@9=9_;GxhcyIm_Hlg6W5S9AfTeFa$i6iJVV^_jDHuJHam$b(V;+MYCb zE%#dC@RF%JrpX5BtToU&hF7J zMn;^&oN{#4SiP+(M`tpy(Tf3Qq^vg55H7rGkAp z`;`5qo|>l|u-qITDC$W8qMI*UPP-(F3~ZFz&!3+?yJShd^g7yo;brzq;CVJa+rmn& znWd^pT*MSgEap^cM;>1Qj|}Q5_R?xDMGbJ1Bi#%9iGkHV&ftouLVo1r%RIfQW-6@p zXiI#oYyh<3ZYpnCsnt8iuLdt0>j%1oEM`5lwL8M4>R=}u)rGr;o*yJT@B+#W2!POA~#k-E|_#?ZVR3dljGE{To^Ar}VU#9d0J}nB54lR1o zoYftnHGNz5-fLY+b+)beV{1}9NlOA^G6c$kDZd*du$~5#a<=bW-F{d*;lHIv$So$9R~1{hDWe+VNNaDBvh8Z2gv-YeGgDE zLcVxXfjibpgJSC3%!NJ#x7dC{_23)?DN5y)VcUBCdk${yGn!Y_E($l z>f==_gW3yRu9>}UuRf8y?ri1QHhmVO+%bDW(mj(_JmASi8wPiBnLKWXe0o%R$_#99 zQRl@Dty$>d3VWoD;$n|~cV)Tut$KJD+`!T#w@;7DN%)& zh%^il-s(9h8}``+Z}PY+39|uGDN)B~&+^RDzHC_=I5bU_TB^GD?u$`6MyM)n*|mI) zpmtC(K$?`t3OcO+OMc-hWwiAww9dLWeb>Cgloiulo1Q~EnVHRrEeu;Py6g-@m;X~z z#Xo+V+xi7^IR11(|F7ui>kXYKxnpd<%NBbhqV&;|EDL%Y{E?E&72rYa21Da^TQPOV zd!}UCYf|uRm*2eAREwY+a!=LwL%ms&sXb2auD8JiKk0FOlVi`mUC6h8<~mb{XV!~X z05XjSd@Ev`?Ka;4HJ)aM``T(mBEg|HeqVq}HH9+{A5MGup}raAk&6a;+A;2r%^GNu zaGbd=mIY2^w-xF^O4F*~1umq{{OR7KpI#YiS2VN6m70rxHCKO35=SP6N!;1+D5~7h z;-7`*!WV5s86V+^6U&PaYex3A=>1aFA}zOOO)9o=-L*Xr{S=>Tv3$`z;w7ms3|!@a zTmg*m0yBXOu}ObS#QwUStsS0ylAbKz+ zh~bvk9LtG(IP@>7>G>yI+c6{R<{zi|{fe+cO{ zn}%9`?hV2W$rsW&?fHgcZw^3LV8bnYZ&R9F%PKzL?jag+5Zlb zv5x8gk%B@QjVx#SpC~rEqhZ^9WdYUIMN4w}-T{R??KSXh2 z4?h1c{j0!yTY$EK+K}w+shv2m9Gh|NYX;HKQ1owHo@dwXsK17t3fpH|?LngXUT-or zOHXp>WM)+9zUp_sRyX2KyB2hk1zo@}b)Qi<#O2LxZQQ%XD$Shu%-+*E#+u<$$+{Bm zA(p%M(CMMKQ?VnCDa8c-lRN@yy7l_ILW|$)tpq z9QUzBU5s6oBJ-E5`t1vtQ-YG{zS7umU=a)KTK^i2;s7bt3SnMeir|_rq9?cu>@?kd zmc?Z(U)z>3fLOm}+y`W^Z8%iz{CP_~KcVfO7L(nkUsN(KeFaEX{LwU< z;XHMR)#}~$6hrIaD9??wjUaBDnzu7rJeQ+dGBb*rCcR55tul^8=Q>zd~dO<_Z;N0uhN0;k|i#hl{IA1JTCG zGU$}&QGZ-rtPD#nhP!QVx{v!;Sn%JvMAZwuT?|mycbeP&>yc!>)?H7?c}##+Uu#;9 zXeMaG(qh>A)LsFXt%d~zL38FBj2-dL7L8QJSx??baWc&Tdti;Tx#-0L^8wv8r}*06 zv!$7jW_TX?DPM1h{W(XF#Fm48njF|8=)Ew~xQ9z*C?LKC#eh>W=4%(~wCUDQ@s+3e}me}`kE{C}GaTUd;oTQhTsAZ=v>@8+zntyHkHxD+R^MBFfx2XB*Jk&UWosZC8UV&q zaTlWyeKqH#Y@QCbEd6RjNNulr7Of(*PkrJF2_9BNhpqWMY)zc+r zaPG58ZsP-+AvfhKzzZ-1G>{;K+mU?>V{DG#2I3B4$`R&t-EOtb{j+Ih)mMPe3*WnV z@#CI9*cR5kg}KKGJgacIIrHqdwGkm#cnl`(UXL z;FkN|swLrE4YuL;^Ayh(b_R2r`}40Ge+3vgxdL#qUn{3Bw4CV8g5vlD5mC4$JDZ)v zI}mU!pHjl-H>>o5a^i7V`kLgL#8r0a{kiLkio04k-F9rqr;&m-gTj3`nJ>9CSRlCm z>BT3F5KI!7Q7I-Z@j%Ae@I}Kz`sX39l?QY)UdVmrINTIod;0_6B#WG2??+A;c~rnv z7(79r3hLh1{#hK}k8%|vraIH3WNq1af5{k~C6igINq{`ZweMzDYU#D9!J-yY8!92O zb3I>|BR(&jK5JmRYi}4kD+^!xa3=#Ye%AM}HT!;@m1bYlZ(4pN%KZvJx?o=)rA5&* zwY<{r>{Zt7Q=)3nI$5{i;PJ#+P*U!%1;4x}hE7~x_g$H7BssygZixUMP~$6hX4E;! zYCV4{#|P@#aA+(kNW1xOWRv9mP_o}C>>0{!oMe}e5?@V-l%=Q9W`yMTpEPBHyG$Zz z?&l4BpWLya%>DCl!MN+!ysnfaSJePFpH;Y+z6qj!23x-p;>MIwAvy=v>%0O8EkhO7 z(y-%ke8Oq{NCbikqwZNY`|^k?@5fE4=hF{Wazw5r{vk@6^A(Np1CwMib>%KY9QF0k zd-#G$NX`?yblGqAS6*j|zHLWxop821X=F}-NMGGng7+y>zP85r)BC2WgFn(;$~qfb zoNzwwiw{Iit%7oxx6?@R;+7-Fl+tKFI4` zPTqVrDBY?oS)!?Bn}szyIC+~@E%;^%Ph|OIrT)=cuC(W&+Ca3tVbSsYQaFVMQ!haU zruYYx!^Rhhe(R^Lc(dvn4J=v1RxJw`DBAPf2$8zslS$q20$#qc6Eu`yMQ{I7M6C_m zi2xzsu%Lr^uW+Bm81IkFh|V*~pbP2c`4cCZ%c+JXCrG;K4WeyrOPHrh6e1)Rh}-XF%nCy~#_{OlQ4*8m*xhK+=i;`I>;Fyv`lN z|CM(oY<60!j^|6=5E&rt=6=)r8(;G6F2r)+T2ZNBIlGQ8!*=HV=ugfo1=>AB1ueJT zhV)>IlWvzFKmJZ`;y1m>f;t4?&cB7FB_EA!areP)3bfgST`Por-1 zH9pE*BWZPy^QEry?bxJ_Jjhr?5t+RilX5!i9Zo{5oaTty6DT-l`;s32=#)obnLVs+IQl3@1j79 z&-+>^N%Yf7r%0+SyR0r9621c5Z-oCeZEnaJz2}iVZW?M&T)mfWlikhD|8>85ZI#uF zj~%bKQKa~ks6nW@K7hQP`6D4nHZI6bCHU?r_w79K7t_x;bhmUIx z-UC9YTvPv~`a3@HtnjkBv-(=m^O6Qae_%(5sk8lFMo8;hNVbbP!b`o{w*I=XL}9b` z?-{qd$;q>qSm?&!)Fc((SI30jJ-g*IN90b+#*>0Yq;c;zNScOg&L@ z9-br+yOq@%ZyuO-xX%Qn`Gr%xW&f}$`t=ANi7mr1pv_aXL?Dxc{b75KY;1PYvrSd^ zXM>!x=ndD}_A?Z*gCsQdfdrTb~;`jx&9(eXaW` z_`gZh7X#|#kF4-g(CMni=1%*!e0q|5~_4r^%$f^OZ{P@Q4F@$o8mCrI;?7ce#r-mj}hScARgk%hEzMQ-v&tc8_Le8yo zGPI9lVG}#tCYrYentQfoH@6D&J>1d)we=s2?!0H>4H8B1Z^cb7;kSg@{?T!J;Xk&I z5~No#7zCPlxPgX|XC<%^{{U-UZ@IY-EKtjB6BS<^@sGm4*=NF@3BR%Ucl$H=*WwD3 z5b|8JcR%Ac~&gyYscUv+Wu7vTJ!Crd*W#QG-Euy({iHL3GG6|<3h+``f z;0KYU>0es-r{Gt^Pl{7$nlJ5X@nc-k;+t4wrF=T@KZ9V@@1veObW3Y^$~=->-K-I; z1)eK)g%MzZE#Ztp{r7_YCjQ0uHhN#eZ-z1HUNY2m30i49C-C>gI)HUA7V|sClrOLIp_5FTl0pdT|bKqx$Y$5S7 zd=r zkJ&fj_L^d})7Ay?hL_>ZM@+dEidml4%6%qxyoL;L3?6@)#52y@e8%Ly9(W(aA0F)P zJUgiT2G%@3soaN>_r+SDjW4d6R$bZqDY=SU%h{A*vh8II#Y=DnVN$b_ft~-w(A<3uqUv77Lws#t~^2*Ew8ShBd?93T)XT3E2sG$`@`hRWPHVm4!R@(sui0BnV4W}Yd;NtWyI7W z_=Vx`4|sZAA4HL@to5H5c!Fk@+2c526^+P-JYbe*ktTcvD6Dcqc!$R}o-DPu&^`$G z8vg)Sy1#_S7Pd8DLdlKG6i_f;+_a2KDv+SZ&LVPhha_XEQHoQ$dUiRliyi~8@Nkb) z(7Z1@>X(_0(@eRH-Rcv@aAPJVU@f8xgMv4+HV6P>yw39O=m{;pBI{bk?6#3_xY8vn z1(Xt%;bkgwBd!KQfV_KRy1jqk5AA2+n_0Y9u4vOlmzNvvW3;@ANv_=O+^Mhxja5My z+&Ki3)YmETN8#<4i0&J~HVbyQ>k*aaySs#>~fo0M*1!uQ@H)0MQPML_MC?G;iulmm>0AhA4-GI%v(RK4(Z{{V&UqqWqpA-h54 zOx|JND*`uc9B?~(6X{g6PlvbKPlt`}y|4cA1t|Sb~~zc75f=a6~NP6vEejlsJnW}MVpw&!8t*(@JMk_|sYmrt|6GD$4HW6VJA zPa{5={c6k}5z%%301LLKB-6*_u-OtZ!=9MO^sMN-A*<>>6N2wnlxc8;;V{k}^VE+0 zx$I4LJ`3>m=9{L&9;c^J4mT^c_K;kR8zHDYBs7*)vwpKiVX z04n>V_IL^`pjqS_SPUX@#y|a4@Xx}n$$Fy!2JS+2#z#+j`%mBoqy%s(%8EWwxSn(L zuKI4$*zoJE52>_3Mw=K!00G8D6&Hs}%cREQGWq^2o@lPiv(9N;^ZPv$!SLTf;(ra@ zYBy090!vg^D(-QdZ3h5Whr~TYNVw1*+f2Ng!%%4OE!xdu1$(x|_-~_pNbu#GYQGh9`Ik&G{{W)3o}RL{)7iizf!`ePd9IsF zva?dw`YraAai;t$GPfO1FO2Lsx*f{OnDf<&C} zXZRYv9=!1Pi7sZ-Z?9vHOI#VkUp2%`&D?ysQ;d?mYTWPO&jil;pN2-61S$;of*mF~ zRwhz^csDlY_ZxGNJL0(;AB>h?5U=Ks#BXI9LhLOy8<(3+h%n$V5h(+&R&BZZ)y;Rq zF<5wcSX=8F{-yM48^&^eX2(2^Y9`d9exYWanTO-#_>J*5#g>;J z3v?R|RmHl=_WuCw#?uiHF>S9Lo^IfI+)9u}a-#rOl>B?pJOkq`Qs2Tq70-iozYuA+ zP`uD3l+&Al=N?#=IU`n9Tma@V*x_+d{73l7sa^QPOt|rd_MW#EsIO#~x>6%X7@Wm0 zj#bwx(8|Gb23V;k zw;T4A_+Rm7N@-#EeS0RSZ7gjzs+w5}897PPP#L#^M=YvI$lMf@RV*D#CsHjg=Ctcm z+A>Y{N7lazz7ZwtR^BxDN&8s;0KzRa&?{Wq8UE23W+73`P`UB}W+3HG-doPe5ucN*bf5A;WU!&VyYTCB5p!ind{INUu zL&B1YL_`z3@h_IgpdMjj3C;lobwxZnvQ7U0UUoU^)1D!_H2(mvC3ExzydHc=rpsD` z#a0nV5L;W12TInaBy}5sMNzO0gow*@#@=g-_|N++coW1rQ|Zm&{{Rq}p4p7ry|;)a zeMQ*-+7>2-lHd`NRf?5t5D#yWzAS&idMxyP8|)e$wRNjNS%jCD8duq5`C9>#aozWM zCA0Hndsl>y_%F7uHT#C~SA)Dubz?e+*&(~NojkHsmNxRTERDecG=V~rN}S@he%lbg zcS=%S+Dr2|mL7B(bEu=`cl~|m=(W$nQQ?gS71jJDr+Al3`#;&N%<)a9>gf|J!*AnN8f`>(WUu(`}y=Yr#Q}9NjK`7{{TP3 zUaP6}mw@&A{{RKFwpP}6Leg_8+C!u2Qh#JzTC8Q>TWKyHP=m~JXGo@nS24*f5xHq! z!2bYhTTNmQ0$$%*_=@vWkVO~vT56KCmtkZ%3cxccl~g$ULXfPeLk8WrKjBxze;xP} z#JAS^$BpgoEN?ENNIYqv`D<@_!Aivy%s?uNE68h7| z7b5aYiSL=Eb#rs}orTFP+s+z6jhl-A%IHbK>BoF2irV|e8h(Xkc#+Lb9w0{pGP`Bz%_b*kK6Lq3%c+1_-X=E~4B2Z^AGWOYYlxROCA3EX!N;sjUIW%!y^ z@QN+j?EH^EA4;t}SF}p&q4j6%mu2Ap01-pszY=)w!#1(#(0EqhJK5=?L};gL%XYL= zl6KtOT-seC$%cqWb`?*Q`1;HCL-A$Ti@qvpnoM_RQPp&BhOyZAqr?VB7V>KIt-`}@ zu?N8tpn$n%C?K?#X=u^qXLTHjZ4K;(H)+(c7&1UW z+z3)bFm6SDho+=_9t^#+ zl1ulqUq0hg^CDKeX&D&-M3VC9kyjgGR4PlbF8C4fw#VRihdc$~eOlUfk3`ci{5xY| zrr*r}0NNAX>M&m1Lp0H&%M@@+cdFVdqcgO2lgSb(mPTwFPmiO-J_?s#@V12OuFI-N zHkD_rO0uoyuYYv6QA0fOj0;<2-RCq-w4@U3W>j}h4@fo?N?RNH0x-i z`&29k+w{bm4>EZti~v8hH3YY{0JoUle9FJOS($k-)Amc1Egt^>f8AyMT$;3{Q&~G( z{J&q-u7_Fh@8fTRQ}$BUb!c?`GT+0VJkhW9pBP>Ec30Ed=T4I8<#R3G_PDgQxr)}_ z)j}BBTWBVd-YA}1V_!Od!B92tivIu;JagjRQ{u11J3k8eYe>^H+kX(-Slb&(Y@m)O zd;2J&Rx(FE$r2@nimD~N@Wr_VETWP)#@k=L;ym#Y^zY<$`UdlUJ z?DTYvL^^c$a7Y9yV6xM#VzsxI_ckkGJ1{Krvw04^7l7=q^uL9gjJIAd)O0OtNq-jj zi&d56hD~G2eOgqBCi!BuvsYb1Qn+}{<@CX0m6f#v0Qab+%$mF9{JPtwi%*}Oj~Z5r zce1~3_uqc6%csodE&dvKgTTKDVEBD&;j5hk$G$q#rA9kenF*FvAGuK_ydcRh?)ZO%8b`(LW8zk^ zYv4P}J39{`D7Xd=1n- z8hjt|2ZJ=bi<@0jR`E6JSm{#*XP$d&YwNk!%-qH(*U7T8j|Xc#y~2P?J6C=C9{d~8 ztbQG9x{jgYyW92gFMzEz?MCMMX%gc5&a<}FV!nz*%#8zHT3^SI8REHUl|zYQ5Luwj-!k`Nxw=|E7k<@RpCBa?rxUfPta~6)NFLWn&{7P zmzLLV$8litrRy|m#baU$?LeyC=fgd3TllZ=6T;f2vv!wS&xiGGbHw(#d=N05HLW*F zj>_IW%nQ56a+fjgQTxv@f-*&X*Wka5c6xV&yg%WnH1^W84NqOyd_8ZY!p#uUH3rp8 z+HI@HF7VMCd%0mJ45{`;0EJay)D2@_(sb=D`~~AV4v&BRp(clC0SO(ohNB}(XCCF; za~$#{WSp}7p_}Ga0Qr@9qT1;H0I$gEqst_$_1F6Q&imqTk7AQwvG~2=tqWGxd|l!_ zOT*eng`U{7_HoIqMz&MBKvjvEV}X&6%IIEVa2b}qPW`YU@phf!&joxt@O|_*R#p}l zo)_>Rh}mL|ZsU&CjJme)NrD?eE~4oxZ)W>b4Xn!SzeLUPRWvUa*q;n)mlIDt^tbj_ zbLt0e-0w8EAraVuv53!>xyeZKxo10@4mPv zqG_TCH2K)08;98PZ=KzV$!7BQg$a0A@eg?^Eyvs%Z9pJd#;KidoZ7wpi`**k_fdeZf?M zRaF$pAn4JCCBA_0C&C{HcwT4{oatT{yYU~wZ-MIGB8o|-n_cl0$kp$( z>$xMuw*o6`nBddpWH@Q0x=ZE+D>ES=alRt6@rI+~ON~eNoAA}cMmsU(a;2^td2idmXc!H*H~NB#;E@Sow1FZ8Wa(o;G#aNgP4+FID!!x&>7 zt%OmJEy6bNB~@s{`K%e4RF4MnfBY3=!1^|~Hlgs-;H|XRQ0BYTwt$=Z$)GNT&^UsQhSn(E=o*2^pFZf%+_L_7{DDeLP zf%OZR^yN4OJk*XzzR)dRDz}I(T{uFYTWn_4xM8z&R>Zw(eNun(?0)c$36-Iz!ob3*&~bB-fw5 z(_-*eli}M)%()D9IJ_)#)b3`^2{}2e6lzV{{{ZFve9o0sEzz^%*lfH@;n}CvJ{etj z*>0^w)+u`pq%&DFF4lP^LmD7&x>{LTfE+0Yk30Bz{{RSYg|(=yuRJlJSn3%_tZW~| zHu{>hn}~HuVkL8qc9k1?bJo1;;+BKs%P3{mzBBk^#Z$XV&tY}1c!t*Gpw3GRdr4Uq zL(3K{MmYN5>AIGorD)%2@El`FhWukv)pc96CBgn2M1-f%F+BD4?V~}$4mZC`^d@!r znrU15{=AQ_{Aa3MTx*sd9@hRRi%B(0e<}sEh{<;G61apJ10*VQfrTA<5DjGbv%>mc zgY@*#yf@=(?QI#4ooS$H$s|z{*kvQW*`qhab8Q{zlWz>@Y&So_{XO_Mo=Y- zSJa-_CR~8PY;fU$;A9Si2Lzh-Z5veZKl~%pX!idA9W_R}wng%^_K{xJW&;ZpOAsYn zf%7{S@0^0e z*!HYDOFs(FrT+lJMWXnR$dIXPi<^l+T!YWb<0l;c6@TIusjq5r%MXjR?N-{t0u|%G z0^&`@vKVItRDstZah`Ktb>no_{975c(BVYW?bJ5bw^U@~83bo&?Z>xF*G(u&aaPdh zt5HV%PUFM+{l(0ZPpeM1iX;U-!#NSmpyT+s?s^ZxyYCA4mGuo`I0f6=K$2OI&`xPKpNZ>l6SuDDz=A)?6y zdww}4yPpsEReVq30e!7s&L;9tlMTyZha6;jbgwMcG<%pe*GTrU4aim`FF)tw&qtobo@<(!Rm{m~^X+R(K@Z@-6}7et7*W<&T9vAD2+n z;f70NGnN<$_2c?i-`}%ufEP>CA(G^U3IQny>&AKdSFJ@h(eu^iWP7iL^c&>SAym0U z82i4IS4E>*JQ{Ry!)cWRg2$RGxj^%^K1YvIv(bDxCb!}b7;ATzd&&D0NC;NsU>xKg zyl2;gT~CF6ApA7d{3QBK-m44UG7#EcyE{fyunJ zQJkIHJ-bG{@Lq-C=o`bDMBi&d7Au`ERY?|dxdGW+;DtPu?l||aH~7c#??ur3GkYh+ zJO2Re#gSx+<6W8&Z*3C*cAP15xrZASbKGY&^NsiI8+mPMK8*hW5p~TM!^m1u4ep&4 z!=oNbK*&_!`mx8(WqRw~)HYdl(?A63)P5rvY0WjMG#q z;%1_+(`}7?$`Nyy{1c`4yZcQ1F4J!{8;g$=zlm)oa3E`xSS1Ka+ZYJmU$Tj=)5fMG){vV)PIo=0KY^^M=56?2;$;KFxbB_D5j+O6Wa{MkD z(wo@wF|_svb9c!yAJUK3|=aBh$fvxw(t zr5GIGs|FlrfI%Z8y?Q6Z-`TswNp*4m00{;AXty!9`ZU^wlSyXm6p&GRLHj6v%^nx=Z0E!6;hQAXUKpgih6}q3q*&a1vn93CyR>X^lN%104nW0gi_LJ< zAktU+zpuE@36)~2O-fIfso@?8)xI_OpG=u-Bhn3x+<@H;PRz}58A3ped8?H9h$8`d zgOgs1ccuJAweXyG+Qsy)_K|>Kj#hMt0fO0Ylw{+lUOD2v!?*p6u6#46q&^<-6mrH1 zdy}JF1!gL6SX#;p#&AzZ+sOe>6$i$DgW6Vw@WHPiNqc{_xfaH4J(5^9f*D;76?2}T zusrU^Z<3ZaRjXbxQAgO&!&0F|E`Dau&98{3TRMwPKCZJ89GK=DTa)*f9N-*v!00_I zC%`@h*L-<1>e{TW9iqr1L{fLo{{X@Ue(2~uPdLSDd~TLIZyUClAu@wG5-B7H1&&ma zj<`Jgn)L63J`svoH0T~d6cE5%tZF)*03PQjpHHoQE5&;pBwei8@_71ASf>=sSUd#Y z8G=W#j^5m-{Ii8ON=7(hPEJoG44wsf=fn+9#5NZn*qTM-$#E{`V60exPkeMI91h~X zj`*#nY2F|3dEb)({4Hc*m`3cn{*T=)Ne2#FqC{cxPTlAvOIK{I>a*2YKY}Adpn< zQ~W}`L&y3Zo}XZr_j-M_Z5&%p)C2>LKnH+7K6%eI<9;R4Y-OC7PBKZxN1^G@`S-6? zP8H$HHFKS+xOsCzTOU?@CI0||g8W3#+#Xb@6@5M_S-5XNV zo-2tVFz&cjc-lzRva51H$`H!Kfx4Tbd_gc zowBaPEb6jxxPYuqS&shz?4SEeXr4NP*7j{)PdZs*wUTJ(Q#-avMU#P+Qpah)EI9kc zg?(Fp;Eg|2(?V(RsFLv`MwD9v7~6+mFkp~?@ITpbKZMuI`zn-XoHT3gz{`qrxycM!)T3w46dMwVq_8*>J9xN;ZxrBy6Kl0E+bR?w}jV|_PNdyllf zuNCBS#`%#0%N%kj-Ja})vI^rQ{qAt|8!r>xY5IPhJ+M3O2ScxHF;c-C~g2vb?rwLxX!=`NL>bmWG~{M+HUZepO^pEIV#A#&2T zcwH*T!52E*$l5#?7MC{Gnsu?&ioLy#i*B;V9G5suT(qntW`RR2lQg16V=&vjDy|Dv z`WenDa=ZS&uk$?DP56o7UkG_J_+P|YeZ8%XoyMbWcW)eWJXWx6hSjbCc8!4x3nu4s zU9G~Qc?6@7@h65nCGhW1(7X-dw)+OSd^dfn>e9#J0VImrgsD5-J=PUnl*021rBJN! zt;*)w!ZvLG0E`09;=jb%{3GFQ3r&wh(`H>uUwu|JlEQnWF^HwV3RXGoERoVbva2fG zT|B{_KqW=<{cqw=fxaW&>bANbi?3>0{{V{hSuHQM-D^!U-rVT1IK92Qxe69Fn)RTH zCAU=(!Dkyohcd?*%B+z^drA2UI zxQ6=f=HxTSVWwLw1}Wqu0K!@BEu@s-mUX#N_n0n!fxI!U!|;~-S=6nydy8Ej;$45n z8l}CwEZ269?JV&@Jjx=Tdzq2YZQT?$_LKeLEUGNuU0F|}Ui?VW^(f(j{{Z38M?rF2 z$$xLC%`|>#Aa98-XGnuERb*6EQMiCj)4kWc_Vnxc+5Z4csK!yAx-I@+@Ggyw6T+Gp z)|)}ob$E2`KSzT>(tNqELfu;0*;&tVs7~VmhGs$}k+7mj)fcDB4}&yM2w9lE80je5 zMaPb{i>td0M%@d}ESFr6TEi~(3eq*KSCT4&yO`MIZ)4_Go(}OJk33D{pBw2Krlj^d zoz0c5j|;|E`(`OFt;gFR0II9YbrS+qqMM1!aIB}a@P~o#{12*lTS4)cjm5R{{6nzP z^t-ZK$)3e5QKXoaa8O9^0wLj4OcmK!Fe%Z5Z|`09?ce8r)TLHZN%HCW{{Tp};%#$G z)a?8#@nhnDiSH-9I=B})o6ZHrt6WC*H#bii89!!|PYZxHvNV9M8*_GE2-Z9`;cYL( z=i>gKY;8Opp|!S~Z3_mqjsp#?mBT>nO9;=}XI;zyWm3$+feql^DDaN3bT0&2+u6^3 zuY5j%^iLDRVRIWvbu1c|qDXz`GR(p|=1@pJOqS#3S2*<#hPJ*X)2`vR@TIPutUsA7 z+IYQ*80tDRq#H`4i?&&w(gPc-GM8I;vRKb9=UOnkv(xg|r)@iT@@5g0MP~Hxcrnh*=I8iJN@c^JWS>xPXIEEaR{JEu0ajrQ(ZvgbL1Q>0FQZwpL)rL#{j*waS}1RaO=?c_lhV&7HVrBn7XoWzuYQPlopTJ<)jvo2}d3n50&3HQ<^*EwRB@b&&xb6h6{1 z#Z%Y55Zj%4-$Bwedr2&`i6YZ%o>klLs3CWsHFqPrD{{V** z#7SNW?5*S9dCQuPV*#rcuI|hc z#?Ny*OsM|=I;m&*MiNE@AZ0(hOe$~&-ROp!gXmzH+In-AH*+;{{RU80B2tSPvSot zYOv`xI)Dvhr`rabQPkW32Rq4-w9{5_O& z8MO=bQ-x{e!i?bbMP5nym~mbw@jK&>js7V3vg5{nHP__UHG6}Hy|_RU1p@&gkD?$P z00%%ioSNn1wbXUVjPmbyKkFrMPv(ElE9o#fJt`6Bs_!q$)4!`cEJkNiZAa)t-Fe|+HZJ>Bp#a7I2buTIyK3}x`okmYx**PPg z#CO53G+hBAQqsOy5uA>m*!8Wi0Nh;ods&Y92mR7W9(Z4IS5SQc89jmb?_Ql+Zg6Uz z=b7y)!ShMzdn})`B%Ua>`$vcM)hblEOV|bxLB`@!kPl82_Ts$9#l8mdAHw}<;JPnq zA;?LsCOaiuVY8%AkQFuqT21Bx47t;8mZCU$g%J z{3iY#m91lqC5<=b@rN9cppHj=o$@QfjxxO(jx}A^T@ON(p;akIbLGDed{p?8ph0UN zhdgC_EHFyaeUnp>E+I!ev5YuoUBCcFM{HnM(w_l;;H92F@BmF8TzF%%^2)px`nAQR zE;$+U+2mB`Jg5V&PAlY@ycK<>c#7{qn&wy~h?b9bL3IS>PfkV&{3`B+rrP+AP)i$7 z>{eg|%t<))BlWKQRuc`0q^BgT`lF`_jIRoui|%LWMxpzC={`J%$<(|Pr0RC_+lcpx zWogkz13$VfO5owXe_nVsepoU9D8OnmljI0I@LV$k( zT))Df+2_SK_fl$JAGu+tL`qEM!nAF-jF3j&*(Z-q)jy2>3wYx8cb`pBF<47SBn>P~ zSAam3H!FH`#tnSsNMW$`xi6zWjx_4jq0YDLdH$R6f*E{Nw>r0o;w>b=O=)du51Dhd zfmW5-)iA&e?qiaAWY?v??G54k+eozW6_uOGBA|{7buevTx=SlF$;rtIp?%M97V)3# z<>PB~m&INoXf(KvUU8{hBTDQ)xQI(JJPZNuO?mz1mEsK@;kQjb;#udAtTxfxv6CaG z-5in*a58b!3iV-xz~Wkxp2w3sW-|*H8ASWHz+N)=O8RqY+BS)%TI*LZ0dcGtCASf- zPt6iEq_H{YDx~r4TfPg`EH7H~NY(XSM^@8tq6?irOj#mwr!4Not1%}4C?xUkiuex4 z#~vWkGjDp$h}&Vel|TwX#yIG6$LZ}}iQx~7nx>%kjdiO}E8Fu8uq_;DM^lhNC!TTb zSl7y?lW~>Jsb;a0_m#Yn_U^gh9V^DZACa`(QX3m~AG(WQyprB&(5dL8{DcrX_Ts!n zY+~^5h@{fl{?v&hERsb2VcIx2Bn4nSeF(1K;XlRsbv-lfekaj%x#GCxS}7FFiH;bT zB)3mcaC%jFJVD{jU&K<|zLzvsNh^}~OcOkTwF+=YOy>r?xJIQZIK7wLbkwT}#Yq(` zbPtD@7N+Z4y1LULk@D>%1k1RNiZg{BdFjn>d^@(X@V>b;_BU-knQ|g&S27NIk;oaQ z!>;I;YxW%)(kS9k583XNGZ3Wm4?T05t*m&fQ}E@R-FP+#ZR3(qw*im_3+#J;_2_)k z(4J_k9j39WU9H(Sa7?jC2KjM=C^;DE&M{dxap)E|=@b_3-gyTt-|N%91wX)t#6C2) zhH0jfNZ`p5$oK${>G^c2H6IUZa(IM!_lqdr2y|XR<29}pGKBojOu})MJr8#MnY9T&)zYp~}JYi|5U&9iT@J(^y$|X=pPUN0BsrPx0l1WR?~+d z8`ve&Fzr8ua%`$SG+4EyFGueL-hGBUwRMZdhVn9 zbNCa%ULX5cg?v+grs}dlsTIBDgf9_p0p7nc9Zx)T`Wo@=Pxh?wo}lvF_@Ck}mGV!M zGDiZX#6;(eqKpCuLOXS@p8Qwg{{V>oA(L=|26=vA<(DeM9AhH~kJH|^yaDhh$G#`K zi%gf{=={!T?_V|+<>9#t| z)4=HYwwmCwp~?Ax>7M!gYl*25LYxy7wcbQ^l#yQv}Ac8pL1I~Cg_YcBv z*;B%P8SowNjJyr-{{T*VxNUQD9-}YX76%1#BXWccfz*@8>_$%!taumVKg7EoLrbyo zM~5`M(}8&z(V@7yk$S2;`&L4WfJ3QK>zq_PbvjbDKZMiRI?=q|zmf8P!hhKp#y=5n zbpvza2k`CHl!D^Y>r>O^dntx_2H_B_Y@~sf8%{?(MS3v!DG!A-Ni@$EXu5Q=UrH`z z7V)jp#H!dbDcFqQk~l;vGx_>mgx? z6Q8&L0A)>E!agj}qtiSC@aErK@b$Zxe4P^3HM+5v?=nwl;4?aqL0Ht~NM+umBf$O< zd{S=-_?rI!;jfNl@UEw*SeB!ymDy|dt-hkOI# ze;nLuzXkkdJX2prB)rqLh}JWvx-nK|bXGD(NdUyNcy*8Crtq=eQy3Koy1pmPy5R^ zKPtg^tmTUH@<^Z!xLF7}9!dLu_($PRmGHvrSGKvoZ?ppHh2aJ%S#Y4fH!6(eqMYOp z!)r%LUfAA#fl0M}mTDYt=(iREJ=mNnep{jepy(|#FTBvF(`mNlM2 zNk4mx;9zm<*YK~KG=Cjx!%Kt0_E#*Cwc^MjLU&0Z`+#Gdl1akyNFIW|tNo&U7vf(O zd@9uK?BQ#dmdtq(N0QFT^Dako$FEBHJHvk*?z|K5qgwb_o(Q*Vjb3+l)$ZGLw&j%< zY9pZtLZBO!fzAlV4ShccVDg;85~q%>>|+$&`COZ6X%v>tzOO~p`E1h_h{IIor5p?7gBY9?sOAS}hY1JHtU2*>MLR~qHa;71_}c;Qbgj@;7R>i7D{GHQ%2 zWR4LZ+8si<$KBri88A2-ve?Hb0<$DmkdPOzLch~yI``==D^)N6BU*sqoQ(qDFY zVVp0g^{-3#2jH=*XnKRjsuoM+NO<{_?Hi9AU}SOJpT@kp!DW)wH$>$0#?ndWAD>G5 zFZOcyH>r3>!g_s%r=}v@Nf^DkkfuO)!Prl#;0${lcCSA(#4IfruCHLjDOIklc~^%u zj}d$&@smpg*A|ZpE4m=bQW!ch9mYm@=hKnL>c8x%@lw~~&%^k2C@*t8zHP3^U`%i2 zp$&}S^$au9c?5A^pMSJ}!>enZ1$c z55+QgA5yw`8v5alx|dvFsU*JccHjVc;HvtKzd4DNahP?8OI4#j(*ozp=}Xl6`&6{@ zb&H)@A&y*JL~hp@F#;iF=cra_l$;KVN#eP04(V7)rSS_%bU(D?7ModOS<&EzH2{N{ z@*oTsB<4muD{ZdE!u54C43QY*hU(TQDK|T!Tmz7KETUX~9Ak>A?-xo>ArvFV#&lw2_-RZ;|Naz+b=V1hx(W|fplu#H_w;z+PhWLDW7N z-0AwKi#`(R_S$H-n*QR-NCW+?<7Io4f-U=#N``QYaj0Cfi-B(hOVvwyZ^x+K*TH(^ z+8&E-?`zk|qtlirzQM|UIVdcRD4sI>&H>$;NAW4qtmUeG%4OTT3;g{i!vl(ozr=% zpI*ObQZs2S->sVdza5ds8NyKJ@?UrL`5OAwynZ9owO~zmr_csKrIlow-RTEjf#EH#+HI~A05x}Yh2Us^vN4eiuNl#PCJKzAd2Di%{t~q zB^xlYnpxF?{I6#V6;Ro3d&RJL%xKzwi1gbwSv9LG4Np|bF^hk*-z|g_C{`oM6587+ zL6r+|#DEtkUrzXAVd86B`wxct)yz77igd{ao3hDc$-1|IBoezZA3vY8mIpFfM#xpv zHC;6o8>xAG)vteBKhLOFlWi+`>+`qWr~EnC{67BxgzMrqjC?OHw`VtvZ1p%boAWu2 zB-QTP_Ti>kB4s$+6t^hspEk?A=K>+R4R6Bw*Mn?h(mXq;%^kIduc$wVt}d-q+|Owp z>=LrXe)Lx{GsPoB*;n$}bFx(6V&o z>o_V~%T|iz+k!$o#d6z#w{7uO9~k(7JO|=EF+SgC;xaAfiYy%=THA8Q?xP11qucSd znlN>ZJpEzbM)Y0=Zh{vbN$w%HACZKqgB#8M)vf(a#8HPOJ{rA zlHW3dK7#Oehm9&d9rd~1<|s_|l3b$j(88A+2HspUq5@6}6jnTwk2UyJ;FIw4;pCc? z&Vq*I>6YM3guJ5S(&BLT>4?2be^83c4v1Tb67j7H|Tgh9L< zVDu_^{vn)q^6Jf1n~|*J%NrvH!+I3j?u9-6p{5w*YvX#!_J%DHcv3?VJZ?FPB6;uD zx`%=Q29pE=6Et$Y(yz+WF=DNfIpiteazY-0t!X+^{{U*mC9(3LhBkjFuquj?s*>KM z>_OkYYM!ficW(fe-r9H!kAkP9Mj87e;NfEj}0fD{}Y*WhG_3 zT1Sk47a(v4sp@;5#;Ix2tagZF0}u0_e)Doaz*1aIESXrDh-JeNa0uyx z`hFF)IK~S0)a0)>C**w<`xSVtZtre9D>Uu?i#T+UDItq?JmVPz3i{*2dITDsgMn`3 zGBODF5^%tR2)ceT}4zU_i_x1cFOof_~}i$n~$gyceg_^WNKqHHo<6qqRE1mN}Q>w{i% z@S;+DV=NJjBV4dm3(>b8+<%i^+y4Ls8%uTZc0_BDcnCScIXNFr{CjTFC4dCm>@MfEJ zd8hBw=8tf)wY5?tIs49C2wb1^#QE_n!LZ|&1t`aocx1k>}!2obE z(;cdBfW8dzE`{*pO|!qTxHCq825Ss$UUuZMAeIL^PUidoHNWDY5o-ESHu`pqnvKVn z=JFd*rEP-XpEhS@h9Czl%-932dRNBcrz*ADO#0<&YeVP19qHZ&(=;2)i`@p!Nv{h* zWdw#;kpm2diWCacmd-?sLT(^{+*g%&ufrPmggifar>BJy8*M(^t;@-%$8OsQ8zK&y zm=+`k4B?5+Qoe-oZh>!c1)s!5*0fC^v$N@XuBZ&`^Ele~R`D{1n5f7{Dym5=zb@83 zH{-1u{{Y0cFK4Lh7W&*eWzvP4Shy<%)x)2a1TheBM*(({&AG5Z&2&(gC6j)qHC62s zXU)DP@HUZjv+H{G{khd#d1!-ag3>hv`Ej`k7{@%WN$-L|uP1`ST{_{etnMdEP1{y^ zFblVQ1>*zQ{x$Y?kMMiq2AO-P+<2qmwvTT;%mLC{YbA+eQ_j+av5a62NCzJI&yRd( z_)V>8chKHy+EtvJ0+R)-jKTyx$j*BMfaz{8HOz~cg@Rw3qEZffV(nSD5j-cT$ry{{TEzxXN^@N0}lhRGO8U z?Y;_^Th?_*^+_~{B9<&a+G1vABp;W7!u9m^^sa}-R$7j+JQs6Vvq-1sSYr7f*=MiwGAGhAdHcBYYP=|+aPu52RRk!dM2ne8)0Fnc%C`rmpgp;a7Um6 z*R6PR(n~|WF6zvqYxd6#SfjxNep3;=TM@Jzat=Lx#dTi>tR(Rdi{QO$eoTk&3Z0@DzFU%9Go4`Uc3`X`T5!0=$7i$vP>@LJh!Q$p+~!Q+++hi zvqfoh1Lcnm>pG46uCa$izxKfP=g~GIc`VOKDE2zFAI1p#JXs0Ur1Z6LTHX- zy0=$d#0+=OrEtCi@h*>hZZw~?UD!wy_Yh7%1Rj_qV>$FVIW<$ndZw?SYA(8uh-Z=v zk`h$_{s*Z14SZXlyJmi&$oa0B=ARut4{5#^nmtd$8dJ-tGv*yK*2!53&UY#{0q3?y z<5->)x%j`QTg^X;tfA0iz1m*c)fI_T(5#_?3;~Qcwmo}aAACZymK3$I`%+Ce$kz!X zs05w>Z09{XW34T=mE-$OG8>V5F^&W#MZ36y=(2SLBNlArbtF_LE8O3is9Bv|ufztE z!A+v-_wZh7Hqn9&5AYQUacvu61(u!VtbiO6%eZbn;~i*v&0HXo(H=kXALEM61hDFYMb+mli>%G5 zwZGZ9=g5f%O{bmgbJ zQjcsY^37!=I6WK%A=LUXIOh#rv!LA-82rqkTGbnCZI7-zCGda49}_f~yg~5u#ddbC z%bib5)pX%Kyk}+>SV0T_j((N@!3{dEvD$+H( zB-C)tv_^(-&|T*P5l^yaUzai-N5~(t{{X~q40s}YjT#?_-VoBYC@&eN()?k2CZ1nB z{{W#~L|=2T+x5hkN&e{(tH-@$Wdsat}(qnJjfIbm+B zS~%HRbC7Vy#jj3yO15|9@BM#WPePpGil?Uif3NGY_vgdE6#Pr^8BHI?pJ<9JONkAx zv!!@>Vz-&qfN3uvSz~KCdhSSu)Az7Js{a7BPlQui@GbQ31(H40w6HXqS~Q!@LvO&s)2d_1>+LFKg6PgRzR9620 zFYEF@6u)Vohc>O^Eo)C2OsyNHAXzUKO{ufyJE7begJfV6!0V7}#C`^Nid`30o58w_ z%N*8H;tkJ$@v+Dr*~l38{Qm$@{{U~lf_I)Pvxiqrb4$FsxtX5JNzy}Gg_cslVLYT~ z3_;FRsN|mJzcBtf_#;yAzl@gWK-19P>5>@cRg|}yfsXZKh2BBV4l&Oa?_erZ#!z&m z()0A&{t3^S(Q-<(R>#x2E~vV(|n~ zY2nd6AnA7&nu8?9WD!YgBsfMoA#3I2`7;ei(ko`VNPvN2z$e_xoOTRxw-27nN_E0*!-`-E;FD zx$3;}^%Bl7ik~+%-($e~jwwwhjz9KY{he=oNv&A?Me#O{2A2f8^ysdai@6sEe$)I5 zoQ&|yzJk8D(7ZQqr(3nHt+WNEY{=WWBRrgQfC0fhPfqn`!k!O?)(dMpyST)Q8AO4l zEN}+|fG|FQ{#CWA%cl57C%!tA3{^?Fod^pOf&K1^2Xmga@j0$-EKOT=+TA~|)c5l2 zvB9ZH9%cJR$Km}C!8i6Aq>x*r)+HP}54^w;k?NoW>OUI!=ioPsE<7Xg6H3?q(IjgG z(V4u4Ai8cD2X+PsB=g(~U)vY=1Jm(k$V?6{HGXDT>o5(EfQfsTNqBYz8ugrN0^U$&= z0AoB3*sq%Z0AbIHmp&`_RbgvuEyBkvF^7z%=**Cn0Y(69A0ZT|Cjv|oPZjiykhRj} zyn9dFTK$$cp5`P{5>T5;fHQ!pg1di-a7e(fE@{S2J&xLx;~9F8%`~?p@ z0Qe_gn@PPk(`hiQaLcJ=y3VRxOdYb#BFQ9EN)cFz6er5e#Cf$FzPg<>Ia@w*)jT2M zFN(TV=Bcarj@m6&?^=ap(rlwuhV8BHo+$+KU^d@6NeNXgw5r64(yFn?_}}4=hkhi} z{9o|ZZk`_wcyCXR<5OExviDbl+DT?H!5nKKg4yngMU1O7ikEXX=9TA@TT#(`d*PXF zV9?{SeP2M*JonKq?k-Kl7SQ>z>F~S8RNF;0)T0X%HRalX6|9c!1+~r8LfRGsK52u> zc>LDonkI5{BQVsn)%-VasV9fLPIWzA{w)l_d37uT-s|kQ9&Pp9cIpEyr=PxgXOv~3 z6WT@dv<|Z2E~9meesp!eh3A(&48T@)TlPBC%?A%^Kr!o0F2I z7}uv!B?~Tz>bm~`!?oD+Xnc2f)vrXXzb_-C@b8O_?WUi9smT-%s@&;zGT2;P0xhjy zR7FOP)FGEhn5(>T?7ff5+BV1< z667Y<3ZY0*v@LaaHu`6XwOf5_;r)`^86|>G5Xdgh$Zs_0EaZE6PB$o5)URS@T&XdV zpzXnnP}X#x1+R)VKLU70(@~bnDQ57Eh17xx^sP#GZQz;^=W;qUaTbkNjJ!oyEVwgE z-HoEPdi~$m+*Nklyq(pWJ3V4;Q(n^U{{Z1xd`wHomtL|4?@oe0DV5|7(?VGO>@-@m z5(MkMDU^^=w%_nShc2Y?rn}-xfpKMgu*D735yQXFx_m5~U4h>xp9w12R%0ruP}_;c zPY0|z)t`j?6>#urGs~-@K^>DtExo8Wa%tgR$XCQNovQ28?1LaN&!G5g#5#qtSX=x^ zSnuCjndaAi$1Xp6E9GY-;aOQFg5?7RWnftdDY$UrR{XDjzo+?rL!{+{43C3NYHeovwt?;_Qu`r;{*_xi)+T1;1wp-%Uu!vM0h!w@ zbG!X&^W2WzE6V=>Y`=_lKL`F8YWnndcJFjyaTISU$#wuLs9th8O^swp^j1xazcO=Td^R4(2D%k{{Vu9d~?+NZQ}OuSkxr@Eu4hCv$jJr zGye3FGTVt5$;yC788zzRDXdK6Po|%N!B^Z(v&nuL+e@!oYThEfk>t44wJ9CE#x0am zQB?Kgf-%$Ps@^lvJagfTX?#iWr(U_z?p7fVsU?){8M(L4;G~S?DI+7H-R)g}z&Nz= z;LBH?Q_W|W1xD1e|VC59B}k)$LHs@fluZ zt5KVTqV#cXKHIH!(D1Q1c-N(_w>~+a!dm>-O(o^9`P{R}va4j0dB$_e%`yBZtg!)X z)l?4dI^(Z@{{UI9XZWM=y8i&emlErGJ)E*>PbN#_Ce#y+ug3tM6!bpFuN5ny8-knw zHj;Z}@%oR-zKRv0iGI;L-|sw*Zbsu{o3+s|?Cq90Pz|H8&U*goG~JQupRs4bZBs<>yc+(qX7`CV2Pbr#)*A_MG^e zrhF*)UwL&lsFAhuG#22t8I%kbB;$jeDDRQlzc$S=(!^pd9ox|Q?573n;dw7Md>8v~ zc(C}3#^T&auIAz8ZH^8A`f=3b*NWc#kvt7PxvS~geZ8P~mDTP^A8@mLt&D(1NF0$~ zO>uSq01GeTYwFNLy4p7z4nFoV$@R+``up}L_-m{G0K!=Yxv0TxV}^BMZ6_+gH#@du z=NKwD`t`4VZ|Zq$X}#@sdYB4Mt}a!rr}-a!{5kNHc3u#&((dioF+vqY1nn6BoD<6v z&QBt_&)NrC(kwn3U&8(;v(dD;A};gWI+r7m>=8o;E1nl0c|ppZrnkIB<7o5^8VL1h zF0^33K9H12J*n~|r>WfkP5cDaEcGGUH)J7LcX0u0v~`#^Zk z^GxtC4-GRr)tXzoB!l1BprKDE>QI`Hp^JWqRJ z3{520Zy*f=rpV(Ne!SLzzxajY$)IZ)E?rraNepF} zAnq_p_WIMFUfl0=qQx%b332H5SY_cdwu)`aUc&^wxtq&qf$7vp+0w`6o zb{@GDRY=1a)TE_>92`+s?-^=*H}Ipxejo9kjs20Tgt?Vd0ugXyojK1Zjoq--o$(7# zv{rp8!$8-fl1BmtjUIK7jsd^~gWDiiwRlV8r|fs&o8h8(KS9;R>g?rr7^KX>UIN1NLZ?= z-;vwQ_ybY+W%2Vu)}z#4z|h>-#yrb-bqLAxa(}szl0ORUz8Uy)#{MGI(i^)wRFShI zBrtgk7uR^;_Qpph(wcj4up zq`IxOie25o84^Kbedm3X9F};oxGpkD8%mMrNUMG<(flXl&kaH1yLO9IWnwO^ghD6B zW)60-JY%kMKZ~VvJ|_5d!^Mx{PYhY=GYd&P(-p8RgW1#NkPlWHhg=a|CxZSUX*!mW zW0c$4+(1wEwTu?*V!PVe^5lX>Ic`W+P;hz3Ij-2ja;BZ%shgW}X{#Ou;|~mKS~reu zwT(XR{#b5XeZ7IUTdRV~M%Y?5Dz0)m7Reu6AEG=7`&sLM5-ja3H6I+$;aGIGfNPo^ zuYzEEXw?Y|B-UF)$eF-myW0TZe5wyQ@f+dyfP7tR9-*N9zS6`-(wlquS!J?v3t&qd zGDrdR0IoXo$1$VW*xqQ?I;V)WON;ol*DUi-dFC5s?7)y@KzJk_$Z>*5$gMFhHR&tw z6C6G0)^>U`=`RUlP5HVViYTL+Q%tIM7m@#a>4b^ie19;1|7v%UWSuj1fqF|3N9;gtyoC0SU4 zy!7!C!c8>q{e9ndjMkqlqTx|lAB$c(@Q%0P4-=<}JRx>$byzu=Fv{G4)DggK#{x+5LFhFDXIT^zd*Mf0hPkz*2vebSm)U`{U2T`=u ztHi#e6=)xkM-(!_^iIJ9$wOxsr}#Z534kv8d==wg4WMljKMwx zY>v`-q~|P{>Nz}*&zkGJ7d_lI5=*PiZkDaI0vS)11`g13&V2@Xug?oRjd#Jb3A`KQ z8~Y^cu*o4Q>4FCx{kT6$)}Qu*_|b0=g6CLi(`Io5oE$DXDaQbMS1htDLQPVrntk*< z`B$eICsGdUwx{2E_rxy;_!2vh5lO8F1sFA;J7jVOe~q#M>Bs~0ubcij{@7ao0F5nm z4HrQ04_NE{q|+2Vg5>$=%pQu|Vs?q_Cet615i*qtz4 z&RM_RTmg(rbR$@= z3|4QaB`Dj>=rz4+ZB#ww#EADYI|*)_-~|8%hz@}i_3q~#6aaO;8u-#ZLc)C+u@sTH zdnp;PT}M%a)0`3b3gIWxb=fW9vA>u@8lRFl$YMRnUJ2(te=JspoucYS_(!G}niVI= zkf2kyJTV;LpH6YpC#`)Xqf6NGUq!Ly;pd2sAx_8WZ~POB$Me}~R=Rz?!P_RHzGbvh zA2w7D9b;X*m1owiU#DIGchvkh zZGJT7OWVCT+&tzb0It;#&dQ{&4)Rtoe&d42bKv|oW6G<^{{Uat{1fiEAF+e(jlE*Y zEz;Xqy}gxVONE;|)lI>20;nIrRSJRt8E41d7_SNV!uPr+sHx$ZaeaFfws1=}1W9>3 zsuD08Y1)~~HV4Znc01S__CFJ9w+pD7Cq-i=oRZIXJorm^(*bt{87C)mu)`AXuw;W? z7xAxKpHaGm>44k9E$-e+p>+%wt!?&PG%2`&YGpoT25qHbjpTJ;>k+RKRTG*^^*Sih zX&yCn`gWn>ZE8;dT*0Z&r|CB<_UX}@El%R`n$F!4JcD;6!ERU1kyLKIO^PkxP23)< zd+`4NUXN1nU`s)N9j(o#jjLW>OuBq`NeULclIiy}$gdQWtZ}Y9n>%@oaU`Bwb)Fuy zovrl^7spyF$E01}+QX_%11+7tqvQvTZ9}I-cZyb3jY%-dqY498oaJ!awZ+6KhG8Ya za;)avq!zUg1^7OFJ4EqcgY}!2wbykUU0(htv{+zyE-q$kcu?}Oi)&j;Nf}fX0Yc{m zLC(^?9$DRKN#PG3Xe$+@7c=TDrd~vb&Knhl*3qt=BoaD3wWRW-V1@G08Z~uRc`kfF zmV3P}+s9ri)U|1~9ZSRKQJy_k6oPZ5w3cx_&7v}*+lZpMMur`VTHYe;s#_z}#6FRF z>GS)%?(e0KGYtnHd9U~%(zc#vJ?@*LeWzX3d^f2Zn>)`G-whXJ8y9`$rOLu53P;Y5 z72Fb@is&2ed2LZzY5oVh_=5WUelWgI?R2{i5a0100BKTDWK`gAvTx^?ov{{UydxJ#>uIkmA7qFo?Y9KfoAC* zCh=UJ6V`P7MXhFv{cYvcZ9w}i#+N6RG%-zSI0S@9uHxKzEYnB38v!``SwbDVdUyW6 z0;NSxR{pMce-AubV)1RxhvA(@-t=jSZ);~|smUVFk|aN5SY5~=Ijh_yCg}$mJwZz{5N;BW)#DW8tN>iPyC~H(hVDUnRhvPu;~N zZn2`uw+L5;`Hpul%HKE~5sx1gQZlIJul4nE@FDG6w^Vz-?|u8@%&U3)8RNf*NI6qe!C4A|M6OV>##N2cZQ20CeNdz8gzp zV-rI2Pb(d zxC9Jy^B;V9bgEjQRixSDAGAlrog?9wf^NJ(*G&hNYSy;UvNp%MLdzQzQl*P;Cj^B) zUPgA}zal;;_}=rxo-5RKYpcILYpK{Q?aMi0eq-&DPeJ%s=m-21tHqbr9v7QHTjaZs zQEPjf8R2z5=;O>P*$t8zNI3wI$lrBXn*7o6l22)>N{Yn!auT@U<2-*l`z{}*swYwU z9wu$gN}5RTKWDFrQFwDv8b+-0NooTPeS7NqoOk$HG>Fk0nFuJ8cr#HBl}M(6HGhvnEDLk&+Jt zbmQ0ST;9L?DttKcgpti*G}=Yvk`Xu90BGEn$YUD<2M3V5M;rlNhr%xxcu(Txv~hS} zT7h*Z0~#&V%9BccBJD4J7cK4S=(e5%yVCVICcV+GVU|J>1a_?}G2kk&z&Or8=soM@ zmS5=f9qqqrd1lrO2?IaHT{u1E#My!TIy?QCa-e{XIrb(Sr#_oLo8UpIV6U` zJdWTR`X|Aj1JN{_J9}GV^Smp7`6K4ZEs{o0C!qRM#mB>6hqkfa{7>BPo*k8R@qtoL7lCNOVb&i*`+9vC}OZEI0`YIagxk==DEL!KNfx?e$gH&msr%V+d$5Z>)aiinM$6fA9_Ogg*@qsVsgU(cm(wse6e43e3QC13AHNIO)ZH zn*Ph5v!$1Tv^zf(>lTvDefC(DE$yH&8-5SULIzMyc{$v08;+IczpkaV$?D zcJv0gG1%{{)a8!Htq3e7B^9~E`11Qp@V<+w_>)f5tbX4bNinw7GEosTmK&Kv5IV68 z2^jXTjlLp$QP*!E)OAe^&#QUy0T72tTj|o>RGBVip5c-Uc()Q23EJ5JFBPNvO!(p- z8S0lhy{E-XohCb38f3ijZl5%v04K^=?BP{eFacm?R>>+#oOAQut)P5Q@qUXwqj%!n zLhDcaTio5=-$f0R+~9L|E$5k7@4OZxlPFWV-1PqZWX{VK&IAFglV4UTPGOsuVje4}d z4DEagp~nu9q$}OYm$|u!?vXkX6lk~r@C&vx^amB|#bNM^+|~a8UT2$9b#WiWdLxGM zPMxIajiu>&D6{))WSQ|RGeeLG4%zumIU_k839oMWPp5bzL-3dO#PMHhT7*g^Q(Q5h9^S=mqH^ds>iB@SmIUJVg;Zh5xQe-{YqW0%MUsGPi;w>lQEvJSi z(=B4STQvX{YhNlPYydzQ9-U7-^H|oCRMb<~sS&fN&Sulby1$BbDPny_?njd)kCFCu ziFw+9lk;)h0qtpYS;2f3>jezLEX^f8;KqH9jnZ=Zw~xHxzv8gB=;~w zzdmKL+M#otobr40eD$E9OdPMmIO2-%z^{{ZaG zq04f0Jvzo~nQ|mquI<&DAU}lmBex>D{{Vtt0QAJXeNS4pxMvdOR@y8KXFOy4^If){ z)=w)K<(5EKaA}+#{O1Rryqf7e8{(w2SJ7fSYrd9)|bb6ewhZJE&TF4 zvoSe*aC(3BtCR4SsPJ7Y$i$4~C>UINSAGh2g3$7@%ip_upI3Y`)A4~P2GghYiI&lzIeb6+HUF!3Wt<;EBIHrN9Wd~t@vwLxqJOGSSCBR zwZF`mU`}}*#xvf%3-){Xo#EX(S4#(i=|r+(SuSo=#-omfi02p{{Wz}^@u$b1f*u;X ze+~FULu-R;CL2;OyC)<9bCUe$Cm{P8>AWZLGg!Tz#_LGZJU@E_6)Y}oSVjhUCDoMU zo^#Xnua&JCMpv==Tw0wwAEDk2e}{fEvRHI~6kmxJFjiDWD!g_a{3Q6tqG)z@>!$cd8{39Bxcaq zCKzuX^~QbFFF&F08Gfwo+Tqzkn_X z$5F{A=tX#5ppo9+tTwvE=Ak5a8Ios(V~JNhsNIs?GmLSAToX~|Xzr&ewR6aI&j2=% zT6s;}QwC$V$W>5!{G<$%^dKKj)!_dC9zGoShQe93-woUCyh&A_?XA4I^4$O=ft(%= z&<#*mI#}CMDe&_{HH$E<-Zh%i7o5}rm3$%ta)k`C)E5r zWs#7a1tfJr&p0G?%{o$*Io*H2Hs*7UkAhRjTIHmAl$zYq=2zJ~u*DpT4`+B7298XR zzPJQ-Cm_<&J~H2UE5~-)_lmBin#yn@mr=2mqIZps;pPM37q zbHkn)R0`X&tXYgJf>`BV7FEwD_^X1p9;F(WkL9{O8aRq|VgCRJXZe+UG5c(MQSe2; z@b`+oA>Hd17A%Q0O{qz5aU7e8G6}8Sx$Vyb<*jSshs7B*rt!CmH2qF%QDn(&;wz5?$!{DHmdeWW zTSFu>#=wR|-c!^R;Ron9XPtq?FN1}MoOkFtmW5f4$^Dr|_Yo|t&Trpp| z9da?$o!tBN6~K7s;Gc&4O{NQ5PY9v}W9DhsdhAZ5ZvOyT6lo&}Nf;Q$;yB|byf@(g z0PTn3O?$;Rnm>!Y1qG}yFcD8OGla1G2g+!+BzI#u16jJ02=*6y8WI!C2bIi^!AyI3?~-~n-HkW z7bgLaRxEM{91oONC*lu*ejIH&TNLoLWt^f~-3ak2WrH$+2Hb&$C9)TA$R~>FhY(hd zjKO7C3Cl!&bXxcYZKQh{wAkTtQ~=4A$;L)|U>@Gp-0A)R@aCW4x#o`EBQPisH{L6p zwg*gba(@c+PmMnhwCyufxxdq$B!WAp*lpus8Y*zxS&ko)K{-6&bJOf_IFf?AoP_pDG0y;fW&z=DyePH^sd= zKMp~r=<}o+e0ESvCA&rzYn4b$QK<{_9h)U&i9l72Sb$yAJ`QUs)2nya`u@K2vNXM= zG?zo%b=^)~LqnTT@VF4kd28mTp(VnYkpO%!Jr)$$;B8hiv{E z5|WnwYb!_=N!3GT$nxmIH6+_Lj9oQjn(^(=hcqiI%{jo1`%cp?^&7oKrZU^v=X_8` z?x3^WJ0z`zW!RF)&AwJ8d6~Q=pp6#deJLZ>tcBF}mJwO4#8;N_MQIEX<`D#GFI}i>8q^n)#c|*-wUySDwLLp%nWA@%4%niLAteC{uxBPXRR{>l z`>dNk7;73Pg$9-5OD%n^;6Z;HY8IC173@gQaFUCe)nh>$N9UK3)*Do}jUZVik1g$B zYECo&MQptM~yWf6nNvq9tX6)@idx6q&Ax7xMG&}((dTS zmUE)a&lRn#46)B-XP!_au#QNg^Iq;sUx=P6vhn`_gp%Ubw67EDqIvFA>lcc$U0%;K zUQHaL76B&Bv6&T$mqoaNAyjx(`wszl$Ka-ke%2b+nd0|``%3#vR(i#(cA9WggXRmX z_~Mj1Y&PoFmfMvJsofu$yOa2(;;)7l-XPIzG!KW9cvi)<%N;sT59uGR9K{0+T5NVEuiM;^Yv z5A^FfTHe=Gll~F+@gAI#0~FCZ!E8x&N!Ckg=YuH9WmJh&ZVQHKde4RYL2|w)vinwz za&8UXzL$1<$StlTk!;wc%Bs;y8DR!CAAE&%$VM~hItHoY4+*D-d^IMTLd?_nYU1)q ztgY_tQc*Y9B(~iw6U48Ak|&q6V1`YlT5H`mUGZ_azwsZ2tfBD+nda8ZSE&r2;o17}9H)HPoZ*y-BeiC$N^ zd7^!0GNjv20Y*s~&H*71g=SSHcSgVx$~t`mRk6{exbYCTfu*+5Qh6FU@$E zNH7qBRb?Em3uEObxE)qKHXTOEBGBXolkIwPP^uG`IabXZdCcFLSt59mr665-Z}b z82CPYUB&9b{<20^ZP+;ju9{EXA)Rm%^nG0*%(qf5s~tqxNf7duKfPk{{Xbx= zq8Uy)lr0LDU9NmTsQ8P;I_cl2_@d_KNeS}+8*+IWBx5K1=C61|;qIZY-Xu0UeAZ0fGvtApPbcd-Gp#e$C&p ze}%pmrM9JX@LtTtU+nJX)JVBt0yaYx}ESQbM8PC<82Q>HLTJn_wV zIK0OZifvxV^l8(DC2Dckx!m1c>3$H=?scj3>#G>{v#jDsZ6YB?OJi(|;AgHsm3)Qq z@Aky-f5d2R{4wFr7%r!Ia9St4y^?5c&~73z8F(DATyjznp!2&V3gtdD{?}LjDbrKN zy05|i03GRbM{eo;qib$1A(Rz4Sf-H1HV&k-Fl>*M;=J9U)SnmQl<1;`J7$WmYQX=!Qxws zeN$DIS%fOg5gtNL#)>f-ysQD-8HQJq05~h#0&|mrUZpQ-RV5w!^h;yS ztvFJ;dwwkRD79|}YqQ(<145R2S(;-#gEk^F*Xy%uEmd^lVINCM= zgPan1&s=mI8sGR^@c#hCzY%;&w^C>+6b$Q$FB=G%#?=eX4;b18eQ}Dft3!Lg9+T7T_pZFOzJ+!H0tQm@a0!y!DV{eG3{{xtCA^qK^T;k|Y4bpWm9 zD~~mE*k>3S`ti+R{4Mxso(j|Df3{gW2{L7p=~c+%A!8#PXTLlTp!6v8`#njKZ*J$f zDa2)FI862+5I;Ke>rV5~x^dG}$!vTh<82>Ej^o8PjN zLso{`PYL*W(rgbp;baJ>IR`3m58>-x#`ZdO^a9=(rIFKd3njTA;C2|~{c1~Z2wM22 z&+HnVjlz{*moN~d9)s7{kF8*;rBhRTb3pO$4E#6o)Do}u-j?nO0Hi8O7$XD`$FClh z?EVh$mZ4#%Bv!gyB}mIjbf5rx4n}iSS3>bOfTRRnO!nKqb|8F^Gteo{Yk2&7pGvT3 z@ANhD)O@RTY!tZk&lvniIIQO{X!K<$$}3~czBn>R`IC`oFDH6VyE!UbDCC*@Vi%xK8NDh zw40z!Qq-mh9=!*rQaG2=aGzRrAs5P@CEFXn93xMSb!LU03Cl0ooncih1#9e z(ZlA?9C^U&llA`q>sQa76Y##dr$Ro@aRuV3Bt5L^%Gt+44i8%N-v-@lT5AZUvYIBy z&gI~rT#o+$;a!SJXpb&*^gf!>u3<|Q66(8~xeREo3qbJ=^xA5tkfT|uoi(!qOuA)RgKb}_D4our;Yz&$W|udhB1+G;xWgfnWhe`QA!k2Kp( zN;7aWF`dBi*B@Ho{1y8>`1j$?on@)MnWyU##&?+Hjg){n+DC4^z3bO>J$J=A{+_Ag z9}nr8eY`^v+S!6#2pn^T$m6)pd06SjuCPAu0Hse!bv>*0Zq#mc9}vl2-M`Pjh1Y8=tX$fh(0HLR`_M&SC-4dc2?|X+HUVO)|Nn^ZH+JeJF6ctSUl<#ztP z51>3_qQ03DLE#-SE^?)3o&pw4zf!>dzLm&Z_~TRX)XiZQjdbb}%&T`Z8KdBz-ERyJ zT;n*eA=LgocwfRoOV1Mc!7XmgWXKl%ytoe|lBA4|I2?mfcpvtWf=hEWKNl_v&P%P` z)!c+W7<5)qh3pBy$E{;YyWf{XdD3ZG>gJckU-(IUUX#qe9E#@B;TTVJXM@c^KIvo# z7$0JwXXq>BeRsn;_J{EStSs!c84s8e;KZn1)!6fw41kac0N{4{d9S%-@sEi7Nu~&I z^}ROuTt*rnv))4$#HvO?$x*xg)5pGZkDR_Z_#4DtAJ-RG()79PZub)6%JpG)k3TZL z;=qh`0kCnv&rbX`WTndVOy#K;B%fo;ej$7U)%;7QOXCeYN1kIIKsUN>CYm+J%n<~U zWqAOr0&(|1tY3!OKBuGVww?^rG=Xtra_aJ18_A%WQMhi(1eP}2lZ9x;PCAY&snR|v z-~2wgd!G>eP4f~tT2#~SG#0sKQV881@U0tzkDXWp>x${D{yJNHW$=y9h5R<-RascA z)upA(R?=<8UD+qswscQ0E+s}zrOi1%(q`_qhHv@x!b6m=tV+RZC z_a?sWPKioVpF7olEAc6hh597=TX?g=9}e`LcTSe<6|(TX&Hn(}Eu;*0nXRr=HNF&* z%&0?T5t1`qN5X%JjiG8*+D3xD6w__4F65U@(A!YD8iPOsE?@l_Sc+tqo0nbk(8u(PK1?V`V<{ST(T3Tt|9r35;BJ|wr*bt{!;k3rLHjn&v6c(<0P zdw>dj%&Gw8n>nvqe+f%}HKpDBQ`$-i#f9WiERb+By-|v*^*CmC0R802BjY_A#1hGW zJU%J$W~(lbZlX)GsQ9+yPM+>;#V=0Our_8Y#vdRFW#%i{t4>q z;rKLdG2YWtm(9jxyu6Jf1hZvTLCC=1=jJ)*EX756r^H0K(sV%@#T=?PHGxJEy$}-H1 z!xiDb9Qo-HBV}9P_CuK3Ki{ z&a5Q|&3SBi&+Q4U={iKQTF8b=+0-Tg7(wU0pAETrV{`$4*orX2AcZ{(XB-A<@edRD zpHa6!u>B*pHq)infF;*K@?HTa2VV98(qTVf_E6yDz;A6 zBOnadshHuRDl47pmCp|WjGM1`E2e!9;ZGiFI+ew~t*&3)Us~AOY4?!YUP`evgGfU%U63Rc!pq zr;qS_66pG)-f5}2D|<(>pY3>&;*Cp9F^xD3QJiwyRAN3uH$7I71NgQbLf1*R`(#gR zAKGuO?jg(qM=nccg4>o!0K+hKhyWQ_6U%pdY)5nl|-8p(n#lZr5Mdb%ARj(k)@sZei9Bp9GRxvb08I z+97oTR^IA0Q6H4?$g&|&xczEe{Hb2vPx^g!{ZW-#3Gcu4{c3Rfw}yTmN#X4`N%*ng z-xq6oKZ`79383m4?Dh+!N%nbX+%GJrxQa+wAS#kaAC?+6ibIB7z6bF$TC@0p@dZ3l zrs-DJx-|C20TOA_+_aKhvj`!0hW`My+$FKLVkKpXWoaj1?qXK<&yAisywdJ3wQmjh zb4awX(tIY5uUP5uMQXP(#AS*ZqC_fU!z{$cRJWHhp(I4Ce4|B{b75< z65yg{(5c+;vnJm(MHI;z05&sEBYc<#s}G5GZ7+3s_qy4f^)QLrFL(ND{eQz1?IrO4 z0EL~j?I%@|$4<7=cdh=9qb=+g63D8ND?#>VnsU$bD4F6!-n4e4BM9O7?BAXRSNKwmb$+C_37)~?X9*ZrXH=9n}5LlJ05SVTSsL8 zhV4Grd`l$vOE_3nKq$;1Wj{K*DP(Q3GPrI-ZmO^@r>S2e8t_Jn!RtZJTy2UX>V*6wWSzblRV#+>XT;!-5cBc8Bh{92D+>6B8wa$&C zJJtfnbdg|f&)BpH-CmKZqu{16B#069E!FY!(*e+eWD_ zRRAmo2x6=PoEp*b%HCb-Qr^oR+lxqJ@;=QGU$up}bzT1e-yyPc3EZwq?hT6aKN;zX zX?C|(l14Ks$t}IKEX!{TY)72!J5n}cdW>x#91;NZuykqGe3EuN+*MkXY4b%9T5Gm4 z_?B0(zh*a62Y)kbMgdhQ0UV#0kXJl{xjD^y4}?4t-XV%@RbF2*#>sr2CDa0r`^p>) zzQTGQcp#5Yp7a}g?GEDdFDBjo#|($Yc0+)hz)(JAB;b`=_D=zL&K9<_ z4GZ0A*JtEggSEV~A>6q@No&ra9CbXLkBpzRc8jA!duyoZxBmcRwNWST^oDzR zQL~0KY?L6uKRUcfPf|I@%HI^fXdi+901~E`!x!K1km@m+nJn(K8)Gf;EV(eXv(93` z1Z{MXD`Py2itM3F5r~(#ow~6~G^j7$TK%>?d*XNPWAOJv(H~Io)|I8)8&l+6x@&gL zA`EBDQ1Y-KXC$x84{k+xZ^Qopi668N?Im+Qv*7(t##i?i6Fs5TZ7kuqO+GdymNjYo zn1V1senpYJSY#UI{uoE%b(Zf>_1-%y}5hL;;rhM;Q01VQIn+ zH|Bb3{{U7000;Fv+O?Z%QPFMF^Zx*VQTV0ezY}OaDwpAoy{FuGw&qA>d;KyUVHW1y zoxW$8?IgG`fwY2*V<2ZZ=~`YDxzc<)p~tG)EFKtnAqHb%aI2`ZF7KLY-WHT01f9<+ z?Fv3t&1(E&_<^fvcd+;!zA4)1c4-)vO$S4J;SAxMv}* z$-RA3M2;A=o7Ep>HOyW& zcl)ltQ$xnOzKMBY*Lop`#G-jv+Rw6ibi^d(oAu z#Tt>dM>!=|Bo6pJzlwDoPr$w$lS;br1a{YYk|Bb|!rI!{%(?khk}@Uoxc=(y&pG6t znzn>Jlz%gV5ptKbm*h)rqIhG)x};VOf2p!r5gfD2tGuzwFaR#9H@jyaAPTq~=Qyu$ z_*>v#4R|6L=d{u;?r!G6id)NUr=VguRrCN3gjbdPK9@q$mfmj_YSuRQPU|!2`gO5l zKf2QL2mA*NeJgid{iRn$)qcyQ>Xx^2?;;rIw=J=EU;sWt~#oAQH|MA{p7Vh zzW)G2id%@TwV7o5P+iblS;CRT7toRGk4*8@(=-i7P?e;Wti*7cNG39K*RlL7%XD87 z>c0`JA#G<_wzP95AMCn#XCV6n)A{~&=$-`CA4IoXV()zjKOL~7Wc!$8_w0GD!%wxmiXx$W@ZbaGouvBXHRm=r771|o zIy@5T_c++_mRTWgrYgNVP6gOZoUquMF1fOH(>&h;pvD9c6hR$`8(D^wSK3frwPL=B(2=Ugt1Zff1Ts_p^w1B&GS9Q;C$_2{m3d31d<#EP;-l5CQ7Qlv2i5rD+!J^uhMYmzuw zN=^?;A8RZ^j8vCX>YoZ~8lA&2)wEk7CgbJ0IV33HV?A?<*6|O6HNO(rF|yI*#tD&_ zE(rX3_Ts)|_%HE0Q}9isHeM)nFtcG}C9{m@spKDjYWmyY=f$rA_^V3-Db3qsvYmL&DW5m|UHD_;Jw|nF=}M##6pGf&1tCG-_6F6}^E#O&iB^mz!O4g@hpb__$Bsd4JEZH?%!@spRi7bh=)x0ffdk}B+ z*9gXB9hN7It0pnWBxCWfUGbOf)nQ?%&0(Tx%_RFtYny1?DbE=N_!#OdozuT&okMd8 z^|1c{C|0%F(n?Nx0E>*{)rVU3X<{<}0B5Xz^UbM4Fz3=m{{V&GwBNzqH&*`L)&{X> zcPlE%CZ%yK^58EUSrJ)5;{l5U-`2jR@TbKOf!`IpITSt=@g|w5m4K<=GDj?Ic_5_0 zSoQY@u{HA4zp|C<+Cy`9;ER~pxhNBE@CG)LGH_0M5$jz~fqoCi@VfatM}6Rn*>9AS z=3Tz%oj}M?mIs6sXZ~Qbaw2up2jN)Ik z={5{wkv5&X#?mvK45{E}j`j0Cjp2VAXudaVi_aJ6`c#NQs_Qw1Nk{M$D41e6Jg^}1 z&3!xJzuRX-@TPR15qLMmdR+U}Jc&)LwzIZ*atMoP2aKKudiARx7Im))_}fBD%}&O_ zp61`pw|OA)E)#Li;#J)lRABB`-~qttMjowfJb7guAN8T7T5!a^Xv)nTN5iiMNp%7E zo#I_XQM-FEVtd~R-L=tnUoC=y8w&!*e%^%2evYJ5iVAB&~D8f~o8xB}wt>_HvGiZ~3Cgjqbmyb>jDJw-f54;3hL zLEirWD-}ZNY$T znPFuNNOr)klUCMd_^WASF0-h3b5QXO#iUB!FYuI>^V;1CrHQ$II#L>Oo&D?iD-)Wkbwly1l#da^BZ8}UO3ONjb51RlnP)BEfYvQN2 zIq*J(rD*q4$2HBhmX&bJe`r~}+kpg^%LkK?q0E20C0K4kE6<)8Ca$d4S8w<)`q=M< zX*Z+Ovi$!5txazb+-d&+0(?lC-Q=1)x@_8PR@VL+pIE#C?&@GwrZxzQ9k#kLXiSJg zu`44I)s}uMYF`cflkp4UhlQgz^R?Z#hI|#ML8NJ(P{!XjR}FB*(+TqVOY<_Kvz~N( zDdS&)`ZkrO_8qVAbj65P)VPO*@pA@*Wfm39Xz7*{-K^UBj( z^>$b7ZTj}?bo*x&%?WMOT|3`zBhdUg@iSWS#;K-0g{Iz2C$*ilySuF^pwlgaC~597 zJ3}0HhyYb)R(V+obzlZO%i>n4@sC))mq75P{qCix&v?JR@iwCiF(un<<|}kn+NuDI z1q6ULD}1G=(`+?Ocf@o2QrEsX_5 ztg=la;wY8jjBk=wdh$(wOmFy0JezBH?{AILn?Y}4rQ{EWkIP0QY9bO;k-$QG1@A%K$hU~@FGGh<|vJ7@Qd0_8~S6T){oKC5fu9~J1* z=_PEuv!mJ>rM{j=ExK8uL{HU9RXNy-bo;E2$ zF3YPj+rXX?pHI4y!^D;|Php=jO=4){iDMDBnYCEqh;6Z=g>A&K+mOt@f#B~AE~2(} zpJx})YHxEqa#-#vEyOJu1;NUXHNu7+4Ef!aA!P73!s#rbO>0nHLeI1t$bhRrzl0*_Dl|+lWC5{O;TdJy@yt2BW zR&PVlZ@e*YslnqbsVsFi`&z(b(x!rV&Co0`L@sR*c{cDdd7QNI#*F?pB63Q} z`Tqb`w{Oblu}T^{o%e$7bSBZi;Ucu3Pnz>lvlH7{_;*gWmOI#`d`g#UBylKs!I@o} zW+!kA$^z0`>s}Gmb*OwXHXbp%wI2`cG^rrEovvhx6$4FzSVCMwlQP^|!xl_zs)X9yGBX8(gWvckK=6K~ zn*H^x@8`&GWfELlvqh#`!DS*z4#ra@!i%}1x0W?#+YPxTRBaP*)u$?RO?1~^C;6={ z>G!qCB-FM40D^hfhW-cZ-Yz;Ot#htNeP;HJtDR!oPibVli5S0;5-K^KRdFan%a?7a z3nLjDoAEb;JTq};u1(@UA79;So*vZKPtkOLC(T%4`&0r&h>+Wiu|k;6<0!k^fQqeO zM{C;egcISzL8AC_4MN+*kZJ3w++W_9t@LY$Gs$fPf+8G-m2U0?ils!%RoH}uDzAyZ zu{=LvYpiItnt%3phgK&S+U>k9Vz|4tjtQBJvI2aW5g8SBt0;6T9Ti6Ec8`~D&%Z_e zTU_NiN_wk*!!OtH)cK!E)4XGB-`e&N=-1ZpM)uZM{{Y&JYA#HYZDVBUNnsksM_p z?yW5IMFs4kMZ#H0B1T5W+=%krB(vdIkh8=Xswpj0rkzgDTQ0Bq-TtPPW6cNH<28LB zUbUA}p3_p+=a$}fhH3Q}NHiC0+9*0kY0fNJa?U2^mU)stmso9WQQfo()P{%c!*}R`XGlPd5<3D#_(0WoA}) z+qt(XW^=fcwYLh)`o5GQy|LCVZ>{c@JHPEn78sOWvB+XJN4;_Z%7teP!NU@vy@=nD zYpc6IVb#2e&XX>*;bxT8|TtuSM`7NW;K~`y(=P>}p{O*hqgJ2ddoOB1xym_PC zN#WQn<+y}H4Z7OgyU0;X70Eu2xwh@jcI+f$JooXojco+i+MJR^v9g53D=cAWm^^H} ze3LAYu>^djWEdQp$+^;ou9o)}kgK$C!rx?u<&kIP%B)yzJ9d{0FfgsRCnG(S8mChB zIVwZi#iPl*32$q2s$1ON+Y7r`QZ|?5W)Q=)lEqsDlw}#i0s$wW1a=x6&ubOs#+hpu z*$Xg)?In^x!Jal;Yyu3Zb&m7eGT5Aqu+TYuT%ag=n`O znc`ESQx4jDqtU(@KE8tL>r1t~zgu{e>K4#SgyDczB}m+{5rVQZlB9v0SJ1x&ym77g zdP@sU4(@vi^=O)PwYX0#UCM`Pk}%<-dxgq|O~Y{H6(v~w*YJnL{t)m#i$2HVZy8y| zZFg-XI!}mgnph{YkPy)qeT3lSV=Ik?K;MD`pIy;>QE7eQJBCpO{#s2^k z^@zMJ8tOW3n`w1CtkBDLnbR&psb&r&!lE+}t^+ed%o%|hTJjGS{8#a=uYYUdj~IAn z<4Z}V{{TUM?1#HsW_%si?y@fM&$V{NBclRJh1}5BmUtK92gUoFtNU#l*3)0qSwh`d zSa^kW%~4ewYqXZP8=;y-CDlqR5-`45R?Ts1 zU(~I)v{N>Pqq<&4aDY2OFp3D{g|bY9Nou5$PUS7mt=6^Sty3Nx*8F*;c>e&x3kZi! zy^~v8nKWJFFuUdm?ktEn8^f5DN#2S^YV7_V)&3A#>E0}3<10H$wu;;AT8x)BR`W_^ z%8Ps5J>_eN*XA+CAYap0wqc1xbRMgcc|O400hY_6Qu5W&f-{x$IIHaCis)1>Y6>ZjC!^1x2)OuvRy;s zuNdkU*H);@^OP3%R>5R-02N%G%%o~Jrzhsz&CWv>=Df!L0OEIsr1*_>TfJK2Q6zn$DP*>?U4{79@qRB6ELMID*P6xufz?jXany5>jGubR)&Brw z{cVtqF&f@p0Jqd`HZeUo81>C-PQDhEK3lJuE|qD^nIgBtZ;5_4@GPw@=BE|RB;=*d z(A+r~+Ti3Lau3i~wEQUjsk}9Bc+KJsO5#XM0@2TW$%D>7##gs*%D#WqKWA@=GS$KC(|)2}$`kIJyALj{RrFdOM!Z^d7Sdk`w zh!74&PCN68@Lvx69KY5s!PO;X#tRY-M?cS~=lpBfJ{tTP(XH4`4yvz$1A}gI53Y0d zt|{QK6jqMx>4khu)!IoNri0_p9(c~s+v_&hX@>s*RMN3gp4lTL5IP*!Uj%m+T1vDK z<&=z!_2a+4Z^D~zp-*RN6bW%N7>3-uVQ@QrX%fp&V~E4Dmcbxrj8xHtXLgm2OifoE zE=-sDZNwp7GGxif$j5#uEv&7cK=2>{9yf4LJ<-qjV0Rg29D$@;1$TuYgfawymE#^k-%W1A5Zh` z?^#y<8y2Xoad{d7pz`_8*Z%<5T{nfN(2(qVz;V>80uFsDQ<7I~mbsDN=$hT>)1whV zz~iZ+rttgducoUiO2a)sG*j2B{LFb#@UQHF<1^r&+A0e@VJDMM)U{tGTR;f~n1w@( z@~eL|HvC(>zZ8 zCAb!HE>x0{?#Dj$!wRnzQ<8dI>6I0Shdgf2xc&tER?$31r^RQc_~zPa8CP_6=yD0~ z-z4U}FJAC8TEubO&8~n#f2%6I{#=UuviL{w(@^+da=+W&DvUY6TX?Pf`3deZ(;r?x zD*DIZx9!XD=UIDA0cN|@HF;G>kkN?5V?2y=)2Zb2HI64PlrPJwHKUD$`q`f6fACL9 zy>@M9Z<4@aOoRqw(QUvwKdxl(_F8WyJ-dHT?_wZT4PNDDXvf&e{grn~T0!!uf>I$B9{5ZYF6I|@&Hf=O)g z#&KI(f0cb7+HEweSsX4B?%?Dh2cR7VYFXUt@4?X!c=bk*X@ZIgv zw!hRZL9}_wiFD+5cQi`h^BlG)3_&cQdmc+STW*acf zlHKXC+UkutM#DbDbyz^*vAMt?{LPB=-vfTg$qbf$A$V-AG@FG2D0S&BG}pQ+rLlmh z060H9Y%{ljcN+E?J`4DpMZUS!ybbXB-bmKuJU4guaY&I2e6131QinOoV8@Y+R~zCF z+DF7b4b@jb_^HB~-zIX2D9*cTG8|yxO^KAg{j;t+;oTSpgv8>R?wopr_DS?&2IJ;wE@XfX6(==@+(_YjghBy9X8KgzqGi}BuK6; z{4LtaZ9y0NV#IFzNmp&@-uZ$Z`(=RL} z`vebh7M}%<<=rQiZspx8B8;`fjPe|^`4hQSuBzw9x-xt*v+)OwH2bd)Yqu9NTIsVx zu31ZcenPT6>=9|U!rdZF4XPrUq+)+~Hmb?Qd|tos%(tE%w(*9Ab!j$Qvg%$tjVhx|T_u)>z_pDxWHdknSs!gcW5e$F99}^7H(99dzeEX51Z}wCVkQ$A3m^Z1`pv$WE5dkab6 zw0oPMIKTid#ml_2yAUJXRi~7NW<}bnHsf=``Ui#XwOt;?Jb!l$ym4;SV$tpj%IhF3 z>LrRc3|caGO(}~J?swd~VqW^V+A*fN6|LV_x2D=$#*PZKs=jx7^jdzq{+I4`F!**K z582%6nl_bh1-Zhji-{3cLn^jy$BDiiN8?Qw z;nM0YJ>ABkEH++yNF#w~Xk)lgl5JKBk;f{d5X-Vim^F=4a>b{kHM=u{%s1RTpEm|qQmUI{a?9DShk4>3 z8F*vD_trW!o%Cof^(&RTn$~!&8CDqV?FyDnM8uBr%^E24=8@xA(1lv^&l7lt{{T_E zZwbAPoEl5R9iEFYxk#<7Z5H{ZxsgI}SR_$$+ntm|04pjb8aAKdy-podU7k5~^KWLC z;!B?uG_7wV9x#wONg2YA$c6LEytETxtnsbzQG!>iDF+z{{Vz*UY}UG zy}a>OkaZm<;Kz4(Bg~RorHu1=%{!xc4zWopDutH>6?k1?S&h_$d3mDWul1qJ?HZGe zzpv~1Mn01{hev1euY}^YlTXv_hP?&Fl6i95z_PMhM|6!QQs->@;|4}%gvyLDBQ7PI z+4!IASAICMWlI#2{{UPI1dsN%eOXd4B|@zcSQ=Y{<*{aKxgus8Sk(^{>KFRfogSIt zO-k=gx4rRPmvdWPz>RTl9Fr_i+qwslcc{5WjnUau&6UEIEyL=z)|x+swLL4tx+CZo zv*@wMqup6*k}bSiY;z>(XzZu=lQijQq@ww(rBo>{x#Eoe%F+J-pY{3e*QHXvzt{Eu z006tI>en7G1Hzsa)0WX~*>xRDRn(+^wcp-E?*+RHbwW}ZXJ?Kmm0NP#Ld;o{E8$)& zOYaA2Pb{+DT5Y$IMzlda(6SQDU~97}ZIZN%l9gZ?k)lSeHg zdGxqqn^)3B>Z9LkFo%u*0Dl{KjWm}He6a|WEL$Wi=9k>wW8)EXq3D0`hxkiQHcJJS zy9jLT2rx}L+uU3wvc~MU5<5IFZ5vsY7_kH9Zn`vDZ9TT_{d!sYimd(BZ|nO10EW=> z&mU;IC&T{$5 z4broiEp*sz;j}DJhISjI;G8=JCwL{!+y$fKIJJA*xsAQ_H(ze@{?#|ync68KFhHf% z3E0KrD8P4xHN1>~#z6I#_P5Em1RH($bF7{zK3q? z5CJvJNAVKV`+9vn>>k?P;Alh>97`&I6;u*_Oh_5mKX(d1I27ZnLFGqh9pb#RMdrgi z*d^qAs|i5Lf^#0?0)o)v91~m@#EVtE$--OoJj`A;bfTplmglv2hRyCoif^8Gg*?kfx6>w^nL}iu3fqWO*}LRL zPyirpEO4yXS(+5Pw3g49?u@~r|vcQ ze_Q_m!x=sk(^FPSwciO}Xu8Lp6vD?#(R^}Y)pT~+H^*r@TU}et2}~7P6tb`?CS?k1 z%^Ska;0yS5`R%nitSzSdU-oXJaFNYB1H`1kYEkilk-1!z$iV;(v&C8$j5Q5vJ!8av z7|(sX=2K*aQmI1J4wQUh856wi^F<%ihLy%=YzZ-;tdYS zTpur0iv4bF?%GAnQI-d6KsIuSFy7rOx|5VGCD-u#{{Z3sXAI<}7M|$EPY`K7CWUox z32RV+EG>4Imv*waifyC}NU98n2WvdLw(dQ^73UX|_#arbv1`lgsCB`7r)7#Xt1D-a zB!sEK9FR%Q-sg(i@t?xv(Gm;&D@yx*VrSGYqK#}?)D9l@^i}W^OULiN`*mSA^PlgT$UDmu|bRU*E{CRUMEq zE0Qt-jFaiqW4(G0!|&OD;y1-%+pgOAp>jchI!Ene@*9FmP%uKOd-z7YP;J~;8* ziq{?rmMaxdbAO_wWyS_h=V7>m&j5k=*S%?fvc8L>>2SmnD5hpDEN>^7+!6><7y5JF zx3Be`7sBulDK42IKtFUoR8i@Jk}>Prs6+AB!%p90wYh>fWgwq351e{*`q!I?#m=MA zXmr!Vv~<25@DGMGWjcS8b!voyKX-(FJx>^?c#Fb!S2l85Pc#xiw2&Gla--PeI0CZl zJ|n}dBI)cTk)Nc-Whi;-amXNX-=%M9UL2QDAPeiZS(ENt?mTqJAQ9=udgW=o%a6OE z=NjLGHJue>ztg5$xU=%bv?7e2c|8qcc#q*WqvC*WteGtx{{YrQEJ3n(^zZbqZ?N!3 zjct|fWx0ksGTa?VDzYi{{`anW*FkUa*Fsf}{`XaZV%W5fu{*H{R;C_nYaUm zP}u4~#~r!PZ>?~82gdJ;T8`l^tg5ZX*q~4ct}~K9&!u)g6Y-~vwMW~i>h9aRWwF8f zcjCHzs-~>jgTs4ea|2iKcf^||CLar1SVnP(ywwaBJ^kwv>%u-aDtygG-8tn&);Q_T z1$qaIyfNZ$6T(QlnnemgM9I&wuRPYgC8_Bw66V@TBn`LA!Q7f9K%#KDG8!D=E z>A9C9u;#v<_@ChoTJOWJr+uxO(@%mg+%NE#U8RA}K*%Pv%q02jeLe~iFr+ze%^i!{{Vz1#S?gg!k30ySy{6i5=Y>AW3Nw2 z`p@BC#Hc(is88_9{HoY$0Xf#u1P;YjrBkN&?tHTzchV%tx;pHA29*D|z?Xsmuy z(Cy>DCmy{k$)f$0XvrRo2jH!I?WgXA8RnR<3ec_vxHu{al zz9a?ByKQXyoMydSPw?H9o!pj}mfK)fFAA~2ARoF9VtX2=;y;5rKZ}b`p|eEWH!e?K zuN-3-`q#M}WhlN`tq&4dI#9KEV{9f@?;gGT zafj&Zl+ODMG7WwSbHr~S2hLD_O1JCL2Sx|UV z=S#j@iFG-01{jma>CfT(>!{JZIc;rj&ah28%5##L=N))RXwsMbxD$C2*W@z(V@jt|8o!G-f`460tCl5QKx4Rc)jKGI+0S(0mi&jaOBU(^v5S z0F`lR7&)3%l2e?JLljZBfswTKj7ogT|eQK$HF=U$rROdXm%Z7l6#ATt6UHCBjm z_Xg~o8g*qkPR-oXoud=Zd^g~YU&b&miM}U5hgy-8$7yw?$$NKa1WY!V)=QY0b0$X8 zSx#Fd;2cBoAHqHtYpabT;%qNxrO9~#TQ3fH&KqmZW#4d#ZY7G{;|iyMvBfqr-dYk2 zrqg)S#=47LS$Jz))fEhYO$@fS*U?Fnw5}#8aUtNA!i6KhV?$2ykH$CgWZF-LHARwl zjgZ;t_Aptr5fhh~z)4n&05VtrTd86~f}9*5Gif!hkNW<+%^jxgZ!U(Hggi-W;r%7N zJ)`P+W|MbwZc%i957}H=d1WQp3|E&Ax@}5!!DWf$R8?{nS%a9Fao`Jo6|S48Y0<-f zBt@3vPJ-gyuBS!ax>==QqAl{5l1YGI6+jMfyIJ9X7HXG%HkU@d({4~ndLCVKP`GVQ zTe*ksm3*6Lg)#vl7k@s^(jWrT@N1ptGpGic!S8jvG7=3HtmVuQKN4ul!YyY z`ts|*x;Kb?JAdI1h7tV*e5@dl7aRaQeHmRC|YDBM0w z{h+nK5O`0LJcUr{+P|Y6l-z=&kWJqM1 zCY9NNBQHhb)ee9_wNHvZpz6Mub*)&f+P?{8h}zc6>{!zR#0v{y6tuT zdij0YoiV*jO8R!wuE)^-0JD9MiXT#oQqp`2rQYcJhMzk{t4DH?TA1O5os|im;tPot zlWO@arb#6!wq&_GeUk4>@gK#jIP@P5Lt)|PYX%dEb>l6@m*tCVVFlH+k(n+wB=Shn zERej7B)&r`+?9)o_-O^!lcZQ1M2|<7{M^H+>9Vz|+6%(0wy?_^Y+ETI-yCA=b379P z9^_~hUEUv`;*Z7qy>G-Cjn<`SVd5K=)8ewQvHKnLP^^ktSw>1+$Qt766_J4fVG>5$ zLNei2PBb)M)%3rAKg(h)=Uuk#w!g3S{&uzEMDShSn{)9KL(wj8En~RZZx*YqtP62- zBP1zgA}UEw-x-)iw-LuEY&=9ao_s~|2Zx5YaRje-s!A^9)2uZMtEHOa$^>YkFCr*Z zX&MP7b26(mZ8j4HzHYtlZuQR-M+?b!0HrF1ev;Ivf_-M}{;xW7K3zpR#Hf5WHz_ z8_Kk4=Lj9)RB4(x(SovzW_4F*Vas@jK!#a|iS_%`zS(^Di+yVxGEaD9m|V-cSl@Fr zk{!|%Ke>T|h75sMtMMm?Z<|x`H;JzNQ7Y+{`Xd2vY!c=-w(_RDoJ!InlLSj_%Fr&+ zA(5CAMO8GS@!yG-<{d-B+LiRMMK!LYX%+2?+`)Nvvwx#WI>)rBky-9cYRbE_mI@U| zeH$ri+w=avLkg<&-iM3$$u2B!wGR#WD@lsd)$U&2QTD5-t^z`?@+`;*Qi_?4{L%zv zSwJeF4>a*)Hny-&cX1SE>gpv@meDNGyF5F1JrJ^ztUrZ-89djbd|$HGB-E_$tTer1 z!rtoAON-4m8C|2Ec96&%9n7GFmFM#rLaI98vW9czJ|xxSbwO;OX*XenF$D%ReZMZ& z0CZ*&C@P?BDxmX^pu$skO&>dtnrX&e27=Sbnvt9A@hnJ|+uSbFs?Hd)<13XTJxdTd zECFwhJUwILYg=e9G?ulzg4q$I)$XQjg2b^QNm5zaiBMSX&i2S)Ip6e&RjqAeW{^N* z4Ko#3ST+k}90H&*-Gal=lbW~SiS>JnIb*oKg2&B85!_rm8G4QoZwgm-PXU`dagJ-$ zt~7w`ees7Xu0^Jp)Q@KT#s*Sh%*kOG7lb zA7LxL0Ke*yAI$R(1`9D=fN)8{uIIseRlWR|cUM}DnrkaQQjI>wAYCuS7ZFRU>eE`nsVrJWhM8*VX$+f} zdX#T6*s*<}AQ2f385LT!yg~58TGn(M3AFo-TJBpiu~@H&V|gr*obM|d%G10Dla-Z1 z9iwTDxIIsX{4aAJv#EG{PttE}iZgkTMROdJ+{Lt)wn;aiv<1Lrj^$f&u-aQ_D~;o8 zJqfR3mrd~GH#TUiXKi(>XzeUkh#2p{XY*Gi{nnA9!E7pkpyfuooYIV}Mj{K9-CT#l zTDH6|F1$Z+XRB#HWVdJX?DW>Rxs{W4TG%{ns~cdJW^b1*^6=E(25Xv@n}05s;Qdz1 z#g=g08~*_8jZefAK-Qf;WOtA3v&ha)0__`k+Q8EIuTp2QlT7g#{?O61_=1UG)Fia~ zMh@XLajnA_DnL@{9u5k!^Uh9ztaz8idbXpY{5RLM+iNR(P~_^~Ay}`0b=X-J<785* zfIus>e4`*9osf?*ze|5!e==oUJkg>GZejB?&g@nUO@OHz3<7v5zUiGnM z@atUE^%nmCgjUut_bg4t)DV;`P6jcCCm~m9@XRsY{8sp}r*C`{XQ#$4 zWXK&0sKWh7I5pbq3Gr*<1dko|pBAB{N=bR9BHjpl-~qH?br{d3a$1g`@bkhqGS0f} z`ozjMogT>fR{n$;9}LIoj=tHeQk-I!PwV>J-tKFExau^Ee}h^IM0_RTIrU3|#3Yl# z(#Yqp0FXaH$LC$F9y*^#j5mtjEhf?>%SMxho1O+@LGtJ7YsoZ!82nQ49_<_9Z-B2X z?PTIxEe}qTX(R-3xkHWCLN%2bGQ&&l~ zqEvsai#SyJ0mp8g>swjkDcI7P-#g>RvWA5$_#g~u69cxui$2w^94~U*3(xkjuPBNG=%%?dY zUfl;7u3Jd)a$M?WYwt2`rwQewJ7Yb9f=)h}uWZ!31L13_R@cH8zYDbn1maCY$1zDE z1CYQFz#ng=Womy0{x>i$p?pI4ZDkSQrJlCCqxS?x!13vtw47SGa7xBEhb6w%0?DOX zPvx!&5<{`FJ$GQ^n(w?H2DA2sxSGq$QTIbgGlBmA*RMFZ_(Sm?+EG7=K00_(IDk9C z_=VMT*aEPDj(M%05BOhDjHF-ixx5{4fa7cUp@BZ1dNH`*`qveBMq6CtlUqa6=g-|**#bR zFA>_dI`$9AsJ%n+|tBd62aikrlrvv?zE=F0N;nB^jKzz*d6OAI^AooR z71K3FYgK>i~ke zisrv)@Ax3kihefhwz{5;`&C703_(X82)dbK0aRqLby7|;bL-l@_Rm!Pq3pGbh62-1#C}I`%cl;fAMg-P_gM^)~0k)V=C`%KreN)LnnUyZ-VB?YXuaC5EhQF{Mg74;+TmJxpgK8R!UR!xLarj&Ic-1G~qXcc1_Tt+< z#Hjw2L&X-~@Jo3tCb01b#?RUlU4v<8;?lkncy`HfR|jdk)uh@R0N}4&`qs)YlNX#>uk01@uUJn6=?(i{M+gfcx{u=y zjet))k06i$BY~b59<`)n96jO5W&S7rWKGqql-%35TQBlG`p^Ce1N#u&>V9U2{{RJJ z(67(@x>#9QX|b{|BWOu4m9da|0n_PS=8yjX1i|p1h3wKl3VzYvBG#>w?!A}8(RPdg z#|GNz$@aj-e6Qla+K2WMby)mKW&2C&kxIB9+a4kCR-guPoSTh2vuBUu0M;e9$B+0X zO!AqnJ{kONwz|ee!fAdPic}{VJFKp4&j%d`Cb{J3wRIgmU-_EGIE3um`;V*qQ~Nad zL&6`r)qXBbZ0W%|-@~11+Xp}15I7%Dc=9J>-eO_2)jEnp}~Rw=KV~Q+rfhe=qn4(1YOjkM6G3w4ELc_}FC0 zr)u!rEB?}gg^=h;`` zkAU^Dz?SOT#`8&;&N*N(z#N=sp!{poyf^z}{?8IyMJrzG9vqp*OjbTDR*o@~$rBah zgZWcce&`>DeNAJ0Q1DlUFYUA9yVSo} z({!zQV#^bNcWx#lxDZAcgU&NvsGk~s7wf)Tf5PO_wHA~gv%{|2T3irE{WXlS2kJ*d z>0H;ueOFDs@Y!o$2WZz3x|T1c>Jr1fj&Rul^N;S}W4(Czygw(s+xqBwm|D(lMENJh z@Mt#PD}zk%*M%3vHpUiqj%n6uZHdMtQ?(2wassFhPEUSoobf%=*)v?v;C8dHfB`k~ z-!J+zC?QrmriJ7ZAG!jT&QIMNiRL~wy8XUQLr&4w(kUj8%-0jUc{@%{=RmA+jl+U| zwZ;5B*Yre3z0kZ-DqYWWBHD|I3dauNwWcCL`>-%Fz;5IWfzi^TCk}Y@{=cmb*~&EL zp!)v+Uo+pn9{9&g@dl)p9ut;WC7#?{-rFXrJZ_4lvibM30^l4S?A}s^8NdN+=uI2r zFNFLxYiXi*pThnN*XGn>0_*9Tc6r!8-KT|ILBWSwqt-lMR zS;=j6J>)QX(7ddX2@xU_5Rj%+B1EXuF?k#;DI)IkEmOq$Hos+Orf8NPEs*M0;%z@g ziqhXp(;j`QJja&eSzbvM1C^Nn0IOUqQb(|b`TqdmABntcq4;M{)jVIQ+RrSyJ%*ki%i7G`g{7 zF|tnRU15a+h`^DgUo8N`XKNpx#9_USR-3;2euuk-YEV+Oy?;}t*ZeW?qe}3d<&EdV z{R~{)t;3HH_*l%hLTDX>OEui$;4En%a8_A?k)mc~Km#|Nzj`!Ck|FV~^hT5*OSnr`X+-*@Ntoc{pCrH4tgn?-BeDS?T6 z+o?Q>@1m3DXK5t}uOMj}Oe!-+9`$0mXJhRD0J06G?}>E_T~EeR-rd1rs@vM>#=%JIo?QDwNhca52&kxGaY5z5LK zw6U@=Mo~Mm`xI%^sX@W5{{SEA$D4?)PMt*J{{Y|~#p16W*yz3(7Iqp{zM-gISh2eo zR}#pp0(qA*Pcd94+1}(_N4h2jq>pjm8h|t3#}-;;)ZP!i({;^4&r!We=1o~+h2etC zMlHV3PnR1)l3SR=MJe+Ut{5trSRb6O{{U#MZ^PPb7dN*H9go@s_f~O2$da=ZhBI|6 zgDf^c7x%bbu>ursV%J~e#m9)PE|*uh()B4k=?$UOY?;y|5EL*eau6)3atjgxWr*$O zRwtJkmsg6vE4JEvx^3&Oo!-W{Ol=uEb1we?*Y)x}cTM<%FNm$Q?Ki{`G&)_*pBw4g zV7DdBhnDEK*KUdCLbA;$0wzusS{+F z2o`5T>l8JFFfN++V|3R|uYbQzxAi(DjfCE=*8Hq(c$Zt%^$!hrj@QFC$5FX=x@|uB zq7kvP7V)H@T9U<`q_}m4HNz1+U;>cir@6p&Hn(OsOsZz>YqHGtM&W8BU9o=qiL#WX6n%))2;3;?iTHh^pVaXR#?#s zt~Mi-l&;JMUo7q;%&(t(O>AeKWR`Pl70H2aY??C6=j9D3RrzvIZYLmucB=DTEPoSj z8SXUst>R~L*1m1PW)~4DEr~v3v|)=Jatuq`BCvJsIWDybwM!_x#kja*G(g~_h#MH% zazOxRCqF41_xfxu4s`s0XGs6_gir1f-~10e)ems_*j~5t2mO=GfrAK;|is&3Hi4az}(CMBrn|rleAzO zE6u7cwW1ve!PZGTH~1Z%iK|)dTNvV&-sbB9<4(WxptqJSyHT8!Sun>f%QsR$=L^|~n4J7zj`;Giz78G^>lRI>&kl#sHWqXmm%v*hqsi(;^SU&NDMTunJy zmM^j0MGb6p>S>InuB$nbSJUtBdin~+^5K3AbID-e;&=@@cNdU;#d|DZy9Z7=fqxXNfP|XptL4M3;2_bOIHq}{m zmTgPy7I)qeYuPO3Xl`uGa7yf~%oonu(lTU_(JMAU>$?KHPve7l+gjCI?f1H@)3?nq zWSwKf06I1S+yFTL0B87#CcP}T0;^SA&c}_JV*QmT%A03*W$}XA`b+DumILkAEgIiV zr*FB_K_P=gOwz`q1eIiF@5tN6y|=}y8+Np}(^43k)?79J0QNywxZ~z$ZUd8?5LXSm z=3~x$W8r_p&lU-jRF=|JN#ZR84=cbW03Vnz@4?4X4{mFq@lK~}rD^kD>6do4O&TCJ zMk$-^5)*&{Rlq%dW&@}h=DmuTY&x&W4NnrCTF#F=)sJ=2J}6k~Zf<-@;)_cr`$=hT zFEvAO95*YJ4#62tLFD6r00z4+0{F7WTGixjTUNJ4xQltZgHj6KX>t@3oPaP$>E5`pRIu22K6xbn0It_Hr&6VRSv!AS&tLGyyWrN)<*`^G zxxCy8J)~ht3b@=@k^$}CBR%?R-26aY4@+BEb;}Jo6lJDKqKe>*cXc6;8PC#_;qUCH z;f-q4x5O=1T-0Mi3R0XVZcn9GR!`PQh_<7+k2k7^kLd9`& zi;J=T(OX3>dmJ-kCz|GrBH*m;{d$;GsG&Pr=a*y^66ZT&4e6Mlv+u+Wl;*0cRPNm|VN=1V` zo63xhFU2f_;WvPDLs!=x$$qo9YX8uHogn-CylSu0w|}{WP;c>IK+10c8)r+ z$LCU8*?d|Kx`%?_RAOEu*2uDw5fsRVRQx4jJxl{9pTc_y%i| zK^Jy5w|7#jj;)e_lgBvk+t*&(!2bZVE%kt1TlTZCx|eD4?)2{o-7a_>k&GXx#(gW} zajTn1{EfMGZg&%p^Zpgk_(R89jkCq2T3fWpV=W{pl6g5e13ORu0IcLy&j{3Rf;Dxs zKBR}>AHaBkzK8asww)v=Y|r7T!m!SANaL{l`U7FA7|n_;o_tAn|DAI0p~VA2kX>V0pLH1{{R!TQ5EjF@d{~H zGn8A`S+Ko6^YiF9HRvtz&dbJ8e`k2t!WNBfDB9QhUSf4Vq#UabIl)tb-nr$2q_1!I z2SprRYhG&St9Vny+C7Gvmy&94Jv$F4kGCLgO+Xn~Qy5A0I_qH=6X5kuS0J4rqBhw#;KmNM% zt6`lv>G&OysZP8#N4xmP;7^4-ajbvBMID?hCX8g5ldehbGv11Wz`qdZPY=fp-Ns%v zDsuer2VZ(Bl;n}YEF>J8v5&3(+us&^H>AU9qI_1=^vex4HI7R!4cl93F-W)@*e#S% z0gg#k#xv-1OYraZx%iBJBx(_BelEV%ZOi#ex|X3W$C;aO6?e$Z_lW>|o_g2JnvIUJ z;LT;c9pZaii7X?LgIuV~D{?~~_&kmW@~=zrH^Up{SnvEG?$(-xvYTlmZ0wyv>>%|S z;~tm?n)KbI|!op{{h%O9K zZ(Ynm#zM1^fsOz@tIvOKp9XkW!QKPE)->Hs683omTc~`J>Zc>uJfBXL$u~JP(&}9( zsHS{H;6H?#rmr5WHRJ|a^q6;o20`GT=zl8Yyj3Onj$infG)oGy@IGRsjxmpZweLT& z-mR3!HVPOQ}U2c zbGILe8LqlgldoxNWjb;Cx{}!PO)BcbDJAhDArsmXRe2ed=LGY?01`jWFZd(H7v3Vd zBUrp?XITgmGi03P?f32n;aEEHwY9ffVKiQ4!|#xJgC{(J`E8`|mV@JqtHrwT_lGq* zmoBC=X{XI9vjdO@)j8;YBVN3z!gQN^o<}^RQr!=rJPNkjz2Ek1v8?W(4=P@MU!nBH zb=qIUxS^HqbV&@bJ8pOiWR7{k?~I?P^5ec4d?xtsscY7n)|>E8#qAWJD@vXnxdbT~ z=O{7(;EtfEJ^A-<3;xUBwUizriu+0UE#h{9er^Z$b`&;wVYrNO&OW&9Uq6V9qbo=H z`;UJOI!-?FJX6Mh4s=DjYq+g)$2f?u0Q%?ht_n?NIB!!(y(S41kRxt#2N?R}12y*E zvHJ#o(mLd=eFwtp*kl7JC7Kmnk;prrkaL_LL+xHo@pt|So%>Gsg%CmDT{_!PSqWQl zuiG@vK;sd{%;X7}1px|}tdvxc$Kf^!pOV8VRY}PxC14Oqw%OqFcF0m8# z#!+Nq2m9Mk=UsN0{{RG|{kU(fnmdn#<5S-l3oWL-Zlf6cpoe3UJvi-=n)E%jIO%;4 zJK4$Xj_<;Mv>t=w4KzWh_;*V21eb0hYbWsayN6;q3VgOF9dppvs`z8z-@uO^+aT6H z4ry>oP=%7t$*m<*(*a;~*mM5?>giuJx9pYscKjUFEp%I-gfQrucx4wiO{&KjK)-cM z2i*MNkO9xuzKHO)y)T3OGBmAo!ru|~+y4Nws?2mR4Br?eC4TbAp++;nCAkAVE6B|s zw5F9W!(YC~r+`kIZl>SX#y7?9*h9ir+BMw%5!L)k|V(3mIX+zAcJkGj?OuYx`&_-9kp zj*a7w18S)gL{seg%e%ZPTL2$9oniymaAJFbR{T})1K~G_6vLosntV#^CW;$4p_D$; zpOIqfyyq>G-_pElSo+vx({4%hXR$0bI?`9st(p1LrhI3*_wLf|fsqKCnwD?2dm?YBtOXJ*WaWlpjLAXoH)ex&9rNm62@=0HrmB3+=-A`fg zp69||A=6oY8QIBeX?Be+qIGQ^-Ho6u9bKc9OUzM$VA3%~0F_tTG4gN1&)Q$cmOd!9 z@aD65ZDnAHktD+PVzf{jCk9y(DVcsip{3YZbx`1ExD01iu{{6e;SjYh&neJSm( z5K8Qug2ZFm%`&qjK!yc&p=Jz9w0`^HuZKExx>P=W6|Yv697M z`S45Ld-q<8<@o4$6tJ{#_>Np__ZH8i_5Q!$?|6qv(ex{*{{Z0?)^!UT*dt|jpHP-n zD27&4!B*QLX;`$RmfVWU*idWD{BhyUX5&}7)ci3lR)1@}mU%7wu+Rw|zVni!A%0E@ zl^7(2ZNPPQ9}<2Wc*1dGp?KR*d#NHf7Wm^9LG70){cR~R`RFjRA%2jKX;Hw#2o3ncRbq5bAd z0IT;@jA3zsgVAsXFa=V~VB+rtDP$hV~NgxY1S^P9_Hj@IpM zEaomUJZ?V3P zmX_p85og$gXFE!$1Y~sNfZh4d0L?b%LkVWF$8R3SOrnAJTRjGQo)6{*0U)?$Ei|dC z4z5K(M)hAespS&Bz01z@fTky+N@Xe!5rQBTI z+Kf9w=W|1}Zq9hf9D$#k11FrHtfCx}P=&Au@m;{m7p(FaTqs_rdF4udQf$PK9`p>elx5k=!|w#`!f^ zm2OBF+#{KQmm>s-*_$8`Pksa9O89omOVl+BiSF($k^&==*GW`hs^bghi4{p!Cx#>C zVsHRB@M%hQWp3;G{{SP_tvIOcX2GTbX>$eT%r=eXve=-ySR!Tw7I?`!lW-($3`rwB ztINJ8X!2ZJOMRzlwvvU3k_C$Cq?S?!cQ_A#K;cT3JdCRJt}DlXAAC{aR-eMY9+rD{ zy@ian^4P^|7>%}&K#b+`0VFs$Cx$h9LcO@vJQmto>9@A_*JVuhwk1R^Pw|3P+7q5P zDI19axX7=jqk^qM3ap-wZ$swl<0)b!>BT#v&aAv;;@vmJAK025y28xs8`?~}L}Xx+ z2w30?dslO@`JYa(58JHdh2s~#jDj$bGvz!p> zcK#vO^sPnWWKyx*YAY(lRY?bUa#RD&y4;* zct^(9z-gZi{6pdkII@FK@b-l14&X)=#Nh%Fk;xbywOaS~fU@xZj&$Mh3&o2uZHV$- zTEI699F|E~sO_9%^RGG-F*1D3YWuZvs#K)3Uz0lD8+eo9?|~v#*L*>*c!KBv7TImY zb}8r?)U#v0a0gM;*DZPbK>T9Y?BB!x01xf$G^mInBJkCM#LlE+YnBDc>zt9vImZUP z{!fqbO|8At=rMRv7#5b|8wl>@ae_E$BnzBp0Bz44cCDWW>OTSgE?g~+zwyW8Cbcj* z`!<8%Xd7re^-G*LKC7G`PDiB*T#!myzw6ZVrkvf*2UYMF#9N(0<5|`8wMpIE6Y6@Z ztCNh9M2d{ErHiv4Oh*Y*+o8o1 zs+4SLMmp{I9tmaPF9=)6)3=S}kQ1CHn#vEbB>w>Qsc!ru@XGQviFMnxX$~;f>I?I(S8|t=TWm`$J_PmSb-kFBG2pnYjWYgWbY0tFNVGmYxegC0KJ>o z&nlkSc@sJ2xE3xczjxMlN8@x_X5bswapRslo}(4a zc(3+PNRaAYF4vBg4^Y=psbB|eDLVr*xVy$k%1Jd zMn09r__N>_?C0ar9-XIrYq?99n7@&$_df8itlc; zb-&Xd+T~U$N`NF$g*f->4@%{HJ*qaDJild-Pd4C&ByJFnIURBSHNKuPwzqXU+{&t? zoG|v}e>&!?QgW5=bI`-mq?L{*#k%i{JU6--yeB@PrpJKu6&V13b(nMCgU21Z))$Iz z?)*`vL8|y-{x(3o!gylc)Q+J09(^(}Yu;_2K=GcMbX(0azRfaYdXe|KbM6Oz{8tU* z-`Q_U@U6Vs_lDka1g*PdZc3vLMn^%OgFW#|o+l3}B{gXUOf`5XD7&NQyYCh2nk3I- zGBd~b{I)+R&u~u|BQ-9O@rT4ZIvYJYCP6FVQHF9!#{<7ybOan%bE4>4Mf2TRYSs(p zhdj!h&IQ2G~r~-PkL%4jE&{<&_Hd&N4wCh(5LH`&w4CqjS$ts-4uV zb)N~oICzgl)ZvC*Iia0^4vrWjbB=S{AoZ_*(R^Ckt*kn}nQri1{p>4l4gfg>cjKNp zuNd&|pW$yEX)p~xQ$&&?6&rRWL5`gB#{h#~iQvsIQPN#BTkDaQ8HP}PNmsTx`CHX{ zfstM&Cy~7snmu|x(koYWW7xbc@izJ$9yQjyF&xupB1l{nE8DM6r4_~4-FSw|0c_B% z$y^UKpkjC(PpuWt?AJn-NTcdM6#mFwI{1a8onuAt2BC4NZrQc1SOO1FOXDMsyko9v zNk3t)+Q~dOrfC{xm#66iPfgx-rK(1;z8i&b0|pxjU-$qy73&`s{{U!@iJC^LJV76e z^)sT}GM0wWtU(t5^+2R>PkuljTIPIb;eQEyXV4!#vwU~^7HJyq!--??--{xS<{;BR@rA>M!Nu1NTmFc+VB*pAbK6FW9HUn*5eO55M4-z6;c3xhCLWc+bSL z-3D`w%wkwrDI+A|x!dbrf$*FD3G4e){BhEB{{Rwde+!#R(;$uD*St~UnUhPi8$MGw zl5(ZP0FUyk4b0w#uW3K%lVL1Z*6n`mE}0 z;ymD;$&BQ7#YI$8S|xtzei!`=YEFz}XC~j?e4XPT_$>#;PYYkmrhF~^j(z}mMk{hu zO&5Y5<W&C8UEdW@LS*7n(zBB_H^(ruc<4!ME?MVd+=rfZlCt> zV027#$$wsJ?>`Uz%W(eA&>tOqDgOY1X8zAV0j(p;uf)IF&*B1jgIOTs_sL|AV~K}l z697FsSDk!({{VuB`~-^DT@&_={{VvSd`$6d5U-UdfP6_4zpudZCsJ-#i)-il9M^)sV(<7a4gQ)uNBe00*m^g?jTU6{KHc#f;(nc{ z$f>v%_NV^<1rq-Nf>d}G_Ty3bE&l)nzSaI9YN6T3i2gbJMw?NVW+(47N@h98 z&fUZFtv?O_0Ksj4Xsg{y*Tdhl*T7GOJ_4}g8gGPr29sO`>9k8LGXgzAkIuQPQ0Gmx z_uu@{(?YFA?alWk`Mb02ID8}f75I`Ri{a1gkNZM=OS!hhYyG#czL^yG#vQz{c+b9i z*GFyers~s6Z9~Jq@JpYD9u^>EPw^YY65O^smh)Xf@0^amjeOodIR5~_LViE#u`S<> zej=FKASH^&eT)ttxd^d^ARaMZeXRb;-?eR=?XUQw$9ipzy}>4C@z;yv91)I8j_eo= zboR}8Y2Qe%!GDwd&tKTtUQ*N5zryJsOY6Gl#}5bGG*)Bnam?&j+qa&3W&NzB70cWs^_%vHt)CEBMpmlKBSL!rmN@O1I~z3OL7s96@g-_l&Q#Dd#NR%dXWJ!27M!0AN>9@q_kq{{Vt*{4qyu1NMsXm&9#p7iZY~ z2+*#Yjh&}`{Luw)Jt9`%WFDCDd)<5XOZZc&wdwc?VFZRP>s%X`u5jS zRmFOJwu7MQNuz19=^CuTp)YNvSX=#;{`I70SG=BP4JMmv%65rre-rMJ{dgT zAiBQr--7gQHalNFIW_Cs?LHV_iBOHrFWJE}k-OxIS<0zwvkpai66@D`owdO5&xy03 z$8xWEY2g0=2)j+FN^^vo@fBh70T?SAOEAI9L`bfDE*dk|T7O#gU)QCKF<7}?-`AI? z^z_{EpN#(i1^)oHV7Z&bx_lP5Np8Tyr`ol=7RcYbhR)t0Cz!cARGr-k-0QiEe0TBV z_8hm-ZS3@K4d`=CcO8_5<*tEAq>^bj79uA_j!9b~NeGD)SPIIk8-9&y+O@xjF=k>|jP)dfjhqn5KDP%} zIQlZDh+pQt4;rVm!Pix=(zf24^**uGZI&nuA_As7uH5oS&r|4l1#5wFxU#Vcu|fC{69QqyrV(!ZQMT_ZnPVYZQn_XRBgN| z61eG}atImq9{ha8EIg{pR_UqwMDfw1PCVUoJ8PJ--fXdXVTlFY-~a(6lhYXFp51#_ z%l`lwe`@_#$2z2*68KkjHI9q(MAxyl%bh@EsbK#AdI-olRbQF7W*`!^^+%5`t>42J zmV1>}TW5{QI00EmB%h%<{6&00@IT?*?~6QV9*t-ayqAk@VhHATGAe}xt1l_Iki%{> zocH!}JS_=Ibt*>gHn+~_$?;xei+q%+%F0*0x9E3Y2tEh+pT!KaXfZ+eU)o5*IkWh|@&$DPtElB~<~ zGK6I;cLD1kvajs(rD#40)3qC)4v1p7zbNs%COHB#FcCVtvZ)Mm4(0)u3fo*`>Q4y# z7SQCehkP;1ZXXuDN)Yz4Y}yzoD$J|^S5;ux!zm1)H%?(zbyJLYK4%7@MoG(4#ymIh zV&lR(TG{A&Zl8CiI|*-PXju6~F4HbhbBq-%6@rbsmkc*ed*SU84MtnLUkJ-@tXoXK zZGWndO^a=U_qiEz8u zIby+(@qQ-o?!TBy|&EuY>K#YgcNWNMpW%($IZ__X?Tmno+A4_#nzwV4PMG4E@6yL zo9zShwIKkR9Be%Rq)JuMXVBD=~|YcrZPy2QiNO<+!)A_w2HE?JAm5C zPs`i`7)q{JdXDK^)7w0I79lLdtR! z?cK*r8uf37egSWW9vw#1)h+F98YD7p5T+-`{D=r%d6qfJ94)LF1FRrZBbA-$kqI@%^E7d+RxF+&JRQ;k&p1M2(;YcO3drO9i-Bu%S zG;Byhq2DNdl#tlS2RN@r@coC3J}KG3X*Q~c=qjh%4U*f)R1B337}~(#Wk4C{rzWZR z7xrGTI;5K3y9Dw|?5x)|_Rx7Hw;bhUQdo{TE1m~jSE}CVKM1}b=Q4^ZjU}zOIp*p@?xWM8U9>1Z9e6TLGhgvGo1xplfep{w z<}6hG9#vYAsIPJ@EB1%d#J3jT z2)}3V3F(qQBJ*DoYg0x#IQ8%1Aluit4X8!DuQpl;p3gHndGj z)$Z2jNN)8@*#JB4rfG_|d=@7I>;C}PuR!qiwx_5|ZGUrR=OAUki3qr$ZHo&nWnBzv>BaC&xz)XI1^VRD{ zQEo47&!NN6<+aX_NB+>3-D8JNkl{Ivv5>ytecI}@X*7$CiDwK5u`XWV+j9l`$Lr5pG!G43_={PR=GOk|Mzkd+O9+2*7aWX^{{YIo z*+sa$PTO}qBxqK*r7mL7?VAgcBLK|&9&wHZW?%R+8zP!+_9YaJk?N#GBxi9-n}!{yPpT>BTtdEeR>%LZlp90Yhyv&I^|v!YIX!YuA6`9iTpPzKNVoSAiJ^7D401=HKBL~gpZ%=76X72g z=&@d3rQVj^R_4SksE2Ptjem}KLjJYf1~pQnF9@Rx|ZLnX3lx0lZH zU^H?u=rP6t{As!wYImXQyLcCVY_Gr9SuG)hQ-0ma{cb&y@eQ}B_nzi_=1>6fY8=nS%XN?TN!!UhXDyE>Q~$Hu4==;{{R_00r48! zNBC9Yi|-R!-pnIyA5OJt?f@;3urJ*J@)+SqsKt9P!(Z4=)8WT~?RmZoAMjsAKAh?-C<++pRGx!>K6n&-VgAcU)p-2 zG3PRviGtAba%65Yee2RAwfJrDieknu27EfyVO(3^>hi;{_=(d8=13R!R?bMy6lmZb#V7ty>$2ZcO6A;rFz;cIba5g){^>HEe!@=E}E_PN}TAF_9{=YxSOAB6cyyd5F>;4J$zs9fHYv5Z+d8%qY z0M>pgYc_F-BE9&Xt`@Z12?3(9kN0S&JvOK~QNh8lkbXV?0KroK0Bo-vcz*N4eh2uY zAA$5$Qv`ZffHdgzzbIn?e8k-^w+u6oM~2O)P8jzBmyhw)a=_B;Kb^@(+#6KOZ!6ue0f`rRhVR<_Wsr5yhHLJysf zPoJE8tJGEJG@b4Ef5CrDo$mQ;?T&iQZ+z2D3dQbd2o8e!<*-|gtL-xAx--d5hcS+$72ZOBY`8@1kfUIYp zrAQv6SEBrO{jvW5Xpf9`3846Y#<2K1!sT|`_*24Z{hw@#MhJF{iDW!=+!y_yU!Qc( zhuW8md{ZBU^lK?^_5DKU%G0#1D%Iz@zm>6%CMcDpnTb6WS-Hs_04YJ%zU!;oxO=A~ z!^Zr_QLFr5{eb=oT?-F_-?3(~<6GFcU4P=2h?!w6kGw4{!whF91gYYm5BwCT_Jg(q zOz;FA4)}d!q=kdRo({IRRLSG_vxjVqj&h{+uU&umCFZa2y3*6eAMjAm+6P7WJEsU1 zcsvtprwx0?nt&;}0%bRmX}i1?^ z?iL+*cr#LlEk8(ekg(tDmn^n+vW%QV7oBpz;UJLU9txDyjdh zvoGw^Y;+IVBjBcsuj^nCd|%?xbtR-M0QsVd;HB;9;C%BN7+C)R%e${9$9^IBgZ9?_ zxcotHedAqM#hyEY)>AE&yn$fRw9$-W3w2oRtw0#VB8E8SnDSpMlzC?FRg)7=LUZ6nst9Ar1ck6qmuPSgo|eOLj9lGT_n>1A$K9iV_nShJ3q;Md#V z0_XUbs(3cq^Iq_dr(rCsvfKMZN|w?|Kv9-RUkr1>cDYb7l6Je>E0=@TaOO-Ztp;YZP{7br^3f8%td}V>w@U>;knbRz~j;`tAt$2v7pGvvXh>GWGhw7}ya-iX{oGvxM82_QH>Kp3Ck+Rhtil%LZVcO* zr0er;cR4>W&IXPBgxJmIwQ(*SwZLh7xd8{GHb{&P6b1R07!8~X^r*s6iRD(Rr6U`~ zI!2x1Ek5T=mr04RN(Q(^hRjIKgotk$c+h~(!{u!4UB?Q)o(;w3+T-?6@lV3fiJFXB1-wzi9prI&7I2f~Nb1LG z#tUF!B`Uj_wyxrLGOK*$7MrhG`0X^S>$t7%&&usI;Q(ID&73IP&U4Tc>0a(13x%mS zQZ`rqnVnqE7NZzR-pyF|584ak7Nx3qcz8?0@<=4TL+7+Z=L$Ng<0Cl(0P=p7^r!3% z`x(H>tZmooyzcArZ?Q3eK#=)jyd0RZ(J--&JEY1a8Rz%Ny8^$a4* z2~^J1bSs8lPSV9$cmc7&@v#l^wbWFd=XNsZ$ClO?Quu?zn#7lOme9z?be-b8vQfKw zlK%iC@h=%xCzbiIM(W@-&lvb-z`OW+QMZodmTN6bOuUZBNh*xx&?>_Om?U7VAo4ls zTUy4CtZ6#R!)T(_nZwyEw?W{#!tL`SayNN_Nm3k^z$#8TJP(UJ8*$=YK;G$IUXyYn zG2LmV$%mV?ovc@@E(rtx$L}(*Zn!2?NP{#-sPhx$ z&IwRDd-R_REc{PyihMI{v%sK6w)X?c1+Y#ODJzgOkT#qG!#Sued=F{iJ6Ex@hHFds z+!0{XEJj=|6;QA%oN^smLHn%Ts|4fAWWHx>tq7}J@P7q-0kmygxk_kU>J8=1De z>fG%Na2&6%3`RI7CcEgq7I<^Q+0?Gxf3u{F3!OXccp_ED&lp@XGY(mL9l24-6>n72 z{6%eYZnQhCLiW~cfFv^oQmg?5Wlos@kQqYs{wnG8TY0aSGFr=efjp~-BDq#%LHsQn zki-mDiZyEWIjdqJP29oo>~q{{w>rm#FMi*pETJ5WHw_TS-E5L_JF(hFb?R%uK0kid zF!*y!y3jl=rRmy}JdG8>)GnlXAt8v`0uVOfV5uY#_z}{6(Z3abBK#>?f8h$yue4Ik z40_aBZ19FvC--kWc~Ch8PUc^{I%HReOZ!Fmd*UKp$NNV71JV3fbv?i_c%MPkzp-^~ z#{jEcTBBRsZ~<1H*^cA2kIWc*SS$)&^rCqftW>JK9)~-7`&jtGXNyO*_;sULTSn?u z9R=sp_MBjnVtuSg{_y9xs_XGD;jf8GKaBny-&jmr4|k#KN6jiu2q_|LaysrfKK(0Z z+u*mr9~@l0*TOH_7Vkp7mAUZ$0L5)FY$Oa%+)YB)&9r|&(s?BHA!`f8-vYic{3I`{ z`1imXot2dT04ki_Z2NIu#GM$e4oYe6bkw(X zfSOCqEoZ~GMU`W4%Y&2mO7J?@vHSzk@3ean;qMFlQPHn-mj&*0+uhA?Ai>(k+T}d3 zNbb8>V>`GN^KXE@Al&I%@D6*({qw02qZjfZ4nfCC_fHGOqw886+UA|B z%ck8#iyX;pqFkkep)s9>xcf_t5q0?+**<#ED}jhYPZ-clICY zRuhnMwNw_C9oYvU%DB(n6rLf}wQq+F0)N6k9qd<j3anH(5bH+1X za=s?eHEk|yUlBp7M+Tlm@eMJI_ihz=I}?b(JaBRcJaTIb!=JVUx*h!95YoxCjiCER znWnwk%Wr#tM%j$R%i1znp12G{X9m2lsk)0uo~*GEq_t__0`fM7O`xF;m~3YPx>;!d4)YPQ;zwH=dSeWo?J47g*JBrY;Y$7~wnydUFTOX5d} zBk?cxg~WE#SQv$k^_tIa?Ac6OP6#9d2rWYnfy^e#$pA>Cx^?fbv682!k#wNQCh)83# zl$;Nn+a0(S+2}T&BhfEMiEpptzPpk!_83{&(g1kc5PYX@bDz9UbJDqce-QY4;+KP@ z@dw2(2hDAyGVF@^uU0M2SR%MZIZ)lV073fKH3x-X!<#|3|3Wl z+0c@?8OR47xU8iJ%2tcBHH_`PsP)((@ejm1hZ;7wdHtZb0W|}8Y|5}GUR4ByDvtS3 za%-#b_k}gRA|s^RD{2b>Q#Iwr=E*$&00;mD$Q?UZo%lb*UMANyd!uQrS%_`WD_z}Q zk2-e1-Jh6%H-9cG*Zv>)$HkiRCYbj2_QKvXx=3ViER6l^whMan$mv{ll%Sk{;mfyF zZQE#4$EGy8gvz9+TW#6M9{o*x^ZP~q&i?=q{7}bV@P46peQ3Lw$K{bI<0k}l8OI-? zucxi_kL{~Af=?5Rh?gE?+a|%B^N(zI^{SEhWoJu`F||vP7S;+@O|6V@Mmzc%&ktHu z>R+$uVOA4Ye&~={#JX(K92qP8pF!2MR zngp4E+6R{Bs*g^45`Q}SEUO7>2hQdgsQY^}tJXY0;(b2BQtD{6J3^$R+$LLYdBcno z(~xpO1Eo8|w>CPPEP~wIfsjygqdteyxjhrcH`-i=832+ATMrzB;Pm{DxUJcIL1$*7 zcx^TY8Ds=uvNP;4?_RTmS~Je}v6iXmddG$}_J>Oln@#&1jl;hJiig0rkWb<0<$GsR z%y>{S&{166s``tXJ-<;;s2#;}Uk$uhrQUd^Uln-qg}$f~K{~kF zC00_Z7d#%r9eebzRrp=-Z^R!F^{I7OwK3uE4q8fPyw|l4k{f=0Wkw};zf6pUU*chq z50>KO?Gx!^C{vB!L)1Jk;oWQEwuuje^vlM5Lg852q+h&e0QTpg8RX*t9Gchgf5Bgb zp8-5m6_3YH5H7W?O(>Gb!&Y*ZiUYv{c{m3>LY=C`a06#Gk9qq_cpKqlcHRg0IUUWl zvfjj+&xxjk{U-fE8{RX4uxFF84#uL2qpVQqIz`+7$rhpPm!~TYxiO{{ZlZ;3tnhGJHR87`~}v;olFX z!yQ}38tksNR>4_=La2eHy0{xoAca}7K4PQhs@MMj8+;}32G;)oNd1ZY5p$($tGW$C z;)jVLjl4y62-@FbOte}wEs_9>eZgD@l}{C=r2XkX>&S;FuXdWh_0|6XO!5B!7yiK> z4E>kIw~K#n-;J_(D@6z=wfKAC?J;#f8S3iHFe9^PSai7ANj`n^D8l@)Hx-|+{@ng4 z@Q;VCz8n7l!96}2TKpB!uI&7a-xBDT-)!-gvknxId9KK!XaQv{LX%){e< zjM7|4&ORmH`kOoj@oBnf_RnL1$%9SYiB3_>J*{_S3$y)IKi! zPSvCEwAT%7;oU1q(%J2mYI=0UIOY87LLaNI1S5n0ASD+|aBtyJL&yRs@d$3v&_ z=j~%s{BgTVv}v3!9RnIEr)_MW(NUv3%xm(U2WGGXe{PiT!I@=kOqdb zJ|HKF7}{!|w%^CgEe;(f&67g-d*OR{WRppV=l3>Rb(mW%Ee=Q_R5KHoXJX8J+g$yo ze06)_3A`KdD@D@28u(sdQ0bl(k(&1HagEn^ulwnkfHQ)lk~s#vCS4URH72;4JHv9% zxr~m)N$STWagHU`*{i8fz@pjr)W4>qziV!>Fezckv6xZ1CyMU0A7a);P2RW=fU0=wfLp`Uij7zgx(p{m(S8{wd)6$#2VCJczuIMLbFO3 zbe7=55bL?Ig4n-p{{V--3vKT_EBh4uB%i=G@he;2d_>nSBEIo$>YRqzI;ew93_ph1 zLnu7RPbb6vAZvah_{;H9@8Um+d{?S#cbAiw)HMxS{!5FA+>%R4Ba+N|_rM$s4lBCw zXTrvU#CWds+G(~HSvOZ#Dp@Vh-pEY;_H)M6<8KA9a2DK@Vy+Q{r7t5U9|!oG#-1j( zx|72X;ycec+%AWEcQ28a``22Gtjf+g4AB=>laDdcGLm)PDb}R$mZbhO(XPjc{9F<% zW8zDwl*M3)@};(&07XcnOzn)Ss-WT+&|<3T{uKD1@q*HS2>cGWi%am;n;HB`tZI6S z+QR_JKWWr&56ZpD#y2s}6AT_i^R7Ymx%48j@tXlU{p-S{%{;0EY@yrx&X~MDExoPIohRAut`WY;^8+kvo@4}u#sMRF2a-q`10H+u$KpMYjx-^A z;$2L|5@}1R-jx}9dt%{G;P8f5DHJGjOFFk^+v}bl)U-`iCe=~NmP{(dA8>_m4#0YR zpn>xD$tNbf>E-UFO|4_0V;v~j^*ZYt+g}et9bmh-kzxJoysaE%j|7ko=OhjXI3Ahi zzL)Vb=GMk*L1ft8(n%yXK2p16e5wc~i0AKjA9uAw7L^sflBMIMw*&(mL5|p$KPce# zBa(ZQ-rvIF7E^e7dU5nb%a?u$Yw}|d9?jwUow3Y56V=FT>k;M`K*^uqpVM0OJ6b65pmjG_A zTU7D)#HnGkYu^#iFWBBS5v7_ywzrR$^PENdrZAxf8)VAFcRbQbA@dihTf6-_)IGry zT}%VY+r&$p5UZY89AhW0(0X92aK0=Wjqa3}clRl8G~v9GgpaZzU>U{%!!oc62+rjN zk4n~*DyBr|PD^9NzB6eyej?Hy>s5-z@QCiBg5u6%8c8nhVus<+k_s5@R9i1r8`@Sp z0bd_Mph4kJj2bSTu3M$`n&>i%m?ec+d8D(LS+V9ceqbP~klRZsE3`FztKxqM>9){W z>6en*ps|8ShShWBrOd4F9AJzT3W)$!?87}ait#^*ei+iV501JojP0_v*7|(QIb{q| zeXNSa9B{jl&p=5ljOM+ZJl!~2TBC}rT}O0}X8o9aQ#Xd8)HI*$Xr5S}6ox2IlJgs@ zOKaI~We5r@yG;`8P`g$#s0QMd@BRt+tAFt?!}j;FX|o9}F3EL~!IlR*qu#rd4Cqb@ zl~T&(Sz85L!M-19KiHbRp1WarA%TojOom|p05OH_QD2M}DyboEFvyLJ;fJZ{SK6+T ztH&^s7^Jwj&C){HNlPf)*x+Z(89m#6GB(#V;N8~JJ zZSB|hc9N>(?MHG_HU|ufPQp~C0RU(pi5ljGAcYiMKpe#+GLsykv5|t@K2(r%)Bt@* z=lm?q_SqloiyfuF&C0B#G!rW2Sw=@t0Q78-MnTTK!y$hnR7X4r?lX|%rpL!Z4hoUS z(C0i3l-bf<>tahx{^C*Ap+srUVUI?+EJ-*NL|`AAZdw=6ph;eqbl8rhl~C5&s)*c82tqTFqp3p{zoaov>Q0szkd z^%aSIqiL7&wyp!m0)k}2@Hx&;pgkKLanDNUsYa_?A;Ps3{ECa>E1wYC8@(MA$sX@H zN|=w$%I`(!bB~yDkl4Wl49^(a>HZ^2(rP zqZr6>lDIj@Rsx}gl%B?N#K&|t?(*Zo*6npG#~UV6DO2Xb+l3^sA;Hf^>~@js;@=uR zIrvY-UK`b~ydAAuonu3?d39|KB$7EIfpD*J7e+D1xl69?(E=MKK4s^AEq>HG?U#w} zz8UJ8Ygpe{yF;m2T(`pFy>pLBnDPt#_s+;o1IG zP^cKIZ~)!O4e4EHiI&sA_7UIfJ`C0F5?dmkcbtG;Hw2+6^MjGYF$ce6UTsRZT(L)~ zOAj7iX)~?YJ{EWv;$6_S);vF_cvo4qP{Ud9zOM}WOpTGW&t$G=weM<2W4GRr@@C*S;a}4c+I5^*u?iG<&%tmGrGb>SL) z_=4L^w2x5JuhI0&H6m%FkyxWD#1&Mlp?$0mES!ws*P9AeDBrp0&aFC@eLX*~_;Y8& zmNr*6`lYs&k@RJSzS@u(poM zYj%Y^OQy3*8Rl56JV|k)<~x<= zYLMeGvEXEIKs@)aqP#8f7S~Ao9k|zQFD|8xM6F|tH>U8Pbl`9gzolb%x8Sau@k2?w zd+!%ZZ!}N|taUpdD(=SZ5rIwUxF|sDf~|wWHNkiu=f%DX@l~a*^`uuaTj4EQ#zdOh z?OO#|Bm@=*90vK9kaLWldbh+wUDuIuPrjz%chbmJtFQU$S#clWRuina956_ zrCG8~Pgsqvt#o&^AP`b^ZLg0)2O0D=#KYo`5Ni8x^qmQ9X4pg6$#o(U0Qs9~VURfF z4lrw|_(Q7d5?Z~ry{e`9XU??q7^I^lZ$iZ6AFtF`1R&G9cNI+$&iqc$qt^T(doPH5 zF&*EL8xaqdB?WqrIL~iG>t7n{{wnbHo8qgPG|O>jvE)r-G)%0X;GB}Z@t!b9uh35y z_;Xp*?WS!N+9?9Ck`=&x#B?5pzd65ayDu4Neleco#eN&Pw3Mq0Z9~c-ZdG!2k%6{B z&PPMrn)mW`?I+03BO50OT^=*>@5K^pklaQlSB<`8D)|KGBN@-X9kOv=8*OU3j`+m; zWEscH*RN0OT}O;OOC*Bg86?~o9OoP#Vd>L~SA^?Hvwl==czgD6@Yc6sY4Jzm)Uw&=F$I?1 z#w?}As@Te^{{RvrVDdg@9D=KsuA}3wuj0Rp+FpUCcuv9W{55GBnDlsJ+Y}0;iooVMN%Qg;vl>PZ-{Yw*9qD<6YC4AZrb+80cpP}VKs^K{>bmX|FA z&aRuIlUJ2UYbZz{eVsqjBmig01e5HRJ`?z7@Sj$KKA!tVox(j$2~AyZjgR_jmFoB~fbaAI1LwUw@(I`j71y@qgm4h^)LN4}}h?s$1NBn?UgIhxGM= z*2d6|BDJw#AYQYz?vP{Xzisdk@m}! z`-6-XUcI+sr}(Y$2g05m_-}t@`#T2EE-shK)_g!=5o$Nlj={0I7|G-+?P1hloY$57 z7O~N^jSlC6SKHzb}j2iJD2!75U zFZhY#JDZC;8z}TkSM&9KYg+~_FG{yiK6%`2Gr3Yq+o1&D7iCr;`y8JOJR9Pj7W>7& z6ZJ*#{{V-)KX_MA)X^~|%M1|=0E4-RI4l7R&jjFCj{eo(w2z3qe`#mn&xO}9Sa?rQ z{oa+KS^#5|jAmKze|aguIc(taGJa|)^G-^2I{yG)cl3g%BqHHWe_yFD#NXPB!QTWe zE<8W`EJGvex0WAc)xIlg@UQlra5$UeLfMmpC= z@Duh@@n6TQt#0DmU(!4&;NJ}FpT>U?JVCiM%@Q_0D!$w`#mj(vt5~xvha4*9hfIC> zB+hn~HOz5;4*nVVi{megbl(8{6!2cNs`!sty7IKG6GgQ%-Q6%5ZK$~$RArgigjM;S zlw{Y@*IoeqmHz-@g@*g$)LIY5?~CviMeuEwqGj-phOP(j>Q_(@b!&y_*07Oo8Fsvm zI^lTB_OtkR@QdO0rSK2-EYP(t4ES$Sw-(+h@z$gSUMTUZFC<*bpYh{lSQ3^nFkPqj zjLcg;1*lv}r|IjfX?9BL6nSyO9s&+{3OFDBx~Iz*r}h1R!*?g%tMO>Nf1htpzp?CJ z9KUI=h(90vM{nXki@Nrqt7^7Z!rM-QBwKwyOSi@awVkSMj@A_b0FkPVy-RXy$+dk; zO49DknwFm=jLVquC_8h;4o}yfYXaBB(?=-O{7LriS1S$(2OguL>06!})bvAtZK~^@ z4_k?~IaVzfL-N_KH5p57T%rx$StI}eEa!#eniUyen}3;Zv?B{gOBQ??;jgzOo;2|# zqS)zZrb{T`aFbr(XXGT|5OP{m!sCv49?zGJwcmwDj5Yo2CsA^Gj zw8k4I(gri+-{%tTvg&Z~2!_*eioiP3W%&C>A8 z4=}@lzjKZ_DDC`P;{9_~@u$Q;7~b5&Ji<+9U;99k=H@~;i3@${2;oLHqVh&C4JbK1 zNauYvIv*1F=fs~9j26Bi)NH&N@XN!u8MN^Bk(9K8Om5nW&ARsWfXiG=%v5EWl|WP8 zc(&tC`g|6u0ZwTpdmKPoX(eC0QwKpUN{@&Cef8EN!No8p!^$9i1 zeojBbdr36S8UFxi&mMRh$5zmMIeav)hMo(*i_Egt_0?6F`Rd>6Y-~~EySrx3+9s0H z(D5|eJZs`Ezwuke{xa0OU*i7&5MFBf#;2%Hd#qnyOwT3N%&b~TT=Xo?Nj8trjK<8r1&P{o@{3JGQWNrLeZqSq&D;oeC9a}!bx3zjb zgx7x!JOO7PhOX{xbvUEAx6$q8l%f96xkYR1@(yxs;CSrd10iLScKgwcX?EAVtWJ^9 z9bU8It$M@7c0UO8Zx%iDlIx4A`1@bCkQLT7a2W!t3}(*F9pSf(e2pVqPDk%lFSvXq z{iCLbOtqg-XJ59T!+^E0a$n8Nnm! zi9Bm1v^&^HP%9|tS;h|}`G@U_AsDTP1e>u_T=#xj zFuSr}x3Iy%W7s!xE6@B(@dr@VB25Yy2o0zoo>Ts?D1b22RB*rFkBweBq7m!Hm2^~EY`qm11ohqCb z=<^Q}-re5b-=oN=i*Hl(wConGDtyaCyU8PtNzMmY!WeFwP3QL@p-nR{y>x5#}#pxW#RS7v5a z9{WHa@D=qHx@n~JDI}GQABX1bTwdEmRh^|zwM55h%tcjzKA;dre()94-|O0?{meqv zY)2`PEzE6=;4x)G#s))@z4ss<)Yv>XjY3O{awD2sgCocR%PAXN5__p2oc{m`_7yg( z;n{S3U;98i8#|-2?E?MDkP;gmzFZuSQV&5wnz4;V6(a7H@h8OEJP8%<=gfvSlOuD* z^RRFClX#9zA~0N}hoc@c z4^H38vGw+X#v^~`E<*g^jiYcl$MXJlDq8GvO4qT`-F#Elq>Uq(D~Cc>YyencmA4Ln z zoRU40#GkcJyqc4EN%aH?Jj-hh!r+UDrpluX@Z4Y#*rEE@gGc*C_^ZTBE$6*oFh~ZouhEr?VWp;ywopx~^JJzi(yka%Y3c`P}>h|{c@!R>vHb#|Io-znH z+WXi31D-23D`{uaY*yu)?Uweak;xDthS{HMBaT#z0;F^*2qL`;;s%PJ5PUzQd{4O3 z#FukCwwlUEh>+?fnol{Ee&WxGCc^xpLvW)3K&_IePjp&@+f5vuScZL!3u0AuNfC~Ax&hIfI_E|4m&0uaKN)y$#Tvzc^ZeZ(QnztE zz0$E!8!|BlJ?e9YuVC*@74Yj)x%N;{zwnpCpsM zkEOckdT>*VO>NZZz9oLopAfz-=un%D0^doB+BZq>wT~{&;#m{r01`fQ2RKuJbHU_S z&vtqRX(c5(G;E|xrPe}jD`MC<(Z3a;A0pdjo8I_&+V)5;XDoFulQbH2~%Z-)!x}1zNN!dm5TwZ94WAGMyRH_Vo3({-5v?Qe32|kt3%oo|qxX;n zBW&Rvg8&F+3U+61E7h!Ye-r9(cz?w9mWNNjpHsKDxOt#iElj~xkPw>ZtG^ur?RA@st(mu6h$1Xo%!QId7x?QY{tX>~m+<5*K2;ucX9_U!lohXqxR&@vJ5LX+lJo1C`N{{Ywg zIqB1igf!pGe}xlXZE9^3Ncd-F*MiRhBn3XJ^5-M3xQ9oM z8-Z!zO&0G>yNpG9VJt27cz1lR8K5ReN{|7LRb>Q?thgXl_*w9@Y2F;R@Xw1ieIHf0 zmf}mzD_Dt|$54%w09Ro}4&Y8w6Kw{eZxv*4W;TWf&__AQ2y0E@}7*0dc!+(JT= zuuBs<&ZJ}l0bz{eusENzSHdlK!Co1E75HmIL#;tFz1rB$u=(oXZD?g=0D{;dBvNv5 zfLK>eulS-_^xqHolf$15EVXELVY24uNS)zV0ls#&pK%EyHUnGCqzvU78-Vn$1+IhR zJqF^|?&jthWweqDy$f2lMOQ}zF+<9o!!3|XwlXktnOBUtlGMsinDM^?d`XYT{v6Wg zUl3|`8m)m?j8jIjO`X3mAHe6X2*_cOKs`OTz_yd?a6@6G_>mh@w_i3CyR&N0iGi|9;A9017^?sk!3~XiF@;OVYZVze z!u?N8({Db@CAIys&kR?jt<23Jnh!7@=NVFS{8;POzIOirf{=dBdXI~ICqKk530&XH zac>hn&Yx_-k<|wz1I|Ff=li@KF<(jeV(U%RF8tVLh~3srThyMUvJS|5;qMSM$7>KFGqwo{r z4c47Lp)7V%O%$kDO>c1y(im_+!Rh+`mCxyID3=g94Z8$j5Ww(%!oHRG>F|fdpAo(z zU+6w5@Db9rt9jW5t7w^u=}+BfQOW6obDsI<1ETmh`xWS#UzdC1I0ek0fI}aY5O~1p zj&sQTtMp2E>PrrvQ{yU8pS5>9O1SrIsB;z*(Nw1{{W{R{{UM0=fM8} z3oN`hY6M{9a1@?69G`#2wmc8ujT=FQbd4`hSb{GS0r!debnji}nWNeS-oaF(4xios z03#W%CcYx2SvwnADH!%T4AELg8bZJY#xq5BR`5gQuU?&n70K;lv}k;+^xua+4sSeR z<5S_SC_q;1$vuX4uaZ=f?8NY>yfG(*1D>L^yg{L9ejU{HF9-Mn^E$oFw~^ux6(o3? z@+KVGTYnY3sW{w04>Jt03If+FbF28jO45_z{{Vo@?G@NjsB4z-GR>{rf=G=QlO3|Q zcEmY;8Ob;U(|!nO6Z|>WE<923JtNfoNvjCVdPF#qEi-D27jAbyUvEs2xG!hRqIz|I zqCTa~y=1)?>doJT-v{j_@WOb<;$lfq9Z?7%gTdpB3>wJshs5#X ztLuLUonbVq3t7O^Y+#7_D5IuFM?R$V&*NF&5dJOecKXfr`fEOI;oRmjPS-#($_8); zLICN5TppqEJHdK?iL_4{M=ib1o2o39m#{MeW08qn*c~wV=kcs1HOb3QZ^i!rU5br4 zIde~c$msqgT6hmn@c@&>m-f*`dteJmyK1jEW+ZW*LC>H)$3H%NF!5%eABsFz;k(UV zI6OC}#%I=bwc%m6Rv|z?cg=5@FQDA`AxN&f<7Tzt9}wu59v`*UeBtDoZSK}Z$jHvt z`gH#QIj;i!rS9!~72usS;GTstL4R;pQ1L>HyOY2XT`=&@kEcvy)O=%hhvv1md}bR(VYYk2@~8z9FOt0~66S9Den&f!j24=a z{3`eb;J+7Xm;V5?Z|xDP-uPePrH#`-x~-Zo_K%1!7v?%tV{tse&EAamSPG%OC49DiWl()@yeJ>_UsdqaPLX}(! zPue5m{{W9aXnzqysCd>(Z7$DBvlgBf&@^jt7M-ElC)~5OgbXBWfw{i(wpocLyp{`L zrC+qZsA1 zJ$4an(gP%WHz^qYT{-m0?^++&I%Up?+Jq8=9E3cUA%;Q0>FJ!FIQ$JqZE2^AD?j)~ zG-+;itGk4QOwdx@q_NeT(8K04K){ha*y^u1EsR`LD(Z3emG%V~p0;rQK3wWgkV z9%kID-3~@d$F*`=FBkZd``|Z+Zu}+Te-v8Xc;i!m!Q$T%SV|pqxXAhR>oy!`2NO)c zc!I0)u1}g;Sz6vIwZm^xg{~xdK@hKZBh#S4z?`MuXuyN)7~>)nTy*pkrbfk1oQRAG18QjRIbXW%%Sd^-wQk^d*i>0 z`cH?o(;QMmHohODgO)Aqz)pcdM zEo*;q1*O1|!5b8`(h_)K!l3$^r>AM!RgRgf>z6m*YniU2o@nELn=D*~9dJn`X9JA! z>(BU`S$_*dZ>C#KEU`&(BRay_2XG6>IP5qF-m2Q`8dbO2=X<9JcDs&FNF)>g0FQo^ zq+@hLl3G~VZyD>}D$*WXus+pp`%00IE;i*+eSrjXoB}JM_-*3*y#m6`^vj~twan?U zumc^jh6k@+0RI4V`qu%dUTQ0(O{-}0D`XKQN98W3p&sDi{cC5z*4Ad$(^1!@md?(? zC%U`1P6|eL?oq+ZGmxV_&Up%Im7}2&)gMOuKK-J#F9I}II_9cxC%KS|OLkx$aHAe^ z=oLd}t^r`gf)#W1w}JjA=sMn&rNeQk#xFHE8fc+uH#0<}ZkUd{Nnim#dvNuy%zX>Q z`gg<~O6OkjGaG9SHAMGt#Q2Zw=)qFe5AYd2NQO^B&DCqweh_?l)pU(^Ply_+WQ}8$ zu2BveagedXz1BVKsugkagHt%&YKgh}DQ#+B^%?dDnu7MlCUVo24Jg`j zIr)grM}Lg_EC>bst=tLfJ4j+XK4NiGK{GDqE9 z55R%Pp{kPTFhe70@j{_w)K!*2f=31NJ;BC)wdVH!0JQIh^<8sM(qg=e?64%W-p!r{ z(ei+Moy4v?Z6ngPX8oXi6{u@=7IAAMbHMYJ;Qs7oLv`bz9Wx(@FAX zxw}9BGqa)-k&sCX$o}s+@3yh^ZAL|Rw{uOBe$ zifaowJ0lt57U#V9tK#>BG&w9)wMe#V5jsY9937{rBZ0NZ2h%n4hwTshRY7Lg8a1?| zFv*ujd4!8a6mkFKGou0FZiLYYTBQO{37~;EShGgeR-zsJAxILWQqr)Aq|d3 zRQuPJ_~XTL_=fH+M)GH9XGJAT5VDx@dd_>;<)tHWmdJ7Q_TYdcDGR}SzF9)!d~SYFK<*B5 zJMw*NhLYpXl3VBINnsJ^xjh*EgCAPxd?TxBZj(b5%9k-1ogFd=8D($7k)GMCE5#`2 zZ-=RNk=$R{i1f=F^+1NxVn`Lq0ZSG}2e=%Z`kM3)hVJ^6?Z1TeY2)&*ZQ2WocLfm0 zWqB>-Iqb41=Qs-_fc@iMUuUVuZDzM}%_Ok2l7)!l=54tP`4j&De46d_OeEr?FuXyc>O)D>9ele>zwH?zhQZI4?R0h{ zM0*v-%Eu$29Z0U5!QK#qSh2a+Ce&{%CAL}ZWwN?eb8hir(+7gVm5SuxoF7`F;>$Cw z_}@X3QrE-?@);XaSPY^omMaT8CPBxU7U$@8xIOEn_+g{z7uMR1p0^SmYRdK9?$YFu zdut~+1af3oh=n-747nA@T22)2{{Roz>7h@at(j8C!1#WPmGz&&mN9OY%0n1H-bur% z?_ft&%7L7mjAW%&7rwu*OC1rlH@2I6kDPwR;;jebHK)UUQ9S6homH0BD_|2cDFCXT z03ZUx)s%YIzx*Wl?DsmgoLb+CQ%$#pqPC5UGZ^E#P+28l>S75Z?^jnD1$P62O>BPI zKeJ|wec||iIQU~(wu-}{&qN`x5xURadL+d?K z&eqez+CG!xi|90aJw`~M!?*W#rBd4N*6B;zN)(;S7GkT(5+f@q47hFC6XC~(ekHGi zpwYC8c<(K4zqKu9W|BAyq2F^8^)N8UnyShIGR(ag4m0*_@Z>%`_($RY01f!X(irUS z#J0Bx>?Mk3UnxP}O0Mn5jH)nS%3=H!KaQ-lOAE`3>5P)wqZJo-F|ltl06^O@*c1$gksEI;p<9VAStj1* z&9;mBAB21z@aMxLT+($HzSA|9TbtcuMkZTp(p0L>B2vOTg~rF)K5&>gV5IL`>pItq zd^2pWbiWa45`ZkVTN#-cpmjKuAsbjQA3w{6+q^4saJ2sb9{4(M4v!Z23s8bRHXWCD zdTYgrER}*Rq0<;>NeQ^%6lGQgLo$WZb z9H6Nn{HJm85A5}y+cz17(Ksv_Ct`*RI)41R(ZLdbqyji6r&2JQn zi7`^}gSj_!%AUhH1a)2rJ!_s2ae5fyTIIRqpR^yty%R_ACDym1+7}bP;TRbKxF;W> zt^&(KX;)>^>^rzo(0YG5^>2;(vcauQ3{w;`V5njWkVgzZFZ0E4aK=VF$0f2sKnFaW z55tP~u=0M%Bjxcn_O8RU{V}a=);VJ&<7g!F?Z>I>?ONJylFpHal&cTESnyXF&N2AX zS=^H{Tgk8&1ydZcZgPH{epPnLJ5u7~&Xmi%0UMAME=r!f9Py0%)ynH*n{vWiC9v|Q zyV^%ZSit~x8KSPej;myRtBI|_Ib{I}+N7KV^yx)Mw0(-?W~P_%*Wq7*HE8}6d_DM^ zX{`8)JVkq`d8q#Y*~)MlIEQjDK{@TuKPc#R{{V>Ae+_M=ynTJI&F%}{u_eJHzkayn zcg=o#_$xrrKj9)CA@Fvh%cx4_)PP0~dV&4|y>0$8c*5S+KMH(Av$niaSV4A>h~z?})msua7Nk;nRFW0%}@=w~)}yf*bq5`>sYkN4;We zUkp4m;12=Crueq_k#RofxQFIsmy)u8J01f8(*XL{&#kNI-X7E<@fG@A#WLJ6hI?jV zv%W?T2d@=#{uf@S;tM$~b&YoBUJAoJ$Tn=qCw4Fa;2h$!R5~SNRft_py$`{yrFg>9 z?e+66n{}?s|ck(=5za56Axi9{wbJQSmEy@5Va4j_0>aO)E&!?N%Et zCdmfatnDEK5W^baD)+%Hl4~R5PsbfY_J;8kI_HQY{>z5K)onCg8pb0XmY-^zM+68s zSkwW|GQ{pVCb;_@T5S_iG2UpADcZ#(5&};d2OTlcanm)tljXIDgi@Mqn)f%JD%SNs zvNePV7B(u%4qH9WPv|;gvvnOOQ*A)$Z46NYq_m&jKVM#zN5g*_0^eO}2Ff_@5y)93 z+;R_L&ri!0u^r4lBh%)WSdroTIo`7D#z5>3eEw9NCY{Ywr!5GrybpEZqA%{`jvyZ@ z&mnD|ha)^^93RNnO+C%;?AdQDw`DH;d#+oMyD(eBb`mc^(2Kd0kWW~U?Hsda-@`R|K-RcCqP=kX2I^W0rs z%+lORJMMWMi6xHgp1J(0qwOOn&A;{LLybAB$3#_|!-rM6H={-oZuyFr!vT?;=bi~C zCzF%wR_%N@;rksGtm9LMV=0r&B-|D3dw?;(=sNOqOn)9}o*>mA(gdux3z*tb76Xzx zany7ij`aO@*ZvW!FYTsgXyD6C;O?vG z6X*j_@aMyuX!TuNMZLj?S-mn8({#o?3S8ZQWCNeF+ufd`xqlGrJ~8n&v#EI3#{MOf zQr2~SLVN8}_UHX&<>c}-lFGn$XJ%{K4;4h&*?0 zepCIkbpnVjBN#&CCgbKgC2QR;ezw-&1xhcwMfTbS8_w(?0Pc5V?zu2_MN z#i_S3&SYkS(C&#)y0;+nf)D5XtDyK(6xRMAyzxYkMy=tkJ+(VTaHls5@>>C(Ki9lX zj^Z*gTHP&3)YACb1?|hPfZ@GcyH5;h$)_aI07b;gS?;IEI6h^`m4P3_v~n?CaiZL6 zww4Ptmr|rrv0o@05->1G@5dSRu9EXmmrROu)9(bg3mT)L=W!X_!|Fjib7-_r{&Vh{e{A)^eJ!`SdtZ^d~f> zQB9{GP>dX7X&gz?X7K+2hjje{+;1~a_Pt^i8FXd@K)CO=HrSzQaaelZtEf$_>Ke3Qq>{hd0)dc=)F>b9;~4G5Qr0HC)qE`4 zumL0!i6ysF!Q&g3t_M%Twu^Ew(lK^5Bk`r>m4e4CrbyvY2T##uuJEJpLZF<-=hKT4Crk;ST6$#jjrF_1-EKJ&B*yooLi02(O!H`eG@{G(hXh2x|E9?fr}OhgqP>uLPmRt z_^MNUQ1KzueCE_%D`$!^ad50lBq|gvVEX){0A%F)16;ndq_xS8{^1ONXH|tGErEb= ztbg7-pF-b_b9Z;TjO_MP-ECN#5ym)Bx$EwGXOqos4p!~d!mONDr&HoDi`xFbf2nGX zdE|{kSe{7Zauq~CtZE1QDnR@*-mpAX;#=#heb-VJ#j^4|y+T$d- z`5*B;YP|QC7Y{5b#iO6(3Vlz?G3oyR>aM7JuS9UUJ&xl409W7fx_P9K7_V>IDOHa} zxJ`|paAYU&u1@YJZ6LuX4QqJuu+A_T1CORbsk}paTgG>Klv|INeUMKW2+NONc>L-9 zB#cF@Yj6O620L z*>5V8EBqjzmjw3*ApSMXq;f2T?Z9rN8tpt68{AmSD_N|oZxcQV#uPGwN4`cs8s@}G z(7Q)Y8D6E0pAJO@-lG7P7}nNHi5@r~B~nHqah^7i4t|Ea4Nt>5)s6M#r^EZjd3FVs z*@#gR6lM$$RUwBTPUikrf|HeC}_w($ML zjU?vaLvbSK_o|I5s}qijGP`;Z2Tt$S{6A~(_mat zd;atA9&F_|C(XOq^dT7XrnXz1hL@{Y_-|Uh{@D}B4Xn0P*lG)%)})e)I6RZKP*j3) zQJ4|OJm#S`m7w^a`#tS#C)0KN0X!lo6=t%#3R+0@BVSqpoQ7(od$X{8ELt{{Tt4A@6kg^R_IU z?kH8`BT1^s_ooe+ej6+4oAZ}9}W&+NA))xvCp5kV)~ zXAU8Zw5-sP*skb?N8Ks6i~(O6_daaSF)_-V~~8ztWTh2KKMUD zUr~7S+Sgk6AL366UW9v1R@NOx%4=66Ga~@ufhXtKA}k3i;c^aFC+7j8$t|{?BVi-R zB-mL`EMMiw?s@6g`uh1?Ql&NKc^Q*#v?b(yTl+oyLh#nR@S9TbOm5NM>Nj#5DV4G= zZA&1yPzWRUY{04DfY>~3$DsJu8~t+I!&B=TWbt^5MZ4duqHQ8rnny{QSEGjvvZ*KL zStVS8Yv$kCvTqYdpP9(T(DWnk46i(j`Uk@Q01+=N z?iw!=>hMZ!Zj#@6FU=K=>^VgQj1-PoK>q+_F#hSUpT|*^JY2a?a|ct};p5D8J`VVS zZ{SUOOU-9Oj?Y+<%IXa_QYa*})a_D4j!5T2$STW%7zR~&2k_wTYWf^r74YrHgA!1h zRsEI5r=wY3F_2ry11UDDNhL}S_JC*psHz8$@Zm@d?k2&eRfbXOg&6L^=V)Qx zywwHBq|w&pt-6%-OI;NB6+BI0;kUZF)1h0L+R8AuQdzm!lBZECZD6bpOMTYPMlN{2 zO3<`_4C$ZPx>Q=Og)N!9(-^CKktyBfp#@qth_Nc5f~tN}HkBD(57my5;QQSQ7^af* zL%g}Rl%U$~s^7k3=KvDn!k^+U1_mpn@fVLYd%ZdfdmG52HuEI2O0AV&Fb(aGtB3bl z2t5ruP0rRblUtrO`$y@%7Wj$bMzMm|$<*SqVQ!Z=*>Xai$oayv6~W2D;2znoPxb`y zrl;WDLVY`0++dWHXyqs5WXH;TJT+`brs;N%KChz2qTc=m zyM_gFLIa$D58WYwQax~cir}=58QNZGG3a{5k|oM15eYCsjff7Q{{XV!Ha#2bT^+BB zZ8RMK&nxXkkQ7qG06K+rKH!kwL7r=gQg^aNVk#vQi1CMuCDpaITjz|V4h&`b?I!>q z%lZ1%B)OhQB{mTmZX)^NLcs%_r;PT;0Qcg#y+Q9=UYtf(YDt_%3C|+~KER)TN2P4q zL8wZpf27(K@<0;HaQFpIHynD8#=WX^;|fa0%ua4oY~HdH>T$&_@DMD)fbw}Ndz_9t z`**F#bS6J6sfd6Ae==+o0O3vtuQivZz|uhjPpCK&C_YpXa50WS^=yI7b{bRwTP4fr z>%0IaTc+E;p~eqhPB1>c)h#8|jaejjR!Y8Hx~I&%9kCKUdj2$1wD~WgfQy|f+D2z$ z8X&}DJPh~8O#4w%+*{obm7gZ~1HnHJd?Vtktv|;SPhqLWGaWvDL}7nCXRim6I#*ZX zpB3mnC(%6n8+$9c?WNl;@BzRb2w)g<`h6=G;|zBCK96wTA&xymTGS>Y!~i#J1&7xl zfO=yDX1r5R(N3!}X}VvC4XoECzS15-7A75sbvfW;y=fW|s;?`bO*+mV>`zqJJZJEu z#c{WXuVHI>;*USLJCv~J8D4Yx;P|)|(6#LV zwYx}Xyn%=MObUKIyBv~LM$meJgJtHjp;I@v_oDGUIQ z-(AguB8&LBX2P#MbnvO!LuPW%65{>jde`jj@W)iv8&&b|k339ld=KIF3#;A6pvie5 zUCU!=8|7=480Vx)cQ;iH!Tf9ernN7O9x+WJd@XIF={ilf*)%^4+A{r@PPIdoL<6fa z%RHZX*pkH86Y}=V7(lJWx=q$V|;q|wDQ%&-xrF2>h-W2$sJbGO7-D!GNt&>fz zU8)6WEzqcZ5y569QBOH$9)`TP!?*TYW#sz(t?!W)$C%1uenOByQU_h6XSn9I_0JqZ z;a`Qe8V;Q(j`zfqM}K=OV<`Y)w($|@0P_evcl%YkE^g%gXO~Xqw~PE+XRK=y>UP$q z3rHhNT@OxW#CDdbHl#sM9WYq+Vo4z3o~51HvC}yn>YlmcJ9`@+u-x0EQj9WAt_qx-aomnU z_Rf7pd4=bU^$Sa=Wm4<}WGg0eLk^sG&p7p}ud4WuRkaOw4czFjm3ED@j(vW(rm#-; zlenHWD7245(RJHsZ~onLCg|=|#s_9@PafR}_ZY2hUjD+*M3Ya`=R*3$z{ykh;?m})7Rdo(Ek8sKM0G& zi%46ES)(jIeDv#{*yMlp`wFE9rrWqmtn89Jp6ka+CA2r!6R+>Cvv8^~#w9I`cQ|rL z`VO_5Cytut;{O0zvx)b|7ng3KavZS$liQ#t9{!c|dVb4377D2^hvQ)!1S8DAatBe{ z{{YqPQAO}~!WwZ~D;)+CgypaZ$QT&y>-y0gMFjS_oSj#zXUws9qgmDM5RDC%WL8Ov zA^qk_JRaRoAI_+oZx>4>mfCicJ4VPym*pHUuh4V+tLmFChx%f{3Pq&GzbRf~9dLU4 z^!(|Ez;UI_GC^#c0`O4)1D-m4M>V8zF-f%(2hA?0%z8J3{Bdh$#?MKXBQoaUa0fW+ z&whjPrvCuKz486@V^11u#S5?{SOcEClic;=pHA2xPA7!Pn7pi2ca_SHdi6eoJpL4t z_*&i=GZ|Lc-#Uf$KjBoda)P=q{tA#ugg9LWx9lx(is~^G- z5?)^U_Y=mT0t0p5_2;iV_2gI55@<1tc|6-t3}s0GU==wS`hWH7Hv7ZL6~wVxS~x~v z>PY*ej-I2h`4v&cN$NwV8E$x8x4~JRgIrx@W8~p_pHHt(#-+3PU#3~l6~n~R7+A%uGxf(E^Ip$Cg&9Q41Y{uRpde=-q5XKO*M1d5lgN%HLn?*@u5x;QG)EOSwPJLk z(H<>7!>E-{m%)|5CD*AZ82r!ksWsmUi8UQsLJ18v4tW{gc>cBPu=p!aeM;P1q1sqW zAW~O_A&xQAIINq$2*)wGmNfv(`^$Q;CkH=`X$)4R`zToPYrhPvL+5~<#|icS01lN( z>qjw=sxm>xALp%luBG7yb_gNNWPsQh!5QFwzlCL9coO1eZII_Hf=>W?bo?u#o+VpT z2TN1V7SOzl5~&27d;8a8;7F9|QRz$+HO#@}Iq4D-Reprz`(l#o!1{_V;Q=9+jiV&{ zr)X??bTKroccVQkN7Ih4 zugh$jdM&rvr1It;c+a_tu<7!QvE!}@fnx*_I3O@M``dsd zj-5E`Szio%Yp20`Cy8}ywJ}Nnv$ieCVnN0SPnRD-n(cftbm@FUsOz#^e2`2Y;$YwF zq+yVtdjK(yT>96QQWJ!o*2l3*ROba|T+_7pFFqdllfs@ty{?U=T@5M;*O1a&B!q*E z46u9*Ri6Yugsl8Ardw+luEOiWa#_iR1-@)tC?gykfgbKNzo^GEcv&OT?=JNz?!by` zX(Mpt9LN*|;Pt^I{u%92Yf$NTD*9HHHvQMJqe!4)IO0_bGCCGwPy7nF-#UD)J2rJI zCYg6i@S6A!#D`1pb@SWCEhCl5IgiaINXp>wR{?tS@_W|&`ux#&Zr@qAw}!&&Pd7HV zcCii7?@7_Opg8#(A;O+H1EFekv+4T&i>^)r!l|b?gy3biko$i!IrVDtom<74q}o`} zOG=Wyo@6qY4dzH=0Qc+=0)5A&MJiIOEq6sutSKXN;ywMRh5R3U_LHxZ3zt8{GqwHxrv% zCB5~Xls1}fy3t%|n&s8i&B)0ziJesxllPfkGav_%pT@L48|(UIg|4-!PL7&=_3X3D zXSXP`lN-pOXO4Gc>+EZogTyduTHc*w1H4wZ@-^FJ4nkz6Gt(UXcV@Efd_^o;wWo#d zCd5L4x*3^q@`#u+u;+|ta6eEh%yfSiJ)XXrb?nLI+Cwb1 zK4HckrCBzgKz%>jHDAWx7^2Yc?xjgqMHb)a@G;<$%It?7`tjP&KWEPkX}%wl;%mmtSMoG({o%E; z9C{E*!St^#)%<;@jZyUrmEP`2L7bHs0B0V@AAt6)p9p+F@fY@8t*()yT*)Z9C6~%9 zTPFYj2V8z%t$LBd&NR6;O!F2dDyKU>p|I7`@(ZNa=U^j(Fj*K7t}+ArZUE=jsLSKa zpBU-#Xt4B&)TFzM0zm3Vu18+@t{35-gFZ3%#^8zVXM!n_Qe!IZ0}Z!19R@oOPhnp` z{5tpso)x-*Z|>ohcNUUReqh)bUAR1Flh+5YTF2N^r0*u4$37zyQnIqx<@B8=Rn%>5 z?qRff0=6y&La-eR4yU$$huXWnBfyi|s9&+}CNdSsI4j8tN$yAk{N}p-E5kZ;6KvD8 z$s}$Bg$B~+Z(pe6@UEJ}K$V&}($yeAAqdXd3z3#n&rS)?)|Bd|v^do^ahgAd65C_F zuEY#C<%SkOaf}Y!XBE73x|@@0uG5)Yjda+SpGh zjD=|}tr?My0mvX@y%p&?CxR{K(<71#i9+K52*|+g#(gnGH5ota{s}zrT~EsI6?p5# z7nsyhLYXC*+47?10Fdb00Ha;YQf;&3qY>$czV?` z-9!tVepR$7$_XUah81zuDm$OdXM}&?o&Nx~Zn2^(P5V9git2PMo?BU!A&EHq*z5KZ_^oYhrrA?uXime61XU5YX~ z99Qm~{ut8Cb2%%+h1y9ikLguk!TKEN>pWRRU|BfieqAbgDt8ZT<+=D}FZ>(pe+#w7 zukjMsBN(Wqkdi{Bp-v`$@0<&HfhEBAOXtM-s*xaE}0xz~l7mTyKa!Wd8sPymCE?$>-c# z@*`dp1AsDk?ce%WH0f7+Nt;6!li2)wf7v5K(=Hy?&%u$8Emd>n0x|v2dT;^HrAWU6 zbbU3Xjl4H(hssi*LZ@nj$;WOx^}(;x&lG;g7LaM~u;N#`w|-JG%vcN#oUzB#k}HAN zKVy9k(!$+gih1s@P>9sW%YeN=9JVu)+of{4HIlP8hAs`C2FHjpsr(k!G~l8W8E9r@R&YXv+(!o-6aAh{AwYS_>yCqy)2~{HrmUFNTeE?;(RAq~jh@p0WeF?BM`QjT z)RwwUlt$k2%}OlAl>!`OV;`3$y1ie+H<~S!jdvf*9E|vkazW1kah{-L@zmBrX;-^s zjxrbh7)=~tXK2m>=egkUJvpn3OHz5Sp_@F=sQU}fWR@8=FkECQ9G3Lwx45TRcv>mo z*$hS`x0r%9atCqj4`qdlDOL8Uq97R~KoFgZI zGlj=|{+;WqeK%6Lf47)YA#)bOSZ4(1kUQg!zt*dd55&_&m`3>kftijs;PJ@nbHD?( zXiS!yIUB77?>M)UV#uoDS%3_~B#);V`~^Y(00_6Aav(AY*5N@$E0T7QSx2W~lj~iP z@Z8puEQUogTFM}PFsQi)w*Z{y*R@pCA^R2B5kTT=c^QWF1xg%zz#mcE{e3F=PSe32wNB&0cN3&b3`_In8=j5N1oP?6E4y6|3w4zxX;LH& zByQt*Q`CZblae?C)2B*h-hd~Kho2-2t|Z)u0mca+c2IhLb*yTn^(C!zIM}>4e6)$v zCf$@sK;s?Hu;V=R;+Zdo?KgR5+sum5!7_jda@+y`0P5qdb`oexeE2M{RExCS1cM`a z1A+bH!Sw4)xYFJ$mQlS{NYn&S4tDhf^y`6v?^5M`MAJB3Pr%j+KG6#KasWu()W!(U z&>Z#l;;7&FDgq_@DxgJd<-rFGM<92`dj9~0dN2GVcN&C=A!y@M8>b3#?YQAU9Y?-# z`BEPZ-Aiv~F0J=&W`cEz2~+^^81b~6ZQIj1=Zda$wCX!E%CEj2O%l8j+l(*{q<06X z$8Yeel6*aRrQXn@#__AF?8XK0f1IgnU z^{e_n!cAWJrf1Xl3v0ECVk84%kO(B40oS)VIH6W*Hs~nE$ocN;;a%I@Z8oHBflJ+dso)O;N^{sNHqwvEK`xRqX5f-Ib3ug z3}=ojLr?fkr>Zs8t&|`ui8A;s=OC5NIN*`TPCe=tDXojLJ|)*a0^b#NlTB^!#ugGY z)2|rz$JV&LZ{UWgRoc?pK_V6kthfXKGmk;R=D$eqz87g%U^KdPTVsu7h$HkM^4Rtx zzr9zx_*dckjKnl}+E&Y}N~%<+8OBCf{-cVxL1 z)q>buXBgwBKj*fTe-U->4qHJD_?g_uWsn8RhCF0@fG{vW`rn|M=j{FA>${Svq{h*R z_XvtFEO_Uj=yU7EbK0NmY2bem+=QChU}Cw6Ba9u)zpi?8#xg42VHw8rj)RP4AAz4K zf5Pjx8YECk3bM+j#&R-OJ-zZXQ|TWWY;@L-Nm zzhOUz+MT_qOE}<`7{>Xdh!&LRDhLMzjzB(~)<&=Y00i>*ZT63}&3sYkU!2B;L~Kat z3HHxE)iK0jC#zQf0Eap~y?E<$!1a%hnqHHp2&`{pmk!N1$IQerKE3|{!o1H~__^X8 zZt5#7Jsl&CGr1!E()?=Ly^1Rvo!`z{aIYvJ-u zD7vz>44B;%p`9BfF(5ue9!7c&zLn7UbM`FwO{bXUJ|4CT5KA1MPSB1sEfZ@e0mg4cqvC zNYp&2mJ~~ii1{)QF|+~6+{bZ0%Qf|6f3i=(a?Lz`3b&XvV{>JI3{Dtr1P+-!0Oqx{ zPuaKO9+z?ci{MQ;R4b?{3ef7=ZV7mlufCvM2NX7`xGI{z}+jias@JEN? zai#b^*)Cxqe(K#-m;f=f<2V`pE3NRQhMQ}qORDNRJ)D6fBG%EA0Xb4bZ^jPQ2aqwj zh*5>yLWNs9sOn=X?daL?1-I8Ow7fDHa#0Ai%z3=9s3r$TTs zR-v+$AUmyw5m)UkfCNqTKXh~Mc@?6>)mG)QIi)DG*c!ah-n)d~ZLl$GVcWChO)hUbA@ z#)&9>g=xCz4 zth6$_B)7J=X~N9MB(US_(wrleUN~Ze6=^W;&Acw1;a-_@1cq4@z=dW7jJYnzx)@Uw##e>U@(g$1Fh3csx~W?*(fX@XK*>EtNtf){+DSDPZgY@4!|(4wxCu6jlwpU&$R1lX59) z9v{87byc>EeWEciL9~`(kC>Cd^zM1AyAK5=>|5_44zB?h4hSk38~_2sH&A)N;)*Hr zJw~;ai7q@RaQ4xCkxZ8=9uRk~_8%@-k^-(zH~^8JyjHfE@ZH|ePJ4|`Ars2btYRX- zWHR6b_qkEaV}N~VqOwz#sJSa_SWkkm*uiOWBQ$Rf`+1q#?JBV-rH=tdFxXH?`A0Z3 zNARn|+Jy6qZ=G!?GOEhB5u%Sl_lfzs@srbmMHH1uHDYBI%x#Z`{uGU*irNXJnMj@u z@&LdRHbUfv^q71EA~I zt_CQgp*zGAcVbv+_c!*oqS7dTcuXxBViPJ@9x?N9gk-Xzu1%CLp)(cgUN7 z8%97G?b9dMrxa0A_po#`JVkR2j=Y*g07*O8iQ`bXR5>F#J${%SJ?THNpw}l`>-&aO zxP@J9(4G!B1-js5a6SEKqNV&zb_=^2ntp+%SldPnYB$H3qEzL!s5#H_{5>ky*y6mf z^Cw&e!%U};#0CJ5PQCL*6@05tG2O9XveG1g+NH5`3xoT^l6vC={vCbl>9^Cyo|bZH zB3Ot91ZT;~AxBfm=c^tG=s=>1bBef>pD4uAAhmezA=6?lcv+$`z66H@B;;^@U@}L3 zI#$hxgl1G-_Nvd;TsZjv=)hgkXYa_>%m2BPK+fh zn7l@`f z11y0<7UU9fp50})!*36GUrmyGphQ+k*AbPG<;Yx?1i1y+jl(U#+%eLME2EMLY-cG- zr|p@!!|XzXun7Unqa0=q2POi1{1ox9YkgSX}9pcT^S9utp1wDawB5ev@C zYdyNGV+V07aOGd-8v^{Gk~(0V(M4+*t6a~TP1vz__L$qvm1J+S%&^O4DGV;;Vv(!w z-IM^5O7uDQtq6QMXpXVzk;@RW3brp1QRZYj8+^7($MPuTasVADqOBrs*g@fz)kpSf z#U-LFyJWhzF@}NigUXF@@~Ig+fygJ0xf+RSb2O4#JWmXdZpn4zTo2tTLU$qBK4X#3 xOb&q2MN*7!aJy+-f7&fPDHM7}t#={XrLBvDD;SF%xjwyk>@h_YRJPL*|Jgmi)qwy2 literal 0 HcmV?d00001 diff --git a/data/demo/001150.jpg b/data/demo/001150.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e8c86a499d275416e7d9648972ddc3e9bd65dc0a GIT binary patch literal 88635 zcmbTdXHZjL*gYDgi6TfZ(v+rD>79s569EP3y-AbaAtXqZE+8O9qS9-m_aeOrp+`D| zCM7^9AwUxT{N8uw-Y@sVy?1BMo@BChB4Qe%>s|oojh&?bW8RIi z|2v3o5tERTk>8kc_hv!;{TuU%iEoT1y|MUab?D9a01_Hf+J|B)WOQ$B$RBypi+@b{ zahv;j^=}5fi6b5fTkmj+JB&=sEUdhI{EwdqNJ>e|$jZsTc&VzUuA%u#|DA!Mk+F%X zoxQ_*M<-_&A74NJfWV;Oh{&kum`|~BscB!*GcvzsW&g}CC@d;2DJ`q1t*dWnY-(=l z?&faRH5_(4rw%!wW7|3dbE0sFsk%>yWjiEa*$m<9j@jE;-oo-o@9Z&t#+HAz069hw{U zTUc6%2wLcOkFw7H$whK?-Phpe34zGl`_EIQxbjdSo$k7E#e9ms_wh2><;71YOUWbQ zhOZqwcS-AxNxvbxKn<%+${WeOMkn!gtnoY70Alyf?Q_nS5Y|^U-*SuyMhzV=!1s1j zuyH8VS(9*4Rv1mmC;Jfe#VqG1MVs<}9o$X7GWvTzZE{6NM8Lri)_(+!K$#a<2vRWg z4R)U8Je=gt_iBzdUtmn7J~}3>O{shj!CHnr9;1_tT4KIWpEjN}6Qd-fhuYcO*~Vf{ z6u8gSO%LSFYF7@VS7+<86iGQ6vOkb*Ns{~&0c>n1<(-WNLI%77V|p914KOh-51Vrz zIvpDmuW(>1=js&L>AJ!!u|?Ah8n55#+(+5{-rFODAfBwjshxr$pM=gD(S;wif+k-} zxbZwe!C6>Jf3Ar*$m~G&ru;7H_KzkLn)N#xCUQ8W(M(Zo{N)W_mpN!ZFML(Qc3(n+ z2iwsxJx{C+n~%PHSHAtYcl8pIR}bt>B-TZ%$8RHip>#V)mcNnuvcFkQQ3&Xs@j1us zIQnDA8t+stzm31#wj#{}aPk{3DMRQ=G3)};!#bHCmN2#Po49DXM*!LNdj5u!VuB`6WfNQ`RYss?I1D%mgyhkzQTb3o9yt1tI z8W4*S421vgAU{g_-eA-a5cN{vg~;tYp~_!AW2t|ib1>%gIeE~|KYEdL*G>uCTm?ra zhx8i&!|t33Pm0#oHpfWWe+xtv(V%VVzFw61f|gVw4w6!Myu9 z9sBg6zLdCIiee>x3VH% z714$FqITtd`uwb@2+V;IJ;Br2c|Y!mU%V8V`+b{QF|TvY#BR+f?x4+$tdi|%Wy|gp z(XWmcKMW##%ft0y4uZl`Qx+lD0IMJQr8-pR^}$wtor}*qsZVKyj^Tb^>OQMWKT?!= zn;TGu$h&{!@HyU~X&SZuv7q;ZHxZY&YA{*sDvV}vH}6qHD4*uf&d*9^T`xAmL#TW9 zEG4e2wUMx>Yrvh#{PqcvN_We97|n%Ex(wBMwlLl$Vz{!x^QqceKkcg+c&BjLl|;y*4|5sA*%0G9fY zJyNasetkBsN5s)@o~p2&k$I z0Qts{)TO0G`g^~r-^XPsn`yP0Fn^g>aNW4&oWHo^DnWAA`1=J@dZznlE|)N2T=IDG zeA?=A{(3ptWTswf|1LeZ`+6Pp#oi*AwL8E<5C4Isi%Tkn+YRL{98 z^L1JbS^s?AH!H6Gb$M=pc(P*&{ArI|x9;q4J)Jovbf%83&AGl{ACz%iJ^ z+6wVsZR_ksrYHC3Yt<*32dm;_YT2aTfB3?{#pl` zoHCH#8upWlVpm+dtcnV>Zl=e}v-EJ@2u6=V;_QofjK-1YESNUvt$Ce_RFH<#Z=RAn z5-F!jKFgKYfZy5>=MhM(rFae2V6}C+>DRcWil3xtv-T9IfH;>n|1LJW{)$s+3D)!Y zgEcMNvPS&xc{w)wfHBwoqd<@vUF4Fc)3%65rfvt~0!oY4qUl{}hLD(cLaqLsj+#Bo zhR~YqNgL_1s+I62q$a|A%d$TT`$5r}i_KKU!Q<#yEjVTb#x4`Z%2bRrz70L9h zE*VNU{v5dXaUW<}zBY?y7;AOofU!-ykafKK$d99Vfx9s9%a_J4q8@vPgf@6Vc*&1H z(-xT!Xx5-s6*g5?H_MAlX>f>(mi$#K)2Dhj!5%$Jjd-CNbGP2_5_ZJj2Vm=_MtR(` zkNj)J`iIN8qi^4R%G|F5xP}Qx6@4FK| zlLezKsl0S_S<_b1Mf)ySE<^I(ot5)9tfZ_1>oOzj8qpi_32z_afA%t2H}jkl!i>hO z9!O$+zs_s;SE0&}6WFYe$TwW4+5)Z|32^9roS(Hj$i~mch9NRGz;o8VMgtc0Od*^? z4)xJeY+W~Yg$m3S=&C;>qG+Abm~kF0te|}?j-a6=;ugDVCdoCLvN!Q}eqO*8_RCS(cD1iP~a{FE)c(ST64Sjkp?!(i}xb=CV>Kd`@JfUoCWG3&2VuroXxWu|2U zX0(KO9R>B7>Xnj{`{uhzDRB}girM62VnN>jc(d`Rdk)mRH4g%Rh^&4619tdayZSPJ z>&+UuzyE((I z0eAn~u|8#%sWYu>!2L(DYpt=bC!z%Gp4L8j(oCFh#~G3;X3p6=YIybbTk2i^_%~?P z^*_4_5HFWbKXs5biep~^5&sRzE7bkYc79HI!K!Gls{B;xbp&-VRk@J`unMp2a1FSP zdztgPp&1)5a;_^bTi2=#3G9lDeoFqvOh`>$I5g&p2q%p0OaQ&}D<37>GeP)Km>SpZ zy=^fm09#RljlXzuyzhFOZk}rdht^Q<|3D_s+hM{d!Sp+(k*xliYtu~GTQHvoij2I` z#P`YJTzb#HZ&Z(|O}b*zR>`Wbn88CG6jm{i+ih<@%#S~7Nhg%8d5Bu5iT8!0%s0v? zAH%`gAj1#8-b_rt(SeUv#rhg8HSC{aE8tZa=+xB!Xfwr4EXd~Ce=Iv9qn~Eb!L_Jv_kOlvRvP_0i{|C0vhP&6cI@S4 z=uXr(P8WiM@8K4v$OTSxuZHU8d=tKA+d5i3M#$QH4jbe^$00)K9sjGtCNtIUt>;) zB1Xs}JKE9gMQO;ubhj-knpjnSuI^eC^Aq>LJ>RQps(tiBpP&Uy+7&*PumoH8<+ zsrwO}Vd=ud{meq=b$LtpBk!`nU7bLVvom9t$sd*(k1RrhmcUR6B>Ib4vAko?>Pbq|b3WN3OVUI8RNW{}_ND8{D%R~MbOk8BeXwM3X>0v zITXT~8%}CVGx!07j6O3Ur?rA=xXsztFS-#8U{dp91F%M8qIZ9FK2zoV{M&bVae7U z)b6PwCx)Jkg3^vUCU>V^G~8hbsM6@y7~;=8g|$E&@MPM+we~NmgFgyMk8Q|dz$h?p z?-Zu8>hPG*ifhHsQc+2+`*hYDzF!tj3lnE(Ms@Mkeg9nbD8%1p+KrrJs~}Fnw=4DR zogGP#ENVa4;FH%~IuF)Y6mW8ni?gi`pn9n9+0``w_F}xLL9F|*%7@A08H` z)_s+&fqebLE0tnr`pVjCd>ZJI&`G3r`y&1+*Ok{xUVcG->`hpL!b^q426WQFZ>_QH z;yt*>MfH6-Y;9ef+S|CtrC-)Ys|X2&EPpSE`z`6rEbR1X7R)7wq-_NXBl4-_?Zc_b zt2tk_7yo0;%Rd_Sq1P*)Yq<4G;IHyZqGkx1yKnaV&j%@oR2t^>Ve^EK)0bE88*vZT z+wI<(rMESyrC2*YD@U-CDXhm~90d$({Iv!zFQ)%IUh{a&@2a$P+L*G%L0gURU7?xZ zWiphZ>0uk@anvKGTtz2+<)ZBV0UhFTnM^N=Pp8#m9Iok2(47I3BxUIk6O<5QGZvk!bc-oM+5OAmM&W!^7F>oKtY!V zH5Ja+dZ1TFd&|y)z8eaahcX($Bzh3M!SFl?g8h`1=HcZ?cVFYtgyz}mcBN4p_6#k4 z4S3xeRfSSBDGF=`~G!X`uGaWh&0)aC39(a)H3MU9QEhrU#R(v$zJV-rrjm zi#^k=R)!Ia8B7QU%2GZJ+i{sqsdCY z?~3o83v(`va~)5_CXfw|aOCCsR+X#b^Ew{j6dIE3+%P|n(*k_FTJ*vocb+JwdQ0>L zZ~a)$(GZ$kULtu(^$w_Ow`b{t?4(mwv>xmkHlcXeucj+?rDO0rS2(5MSCjFHV94b8 z=6UNpK0W0~mhc4b!=g2B=&YP_ZnOql9=yr7=&mK!!_hAw`7RwMUD zmRavr@=2c$qz)LS%t07m=9tFuuM3l*C1)FttE#}`v2_vYwkm@HI$rR0@8yTs^pLet zL(GCsYxuJB*XSm0A$kn?&T=#AMgRKSDa32m-pboUfk=gb>M@FQm_dt3a_y#$Qnuk~ zjT!HG$3XHEgLIuB&6SQu<%igtQj}y0&H2$!m(Dy8Xqa1mHfv3{q^lIfiPkm5XAxxg zvPs(xvSnh?f~u_>n#W*k1wSsldZ8WY!)%2zmy0dx@9=NJ5l^lkY$Nw1hW9htAXq%{ z-!y2*J^j*cV&@9vXX%R!$JUp4A441JLfw@^LDagMujSRL&;wo zsUu-bdoAkSN&)=@?ipUQkb8XkERfG_39avlrYTe4C708j)F7ICH9gciW$(6vSK1C< za0udYm~8rvG1zMVWG((P!Zfjaad(dfso=Z$?sK5fU)fXoUl(9b_T<9I?kTP8SC^(E zI*ywTKE>h#Z6Z{Dj>4Ji6QSGv+29zYmM#PQC%jNwKELkMCVTvWZC?`6K`?>gjI&$I ziWbY%WhftPx%47eSJLISfLYq9;*1qJ?)i)|o2j~V?&sT8Pu(LkhI;|+-fAIs?D9rG z{y{k<-F$RvrW|JMqPNuev}el{|1=KEb9kzPWBZy#+$Wg4ckEYBZa0 z((u30u{wl8OI@AT*kx5zNHS|+2YqB>@aAt{%8xY~>vHQ!^&!S+j|96}e(B(7TE)o4 z{JZ-SgdilWIKjm}@zP(wyTj{v7hY!Yk!#z1oFy2M2R$Abf_pyz=M>~~XULF)*o(vs-$+i2@ zbq&~?<=sxKUwZK(&3u;Qp^(JaHN%hIo^8bY;x2tF@qSMZ_`;WHK979uY9wKRbN-#$EPA5)b7 zekhuM^)vJ#kXx_b&|8VWp5c{mtG+s<}i6o+##~LTlqk}5!*fT{?9`lC*9XWM5|KQ5?8v0wU^@kUnNk9ANcb1mtOtRIK z%=`AA|=y zhz59tG5epw=JKWVR|70Uidv*vE*7+;e5Zi&8*eR-r(r(=r4+R*os}7@zjpcEdnKsv zIjEaqpJL?2GDOUS?0g4wIK;}hTP0ru$WVtkIb?i@g^NP5d~_{Lm#bfW=yrq7*Ntu9 zQ?BiLup5qK(yAB22$Cz9>$gv-56o4Q|5n-BD7R{Kn7qGvrHPY0I<5l?T!FJHdR!@u zn1Ie=9s*6PPX>2(tW7U8iPW2^`)6jGKyP-l7YmTB{FY?&1&Fw7j^o!D&o^Wx(Uv2gTHJ zMakN2tw}TW4|Z4$HQpWd=4p)SD3#9wZwm9jB8LNNs z^sgqD8Ms1`=%4PyLTjJ;1<@She>|ak93sZ=n$4TJhZhple=eHmVDrk2I_=JcVnj+` zaQ94=^Ib4W=B8>lR&Hcu2fMd^8^F;JjtABtJ)+Fmq~psuse2y1yS!~s_w9F4&)$tG z8wf!ZXCXLxpZ*T=+S&DKGb1{e*DB&>8pLAUz^CNj<`-QT$g-IC+mSf9PETBi4KB zWzsA|+AG(`-Kdn1Rx0D_t3Tele4XMT*PES#;A;tdIDHybMgJD_G}0Wi*4Y4668qyJ zW{ZO^Ux4-HM=Kd!A~Tw#+%m&W3#iBN(&^MR=4n&5dur(`ruZqJ<(q^jkCR-{1D)R` z4@(w3tC?4=b7SLht(DNA7rPS*;6}q9;7f7NNX|$O-eX>M*GC_een9dR_yKFgN^EaiH}{dA!@APK(8C*C0Z@9 zRFxuQ(zO)%UX}Bqxx*)NXm{Q33C!OaHL(u7@XpT9tCiA%YOB_CXF;B8z}ii=@#{z( zWw)v%hWa_pH=R%c45=r2Gk4X-q2BHbtmjPb;KaScvIkTJ8g6?<3u#lv2;CkNmk;x? zq&xB$SRs?bi+8hg?84#U6RfBu>C7()=YxCMI~@|(V)a>^WD8?FZI5CT)p*#Cb+(3Q zdkZ_G2F7Z#h0I0JcawOb*?Gw8-_wa2C&fM+v3{B9lF?spS)vnl0 zEc0VkS3_X2983{9*_X@R$iRowsFxFR687O)EQ+ga1i!V*{(6xtCsuC;n+(i`oV%Jg z5jlZGA=t=m?ZMo4tmJI@t?JBfefoq{DpLbF*`0)Ifc$0Yb7N<{Qti3YzH*Oz5A9Yc zgF%1&tCWn!_|C=TTlf+%FmMEuht1*dP38!6eLl7Ex1@oUF;y5fO=t)w5bWCBc@<|ngeuch1`R}xAwa*Y;omiRmUS-~Jq*N>R8`?)-tYb@<=PB>E z)YV{Owx-K$d~1xDY=WVUeEQbX&t1({O~QeQ=G73&BX3|+u7fqfc`}l_?Teny3SI5O z ztnO)|B+FfGT;6?n`}w9747UotYF(2mtvOPPf_|Z|EMwb9NFXLa1h4~Z{Tb(%TTpA7 z2Y;FpHJ|seB_NhK9(*$1nZ;aD+=*Yb4o-4K=MtP}5<<=P@P22?laanst0~CX%rVOR z(nZZ3T$z(QQ*fIniiJghnp0mpTvY4@DHmPXt#C0cL_S+KTgaV-n1%N?ckutNKbA0^jgQZfgL(})M z#3iY4?zK=Cs&3^s@k9!ha?ym{*)CC%7~-r{a%}YG{^Q7#AmN7^rNDgfdbsMRM}b4@ zy^Mb{_$5qp(Ecra_R~2Wrrw{@erFR3vlv_D5cHSH?nY@I0i7bWT?16iv?=E{zYLo2 zf`f6zuys}W)35x;o6Zs+lc;sa;zs%EJgpFemV3(YXK?q5{xv*bdDEH1D<`(~+@v+_ zK;Ey+lB4CQBc}90{p_222?ZrQYzdC1ctQ0(&m~Kz?9ipr{E86@&448DHlLMo#1rZG zvT8eqh?HFR_;Gum!-eEvOqq^wl`(7phr$PQTErCUwKJB@I=g&p4pm@;P4zIHqXK|& z%$auZ&l(&n+u5z5z{rEjpAlD~jp`f&X~T9+p<+|T44>wys>PoqYvpidFSA7xM88Wxc%ge!Sj7cp-(l4^T=#j2B!_PU-Sg40x0c?R+I@dyR|sw+SbTA z`7dUOpGn@K@ExoX16o3J8uxImR8-G8nfuH02N8J%0XU~Z%GX7yakVaZ5bV-p9BEwk z?#WPh`BWj|f>|1P8_c+}%dK0Fmdx{%ebPqB2?5u;uV5S-XWMkjOP@&A?h64VYlFkTNy(UsNf%|FpI7ekob~Obsgx@ol^`iaZTG?WBJ7&J5Q9 z*1>$RK7gCA|F|K2@vx5|V4mZrB!vgK+oyA*Fy#~rE2}6!$KPI@jEJ6z436A|+zw|^ zpz_F;AAx1IP3b7V8$)0|I(;PQMCYVP&gI9m&aI?}h9}H{` zy{gDbYjtc1j6m-GJ{c0BqDF7CUjx#Et^tVz#f9JRCkOn7!ZqofIhB{G1iO`aKz2=p znZ`+!drg{izZYUlb`S%{A{ZRjzqB1KC&OIM!SF1NfDRZZ?Jz6Q50&ze&9EB9M1@Y{jI@E%La zdaPTJZ@EuELFFGYvPHF?>R%9m*P*$3oXCOiNT_T%pb+=ntVm;#di5D@F9u%>QukUF z<-`H8Yn1y96T7iCC4&(FRZr=S+#}*h#Vf=QRA{RCf(8G-)2sScxr=wu!3BIRNIX0A zEIHQRqI-Umb(1CX031Nsn5! zCiV%}fIoQA9-NDxfQ|jRk(KaG50^U$+l+pWScUbJchc@o0S51wKJN@h8ZsNOUnr*^ zful-XEv2waE~dAOKII(bc!;K8WXDj@}ZCdC=2EHUDQ(QAiBe$aXS zL7m!nH56$Ug$mR5S^)rUb?^Osu?}Gg3?tsW(6V{*AMPCS70C_53S@n%r8Zh=$hkPn|s?rxoFXDE(SKQDbq_;S@{aJJVHu0HM z4@f>k;qIv3F~*{LIF`P~x1T8puZi2+MbkArQfw8?D|OzyA$mc*{*J}Fko$f)p|s~# zlQ2ewCNA-X@+$B6Je`m{FX*e|5%|0nl0R1Xk9_ebo>bwcbRz)c{&%W~m3n*JOKd!E z|2H3-qQ&ayH!q`f=u4RI9*|n&c#pt^>%#958ug8iZ{~7JE_}wwVJ;VnNahc!9OB zI%>=DfOaF27<-dS&|uj7OFC|W=q5mP1%J)IJz{9vwgH2x5z1MI&>pFSjM*Vdu{omuhZ5x(u>M2z2T?*6hmgS@xONy2fO z76*{rAImsg#mnl7-htX0+8wBgeLU=Qx7_t8Vw*P~<`uYgkf!sR z_rhq!3+!$}izytNx)t0Bgu^dSb=PKSN05Sbn)Wd!P0!GE5}yhln@S9+DNV1ku_X^T zE9q)XxHA+&d;GnBF#nk?+l=|XC|CJF1z%9<#hdymq0r#Z4ex#9^upp|%*^}*S+m}i zr5xi%lH^0>-!~MZo{YB6T_^za4mSz!y)`S}4nw-hkEXO|Y|*69Fv(9aBv+3QRc`Df zP6NRRXz<_E6V-9clfDq%kS$>^slMJnb8bz8B%j6lUREy@z#g~J*2*k<(>sns@@C*5 zGbX#=i0ltkkFv41!BboITp!%6)%BB}%8dzD6*>=_AE&tq@-4XWU0N(J&9wMR{hle0 zE^Cj>HADT_X7z}zBC&G-l`q=EI!_XGGh43)af!S6dwfvb7IdTHpDIY@VwAYu0J zZlOt_RX3w4Z()hGp&ak$QDrJJvcE`XHh>^HA4&Ko6V>*_C&q#JO_;)_oHMKR|UlTE%Npx#J|78Wf0(JAZYe z5|N$rPzyxf*xh?FJ$VoAzkC5WXEG8$uo+r=`^G-k+nDmm^&|~bfHvFfWaB^LPgH(r z>Qs$yw{4mBlN;3Fe&4SqkA}0Yh9KsF#8RDD{*|8OxmG=_*_9l~?bkHO&ga&Mbjx|A zM}X4g<9W4b6wE_JPmP>6(nf=3Jn8MU-<2DbDy6kU(;7O#d9N>7KX-SB=f6rnY<-34 zv>cdRsps#F7dFzZJIaVJ`K@SpU$OBX#USY?7Ugzra42DllcIwWHV}|ADc`R9efKYzwOp$ab+@ujyl!WF-|GkfR~uNN7>zo{}>HUrX3?Jjc&wB17P4d z#6{s7(87yyV#kw*!|?eCYnT>Wt{6|6Wd>ukbulVaRD5=)sD;_`y4qMm)r9|U64xIP{4nCk;oKUx6;)P#DG5fDQFGN zOXTaYD#Xg%o?|NEFY;EdN8Qv;kiH0|HcQ(lBGPAoh!ye$sWs@45#-aPdcHG(ZL-SX z-~g%c+pU-TDV!Zne5)bK{JmXVqI_zA`zRO;JsK(9T7NqIlzvmT+zAW8YquZ(i`7_QdQ}VrWe#44L(r$O(O4=>|J~Y6Py-edWY7IiFqH^ zA~ih1Gn!l0|7;$B(F+Bd?t=V~bq!bxWTJ~;;sY-@U?A64-JKZZ|Oh_ zBt#1b4|mi5W}0}~xR`Sd;NHT&w-&Jsco5Jm`mtYOBUJEI29rO{v=)G)HN%);AJRp+ zz38FE=3PnwgJ2?6hc7t2Yx;&x87_LtR_1jnV;{Jgq=a!LM1+g0H||-zCQLvY*gES2 zqCgMGpsTuAbzcu+tc1?da*C}7$^MR;Gij}c#xo|L=f(ip<)A^>lqd5%GbNc|*1S&) z59lIWuXX2O1@hwLa$3N)dYTn$!_R+g3Kl}gOEE1!s{g13s?K7zKDu$Bd4e-wx*Z>j zz#4@pK*a~$AKc9Ag+YzsnB~BvYrvZ<*;o>%Tc2euBThKeL#Tg0Zm`q}95214C@5i_ zs^gPf!hjB{RHqb5zXU@NIJ%u0?1_Oc{8;%5mMGuS1xO>=YUj?(lV`)HlKdrrbIr=h zIrA>t!w0(DPHbff9P)7&T9}vh8HWR+B|H-13ud^mr(BFXxriBQBOe8_IMO>${WWQJ zm9RsHRtZ~(j3+xaW{@uBmbi0mjUvOy;qgcIxu8NF2z7sjillZuk4lpxv)uqEKKO@o zsrb_qVQ%jr9$E>MX$Ec2ql@k(VxsP>F8+n$_8_|4pXr^zz01Ane~sSb**pq*JL4&1 z)ZBY1FB4e2MdB%snMk1MLf?eP_wI@W>2u6--{gC*N(IdRtPjDTd;fd4^(%OI(@E}q z3zwLIF{^tFp1bPW*F1B_&d%i1N}@8LA1RrvHNg_mDWG0f+JwOLB( z{17jSIK1m*5-8cA@DjJ3(&mUBmD)%8zh&DDPgP--#U-lME=RtZ{4rq1!da;{xfuR> z{JE#BLUr1{Mp@ieVhhCVv|8G(AnriiF3B#;oA{PDuu@glU;CU$gIz|99k1@+NWqJ&WI|vPm%)+~;?EO*F8f$}qudut|>)UH*fw;Qq0>a@1bIF-kfR9dTc+Z@GA( zcrC8j16^UU+;^`du3h8uZWCM$3h*@8Y6n2wp~Gdxea;_> z)TcT)Yg$sG^EMj1P)X`1u%CHW3GN5LR-evVe zq-N3_2+&*GHLWW}dCzw6&bS?zF1tphGhhXyc!*DNkUJ-pW|&HBa)r~;PEU9CYp^b0 z#48OBlNwpUy+>c%lNw7b5|CvP%isw?6N0Kuk4c*!x`c`7v=-Ph$bhTvN zWZP|^Jx>x|Ar1wiMlZ{*C*8AqF>&w}x~!OyP@xm_dj~Ja;4|nW;8_cqT~^fA=aX-Lj3TPR0 z2AcvYA7MINO6<(P5IvzZ^8sZI$8r<*RbWW|7_?>Y&v6k+AcsVIsXzqo!w*V z3ike&$Iw9Yn}ae*eo$Rc7p&?pss6j2IaOhXii*0%9~b&Q zr807@jni=SY^h{HeMM`2m)Lg8y5e@RRd<^!k^`pT9N?11Ui$O{-5`k@*CpQdMrG97 zuJC&4?obn57x}?WV)hZfZK;A>iIp}&(DlZ)X;f5@>x4i-c-QWJTj!>xMm#8~<=y=I zfJCW1=suKs*IPVGdC)qm(NT6d_tLkFLb`~w{duh;iN3o=fdqpu+uoRt?Q~ocw zy1DIQ273O@`)P<>-0)G`Jz%dGYZC7rZec`~?&+SDw!v6n&JalVlF-UVQ&a3PK7(y^ zQzS4P3qSY}Uss;637jX#ox)_WeXgASopV}d6OwZlRc^Kt^SR%KcwTFAQLAwl21AqG zE5UsgvBPctDWn1+VRAcr*(Zllvke5gqrWi*?=*_@RGZcNZw-1yphg`Ym;K6>*9^V< zr_dyMC63rPLoW{?GrL=T#nacKXsoRp$o2DFJJ3HDL4x>f zsjH>ur$b7MaU@aC!%dz6sMh(~wh0thM}kO^EF=X^3r)V}HX2pCNK`ah-e2;NOirykc8d+F5(F_m$^!*l;s(gU^j3MYa-;6UQdiUa+m_5sKqBd9o@|*^dsI_Y&}8DF?2rV)e~sL&BX( z!Flbn|KjFzi!c|GSAKR)QRj+RzT5SoAy+rOBFH_bc;u4%qPozEe9T5d=nTuwuIoUj z?Mm1=kg3hj3u$&mdujP)%^0<|u}j`WdX_=OcA7BTk@ghmFZ&(_xCQ_bQb*?M zS!!RvuzM?KcyRIbMxY?9LTkent31za;~GDtLQMW~YCMG6WnvoiLImk%54aV`Er)vN zw&dpiT+Vo!Cd>#BvW_E2B9!fu(2wm}nNg`h$WG-PpOS0{;%v$tqg*QZD1=3nuWf}J zV4S#`kW1BbR@wRuL75_b>rrZTL<04T!xiOp{hry_jf>G=5}?+PYdX|cQgU&r%2WnL4@=KMlu{lMY0I5y2K?|7U+dWl?ZR1mvkR4?ZIL71q zC0aL*FZV%oy#MXvzj@@|5P>nM=`AC_&ox!gMZCu3!(7tb_V8vnTu10_Hwr}zrV?XM zWT!gE@{dfHe=m8*1Z{(CYN9^>{qFY|xz$NrW$ALCk8F|*(E=9!ObL(2moXo*8|159 z*pfued+G$IEuL1claBDr&Ar$!(IJTOS;|M%cAmGo@dyDHIgr-!hI}>iHbSWM^ z+g?uhyo0fZA7x6UNC&05CUN zPp)C|gXy_mz`aPz>Mn`{RM@kBWudO8cL}%gYwTdS3OcE|bWXy;P} zUN_!%$dGyR-!EE=HUCJ#<0#c(6gkV7`rl>psnbISkZ`~hyIn|u?F+9Hj_+59E?@&w zkLY|v7whO*FDvL08e{p~RHj;M#Mc~}K2h_|f2YsW+N-I}kJ@1jA&hc#Q$)-!RL$=u zmlnQC43cX}%Dt-U&^0Jt25qkE67{TdvW_5uVJr1PAqwvk2KXvE%#&05)s7cZ=>a-& z7cmT*9g_I`HXrl|vM?MA|LxsZ=Ir7Y-Rekp*X=nM`1k;Dx}7Ar&_Rb4xO#&QcAbZY zorWkCd@@96+$n7>HTY20sQ6j14O_ojDQbysj9IFDcTb$`gbX0u6ag>R$vfLG`wJ3u zFWdU|E!+I2C^ej#Dpub;%y=XlCA(ac4%L_^bRN;lSw&m}j#2`3UlC@e$9vpmWJSE9 zE-BfvvFDhd*o8`*C%)p~bWiKpca(O{w%I_AOeI`RbDfR&6!!fZ;GsJo^Bl{&DzOHm zlGm;(Z5<5tHLtd?uQ8tT?wpZvcowfqWA?&>d~vC;FKq;?-{ z-2U*dIM33=T))g#hOP5pjK$j%xn({G0-yH*8M4>@%5S|gB+Yn-uf4nxYXj*_-HzcbFSSrdJ0db#R&aD7s?Nf` z$q~WmR9mgXHj<2;v=b=!p8G!_9ne-->RwgEQChEbMek?KWn8uJ6u` z+iNWjaW%2rfYZt)(O&O`k!Bttox8cox;^Y-=2A!XE_}srM`l+xcm3a1-frzPJ5)Zv znq0lc5o2A;%;9bYE*Vc3%LKRw8^vjvw)jbC#vnkKvr10P5>~403t_BbhSv~~#fq>E z*S*q)uUX|}bF`gjD(FdPKU-9t;$IA4fmvchNib$o!d|_IJ3ue@@2CWf>_6Rl5Lh9) zhTy2DWBMO|w0F)iM#rS5m3gAPQ-?1muu0nbYc0hbWJIZRG%Z~NX3KvOrdFs9bTdiQ zPs`eS1pxcNcwiJL73wN|P@Ewv-BYKdvJu zo?eZ+(3$FcWMJb3vU$@(+IhKL&sd4(8_?j;`SSDm;zg9QMGuVK7(H8L4vfgHq(8Xf z|3-arDv0dGD*-qr6)O`&optj_O}yt*FE0l!;)G*Q^*sYKOgZ*}tAyD=mHD4J?)%PT z0U=b8h`yA-pP0)h9%s(H@7_pv$X`81EtfaO>-}LtI?GspyMoK2_NH>Z^r@J7v`|_T zX1Itw+TGA4*DDpAK>LF!iwwFcx6#Nr4dBO9h7UMPtb-5J*JZK#gp-~F^0T*un|_lP zTD@1#+WB1nIpE<6_(r|XMoEzTN8sJ_*Etyt*pi@2J$AgXrpl!;5@E=n)xA8kswxO| z(ZG|WA&Yx#5|_oM7;nT!BCZ^}TH_zEw%qbcwOID|kJu~LvA-oJW>Uj$?ZGN#S zS#_HvgY9&mTDfhi3M}d>-V{A{9JtgXA+382F!n1k7uCw%>4aXGS@mql#HH?oKjGM| zqIFm6+R>azbxtDg?mTJ3&C@@rm>wEE%u!;D!_79M3pdj0GFi_STG?s^($daG8BlLW z@ad1c7o399eh8#`d#{WTC|rbZy}Qt3T>Ng6mE^N`Ih~oU{C%Sh@mgn+h@0hXAo!H& z2$Mp(G8g`XD=CJH+C%5etxa@owvqPY>jzIAD&5Xv$XISQ8_W%K4ba^qkbkdqqh^El zG|~a8s*Lk)n@>BhEqSx_%(cZNP@V!+PET}OK#++>4x6dhoCXHOTUHOt;gZXy@m0_lZ^N@J&&=OLC?LR{;pn|I& zJ+t}{Bhb-wN`l$Rn>k_rgP0e5p>>7_R_Z>_U{PJlQGSGW!_-S?@f7q^GY_sKaHmzk zUS;WsS6&|?N9D$t6dlK*JI1EPy$;Ppbuog!pt&cD1lcMtsVDX9uU(kPLxR44?&0BB zXUD>B1;)Oby-~VX>ii#yuEL?IuMJNDQ4|oBZb2yl>1HM&0xHrmBt@j9VMDq*q&AVx zNsb;!w=iIIjSxnS9I);8?fVbz-90L&s?&fX%SPLfDX%dHQ|KkIR9-wmC!+F z<;L66m1rw34o~YGKf@&;J(vnSL>^P>ddd6yie}3Dv<%W?A=e=uSBd2AE!D1sKdpvm zw5XF@_LW$+!WL$^b0^mwjccY#5Hb2Kiwj5puXc;KgE*{S@2XqJH^C<)cU6?R?maC^ zPSG4FH*ZLpnr(4xh`}iJn+@YDN zQIldMdGycYtT3G3UvC+gxhOE}wbvKvy-8`9O9-sNQ6alpD#g3rkDh0E{eAVi^g%v+SFe^S$=TAu!qs|H2!L=!OqQLrkI|K>k{ z_r+&#E0t%mdSs#<=S|H{fJ|>2rjA9ikN!<7oOo|Ard3 zzduEerK`wVLOi54JnE3zLYw=~G9${Qw#iQ7LBzeE)Bpum)-8lZ8@6+}RF_`uU1;hy zVt;W-%eEmK`>XxjFW1NSSo_V0ri*R95+oc}57}d0Rf$?%j$!+yPWL7zQ+b7%1>^IT zg9}LW2e%bZ&|Rzi;Bwz*sN2vFny0Yr=ItqDr|_@!g!8vZ{NVz5c$FFHnIC8gVwdOt zCQoLw{*>e4rdnZ7)u9_rD^MlxK1PhfpLp&u7bJ;g$FNkKFA*n$Lrx91Anv66<^GD< z{^*jM7Y1{<;2Xev?}c1WCPRw8+#6!$xsw2g^pV%Ai3RgvG2yYQ%yk$${X!gf~6Y7D26MuV2Ap#BFXPYq@jhL3{W&!}W%J_8Xw13|wc z56hG=uw%U88lGU?ZEdTMj6${)uPzWpXkWj}yD1jk!>}1~KZky>n&7#Lz$Ju(dgn&-YyM1NY?u!3v@Mj6tN#+} zH7!-9OGEjHuU>hoz$JQ)`%n`Y1LefY zocFrinGFh7ny3HW&Cbi4Fl^rRAHd$71U|jkIHc}d_enqa(6luE;^>8iRGLAO2A@DF zAO|zu-bJ4I^c*DcN}ZSJxQZtm!Mz|BZK{(R%}jyt&(<_s-}^G}Vj6bQ88FL5jz@-a zl_pIsP5{FF1MX-L<0)C`r9UHz==v{;=)LEA&TiQwL?OQy zg61~4Xdx#`{{yTozBFkJhTLvt8cUs5awZSV{_aYK4dB1cF?d*~rfos#k2H>JNgbGf zJ_HfWa*Tn6;DjjaDXmN%LgBjcUKHhJu$stVCpu5bqqEIVwe1W7+*#HSFT$wpV(Z4@ z!8E@c0>P(W_i8ibuRRlJ3qW6yN2<3jAJ9i?e0;I-NHlU2w=?Eu6Y_f5yFgP12Lm9IDKnleUGFr*K*o zz3Edge)qYxQ%`Q>Q72E%%IXZm-}SrnzoyFJbmZNPx3H_=Uw6eh|IgGts&|1~)D1V> zppxJ;HWSTH&h1=(omz?NA6!c~HSekcBcTiKnk)8Mmx?Kc?;mNK2$_fsA%_Zy^vSWp z6S{LP11$p=CmJr{xiUb;op#YlcCXxgHCst>VMf&*Klcu>Ikv~sg+aT@F+;I!V=zfrV=zCXHoi)6XzD*eGV$f! zQ>F0x-4*UqPynN}x9n~Y`0q1iOB>ut-}ZsAtc8V%cAlv-fTHw##L?&_A0qX(&6((! zGk_50r>DvFm4E$nryF~7cMPA>U0Est0dm)iNZp}#@P7Rhv$q*ExBf6V>-g>}otCOH z5yTs(oIhaS$0eEK-4bs=Mn#re1y-_Y6$Lkwm)D;kU6&U?9>edm$A{gRP^n;|@zh^; zZ@-TBF8+sO4GY8e52UB;5|aA3+6jjF_!qP{20;ovr(LfT7H>20%RNs9P*5+y9e#@p z4?JqgyZPdKEW&y_zeNx6Fbb)>%DO=de8I6zcmhQN{cC<7B>}{IyC`K2@Zs?rH+9uk4MDJV~iW zr2-4;cHK6~OB+oI>7bvzZ`KdC{t_k3;!wJ2-^lO~bl5wf)BEbOI7046!Q`U`kq*af zgKG(#c&=09hbX6g)x}2y?>BUyt=oyz&lHosPgiF9vKeIV8~u`fYcxpys|{>_JiYzh zC)lNCH%yXxKLdt>lBSxqM+6bXZzg_8cIw`x10`gn=k_(4{mb*VhT@;f`d)R(4qSmR zj%FkE>U=|kRDLT4s?bVKGx;K3e$8=1400sokvypu>m=V=lC$bALM%L2->^Ar_0irN zE-o+F9S!8S5%62hSNgv>?ub<+=ddpFb4xJEE2|83o<2LnHE~GL+~mUZg_t~-Ds<)uJ1EdtaZ-P}_&6x*=lOr%DqNDy_En&=An-8p8q_0c3~3@_A^4&cB%YIV z1Sx)?FFdYgf1cUca)9SbT;gl2u{R4Q2L-KR@C%}FHa?wLz?u{498#nT!x&{WN2SG- z+j~>zBw@#n$WQKIT44=V+jlojIfbiy+AAYK#Q$R5o7+aiueuRhE=M|j39G<7qR7L8 zZiM``P0Nw^2Nahi?ycy7*6D?~5}F)Bo~{7-5tpOK8Yh!=lbW|m_kH?DTy$xf_NQTo z%Wh&`YQ&wLl;12!lul`btYsdJtEUhNu%`)l|Ac8rqVS(u66$T9%Py*$KibjpE8BaIIcu=!U0QT zheIo*6kkt=Vu@&{7c(l|vE@T90V}7?69Tz^D7kTC0At^A7yexuCT8 zC+6Z+tMkzmM&Nwv$%Dmr5gZ{(oo;4An*{kB&Ys=Tv6($h(a8k+r9;odD zkQDZkR2txFN1RzN8C!V`4kvg`C{tH{K9`-VD)n%$WT$vEQ5Txyw~jf2htwzB?)&9a zbnsG$vs*$VAajlX_XkZ@&PJe)+anah1C^)x`OBy6FSUX`thyt?*LJ9Z za2fF6P3xqzhqoIy&OKjZPc98j50R@>3EjGPVRWYWnt6@MN1x6JG6f<3_fmXfhaE7cE;Y%ftNuw2#(Wx%5NU>rju28o|Wj06WuVs^A3O3 zI;A}Yn+Ut>UCBTFN>8vyIcaY zjTJK+%O;p)^fu|{PQ8FX^CLO}-{A(;pq4G{8rY;nMaX4&|EYMi>@;q*>fD-xA{rB* zi>u_NOL|-3k&p_`G0X%>cA@-+E=jGUjh{BBBhOTpq2T(sQ^pKI-Zt zwwX4n_Hbd)HW8K+n}qjA$NS{3d6k#dI>D?`V8~d=n+-AeewD41mYCTcjs_rX1Edl; zQf{F0)XPHMR#}?2v~3L*4OPU&hMg{2d0z}sQI>`5NQ^qNVLl**?ONK>U#MNPThJhU zX<0Jszp|0I=^huZr30-QXwDXG1!fvxyAuWirzZ) zUu-zwu}fI(FM4?uzg8P$g+%i9LEFwa@WMhGdORh0hfwMl38G;oNGkq=Wp;z7PVuNihEK#6xDafXVg^+ zYbc(4c^y2w`7GksT@^(`j8wGu0TE)#{i}Da>7LpdSZCdi%h*{{+2;P_lz*J6mch5w zjg*|&V7D>uw-Z(Q0S&@DLR-O_3$AY1F&zT&%dj`oy+8I%9=t!eb8l!^N)X5(jI9$1 z8oMOCIXBMmD6aeaP*Ec@O~=U97YpmHcj0NM6)#Z?yYW;iu0!^z$H7F|(i_7;i4s-g zfu(M)T;k>AE-B!BVJ|B%?x`z%>9+oZ_1!vz3-%J?QxS_~2uxq@xJ=CuoRzpYNR)_| zf;mrZEKyfG85n(AR8V!lVzhqy`_dYrKu|*eB51cV_&g>_>;5oJ33EBq)mvCA2M<8+ zH@Q&3W9(3_=LTaktu=SPHW+q(g0@6)98Vg~xq8#I5R2p(pF6fo;$Z&gh#zA2;!4sH$<+Fu7*a4ssW-#}Jv&>I9B^`A zD{qab{z!!axNoG={Y=;nU@}fVEHd3a#BIcSI_+OIsVpK__!EpLx-3hYh7Roiz!>Vd zcOlVd$J*bJ^}t}fXC4#4K5+>KLB%pI<9uD6vTkPzDBD$@%K3q z=CBzQH|_yzg6cl+s{~)*`AD~14w0Bp9CqmuN<2ViMT<$oY>YB%>v&^G@qf&hrOu@RG}t1sot6`2SducFSVoq3Qi+C z9$AF;BE#gZ%ya4dFSRLOOpki1$_zEn98DcW855X{Y#RBlRp3hI^YU$Fg-?fit7(6y z)+hJ)w92z@j%B#H4;yDG$0jy$knMW|aM!eX$nO5~NRRj3rijxY0` z>0xazh@PNd?nBr0=J)i8FKa}$hh*6^{m30j{qW}<)bhRZBLNEmymOacxJO}C89Y0Pv8c zFqZMj$|%`5ebFdo`N?yB^pfj&Xy#nrKmwdib3fl#^K!9Q=u&R8p$>EC-lH`qV*Zt^ z#rwJ7&MMb{H+>GeF?%pJBoM27AC0If0!0@eR@6E-Iw5Ib{_c{XV5w^ z^Z7ig(CCk5+msP?syLwHQ==pYM&SFU;nJ$I_`uWhb5ySWY+c6a9Cq$(*Z1Baw`7_L z3{O)}IwRBQdV~DLa1#CF3{Se+e3Kn0f8!0kN~~f`dQ(Q}^{49TQqTvMHtvptmF;W+ z%7>81;=f|wh+5CvqXq{k%zUdGPp|b27ssFOu}vcn5gkv>E$}8zFR+HW8rknBA31CW z&}Cj)q70VU!Hobe%~L|{L`LV_TR(wtOk-2KzGSJ* zf@FdHDHc`#(tpkIhv@*S%6fFynPrGeO??BD@iFbq6h8)02`56*#_~h0b#6Au+}7sR z_>q_5(q9uFHwH&OG=pHtTm-Lp3zx9A;o6R@5rg$IA^Ty66=nSAJ5wNN_u%<2IH zOvhajZCv2s<)=Sn0jRittzSp+>U`a!M`!>QA?)ok?4TeyiYyn6&*h9OXk_4d6v+uG zE_6?R;Zl{gV^8z*J|D$*GNOZ?7Op8c)w9RN)-bwiP;amt^sgs4r)8Aw+$)Si)s^&i z10<<9j`1GRqtP4VD*pcgx{Qh@gw5ZeC21Ash|@e5fuVQ%ir4&3OW^6IuVl|kMxaJg zcQEs9+77?WU(}kL3Fp0sEnk<~4%xf>5V_>LC7Gt)fi>J4se-H`uXv|W+v!jy!w<&w@4##f}LQB)Y_EIsT zwy8GhXIc!e0>cB_{uRqJ0@hHlWR9#Zf`Hh`@NA)x4fQYQuK5pt*_xdz`H<=(ewpf) zyhY1_gq`^bnWIWJXd7i|Pp!ue`g{!Qkx?CBe=iK-a#|EySx3)NP5GMI>enGX3RNht zgKO9$&kS(vWK{3yX5j9`4znrW_gJ?(wE4)31CoqreZhd3o^Xq%vO7I`S*6Y4TPAce zy)pOwbJ3(&hWMX;QyHzb$xO0c8y1uWj9Zi3quDO2S~yrp`+lC0zXrBY z1|C>%lU~23-HJUq4M&mPUG`od(D!?PUOVYk6LtjcF6u)Mbee6usBS+@qugRKJLOcg zD-(M_JQ&i)I|U~AGj}OeKDc0(OCQ^Z6!z3k9X!6xImxXm-}f>>r)wEkgwt$#os2c9i4p(zRCN- zcyV;<9bjOKJ7NOq4OxN0suP09Sm_{y4HQv_q)9`IS6VmX=ziY+ijYc5zNNhC5k$=> z-+bbl8d!8-Q;!}ZEEkuq`m~gk)e-=QCAvby2)Wtc;JhL(S>0X|w z^ZY-6O8J&AL8g#bfW!Ooz|Ii^D_`(hJ3xXiiYre|XhuehL#d9vr6W`t4-K`Hg zcBoI`W8S%&%Mb3XQ+H8z07x=Kq17|@0mwZub#+}7yU*Q-OCPvCdSkqT4B=VMDWzADej zQ4c+f31_jq<2M73naiJc0K=b(V8-T$R_h6KZ&ih4{~1iUY6;s@s5U8JE%u#g>b869 zgThuVE)M(70`ZU%yoff=)lt8B;aYp{l#+qXyK4 ziXq0O@m+U0w(M8~6F`6ELi(HDP);4&D(<*u`j%e0j_YINX@CD%yGSU$3+M4?i3c4K zNs!3jQB+#E=TbMHx+X_Ddb$#l6^#Mm`cdVtIp~}#J6Emg~`hqD`|zhkFD7lSN_ zkT-{Aql-FIqIHT*r>0M@x9;3<1$>$)(S=j2-L+IP@!z@I*An zhK=6(3YI5Ctb%Hm>+A`4)z6%WwP4E6AP&e%PL^28s<|Du%k_(_B8Obf$R~LBc=!e9 zVuObJbJZs5#Mj{G0hUB4n4jRsa58T_u(S)ro+Ce4+Sr3bja=#&`)GFV)vze{=Y*Xbe}<$ePh&{SL%zLZt+W} z2kWzD$Hr|=R4qgm^i+vNv zt7CBIv7RMtN+oO)gCi_^Rz1eiE0`wkw7}OG-7pJ^z{jSae)X_0VBE=-FH#mRhc7!7 z4MN-kWpgy{J02^o)4lo}$pZaxF0+N}y&g^>1glA@%QCqvCo!vtapqt*88%JLNz`Q~ z-A+!rsZCyUHY)DjN(-Q`&f+=j%54B;NK`k1Tb2m=h#1Nj@)& z5zuYQ>Zf|V10C})_ym&1LBJldN@iM!j7pU>={J-H#A*A5$MZjFa^B?eKk0dto~sJ8 z3TCpGls#Dd-JzO8VgwnW;1(_o#rx@)S-2b}Yr&i$MX3s4dlHR?+*GDogHRL1T<(3&3J-B8AED2FaJf`S)s$PE zv{CS21PQ7mn6N;uNaoOI9(P0mmiaJ}y-peMEjePA$p5NUZ#> zsqZvZ?mNjto12{(r5DehP%G(3khc&I(k(bHqMLL-<#~v9U2XWplx59wqsefHHD8aS zjr3Q*6Pi*3PI{R&5EsyaJK9e@#@X3LW?q!q(C4MuZ3jY$*qiSW$%3;f3jYDp|0dI^ zJ)=bDk$OOfX2|;isyMogm4i8U^Q?oHjo;tz(}-vl66RlhPlXh85i#c(6!G=v%0TE^ zBZU6_r|rkZL*6ZKVxz-aJjp>&D-Eb+5Mv5fhO1WB?bTsO+bMT+%fUbUZPb`;%Oxp0 zKOeTSq<`ymY3St4UzzJ)TV4d+A6jqW0*iD1(2vpQf||0DW2RtUJY>~A7_RKOrtyJY zMRg(jGxL|Ap8;jpIu>RCbPMz*7cpAIEY-*0+4TIigFR-&&kYD>odBhuEU`%2I)0}N z$4>$0tQXOV<#A1-je)c{y)FwzTwk_1LsKT4X7cKl+draJ% z$CcFPFJ?jB%fH5*Q5#(!OC!Rz9Ryl-Tll!w5(xJoFQ)5HLb{sU7Ate>8y}7eT06!h zYYx{msJ`$0E876L-kM@Uo(;hGy*}o%6zwN~J5T)0%C2{iC4@Q~jPe1U$XaAimq9~q_J4p!kUmu2Hfq$(H7wTv zeYzP4#JJ3!>m(TYw0h#4mi{&Tx=dnr!LP;;jT4){zlFmY`CBVed=9z4i(ARN3 ze`3i|Jmw)SAuVQacMUrg#WSw@*_{kY*5X>$523LfHu2qi{NGOa4HQtx3LZiZbncoI zRgg3=N^5p9LE0?{m$^{9XO{c)ox^|x8NSSh@xh9Q2>rOK!EW_sLkj5$AL* zqpOes1AHjodw|C3e?!JJH{?0D2hc_8QrP`f*uTYpUS{zKH(ZmSZF*S4CC7k*Gmh;Y zP82P+6Gt7&M9nVB&r%QV%8PtR%RPn$$=w-WcryG}UseJ7e)7ToWM3)06wQdNzharZ zJHnv!jDMe6eoQ9~72uDP&08B$sH6R0j){o2W8v-W-yQ>Pnc452V%S+)^-8+H1 zj5*9x?nC9M-1mebXgf0ZJ=$d@T7oI9DReKDmYYTbB~a2=T&S+Hn4V5TI4zHDp`J_c zmCTFMt5?4d@Z=N52s8XyZKk7_A0P^!xET%HYBE;OAB@IrskYt)3#nL3=|!%Zv6^aJ-u92Co(ndXL?FG6)Cz>li5DP1T|Sr zjavLsLcuboOkZ4Y=a|LVd`-Frd_Z{MJ^W}`+3GX zM$uTpUmwo1{NP^i<#8tdXwDNdCY!e!lucf%KM_3;UC z?sOdPEjIkHX-a!m#Qn}BVekN7k zH!~qVi9JidSW8P7Sfp%|0DHNUM z*VDLHU)x~Jl^(Yr;3-RAVNb)_(q<0Zudlql3%*NNqUL>9WUJdUftp;lTxbR%#!De+b)+S`&|2 z=758x@K7MItDP}0%QB0v!M}G)`E;&5cLe+w@9}1G^}ELtfY*E0?Nga8WR1%%YL$DI zyqhE(mj98}U3X~cv~O~ySQz&8HH$CI7MPcZn?IJ%XDFeFM`A!inHcNHavS2zKg9GE zGV@}|A>L=#rcB)#8fv>0E?Jv4N-eUWPy1p+Vysd-(5uYV@6zB#b(t{lMeW{#VZ;YF zXR=O(tSY`^T{vpLb{d9`YGp3-@z8D^-azVq>vD}MsXOL|Nn1FJpn zy?qezAApg-%h%`08z^8pq3Z7+I)E8;PCzxmry`ADG9N%6N^M1_QVb6&cdzOzk+-0R--pZ8u@cP| zPEuP5VPvg$s|OOF<&I^#WSovElC&|${)j5taZh@z$q}zZUFZgC;3nL!wWuZITy^Hi zy#r3CnO!Zr_ve}`DLW!3TC2!tatSQMKMCF9=9*N1FqE1Tz?g#b?vl0Oop_8Y4wd^6 zrs13`(m$eXiHSA$Zysbo{>kSI$aZTXRRoGL2jju9PPnj0ItsxKJ9eg*0 z)uQ+GO1``^;b&4zVb6B_-o(m3sx#4QA8UC^E|~>W6U&sJqjyp7!+Bn?Mu^7W z@BmHbsPy0i<^}Si)8K-|_0w6>giEjH8G^U;Kyk(<<5MDR6?Ld`g~<=N-E#OoyH~L= z{ns&V;_oyzz3-EZTgNA7Jpr$AbaTspvKHc2{LhogEttxjl*c{n6o`048GQ1fvwivr5+5Pn9#pDfY@15m6J~DXABc>uuLXv zUobr-_e&7@2SUII_w5e;%=r^EiD|8T1+Cgjs->$LH3lhS-I))Q=C55-&g-Rq2};K4 zypJ7pg);|7)JIRh(R;TMzptaMRJaXpF^AcvZar{IOMCKiJ&;Yih)DkKSs=?sxyNfl zHa;`yt~UXGIo>h`pX<&Wjtd;ppYyU?t|OG`&0soi=7^XR<8Q?G-+*_5zYtOyhIcy+wj#Ci(v2cqUL|A}A326$+S%_jnbxmAAxvHndrdh2lzIww`&QE2->j}MpSa!DtQ>sy-(FoaZLq1Nn1qd>U*+pA6S1nHKD5jNqbFT zEk;xf*ZqrkwXe7W9+A-_n<;5}86gJ$^-q(tv30yo1#I*FOM&gWm^O#4#(2-$ zbwOo5@)zbGW4%_sts!;(*rvd5mc^uJ&D}muX@?IEcx}4`_gI^4Zuk$J?ybkc*h%AA z3DIIyIpV&>^O+&9Z-b}BKVJF&K&j%(ovkFP~%({ffR9iemzJhH>G&v!!p zqOmZ-y>k*x?iz&QpX^z?;G}I!_?-2%ru27OsdKJ@gEhxH+7Q(UL)no`a7Ui(U<#{PVF{j`5&3pLToff5ca6+%sD z{yRHTa&62Q8&b*qznPWFp8Wr7`3Jk!&SBQK!hO%QsHMu`fGXrUkMGxWLDf%hzjeuT zt$$lbuaFhoITsADk19~Tk0g*_+?pjM=B?emVWPGy#38QTc=nNk11*BP!i+_!J|U=Q zo8g#B&c*&}Q5xzEk5;(9K*$4xQ~uJ;GXD8D7gk~7|HV*G>J4M%9no{;eCEMfY+?t8 z-*@vSupx)CN+HP4#rskmU7QrpcX#u``Y_+68_D3A-Bfu(eC%@K^VlnRJK}&tmc?cZ z5afYPSFLLX_5_-YZrb#?)c-JcCoGbNEcb}0p*@>_PohqT()rA)WJUMNm{S* zJ2`5~U~G_r0M}`zXwQjzSyxLk_~4zonLQ2;n*B3fJv4l-(h2#T`_c%(#Fo2e*2%hr z+toL)1gy%lQhX=33K&}J7UctgMCkR4;E@hwHb9T7yP0IED2k?LLS0TXG4f-=D zDM2^=L~UZLCVf&ZL3K*iH}SAfp<~6qWS{Dd^uNj?X70JW5zcQ5&+MUOMHtP8H9PL= z+$(H2-Ork>O7Jl8+|kQQwJ6@%%=XXo8T)gH$F455IhM_i&}jR*&>8q^58k2+-2&uD zY&lY-{7vt2tr@6s3pP~JvW`c-A)Mns;qrSycPi1{B=#vLT5HWF$4KXgNd~W|yb{^> z(|8EuzlB$z>*yI6H3{hBkAX$>MW6g|vdl?DG%y7Hx^;6%cKEQu{bQ9OD+Nb1kuTH- z+Gc7gQK=vvZ0cc}4;pcDMi?4so0FoF>>=g}zsqrzt>ZAE zM#oAqs!ZKSx9)TVQa^0JETsRufFTC7Ao12SrJ9K0sZkCGL&fyBV!Vj0dkBEv3N#m+ zgG(>}h4-2&|j{XCvrmeL< zHqE8zy&uW>_drsuV`U{7(hvWNi`xs^>ujn){SjQTjOAKzBUfhY`wH}E87*y2<8CaJ zsu2`e&*38;MpfQ$IT!(Wis$Ceem$5WXBVh`B@keuDsfohccY?nzp`D8b>{}1F5>^` z;<=vC%e2JMOa^xDhvF>TP~WY>0Mjd8eB?HeAup=xrg7SvF!?Tf&bH?Y2OxW_-|&Dn zrN;|%vm9YqHYXWQ6@uSqeVS(H!q7Ft(U&tPl5+_&xS4C754qeo!nODT)j7}-JGwi-d$gpw@zrKzc|>|Mw$z2Nhj#GnB)8JRCewq zyk#(c;TB^6L2IAfmP*somAv>`l-QEqm8r&RSaefhuD-#c0ljRKCBfEPUe6gt30vb{ z1SUMpM?EQEOELe#EOx{@q*xl*$9c;+f`HSVgo8&U{7rRbC7Uuk&FJvXjo7{D@hS&? zD2#kr%44?5{(4Dt(rrioo)@M!S>kV=?b5%UlCLW-&wdH+RK9ZXVlA-Dr$BRHl7Tdp zYmb`U5e=gEp9IOzb?kBlL5Zqq9$n(n)s;iDv8q~wV@)sVnpA*5tl=SQQ`bBY0>mCl zIcUz!vB5>Y#v5oHt}xyDo8vXce~R7KuZpF2|N2Wf{wGsxiCrI8PJAw;W5Op&%!N>5 z`0f^ydnZfKvy)y)APi$zNg8uvXhHlwS{I~fGQe=s0eh=A>X`Dqzi!EJDzazS$8QR~ z3DQw}{?#vp(57wWcxl_8tgx47XK0nA_8=Yjav##$d{sUfIG4NdLbb&^c+p+Lsbs&s)GRcaJS8r^?U$(|RGZ*W>pQ<*lR^>ya4mW<;$3ap z+vKrL3F{^@*^7OxRjPDyRY+1(ZvT2Aumb8Fiv-qj_f*oa3jArip{-?vb}^V69ltif zPuv9lDU%%bzXn1f3HQj2Acco0(yh}&wdF2F?HoC<@MOLw!4NKOJ<<$gohhz!%q`1D z2&9++3HO1do3&RmWJ+FDh5>i`Rr{o}9>!sps(v|2=2=-{=|=k8qct8~%Jpi14SN2% zXVi5u01M`Kg?vinYD`3Rm)yYA9z<7sc!(nOvW|0c{ro=wzuA>h-1W!uH-0v?FUX0w z>cWr8QY^@&)zCDxeS+F;<~>+p0LRXtmVtM#Ah@tJax$6BF1mq5aQ8YYHu^mJz97~_ zeLuzh=%?l|Od-6))wva9KjOdiW=t(Cta4;IR$^@Rz=X%yTnGO=Rk?yF9)hAs%CGK8 zE&vR@Zt~kqXLNH%KIcmUqvCZZ4a4w5-Ds1r$zDU%cLUcN$*Ez_I4x71HskG8hsP-U za`Kx4fuF-BfuCu;SKyIwS;VYbUa^S#8>YD(m9D#x6=Qf4`zkRU&fD_zdZF5_15i5h zP>bvpTp(DI&-x$QYm4kg(Mt4`cmoH|&)=F1I+1RmJvW{l8D9a$kRCjT2iTCycz?*m{CLpn_YwQRRf1LcIpuGQ zoYTHHM6XHdA=5+N#vdXLPu-?lL)H?yn@D%@W9ok^r8BE4Wa6Hg3~zJY6John37zeg zuz=IzWL;7$Elh7+r6g~7Y4@~ExO{{XWsn|Y!$;AqBd?=2+B7N+8XSv9xU+zF$~XnBUE3lrr}9$t z{+@^tVX25IRz6rdc|$8h=d6X4w4c>lq}5h=uU}1*c2p|ot_rw*%qPU6r6FyvQZ#8X zO;_|gs+8#XLT#$vA*>;aw)+Q<#@SzGa`n6RPfVdZNxU8QyeYc#L_ySa$Oh>6y-R|c zyZ45!0>Sa<=KJ#e4KL&=d;wVw^Hxlnm4f1Arh50#iorG^Vb3%O+JQ~vMMDhGu0w%E z2%30rUyqQ|&$7&Cvz9mh*`*8cKw2ch`iHio8EXkF*r=aAdTPiiPoWr=if>r8EW^u( zHY+yneufjs!#AJ^Se;=f{X9q=P3CIGppvzRXO)FZ4kiuxvJsMfpZWEDiat#WCd*J_ zf-d@Qw8JAV9M(S0Ysx&TnpbjkCXOkw4}Ay;IU|NtaXY_O$R^&FBZbBF~*amPZ zjJR>JO!AF?QVX%r%oDk)7WsCj@Xryv+^eX4aox1wtr+E}<0fPY<5^>X_AOM617Zu( zfAK_(30b!p-E8qv6t!E?QOe?egYM=zV_^~KH*pe8)n~P+j#)62TH}sXz42EWSmM)S zgnJ?};}F4|+;^O6<@$hX;|H`0Dl%5TJZG;H@ou~hM&$!u+c_`~nClBzlT32HS*7o; zw#h?vxiwLfE|$*5P_h(PQts&eIN8MeDtT(w9GcF~KkOk*Oe433mh(?+`n8BMwL?qJ zb7JjfC>H07jZq~Wzjt&11Ux*aT{yrMH(HJx4iGs+L~4g86q@*EEu8I83=Rzu`W`*7 zJn3~agLNdZF5nU5@f_b(I`X7?d(L*GH|Md{09)x@(~>cc>ZDq<^7N02!H*KspG%hK z)b#xJZXSp>BiKcDaxaZeKOa6;*nqQ|{EA+WP1Zylw7d{^RQPf^Yuc7jcrq44zN$e6 z$*~;pbfTU2ADZhSO5fz_Wz2`z#Xl_0lszq85~FTCU$M;rCRN!ob?Iw?xU?g)JF@C8 z=&i|3gvdHj7(uDrXSr*+&Z64W^Nt@Z{#2W<$0RPb!0fp-M@h8>FD0XojjPVYZ6RCg zM~+JoeIUGG->wyt$k>BU-2Fcece4had|PoU@b~G^KCO~5sb)EP zj!tMOOwKm{MCq9Bn$Mf4Pt~`x_Dmep6kgAn)fk$dc-1%>d;`@NcbS9EG;j?`I8(I< z9he4m$Jj-BXnJ(-s_$5=b;XS8B5G)foC)K6b2yk&7}T zQ+sEnn?s^qOrkaXD-3`30e}bBa%e6gXXF7X7!Cr@rF%0i-8*!|yZl+AjT|alwpe_! zcJns8pAIHFb1_4~6Ez)M?2QAgVp|-?Ap`t$ zX%}*|u8&QVa7-fHZ)g?c__KA>Ma(&?Qi~QUvbFhl8;xM6@{xN3OElN(Im^@?}dPe zu5QznO5DPoCadBR!~TArVERURg|V0MU)q);tfpzbol54_j{v;Mc2A6F58ZD_mLb2< zuYukhgIv-KhY~sHwvhPqqF$8F+h-}kbhp1L)4?J!1mVi6HPJ=0)+dRx{hU^Mjn0(i zcO_@~bYx?Xzgv_Z-uUbZyt!mL_9RiA@_y!{@N0avlAL9?r? zM4oe8Lx09Xi`u-;EYOu-PYn-y^B=~TPI;$Z*Rl~Gyy17}_|kx-yo|OgyzWEVtISPR z9+Y$5bY68cT|@6{2IhxP^>c!s3s5Bcs;*DUF{K+An8qFaU}e)t1)3LJFGjrZ<%*bS zQ(8;tnjy$kEDtxhFDSPD?x>Naw2l7#iHNE7>D*sAg9}eR*{n@Ab@*L&%lxMey@l_8 z0EJ42k8v7Bk*Q|pnGMYinR!0;;*eUH@*|jmlJAJA##)e~e?#UD` z=mbjxN&{z9i#fP7J2;<;@KS&cW#jzFY2=vB&oWkj z_MRDWBrfOunslOsSf=RXTOG@j0-Vfn0U7)ExPbowk3n$0*Xb6X(Li8-t0j63!=c~B~ zXc_CCy*mAC<Eb&lo^R z^v-+quP5-I!%r6YzU60|O}L%i_$;hT5^Lbm*JuF`1$ifX|(j|u`j_(#Iy0{n={H{{r=KP z=m`G+92)!D-rq#jw3}P)^DK`LC|*O8z`-8pACVRL-{2DPc#}_;&MH>wGq@ZB&N2Ge z=$C=?*17O@n6aUW%m9pxf-(A@KU`Pm977bVLQkq_c@&$cDE4An+SuAM+}t?a`5{j& zob{;nY2l9g3+rTX7!e6^fQOJl2DbG*7XHnOM`;xCd11FIZf=?2V~z!9$9JV)%l1`? z6KEk2DI?q-cs+PNmGf$8TFd(Wtb6HeV>xxWF0WB8p+hg7wFnsndSvH=k9uyUVP~UU zptaQPg~7SjZuAK5l^iS?O6nV7$M!kz}`!Lg+lU zjfuk!dELn9PfmSnXuJF@T?sEWSJBUxb!BZVGH{F>fKER5&D7^SdJ4dkRJygEXslvI zNl(nN;1H*hFnA>Xd{v~oze|0xvY#*I*(03o2d~Y*#(1pz$mDCRv<1Nc$fIf83F=H700~xH^*)Ea zQ&4hoyYv3OU(?w;^4$q9i>%_h3@wR`ryB-0V;IW=jOUNewsae_sOhs_Ciea7VS{ci zc)%d^^{!G|DDCV4kmaJ8_bUuB{70^T`t_-x>l5j=Fv+Q=^_9SiJe9@|ZZnU=AB9zF ze(L4v{eQvP{LItV?7sdUxVyiK;`by2ovZu$m$Q$ zx*5DrYkrMu7*e2PX(J?Y$8XG6J%2J@+c4Q0f^x2)0x)}I^Zx+X6_R|@=GW7=_45f@ z+8p?vzu|zA!ck~di45NoOr+D1a|Ssi-0&MImRoUE4@ovyCTnz zJZz9c;bdIk@@v9=B6$A*Ec+!LPBM5O(xv!=@fbgt4agv5o_HURt$4qRd}$^9(TXW| zWxqdK(LwjO^?&$pKQp3~RnY$c5^A#RGr8DGsV5!liSW0>-;95?hmWrQ2ma3bOlvi| z1L7YO-79^MMvd{qk;}?gC1PKg^*9{W-xBH?&CHr#f&484&8~QxQ;SW~>PWWly`z7;HJl#)FU<}u{Yyp|y&j{{X=)z72Sb;y#ga zCGNj*Xe9ARigig#U1}`3V1N&mBMeU1)NoI7b6&UmIebg_vGH@@i1Fu-ejMF+W@(F0 zXK8yXiDkDrXyn=sP%_}*u-x(mZ+t-bRekYq#kyXH;*Bjo;Th8J7XI*DNUSeqQcQ6K z=O`Nt#Qy+v4_e&#m*U?E_&Y$oZxHy7D;Pzw0wr)=&N1?!<&U7RCm&j_DOJT{WqC<* z!EB*7s%ba5D5JsNm;S^cRut+m_pZ`a84Z`sfGs`zdDPv~0xuZ8d4 z4O_!Hl$IL25+PQKW#AE!^8<|LuY5*rAH;tSrypyC>e@Zh%vG>ld2s*^c6AT>QiZ7*WRpy@&Q7{h)jq`(}8{#8)1iy)H!SRIrDi`WzHym4oD7YlHLhKg;q0NhamCiud1JTgcXV z&Rv0wt5v#+cS&2(EnW5P-0A##@GZY>&jaZb_*+MlhW2er;v(KlqkQU##`D1$ z$OG29KZn2YO%K@5;e^et>K+Au5bC!pi_J@2yJr&M?&2gN6asny-xcfHR+*=GcTX19 zu_RGOpFDzt1Vb)KMs5irbl7nZwhHzZM;Y}Lt$cwtLP5V zT%W{yzlOdkvXf2mw}W(TO4d)1!+T(kTocYRGCvB=*L+!VZ5ziV!ZdsU=avVj1J=1M zLK*J}nn5E-nRDkM3`sc21D=D7_ODGTKjJ^U@>N+WT{F=B2>dqvo_-!rb*g+W_+z5z zx7SgGmTN0CjzYb52gX1ruf26?<9mrUNUU{V2+v_{6@<5%mGjOTJ9y&+?F_s-w;1Wx zylcX~Bel`58VJ@>a!V2d=N{{YE;?$_Nbe6Mrpa_sTYym1uy)9ZGc>2=Zk)7!Pt%;S(mu5XX!_Y;d=rN;UHOq~rLOY&w zn(2HuYv79?8AIcU;QKY@t&EUb#S3B-CO1SOdM*nS#dzkqpld!EwX*QGtNyn8SH0El z zlvldazOr(kt)U3RoBPkJXHW5O`$tdxpL`4O+ScYzKgB*E7dIh}T0}DJO!wtboco&5 z_(HJj-?Y#6fqnKqAB~bC4*@M9XJS2!g>(MxOY!zNwBLxH7mDD7w(xg`Bf2Tm5-qS) zA6@bj{{R81f3lUltD^X;#CKT|Cxq@?x$C}nQhD{{iq{qFl&Dgstr|+LTP}@$>L2+k zcJw`0FZji(;uiH3{`&KF`Nk>>(Vl+~i?tP(;(w1Ootoln$n^U*&Piz(b7S*d{{X#R z_ky;3KK+!uZQ?ZfOB*dZ2mb(Y79;Uy$*(ch+TITjX*vp^^EG{PZBFPOtf2gWj{g86 zMfumGd}wpw{{YzELeR1Y{3Ko|mU$QgGaO@KC;s^#op9zZKV8Mtiqf1ZQC>|(E?<8< ze&gHB=|XD@hf7Me;H~}IR)3W#KbiBNgY-=!#o868!7mWlNp!asdbX>o2qWDpK{SfY z!x_N+ z{3D?lrPt;<%_2Z_2#`B;@6Dyt4(I_S~`2R+?GPt&0R>2*4!vlfxsjv2m#3DxvvPnz0#+< ze-P_eYPOc(O3=;*Why}#9G4{U6k$hlKq9-SMm)|-`tskm-p|DoJ=3#U!>E}VWak@*4l=!PI0S*zS8L*5jGrF0kA}K0h;{z}h&~9_G|g$|Nc;_G zAY0us7-V?Wr-30+0Fa?742k*Yj|XX<8rBsWBNJa+iD0x2ts;Yh8Tt7^><`V? zfli%h^Kv1|oRk#W+~Vcc%G$P%Yi%1O*9z|;k1`#gs8D)=%N%znJ%xJz0Ko4EUP0r% zEv_{3j9g^`F)NP~wa{e}2B{{RFT{ha;~+D+p>i1${yuA`qaKeJ3@RD}Z*xts43yCkR` z02RfJ#!`ZCPt@Lp7VRc}M|@)ah<+h_V$k)CU%*-$3XJNcOrqoG`%SjyWCq=D)8$guk+n!FhDMeIvtKG!~Yy z$FRmiu`HpKWaH*M;B~-S_#gfYS^oe8CD8sXd^fxC_rPBa*jQ?w9n<1XKHA4@l7U*t}n)z5BBX^aD5-Hqwv)l&0JTDguL^xgB#}o-Yw96q`mk zEIP#3RNJxhw~0I+_4|nJA=6-h6LT|?i-JZ#86}9o3Z$L^ zrwl}Wnv=JmqhICUO$nYJN~?{T=VJDIE59!460#0nHjkk1kbgcstFG`3>_bwH_R2vU zhYkWT#z#V!)TD6tH-eSCvcD`zOR-9d+5EeGA~8-~KWE>KPw^Jt{t-QOp^H;g9!}nNaqw zdUnwz@;pCZ&~3DY;xeQj7d=1E_}3rfOQ8mnVf2S4b9yQf1HQSjkt?eEN zMnqwd?tgL*<6dRsxtB0S8Tm_Y!5HX1mGYRy3UwNT)B5}mp2u1=<;$@*fdfh64Lsu{ z-XH+uuHj#zeg(O@e+$@KM`<99EthNk;LX@$JPQ2w_*rb%UM|xu#tCbc2XW|n{{Ww* zet~#SNpx=xT3t;O6XXFH*g_}+o^#Ln^smqOl05Zkq;+H}(3|u+N3_%;wrj<|D8WIQ z03}Z+kzs8w4r;ca;z{3i+r}S%--;&Zw*+?Y){^&8 zM7tMoW;Wp_k6;Bba!Cibxa3#Mr#GYizd!5beG2m#$7*ByJ;FL3kQ zN@tE-08Y?3Uc8JRoqg-Bx6^GeV3bb>n2}`|z(xn5!R{~ttQ3qTmGy-1#-UF5@`Pl1 z5$n@6)dv)n{FDB_$eASUWu)_7Wl>7T_#mqF=kEYJ^%&z7W;@;1Qb!}r6mhzgkS+k@ z88|&gI(z1uq6p@o2}K=Z>D$PhPmxE@o?Tj-ot)6Q)1|jO2Yf8r5B4^ji^O zYOl0LT0$e|j&Yw~$DGy^5^(691$?sZIgt@OJqBEPaCbPcO z#j!UMmEVxBv=XH8fKNR!Rqt-^8@Ox}44T1QL)$U3yHrIO03J`)ur)7>@!wgQR`q!uH+2>Ht!h)1pHJ)h{+Z}SoyU(pA*kLXoN>I z4({!YchBosdauQu7TVHjKG=a`LUI^j{y(4NUGc)S{eR$WTB_}H*neYh6!^R1kL`_i zJPqUT0cm%>4zfX~r;6F+xbx>$4$#D`0r@!I-8loPuhid$lKgVfG>drraTblLyWq=p zqq8cx_1fHl$4dNW{{VtD_&dWtv&_%y6^1; zb3L|+qCT^1(%@dKKWDd6IVaCQGme}f$Gv+cpTXY^Pp`@2uZ;Soo~e7cWvz=GTG=mB z5efO2_h#qQ72IjQE%3CqLf6A`TS$hlPYnkouWj4Wf0^<(i2nfKm;Mp)XNHr*{xH3;vJ=Rye>Rb2y6$xcJ4CXU zC_U5+*Sq{0_*wAJ_J8n2pTn<)-W-ol(<~#xK%BI)e-p^(mv%goc|AG|S68WDkKSF} z-4|&XD{SKf(0hAU9;K+t(u-K#7cIEux(`xo_Z}U{>EzfqN*@&Lm;WJ#fkGxs-H5M^4v*Z=R>$4LdiqzE_~Z86)*_!$@Wq#ftTeb*Kx{9yi;T%JW5y#- zo%-MaGJgtjP1$b$0O7m(mCx?oA7OZdU$$r&?GeUs7F7fQ$Iy?wJ68p-d~1usw~$-- zjXu|CWB@Fn4Wu_XJy$%}hxmi`jqy*1{3Q?kB)%<=Mt4Ov^XoT}X~yJj=gPKi^0EWb znLstiYhScIr^GvbD@XBF)rG`YHmz?yrDZa0^EQ&NxlT850UQyYm9*$ogr6@*{{RR2 z`5ul}nM(~;H6Ywyb)o9N@KF!=D8Iv>g)m1BCde4XNqn6)Mvy}lV6qPIi`=oUTgT;Pce$n3yFa7}hLA%nl`R2RO zC6K0Ui-Bq~;uZL7sauO!>E>b@tf{#RFjqEh4BO3R~{&XgTE zxU|*p?sD69)3+*ZHLkrKkI*lOU+`65+Dhu~*TkA_mVp(mtSc3@{+}C920XN0a(+Yd z1^|rmN4;t3U+`Byi$4)OJ*D^~<9CX0WYaXJllwB`ONuDGfg^5Ua4{gs$1Dyw=D#%j z9r0Vl-W9lAL&N&hJb5MMgXRT)yDUwdkHFV|rTkFvCyAgJw{{a~w~R-bf1&*L?0sZ` zxEbyjA4*ceVC6Z^YR*rYx9=wGmD+DdWV>j#zNfoOjd6ImQ;g-zp{#9aPCM$F-$wi1 z?L9O;%>AYOf8o!FzYTBozX5pC-%`7KVycZVmm@QbW=PzSP5>m1flqAb;I9SvdHg%$ z7_`@!UR#^!f=wu2E=db-WeUfHzyzEOjPC1-^w|7Crg$gA6Kk5jv0;5Kpp2F;X$wZl z2n4&bk^m$E2m>G%ub;d-;wJIW#?32H)cnbypH__dQGB?F$hqKxJM{O@txO?1xL)yJ zKf3z=0A5GL@osfI+Zz`tIdZMsV;O(j+_p{m**@oc@!R4$CcCLfH3RdXE)mXl0!SmL z1f1Ywf;!iLc$>u+n$5$Rree6k1m^?O-1^r=@q5JU;x8I&7tJ@Dy787d0}O2kfIYBJ z*NWjLCFLNJKsd=Y(TAnW1m5S$)R#1lr{R770FkS^jsF5RSOoafYIztX)^ z;R<+%#J(tw*44boiwBf94I#)oKs|Cs53u^vtgr756SA@1d@{80Mu{BWCDQyIXEc_6 zON(o`KGhT|2}Ea%ljpg1`9>QqcWrKK>~HudNBk1@Z`x<#WykFw`)KKt=)M%Oxo_-m z4r&^Bg6qU`BQ6WZd4|}KMlczIfG|4x$NUrL{t0>EzaHz_7selnmr*o2O`6bivVylj1RkCCHTMg)BUczSE#>+7smc0xOpM+e809#;TCKWkWT{$2nQI( ze~X_W@kjg>uf)11g%{zkfcy)jM6s)^z9PK0Sud|8W_{8$j6P67RRKW4pd6CSe2Yr` zv_2~7z9G8tNA`?wb9p$Hb2?;#JIo6g{%Nx!&ec$!B;@Bgue^Q@{{X>w zwEqAZ_;Sim8r#pP#cWk|bxpgrwV5bB00l$+nEwD}zuTw7a(s6Eo}zs>#WGqM8upV-sJ zR=zs8lS@w+3mGn)K|Q>{ZHVXQ;d#bJI42{H_1%8izwk?sh`%3x8SA>=fUPYgzeG!u zcWrALo4BJ=$9ZhKKH^jWLFv}A!(+Xd`#o2s{{XM+{LO0N`TqcU9nZ`^hhG&w6ZroC z#u`<>$BimW{SrHbTlBG&ZSMqohUJ2Nj3Xp)3o9y~a6rNIYmbOO1U?g5T%Un|52n&D zH5g@v>%=;(+g|DrTRRZZ+e%|t;%1D6^Kb|`J5K_>boh_(XU1O$JY9LBvA3r$a0zVMSQGRR~Y4w(ZO7$-l6PKPz^PLx_nJwLC*^WU+@ z97Sk7UcZUX{8@)Y@dlvJq*^7kaVSfBXqA~_QP2lGw;fpZVoh+`9q;xAqRVs$jlqs$ z6t)2%RZj&n=kZDMZ*YF}ucQ7C9|u?JvKk_w#v05EvLJ?f{5Z*6t0 zcKcb=)JBiA#}aS?g&_L%_36@xMZ#WR@WO87UIEr_{5`M1s6{N1tWtvb8-0%>pgl9` z)K}J;Hie{5r`q3W-WzMnpC~kJU}pd@$Q)#G$pm_l(!Naa^if>O>RU3H@eiBGJoGr~ z2+I0{gI?jGd~NaEJ`A$Z{4*Y*0CrMLlZLaA= zq=lB=(n%%8KZH9fgXvfG?MLE&h~_V>rR(luaXf5rG0Yh2XXhvES zziC{8412}IScFJMq)nx*bMHq8@F@KXjmwHg_+kK<)hNpVSg-$9U{7 zhSt4(xm5MBfPKU{3_eNEa;A1@r>yG}Fq5ZD+T6{L^ zIZ)A&JBBKt4*Yk=Zj}wZZ!Bwnd29-T1|(sh!}0$B>(ef5Eu=%f;iFYxNdRpJJaRi^ zcCGHG=ly<$dZMN8iY;_Fv$bX?`@<*OpH83TR%3idyt^f1f;iWo-L^pdMsw-@aaVPH zBG&d#DK0$C+(m>uar7JveJeKe!#2?XT^-pNV6fyI53i?xO5U6)MP02-XYCu)Vx;=r z#-^A^jC*hx2{3u*13${LHAcFah=O53AhWdI+c_4f8Pm#W;Q zoJM$*s0>Ew6-fj0ALLg}J9gLl{WevzMW+&_MN!n3&{NQMJnIgoV2zI#GHa# z98q1ik7f8AmzQ(z%|rIgkHwnW#|79_&tb+o@P3%4c%(>bdI5_=% zeJjp9G5Z$T%&U2GuWB=|8*?JYr>9}gGAp&we`F5=Lo&~+TAPwF^2p{waC?)*d`)Oz zBNU?Dy+7fE6L_dk_TO;_U|jAH))ig&>p ztD^(9LvF)ALfH5ESJa*#_)p-E2BJQL;Y%3Xysr|L`i?!Tq=rJQ05s;`DP}bkHRBP1!!;ZpiYn6|nhM6PnkyiGNzZ)5~+{uh_r#mi?{&0N|!RJiE5|d#2y$ zJ_WFxN8)cBT}Z+tRyi=*tN#G3n2s%MH zNwps_aveAIXD5OF=s)D~U#;bN4po#x+2G-2rR4r8Tk==hD@E4w)cD+XX^O`?)OvJF zW&Us1m4~JePeMiPEEY}7wvThN@fT#)nB7Y8?pIYvJ zX-$6q9|TKw*Req)c?e`}!{|LteBbcX;&+CA9NB6g6Mi5@B(t?47DjC0y7bS@&lnxe zdGx>DB`GtaQvKSpK8E;#bw0lt(jb_|f);PIPTNcJdJf7f#x%d$zfUO&i?@7myPVSkB4i0ajM_AI(LVxrXFLa zIhy5<%LFP1%vgW`=RGTq)-Ux9cf_{d1kn6Mx4Q3%wH4C*L*olMN?Pf z)ctA?jv9r*pHGS?nV#b1c?u48#m+2@@@5Q%d+Y6{;Xx~+-+uYx~1-?;A^vC z<2z=yww87B?j>^e?yT5lN&2Sj;3S}6@EF&O{CoImW$_=umyrl}%cn~$Fh?Yuw${p$ zJ8nWa?~LTv)ta>M_>axgb$eK4iItXE9aV(NMmiod*PfN-UM=`pb>RDlyzp!dsA%!7 z`Lt&nr-Cp>e4>8(1E1ZuBOf6hD~A(+r&b<){_B5C?4gaFDc<@Ym>x0kp0VJqRqibd zn3b?b4(P|AK9s&6@k6!jS5lz`Tw-vgNaS)#@ZCH6eQWBUjXwpn?-@ZiiM%`@A83+O z_FaVwfY~?=-E)KV=LA>H_g@X=({+PsA{E&n62azR9&wC*T`^u2Sk8>xDlJj(P{eab zo)1FT#D5)Y9xk+r)q%_+*40lOIUt-20B{QaIH`OQ{{RTr!;^KZTx&M>_p$|v?xP>^ z=%j{m3X%S>kC)8H%Y%8-j4jcF=9Jreo(1C9mTh9= z&0)-p=)~tB_b2-P6-wUL*%Wz^NhE}l!^~At=sC_$9@x)%`VZny?APJn4QUovHohF1 z?)aQb6uQ)a&Y?$P7!eM^3s+U)g zLI(v<7Xo<;0;-Bv?h;Cj6Qjmq>C@(J$#o_)s!mo*Q|1qX9|QjY;dJ=xPw=*bcXe#D z-o%%ZOKawfSlxoOq^SUbo(JAzjyl)um%(57C&$3A*yG^(c#bPq)O935CC0z2*hS`} zGVm5P2WiJeCno^c)nBof{1Qv{UHzD}>s>>`e-Ct@9r&`zKwlPkmrsV$-qpJ9jtCbD zN3IKY-RqVA00j&9cG|{-`hD-j+0b*HFl2`s~7xb;Jtp^OVfNyG20Ja9VwdwiC0PMvx@ z@!fx4hx{?^;S}K|8FfDB{{Vuff5AIG6MS;`VJF4E_(o^FzOl7qHPxIAEsN}3j{ zt&59kQ*5@k55IhDIR~*FzLm^=+`q6N?1k}9;kW!H`ftK3OIzgKFnb1=Mbj+76f4!IQle~I@y?cuLYBN)b;Z80rNTG8hO(xk$e6V7;2R-wWN3iv&MxA)F z{{U#*%%}qAbZdY}>^S6qDuT}PAg%JT$paC-(8nBegP)~ZOd>Q}a!owKrP za9Y}z3nyLK1As{>Xgpo_w z;fDja2M5$wyKA~$i{gzx%C@@j-mN#5a<@@KnNLl?uvT39Za6u`c$dW=4bP=oTpYgBCGm3Y{U2f7c$*Mo9wELFgf-8Gu zklb9x>6MjL0|g_o0|)Y{E$4ze*kldo0nCyq&Iss!oQi`?xR}~bt8A(;O0#!8azN?D zJ3@llV>b*)E0MJ15s`zO{{WtqjaK>ZVZT#@>?7njz>dN-NK)16I&`sqyiFeya+fXAK@6!y(Y;u890!DhdeGj zN)2)8db1TT1 z`JP3|BRl|j#yx6`cK}S%CAl~_FDr6z4?st*dH(?GQ{!lo-NBKFZb!}Y44+R z>HU5G0Px+*-_)9YM&9CS9#q_<0-H_&9>dq4(z#y{>UV7NB1m8=#4Z$!$Lo$k9G|Uf zTqB}N;4>)Y6Q&M01NGon8?Ng2^6Cut$&i2u@qkaJI2j|k?fKVE9bRqk=dVx0`uP}D zn~ZhPa^GFewSTrII~Q*6v=M+ib;qSoVIAY$Jd#|@R2+wC*z?yo$r$U_xydJur!q2) z#k!!{GCOnn{c6-cBZA)0!6TI?IAr;bKbYt^J$SCSN%Gl$U)Sbws=DfJ>Zi|%$hr)v z$&H3`f0*>?S-O-GLuisgC}v!hQiOrW{{UP0)3nV(DdrQamkgvb9;AAZ4u1-6r*Uf> zVoxnwY592r1K-w#d)+U?=ly?>q|}wpe)8W?y0~ejhS)c8=8^hhifsC{Z7C~sz;+x_ zTk=(U{{WH5?S0SI4KXz4wor+?JZ)b808>eEG*GdM01Fa(fB^Nya$XkkBtb(hzEoN=3|_7_32r-t6g;dzpZ~H zRnj)@XFzvNIl#%-Fmvf!e+IrIY2Ft2n`L<~mdJ@JNEwC(RGv?2@;l!XYC8SjmkKm! z!{)|w$?3;W=U;1n!#}YNzK^C_d|UX-;q~(ztlB1vaJ;C-MAHM*o~oyy60{_1SfQ*H&?Vvlsjmw*LU*{{Y3={{U&P+A`Wp zPYlU+XYluhU6?+i0XUX$e&i-W;!*=ag`zK5b|v0KA$0E!4;K&c7=g;iC+ zPyiSMilHW{8-mf>05*&`=l~rCD?d}!%EP)TFi67@*YW)8^(?;$gvzjR!r|j+NAGW} zr24n7rpY9Jaf`)Q##M8u{XCasX+7F0HVJUjlZTao``=U4R~_+l;w7Jed?|IUUC$gs z7+A#fFjXY9*DaiIPALdJk*>Yv@lL z{8qElJ{{b6{^rtC4YuP1TkuZb;sdECpcPV-V~3ArtNZQ!dWD}mIPcm|T-NXO32$|4 zza}V9m9h<%b^{;*-2FS6_-0>_$i(g7I82QtmH5LOy&&0plbHrZ|Jb7iPXwh6tt={j8PSCD?SZz}uG}Dj{z^XSh9-mWL+OD6XXxcrM zq}P#5?$0!HEO<8)x&7qWJvS0SW5`edt#Fuk8g6wXyRuvBfA}Aw>St22x>06Ff&3X^ zplG^`J{)YJyJ|AdeC>mr6Eays@<4XB|j6{Qaxa zr%leHOS=630Ix%qRT8^@*Y*DZ4qo`l;v3xq<93Uz!+QS!>7HCp!ZDX16M>$Cp0(Tf z?kT)QuK0DOh)79hX!GU)#z8xH2aJC@;=VTgGWeJAXT*9AoA9&1+P8^yc&7@wev79y z#Dog3m5-{rNW>n&x?y8Ea<$0IfGSSSs&!m*Rd-*=hbD z@f_=I;hzX<*X-mU^cx9g+w2M{$m`a-uMB?5f3+8gonKA(6XHjKfhjbaYekTM-z)mp z>grzTdOTq?-zwTLqhedYv4ib}0|J?G;$O5fnC?p*xoyW780&&fbG*_`Sx4_SSQw}A zeh1{IjX&U%Keh*rZvOz`Bk_#2fk-*BYzB67eedFC_Z5PA763W_^*dezBA}GMYCjg#FVh8|}l545`ng0N1-w}LX z@vfPqX_~YSFU-v(w)Uv*VvVruC7ThdmpcTQV9lL@v0imZ#xT70JJzzacRjcEO#PQN zKiebX?T3cEIb|jGo-P(mQvPho4Yjal-zLI-a?A{AxDzXeI4Z1b_J`qz!{3Czvi|_X zt6ei&TN@~B?c=x8w2NrgC@muj0>%pb&m&-#2n#VMpsytTm^=yapY|*GOJ(5S3u%$h zuWA;{Hl=5#Lag%2skbsofW<~WW@FGFVNh$nC((R5^L#=0jXau$v2qEuwz!+kdv$IC zsOcGDfOd?Kah!2q7nx>otyaV9@ ze}N$-ZHuzP_v|>v)Ek&&V`wuB zvw~=@b&J`iB~jhcV6}_^^SXkCfZ<6b^1u&^wS8B_y1U$Hz9qOzSuCM2TsdOSfq+t^ z9EB%w8SCnoXI8iuU5cgYmLF4&c(E00b7P$#)LHSrILd|9Sl z{6q0fM)hO1a}r5%M&hf$b_2T{kf-@)HSguKrlF_gy+2c$y*a6Bc`wGVfmT|Myfuvu z`CLW>0UA7-wR)OTIqECLJ+btM)=xPm0$=U5u9N9SJ7T9 z@s*yfE~$Aezht*U}#`ocJrm(&&05QCk2H_J`V}$ODD>LJaecryaw7UW?$r ziMATF`Yx9|&TSI#9j6Cz5vXE%VEQ_`-jF5!8D%M!=|I4;3{We1?hJHMq* zp>Ds^6H&5_OGdXA*Gjw0Fhl1c^c6r8dK_Be_ z&{vM?^3!qtyMJFTPgT-$sG2^^{gXa5=)N}6qtW##1lI{~hDm-|Lv+X>{wLR*V!od6 zN5(B@z%JTd?AKR1Zdc}FGBL(~`uO-y;bw(z;eB2Y9O-ZfmURwrF|n9%2p)$Y zfFr$qSKtpE>3%ZO{=pfKbdo8L%#$jT?F!0IRT;<(Iv>WqRaWh9#MG6bl20|!xEBC;~*z#z!)QsuUh?m@P~>uKLvQLTEhP3*uvsFn8PUuNgY?d zbN7Y;x z8fOGC&teV%`ilB{_P70(bx(tSJiPGawsGn9a!SRt^1vzpNGzp>Spn$XKb3sr;>##f z<)b-Pi~z*01_pQ}A8cdOn)})~nAeJNdNh3HN;0JE(S_oj4_1K}QAmH$nLm9LPI46V z`8mivLk^uPUx&USeL0fMh^0{?g^KAMon=6ijoZbC#3Td(kq)Jl4rvA|(hm&jMoOf+ zMuXCwN(`mDq-*pD0i{MaOqv0M0R!Lt-|zdd5BGgvanAKS_;5Zu84pGnCm0Jvbrr8V z(2A#iy7|1vHT#@?jf2ZgYKznV24y6k?K}uAmCFkFRR4-(uuuzc-o3S)DY;YflBk`X z`y=%4hy?vh_IWSX$Zl3l(vJ|wJr<_sgx?LSH3_5H=d&f5_oi2xrPZ4aX05(uN8HRZv<6jk zG&$N`h4j9_K0r8MBt>UZtx{1bpWRrKL0JEby*Va!a#?wv^ozh_J2yNCZ!aPdiberH zAMe)9RtqGP4T_||=?QEQ^NX*h%q&-`Ew{ijvG!+4k^uN8QNQp_ zgFKfVx;@DB+0BnLf_j=8b%bLR$h3V(f-Hx249ilfv({ekRsti&d49igcq4EP@{l~h zab(TF+#(MD0CVB4tK8{EOTwvABg<=Yb0ffH_a!ErbK8BUz$ma4v5RE6@43KjCiU!V zw#WjD8I>f5@k>p~XSEFyEFH@kWFMs^}25QLao5ndY7gI1+t$G zKZTGoD#LQnh`poLj=$4osV(TcU~XpP^d(L+h7+?o~jY}QL&=XaOO9pslekriJ+O<(iZ;NgiFJ- zI3?FzAM0BrHOw}AzvRv;k@MSY62C&tNaRy>gvU~}5R_bg zmg4V=YCZdbot8s0%BPPP@~V;2lrf;5lN6{$jz*;B8q@~%^_7$-%4mH5isbU zPt&RB#Qn4#H${ii<)7Ht;TA;`^#Yw{L8Z8GagSnzfHFPuf(g-k%c(89cDf3y_1u%n z{(VV|R+pC&tlb3y2u;s|L$9j3eC}m7SIs(|-lnQb;{V1o<;vTHHPO2YY?IeNV4p|6HwUVE$Ic8spw^U#RMW4w3J~D4jr3NPn zn{>GRLVaD7loc)4vBh-518xa|hhxW_QKg$TpTlVe7RhsbzOC%Z#wu%m(Y*`eJ$wXy zZh{bckP7@kW=?OYtH7R;?-9>Y!rLO%M3tdGi4jZ&w40MLo+BQ{0pIJ1!{G$> zZAF)K>lDE-wL8~Gkw3!v;W&+TA8Vf7eRNMU@+nnbWXRgl+OfUK;+k`Txd|CpZJm-z zT0~ZUp0KP0G|#*1I)LI3-)9EA!iE{Drl-OSUlBYv-NQ@9Ci)r4!N(7;wLE*Byg`-X zV}lY5ndMyhRm;r?`qYO*Pf~`L`)h9t!&Y0uW1YJul$Pf6H%|uGi!eNw$9>!URl=f^ zqVbIrAiOR7;y~(63;HpC_p$>>#>lx%N|8U^2Yl!g>!nb_fVHE1R1p8G^i%T;9)&04 zaa~=8cvpZ!;{6p=m<6}rmT}{BBTd_k0DI)x__>J!Ayg?^C5*F%@6Fgfd#KL+%C ze)!PYKEH#_$Sj(@5KBzteVzZ?w~&_?F2PIevnZFA`sQ0& zqM3&9dOrXGTia4&M8nUq-?jFkPt8&GWus zs&Aj$gQKm4_l@K2cF8>C-RaSmZDH%=O}H{e;EnseCMKkBkt^rcBzVRcuVeP)i77_D zpP|U2eY#*Hb^p^Og(#WGOFBiiE$*bL$~?&8E9Y8z(v%KR2!-(^eY~N>aMp?RKY;Jp z_!ZdIjK=bGw6^n}b@CpFyxzg`VK`5d)?L4(_2)n8@+Tly8V+WsR*p$oSlFyyI2aVPX(O-=|{}^S#-h;W?#bjy>sF8SaebJ3Y2?e0m6> zA-!Z|u2woe#vCfc6QBDa_YC z3CY!)DJ$@NI8X zAf5E&Um}9;`aCPnwyL1P3)--XC)Cr`n_QIxKHjqa+|xR^e=+^zU`LnPR9V!ReTu(| zbLz;Y({YoMIW;FcB&>&E)LZXZ@z!h4n-QZ88n{mv@C9arh4WGSJ)B!L8+vAaxMvS zH)p*STRjMVfvPD)fd?0EO|=^5v_6wG|2lkIe|+{qczYwTewhTjnQ`XvY2 z22A*_syQp)HDR5cd0ZHlJs&>Tb`sq!)QyST0yoTCg#0jGCdkxFPDmbVv%#Ou0i^x5 zxvEj$%o>n!WR`POcdQkBAlYm1w@|8gUj_eKYhPWJyVB>v0)eh?;xR`n%*SM6mhKZ->;3|7lYqzL{8WE>0iD#=D0RLR97n46g%Wa_d z`S4xPeqp$q-;EogF{%l@`a_0uFuqLngXVH1;m`b9_VXIA1q&5f+}kA^2xo&(j1hJB zQz*Wrl{28!*3MBXN!cR(`!3F7fAn0mDmee87EIHz>XbWH3Ho$jVwdr230}-_FUK9; zpg;C$wR9^WA!?`R&hM)Fk@Uq<%L4f}S#Qx{7S@yHpF zWALS%{7H>yauCJ$>e>j_Czey*^Mfm^{4T);V<8o3C90q#h5Xu&wVbCluu4DYqCX`u zhRIx!x_?okN*M_?Y022q#&wH9>oQ^w+uwNX>YPPl0+?D$hH64HF5ac;Tj}plD(dKsa zdoIjeYEx{I?sDDB1KAe7EN_giYEoB4PR|tm$$a^VV${HXG)gWP>c836X>2i^(ge?J z2>S-P)+YPnCf|`u1h)cJUZ^azc znT+#}1BLY0f8^v9V&9hcf+|qB^a631jF;@Xi74D zr8UBE`FyD(3qjMjDP}Q0030x(g$W82MGr2oCtwZ7Ja`C>w9ud;O!0fM{6K-p`n*;` zJW7sPZ27{Yyv)k!rf}Y-Bcq;VxL0eNZis{|;9fpY_34ji@uSSIwUznL$xc0us2nzx z8S{4+fputj%UrfVvA<1g0Q5gUZ|0~L%eC{`94$@hBiy{S9V7@^__cZlqTU zfIQnv{Cc0haJ-oi;>$~;2b4m?nY@pVjIJPY{%l^$N@fN%^0f|yTt!Gp521&W%&+$) zFKINXx>j*+^L%u@fIWlKDfp$?JX3tV6!V!1me9_x)Ii0^im;#XI@+u^EBW=wiDK8G zk${3Tao+b2=M(O$Z7VK+gNo-z*ya5Be^c}?)+BIjFRk66&(Xq_+P`_tmaC?Ro$+}R zmMg7g&%>^R<&8^&_);E(mdg+6{RK5uiL}%=vWF7vR-91uQM_{CzMHW zA_}_Q=4#yc4F@(+Rdrqkm*vYvl1`d6$yj?zBRDu8me)iEaqDPdeiO~u~!OfA>*URmvC?Gktw?G29CUps9M z7BHMF^)uHe8httnuK2LiOkT<2C%8t((zLlDQl%tSp{rl+fNnsqfipWCAoKCgWHhw; zl?_;I3h>bLmH|_E8IIy(*zH2v<4 #QR{+xlW`Orm5I;>@KEg@-jeUzd@Op@z{2f0~)3rymHVno&C_c zy0D$=A$Od|AnC5rgoi&jkF*UcWwqJyn~d}S-X9Nskb1M+%da>c*^Yl8wgNu!@qm|H z1-}f`RVk=D+jJdK9Kl&#^=UAVf{n${uB~VLW`_0>)wJ!*+}1+&I-1vp5(>mo*&!&{ zV%%lHv>cV6@m0?%W7u)iisFA;Uk;B36I`E z?)$WG7Ra!j+|qm4d! z{~E955d#tZGV#_CIeEwe5cDZL-Kucu{Jv>s;SI=W3L^mz<~jBLe9!P(Og*f2Uwn7k zY{n`pL1YECGck{hp|cJQMWq(BubA57v=gi_Z3I^4Io}CG(`_G&=LNo#`o(cT_BjRl zvf}#4A18LBZ$w(6F4svYDt~Tn`E9)=81|KE$5tgpTJ~pI`9-ck2*7zM>SYC9}{X@xY-!CB_ zT^R%4_DDeB@!BJ?Y|GEX2^Wj57`Ht!`3{#tI&PO4(!FMo=7Q`C5AaTXzZq8{?PVT} z6xPMnZTB}R25bK){=ktmx-j|!d8{y8v&V}jk2R*C(|~faUP?+Z^)<8bzNabSWwod8 zuGFuDU5a!86{kDD10Cb#<)Q?~UE!cMNhiYh6W}Y{89DOW#K6u%JW6p>o_gHPI-W+C zw_CfkWNkz4zKhVxVU;(rZ)*4W_%3TAo@_|-(3&Icw`W*Vu}#sHPL(f*XysUjmWl!3 z=7PPJ87!8xWzysg>!4Dzj(PB6LfnN_SQ2fSR4m?xb8XaO+>CQ4kdmYLDj?$&TD#Bo_%6ZjqD{WbCU`=brLX4Co_{Y_8naa?CP! ztdp_7Y=f)bY~clXZ9@#6Ic^Pi%9jttNSw^kjn)U_NiC|HuRnH!Y_mGVwc?C`i0dpD zn?Q1xkm64UjkJRXN)U+tUw`5t7so3W;1@-fI zya_|pp@?MpA=a$mpxM)KKh^);OOJRFUltK~xop@;nz;!|28TB|m05N?ke%)q(amAvz>s%4DQj%1A3q2lex{Y9 z1PScPN7aBgU{MBYr<;D?7n@ctt{yQ%Q(QFndTf8CNsuxHe528_-TwF&D7I(O0_*?Q zNskIcHg+0$jT>`!)RQP#CjS&IX9-QklVw0n`v+{Sh|7SuLvq~hcTHF9b`!-pGU6J@ zHIee7Heq48NR3-HiGZ$u0E>fTnKUzTxyHL#uQ^o6kq&Mr^9XqxFhEkt*ViZZbdK~X zy=8RJN_}&Wv=ZF|-?Xq>$3n`DNxb5C-C~#+)nXJsPh&6JtlgjgLfQ{Wgi5j35uBS3Zsi#;5ar|3SZ~vdl$OidAm@XdP!>lT zo8?Uu)Q=wcFR+Ihj#{%FJ0AH$Sfl2T$ z6tm36L_tiq3X75@6fk^&h3HR4cGEd{tP-zI0w{B_^Rn(oOKG@5T&w#N71is^#XZwh zOZECsBs>fM0mSb69vpp9V?DUmnE!co0;z&=-WZEYNt|~v6R(lBv4dum&beoBY(11W zsE-?KaC)M9ps%??-4Wu3t8w-UeXCEJ>=(8=I%alKwpIa8>+?$GirV9Y#l|s?Ll1Bi zQI0j0YeMVS2?$QMRjplP`R&1 zo$NT5ij0u|#d?tN@7=Bj$GBR|>n7Jn%xjAbpyBrEB87M*Ly_}kG8So;j5CSPV%L0~ z5|1O-T^hq`-s1mGX<^Nm53ButPlTz}M!kDlTh@KJx99wFWh92qL%D{0Eo7)$kX2zG z=KM!~=7vN^FH6vbC2PYq_U34}rC_KMIq&e8jUu_`$qmOCJpxIsQ;hW&Xgub+NaxX0xn! z!5DMp2+~L`_3dr_c__2!-9|F)n%M4a@$izmdNA{9euzGAbJ~6o#jOA)uL|oOWL;@q zGq%JTiwgN%_n{-H8*HI=zJ_^;2>*@({(m=;g?L?H^d`iG>@#iZO04ANa#ShGuGk^) z#jVqsvEOf{_cce^jK$_*VZBzMSfpGA@_#vhf^iUD?9;|sr=9oJ;yE7Al{{hNia;Nr z7U1Ma39RHX{$UGuV6Nedy`xVQcfYA>OB%^xHlOG_+NkG<{P@=YQU7BR3i?GLSjVV+ zypzF`XHmVY&~=3hGUVZ=>}ne101D8NzHAE!dGegvTQE12DRg`LPDJ%a*VSL3B6d2j zA(KsbuQh1rhNXK!>}Fqo1{0x;7(sz@hC4EMXdmG=bEQ{N_fOfkWbaUu=A%pqSQ} z>^Cxrz~V5|FMs zh6)w&F>#XhV>Z-$E;N8rTchklWz`R*RR9RJ2HaETsCCQ^P|2D)ma%af>8wE7t0~4V16p5ZWhTF#c^!QM&as;tIjl!j(@aaGaizh(m!Oo2OR+Y zW#n;GEyYSj(P2}idOAja<;1aeoEA~!HX23mE2SOsOnXPq<%NQj!Ob` zN!O1ta~sk}w=2J7t%i_)+tm1Y zrK5hk6u~zIh-o?QmYOe|p1Ig;tBqu#>X@~S^|f|)8;SWg|0r(SiozVg{hieq#}qk$ z2=)gFnJ%a#mDByw;EQr{E8kaw@NBj4AHHx;*LIrDKAgbRqSpFDfLQ4pWQ?FyJM5d< zk_oUkeT0J*E28*|SLX6I1!YW}ryV3V5{L0^4gpt8?nY}ZA@Dbw zNl#QI!U7Ei@8Ltm<(;8$+Oq%Lb1z7zQMFlBO)Dnvf0`MX7zM-Kd4r$>_2*(nZ+7zA z2goH@FmSH6^}B}M#YxTl*?$051wZZg#yQR1oU>?xrda`}{(9go)#6)lcF(!L&Y_MQH7_nGv?dI8pt@nf@X20 z%!!GZKimfSeU!!VPp{fVWiLAFehqkSx-+1#?pXK_uw;S7zkv5n^PRGiccEsGfo6TH$SVxxcS6!kEx05J=;vJLIXs{8 zxuL14!=soQk9|q;Nnsy3o?^+{~4|SXN&p`T2(QAbMr7X7vty2|0LuERN~`wkM= zIToj3lwF54pRGnKMg|S6xMUP*e?Qa?VPcUdK~Dm)#&vVA%Drl-{6&^l-o!Pgar~@X ztRTK*g5d2tZn5GADt|%48uzQpDO0F~8#5`(`d?i3%3c2t&8!@EEGt;Fr{AvqLV|31 z^mdp7s=0xcG<{)Vyil^`f@?*$xwbraLohE|`Vw@M5y!I5r8s@f9^>sj7R@iH}v5xasSOL_ocs11I4#4tgj9ql9G_CJusS}-O(Pm zco*n`i3$o>*RWP^Z)p?1LE}V@k0udK|2ZcdimpM(p%IJSZ*RTFs|UNYIZ0^>8{2w% zv`B?HaO(!FqWg60?#_|xm`=^3)_k08vsWlkk9ClB$>KdGZu7{N3)YnFh!*-ltf#NQ zi`QeAD(cFR2)Neu9t`d@^y}kS=uI0+!U{_)PR*qWP~UB0UMEstU8-{e?;PT5% z(^L#cr~u7o0?lLZzE&Rqxx|aw^i8J3ZcCl)%s;Vzt~y{F`vKQ3a+xe7A}h^7dRraz z-%pE=h8}j$oNU|UxjYx0c(00nrUKtEr5Ff6u#)UdJ4ZZUypExyyDeK%fY_`zOK!&w z&jict<0hOV<|-^#(>L?c@oJ>~&#|$G8-PQRH#e~hm?V2#eSMfWk%9lvns;|}wDh@1 zp&MLwi+i6i5Xwy|D<vQF*PK3DHlQb|s`xYKBl3~F2ZGc~cZD0r8D;kSCA(R%}|GfJ)n0wYtAX@b&EyBl1g$u${2%v=GI9GSk+ zE|dl9qST?n!>rmM$HPyutUvZ!66A_Yp0ViLy#DTD{$1)=k{%e5XpaMS*sj|*`d{2K z%ci%tox9qkMYa;BKHkDeY(#Il?N17B{a?^&n4hW*xO zEPpRhJ@Dt~mrE-Fk`53(Z%0q=X!q_k;lPCT2>6&FKDzp7lf`x{+&yim9nZ>Jb3i;sd-`zxcGs|AhAkJ(yZOql*uHOOVnmC0E+Y zmW!O8%Lv>|H8{D(^@2*e4WreCJ10@B_?^m?9i#^$n zj}1)(GwPh{BlQprPz}KhJ z_F3c19dJI;Her8_`qn)?IX4QoSv~gjfh>rk|n;Q871JP5KM+ zyDajwmKjrw;xca9`ms<}XlL(Fz!uo>k(6Mad5# zjT&qk2cf2|vdG_LX^D_I&wdj^IvI|+7RT;x*RfehZcShlXW{L)JqyZiu}|`NujmAt zy51P8U)w}Z&8q0}retv!#HZ6=iCrh%+TQ z0UF|+39-*-_DlD>(z9T40CNs@O`D9WCzt8Ap7ovm@EA5>j+7xmO~ym#__U_aZD$?za&Vm6Jc^1uznKX zEf@ITRPfV4!9UP*z1fx2#pUMTIkcWK(AZcYcjC%;0NP=?P-7;iQM<#{CKH)O*QUzZ z!Mtukx}Q(E(#h;tU}k19=*mA{lzDl0SYL4B@iyX%mMApT8&31rg8cqrplWM<%&QQR zkupQGrIVNJ(Y-EjY(ct-+cr!WwKMV7hy)ThOL$#^tA<@?A4CV(q~JK=c^Ncn%@gZL;c~V zK9ZXqf6>=PpO?**$tcVE8auj!-KUv@LKt?+b9bL`+RS#zuS1JG;+eLp@6WdbLc(FJ zvh>uwHGU5^k6%p49*NuJY2sZ+_U8s{HQBAL?L+>nb*(mBT~rYvGjJ1uxS#vf)}Z-S zKf0wjsB~f{zOwyqOq3kAacu6_in|rxUvP2G3zr%G@wI+0d?iveX-|i2PA3}~er*<# zcl+1k5@#w~8Ns6E%#sEIw#a{$vBf8g!jY4f#wiZpHzlDWdT$mRye7-h2h($176bW& z-OLU+C%>OQr}dEYPf}kbk22^Qh^m3eFBJCso>yhtMWbc7M=W=vOSFQ(E8H!C2s^}S zB11{H1!C>`t1JtTpt^h>OfLg3 z>NgWsmkD2$B||g&v1gBWXWD-hWJI#|eTV!H!dBM(eEOJ{>pwszUuYRy{%9d|TbQr6 zzLBaYF$$M1%c-dvFotGKHy>!BC8YVBlSQH;XUBTqOA84|jp%8s+&}kF#mZzaWh`FK4{4haE-$8JT`3BO%|?bW@TA|& zHfpDzQLrA0(Wr@8EQACIdmub7;al=UxBSw-Mk)Yd)-wE0GOtzEN0yJ_Gk3hZwXwMW zTsm_&Mekzl*fv`ASm|r*2!$fRy@MI3gJ`zTr|#yoZ&)2HRrK0@yK52?>*DK_64vSg z30wg>L2+O$rN#dc$zwm3i`3}_e|_Uyw#mJcMYi4X?(L5)%2XQ+U-LX!ep;${x;y#4 zeXghj*IOn$k?v*sJ|=2S7$fh+BC?hdAklk3+7+VT)Oxp_`M3AM(#!tl;Qh3DYr^(a z90ri$6neSKJ==7k%pXW||I0Kg_kqt(Xjue1X|KH6O-L_L31hUeAgfT5<9BrQj4Wal zPnBu=1^~euV*yLGs?y|KgRD@f1`0!<3q~!D{G5KO@bpLJlE%4^IlXQ~ston=MXfSs{MG@3KLAlGT=$;MTs3K(cMz$x zGAwYy;zF>9E&nKAh@nmrJBd%@RQ}*D=-dwwS9N}LqDvUN{>+A>4wTv179?EtrAu|b zh`A)8!Cb4Ogo$dcx5+N{wkF2gPLU_D<_^r1-%n*P{RkE<_LCQ5wR==J#V0%7!f;U$ zY}K}735?A8zD<^$I{)o>2Z4qs9UVVVARu|l_^IdG`VE$+5I}yXsam?fh=>lf*nEmw zOJLw?7k4TNldz2;-$Fjt zdB<&@A7;c{594*DBmzsMw$SAQuHUSMhD=B(-lu|LR*pnx*B?`?rTT0FyIUtTA?S+T zj%h0f`m&YLE20+*s&9RrrSKEw@67WK4w5j>x#4#b8^|Nd3M@xgMnX&ct4(v~c^yyJ zwiH!nk>XIPeL(H2yTlN?F7{29F2MoTm>ccMjoarV&i3MZu6F1Nk@t{P>0I)mZrEu} z8_vLr5R&hQMH!ptUT%vp%5W-8Ix*Q5`|Gu)Mj9mQUi;S;wHv9L{G8?2p4M#w#fvK& zFn{rM%>Z><{J7#O-YjnVZJmhM6_?Iq@`>3GSrX(p-06z4x>CGVFj~&+mxjhy7_tyS zlBJ<}yb@-ZitLFm&b4r!2D3Nkk1>p`vJj{cF74_$Rynum^3QNNhhS@aDxTKzHdH#7 zq@B5_`YzU}+`jdk4+}asmZR5&t46N*YN!s|7z>Z@ZmvdLkhms%2Q#yXJ zmAIbvM zD@D=4H-!&OYYJok2j!F2eeN^2#t^9UV;DByo{JJ$+}i!1dFp=&2bb_pb(7%+IEBEV zb;c2E6yaw`Q|D<;KxIJmY^Ij76i}ukt>;cC*gMhgD^cq;xrrnb%RII&WT-&6(Vi{C zw9`gf$fdA#vieiyxD4n$d9JZsP59L%EoLQfF?>G@M&2aPpKTla43z$p2q?oy7Se6B{y7yB`+Yno?~^9)4&bS0Y!PbZJ6y&J5%?wc_a> ze_Y9h1Z*e5G8{0zxLl#mXB?iN%%69Bc@p}b^Fy(8xU#}byj6Zl1te(VAHd@_ z-i#>kI6!bH1jD^{q*ME;!Eod>8>wYRH79IYW^eqN`g_8Y&adwR&wgnNr?r3_H)`Yl z0q|w5*Fjva)cew8r^QRxCRDNwFW7!kV%h2dRPvt@Gm8dCj;P9h?U*WA_p-12#j5lJ zuotrFuO)Zd=#ZgchhCIjf_H5+I6$r@Wppt$XI$#nYhXgoRdU!KBN^V`{?)b}a-;T? zj5`A?>%pm5$qt>d@?dkz)zfyTb_dN7vNLwgxMU9Aj!$x<>P#$0(AlX#1{CfqMc&Q6 z2Nh{ljha8sG$ODxR(|(kNw;NXS?huF>28<03C6;)V#S|zUVL;taKPlq$!lNw@<3O9 zBioCSYXY`ucxYJva=VU4zLS{kP6*_%T(I>qK{pk`QjhZ-++qF{IKWuFHLT%PM->j4 z)jp}i+XCxLYclOyZ1;4vwX%9B+P-&V~v=o|-R6Wuk znmX5YT=EkjDQJsX-q8zpL2`5Co?EbERi~`9ZJR?owaK3HrsR47=aJZpvuB!Km7@jS z-T4Wf7uU|7^FPe>*vI%M2^)S-Yg+w+by}XwtwOh2xS1e&ODfwD=KkAB&wAL72vTW* zD7t&oIcVDlrvdOw4i{uZD#YvJUe`kDvPIaOBZO<6F;P4%5qy2e+fb~>{p;57`12Eu zmx7$J1FJ_m&j&~ zGDdn%r+(|XS1MG_S89p&SF%z6oMJ>FToe3JZhk}(%k#4PlkshuHd6!4y0hbtVAVjT zV6Ks=YpCZa6BM`!fqZON=V+gKRd!VLcH>Rr$9=V}2y}%s-(~y7^C4_{yN!;soS)Z_ z5?J=~3d?iXG;k>a)%B#@q3B5HjlwDSd_lb9m1ZLzxE+A}L5Pl`2Dg`|fpNxaRQd3J z4O^Q>P1zgmww~dGl(3sw=C?()`)?H#8zZkUt`nGr5uGP-W2}MD*GO2gr+!6Mv2(DUEwRVXyi=baq-l`y7I0KCL;dBx#Z(BwLwHI}`f;G_ zU8Z25O}%k1`RR1!7t(nTQS^;LC5Cz<2p*30mYTZIrywje5q-gq)FSRh1I6}XKTE$< z{}8ke@OqxQsC9WLEd zqbG2xXWiapvG<45v;nKs?jzTR1Iz-8058KGwDMiBGme4%Gb%bzxO~B|yO@8qvKEtg zVXFM{(Q_Vg9XI;FpP6s~dtVp z=oA`jYPeprQep(}A6>xw6V{UMrCcKI@fpnkZnaa4FjA7Dc+ntk1OI|LLa-MjCmd)j490E-G z-`T?MD44`=dN2zC$;_iqQ=KKtvHSL#^G9;B2T3@dlq`8GvR_tNuDrsc@wWp%nfrsx z%Mvl$1iaf$fCnUPCchfu9?zA&6mq&Y*Bu<-HKh}a(BAfypfdAM;5o%(#tENyT(zx@4+UpiMMq(f)c!NjcCiF^`A$n@gzG8Oy_7?o=68 zr2YoIV8G3NXF^-HF++7lgQ&l6N~1rgwr1r8fxbDcN@kI;nN9Ko@%s_r?|U(@bFXdU z+vT?9o_ncss)sfBA`=P%J~mMBbW7>=pmu^-<$d?b^ly#PMfwM%mGFU7ai64&5%?w; zm#Q)snO}xk>!0Euim6fw6DpzGwi3SBOLE9uHOlm%)Z`=~M_bX611*AO^y6QO;&Gt- zLtaR&ClkpJ+@ec)GN;CVeS+<(P&i;_31~>H` ztllTfrAfM@`#A-~dPk1TMQ1RtU z{FnC;UcMB%mW=?(!Nk)%oCS}#8q?jkmkX5^hGiona5%40z7JVouvpQu5I~=veVGzl zhVW*5q`h{SJ@?HvfFAJ5ppGxr{jld2;Uvl(0--fmesZ>R$?0kqGJIVC3VvBc+__HX z#N$Tfq5Nv$BqeO%I~l7}k`smNeG}xe_*7>@spDl1xVe4mBS)sR*T4(84vytkmf2yy zh|5nXf;ZczrSZ;!dQygh8$y!*0AT_F7Pc%=Enk0zFY6>zuOK4KJFo&auq0e zw4mZ(|Ek(QfRhJa7TZIxH(k##npt$v%K|T$rMaWR#Gxg5#9LZZZqkvPmwp1j?zYsv z7qCq(Lflhhe%@=am|3tGVVczthT`0MRG)OIMbWA0o|2m?3~joIIy_px)sX0~Up8Uo zoL>jLvC%sWpzYz=Ro{iH!8leZ?u9h4>m1L}O$t)nZ$ zlY9fH#MzWvFjhYce43uEAKe_*Jz?}{h=3u`aMwSiQ^|w2TyDkVUes?*iG_xR@l*-s zED^n>mFZLBHsZ|dzN@;%BQlWhl5v)GSsK&0RBd90i9u;QCO!B|U5@(L#1XLczJnvh z&Y?QuRlHk8A)Ci|=nGF|Mr{EEw~5_uvf&9D0#e(@lyh>u*}L zQJE#GypFViR7$U}->XwO{^}NV=$1h#C*VlFRDAr0C)s~K{J}~&hdKW+EPEhhm{t(l z$~QkI#DT84`VUjMDHyjZW)TPF{XJM*5{18jA?*{Y4+vtA*!sne*>&wcV=+o30@QM1 zBC0J+{WABrbXxrd;?j8AMn;ao)`+Bo_t81&wJo+w#cMfxQ@n~DFb%pF-8w9rdqr8u zd>++RptgH1^-@g;!K1$@&<1TzeZ~VI8ACyTAB&p zxVx2=tk^m|z58pr%`o1PLM!n3Xjx_CU@<4{!xz{L>AOFoG<77#tYzsPqT^iFP#yp z5en&a1PjWo3Do|9*63w*nwi@Rx(CZC9ClI`VI1E^y*r)AVeYEVeel96{%(*N*mfK+ zCx%s88BKj->fXWu_&)$*L7l!UtktgUA$PEbc6LrTFI;DiGwYwqxy@P?iE}wBSR5-3 zNbmh?^SDYyFyE3-*B!Bspsv4H)pU;z$M&21$Ww93#;um;jN|dIF4z2J;oVDKlFLuJ zSfyj1AhP6ok>5Vm_n8)BQmhpT{gtEg?frB^i%YSOHP0*BC!foDFlIzRIk(g_55iVBrA_K z$!;)I_{eP`gxSX4P(M&we zi8d2o)5MXjoviBS)^N@du&p8c&GPl&01u!Z^>;zl?PJr2hoZiCu6*yb={HOit*zSU z_iRu6vtgAzxg2p=v0dF+%A!5$SBCkxBOHQzp1t}S!|?=n_iEBw$T24p`D1o5(;oi- zPfD+O%g9z%I*oV5y4CWxn_>1_i14>Cb!cSnIP6HsKTleB!mSfk@ejm}D%G##wVrGB zmf5zkcV%I~3O=Xvu3FRWcefG67NV1DD+bv?Qn6h`yai*^%mald@$Fw;{{X=){t@Y4 z7=9q$cvo1kf=F)d(1cl8Z>$Jn3lv~M2N*cd-S@2PRQ5Dn?S8-T+J0q19P>#1nEivk z3+Q9uZkyu0AHdpvmEx94J34U}rv+t?_%|XTZM+>9g8t zz7T@uLJ!+@?MCD)&nG$Bc_n)ApbxEjzrimKuYf)ZTX;K3k!~TevRGn}%Eg3gJC@{} zap~KoeEa)O_@hJ7pqZ~GNhjJ#`$L>5^a?u;EA#BdLbR_K-un7$y6t=U>U|y=N-D?3 zAGGd+;cpGz$$#Mck3QXl3F1&nq4p$lIW_YBzke>RadU4PAc%0IXLFqY0FlY&zJ>V3 z;~TFNyulDj5z4A6VO#RY82Xy|>%u3bZ<#~rkn5|ObX1+kU%!74co z>3}P^@g0wd^@~XDG+jLIl`e5mPs zGw|?gnostftGCMG&IUe0F`V`TllaszUD{r23u$Y2_K2fqg=Hg>%Z^8{CyKS;KNjBj z$6Ox`JS41RhQ+1SZev_4h{A`)dDvKmQPqwIHRSq7j;}O7iMpqRE-qt3r(ZNsvXO<5 zHXI!H3@}g9v5o=DnlDzB-(KRDGgYsl^q<2200RF2!c}b1>NBI;TaaA3{J}}af1X8H z_@VH#OVBLaPtoC+-mIs44phgEKNIcH3a{XQA2qg+n&yjck87+%W1cm9n|p;jT`}7v z6OZw(%S?_;rqKK z4I2RKCIXCYCpaV@UZb8X)chOaXs;DQ#s$i#Xd4`4a6$U>iswEv>-t~9e+fb2%>%&y z0NU!;u|pvHK#0ed0hj_g<0omyLtM|mkK3xjzBk`|GuD0@Y4S>@NUglfhkrFk&6QSA zGqaFXpW+x5(?Iayqo?CEp>PCFX8>agr{9z&J`GkTG8c6P=;JGT*OX9cmoarDo$&>)2;ji z@l!+B{2$?MIO$rJy`kRPwYbP8d0}-)6P`$YhwERH_um(OJ$RziOVB)T<9q3&w^JV5 ztA`k6Vyp>Mh6=!pb>IVDmP3#&OSF=dFH_e#{>TVbuH!;rXtoUnb(oX1H6%*r~Xj1YUPw zW4C(v7xqT@RpKv-9zXEsh5R+9rS0ymfXYxel@w<$aOVV|T<4!bj#yXf2Z6ND_(E3G z!)ERY7f+2-$}Uz$Rl@BA@;V&XHgRf|l{DX<&;0c>!lxK)by4_#!XT+UxDH4XIL1D;;9s=g z!%rG&S2q4R_;o$4w3=vXCuyW*kUDa&9ynpteBA*(xUYPDZr@Ru*xPBBGBH0f3+ddN z_?`nQi{BFO8Soni? z;!QcBh}}P$Berq2IZ64{73>>0_9N1`+qs|(=1jRIiGAFY(~vv&uT%Z1eik>vZ-}jJ z4w#TwTsyV2LOqIi63oasAY}Bw2eo-6&zEs=<;e~gaLW=1^(1by?tgDi9z$2%q1l38_ zJ0`FFd42*>Gp%KYQy70OzGH}OeyATotHAtPeL9+@C>&*|QfX_2(WqeLtK&Tv8Y`ulrSrrl$;a}2B&H6R}2 z+n-@d~8nu zr~^3}^r)lM46$PbfJnx`a(_Rd{A)!Q&3(VG>w6J(y~u9UP`3jUvnbpgbiwEkKj*z? zT|E0wBXQv2xcz>;Yg6q}%8I}o?d~uL1n1WrRZF31E3h$Pg7qE!&Oa~Cw2MkV*Y)`s z$tF^UHj!A!*-i!zLC5AhRcY?-E!X!<#~pnJKQGJLu5DNlg>%?skLG=S>V@3V%mi}% z*1-&V6ZG}x@%q)IrRn{DU(+y^(Ig%s0z43%Pu@eGv{r1eT8RkAQ{8t1{P`bBD`g6H z{{V+G=6C+T$od<@S2|CP#kxANj4<+TpkfB>94h0f!8v31M^9Rh<8SRzVf#0DZcPHm zL=yO#{vWgHnrwe|&LA)(Be^7#>Ubpd73Dr0`0sV_e#vcgOEY_~On^miox%n8AH(iz z7U#s?75MF|O{eQJ!49P=nY=;bIiwy{vIHmX8}8nLom)L| zo_|hpSh+VhCAUxO{sp=kLOiQdoR=A#DucH?f)6L0WPL_ymzR*n%^{RKAnL=vpzZ!e zL49#_kLIf!lE87m^!zyDsVa~fT!NrwjyWSgT>Aba-kVlNs7dZjf4l^jIXU$|hkTl( z_YHG(3-|Vn8c-W zK=lK^{{UXK)5W&FAD#7RW0HHjhRVB~1s!3@PtGAL~i~70uSa?&R zMM4`d>gS_saD9o|YpCP%+h}Zm7opEix%{i0@wbbtyg8;j-J-qYE+O3b#s^dQe=4u8 zc_8aK2PdEBRV0kWyN&{poDReM;=e`0;c4M%r4_IB z_a7OF#Z<&iw)8%e_&M>@N%5VsXqx+oH4uY#@&KE*;7{`Hn(O>Ur6Ex)7Hh%M@5O6JsOY-h8s2Z1a;gpk zalr?sI^wF`XmWxhfr-lzo`)xcgIROMJwLC=IUS035?nvbuZ#>Z3)ExQt4{lEzIc>; z#Ij?s>Uj3g{{UL2f3({ps85_9n>Yj9WAOg~IjEbFxR+ey5!3Bt?u>V&hHq#i^Fc?Sr`82<43{*|F{hj|O~!4Nz<@e{}1ID&0wMlWlo z-Jnfk2Vr#(Q=gc|!M2bz30QArK8LzlM;E-P!JXPRt z+AG8QFT>alt8=bh2&A;P05oa1Z)YU@ilZce7@jK(TpX~{QFhaR>-^1VG^!@g=!3%$ z!KHXqM|7_%G&^@lfHU#6x3Xl1dW`2PUm{l156R9=Z4@)(X680qdY z`PbFIwGWE58{ZaOcyCUMUoOqDC{)WccVaq`?dU7zPl-Mzxf+y~ay(OGPkd-FPEqSlD{tH$~wT3pX$_=Yl_n-I+1eMsx1i#>DP$NL)~cJ=Rjs`=*yEOATAOgP(EEEq z{ik%F1m9~u7x*V}V*|+x9c#t{=j{?LtUxgq3lo60W+UK`SiuHfktKuJr zv@hBR!n!w!rSf#!?M-d2WVyH@QQUxygMhhU27lVFs@NKMY&AzoUq|br@8|yj43V8Y zRO;4>lj?o<@RRm=_^G0@c%*CAx_*ax!dV6CyQAD78;TKy4p(R(k^sgW;FD(Jt@x4Jz@juEVH^eVPS* z`8eYtf`g7r0nbX|{{U@o9ZCC0{?K~;pX}%1_#@FSH8cr#X>74ccdC#U0n{XZR#UQQLGX`_q%k-iyeKeI#)Cx`S| zXTO%+V!G6)09-IrouKqs_k7z|12`u*2E9YY8teGB`b*o{g}k#I<8to@j?2?E++R=N zT~EN!+MOaKzk(12%6z+k9mwnTHR7^ro*>e-mqm^-8oArHG4n66^%YZ-zXb7^~aSBgfSs(7HnX8gI$#E7pLlG+qS35U+___GvUU! z@Z!#GUskXh+7>plT?UbqV+R2+3FKpOz|R2J=k3Oo8nZOVA2v}{a7X_DTdjVEf5A~c z2VW<`sJw6Ci&WI5(;Ia5*2>;u#>568e~1nV_7(Z*9;0)25W7o%HA`eL$N>9v_u{^m z4Sik?J6L#TnrXj~_%y|I5t&CkfzKn-l{t*GA&wO=gS?=x4JB|OF%3OeTvfwXgG31^!3249c7y8Pd%VgtB`(N5)b%wtXZRyF3d1MKQ;m6jMr>=Z>FD#jZ3Cz z7J%_@mUhS~f^bKC9+~{ADFX=iD+9(?Jdyr+s*$qAAdqf5dHL97ezg0Gm5qxl0NMG5 zI*(s$=BsUymo?SKO3fmWr_?#XqM)Ag2<2$S);64DG*;Y= z^nY>A*l(g{1nIGCbbK}iOkrYuM;ZnmnWAda+^Eo;BR9BU2 zvG{Mq3;zHJ_rskAdmjf|P7SPc5A@AuP=Dj<;P(yHW6l&~BQ^AU_;1A@wWgh6p!he# z7akD!E1_R*&^$g^{j0<`L#_4E;L9^(JmpXv43S=$pnk~S9)i_Y%Ec6(B#~DN0OY9u z0AYKab__q2_1o$f0#cmj_&BRTXbtCTm8>heHUqR|$1|sn9gr)HR0EZwp(%%90 ztM)<|#(6mO1Y`l}istPfXL*px!Q-p(X1zV}kF_YZ&C)0}ayNmQ9HQHNmMjym;&ja5dk@{CWUbb7EkylBH zsY0SK%h8w)I019ef1a4CT9Fd6tT?~`ZCriszlf!|n|q08f0Ukc*mmpR{0FT;ER)A% z*HOoSa$|NGE&kWgWS@GeP6)Wua&|K{eO)XkO9cW;hj64GRWe6W-1ERS^-qVq-8#xE zBOp~SB6sL9`-?+&N@_ph+EwGdug#^~D?7{t3l!Rh{g zm3Vp6bA>qhv*-PoL_J#{PQ5=W^82Za%J&mVl30>+{9j+w74-RT78KVt zkDSagGN}1)p;9C!r+>@N02ruQVx3qJ zp~gdUdXGx=?s-jUW?VM@9P{h`IH)jKk`E-~J$|3ot#*9jy{zG#<7y? zfN_j^eJYm39VdgnENcE3j9K09y0i**E4lL`{{VFRamPyae+PVQ)3sw2tk=R=*nr+) z3oK-IBfrM`VYpP z8&^%TTq=M8G0bpOVL_stH?{S86%KS zr?AJpHXj#gdZet7Uq=}^B_zg3J&(VoM}6iMEf~QAj&iuqBz|>KTHeGvGd62W%hvK^ ziQ5??W(gei{Qm$d`(OSDO`PHO2@_`ryfIKnBTFVm}lA_cve=A?{)AIdFrDZj9`a|$uwk>p|hQfH+7BCp2 z0uDIpdh<^49nPb9;fu($JH@w}P(xfuHvruL1HLm~G=9xLwpYTRiQW^N#2z2i&4!t8 zZ4+AC&T~0eASUoKcee*_GtVFz_Dxg8w;FDY{{U;eb@Kr!30yGFIU}+DRq$AfRVh^V zbyb<~!NyAH`D5bu!yDZLSH7Q6xt9A-n6aAfP-9+s+6GBKkN*H#zFhdBrNEbNE|>rq zmcYP2LHbwdKZyK07sOu=YW^m_h{Xk@EG=?=X?P{%K7$}A2b0Zyc>G$Or}2H-G>DMF zGkokg7$-ObzB61j-?E>*tAE$^_#JWgm!ac6Gw@Z0sSU)}cK-k>V)Ahti?IqT_PO6#)Q>k|clF}K=Va~x!2BO|D<5ctKQP2$fK z#iHrg-(|AX5KnlJwhZp-;bCBTAOpDiXFTy<<`!~Nx8>@LYep>}BKY^h(As=j)Z^6c zV|CP)E0_*R!DZT55$IHseT{SRLi%2=@ZJSGcSg?907=g$rg4G~<6gbuF9lt_+*bF| zmASM-5Ie+!Xkx(h>y!1ttZ#_l4UpPfUfG9^bs!c9K4HKLM{+smuOhyhmMSXIZtL*S z@hVZ1yXtwD#G6PoTiq*PfkLF9!9G4;btkFMU&^Xo-S~9fq&_3?ocG(5H`*-SpnQM4 zxeR;qIO4m{75pdHbxl6gOTBoO-6In=6_}R`!sDhhp8md-?Z2|`?D6o5(BzVV5 zW%BphY1fiDxF0Ai$P?uww~G0C$s< z&#zGqf#8@KZ9F$)e6xScYrE!B**&8~8&@RtB%e=O`3zQJQPk&zBM04b^L*RZ{%h~t z_h{jor)kN*UB2TZz~8ezifrJz@C}umt+)%jOHd*p;09n0G7bp`lg~J>puQA*G}AmE z0>|QQMlCwT&H{?}I!J@yF!aIfbD>9%mo~=rB8T&39fO(x1W? z5_pE<&+RD`A)g&wvBIe&aB@E?@f}H2O}6jT=Wf5J&i6YxecN_D3f|jG(>xIN7Uk_{ z)Ro@Yzu$FEKp=Y@pROx_(cbZVLnK!^ukS9|yr6z#p5Xce{cEfEf#Y~%@T`6%)NO=! zSI{gFc^xgS%M}Wl?nuiuc%G$otZ9BDi%|a2l6z=m46<}#>yhinQCPZ)oHyII-|PN< zSFv>IwvS(*#S&Fxb$ zTw%z$nm4>3;Eh*Y)y1}%bhgt13#*3OMt<-E(;a=Q-F_SBT9t*Ig6mc?Nf`rYm+~;j z2cEsg4mj^w`frA9{3R>uvp}f(%tWssX9bts44UY4&l}qKX2MwQwAfxbN|zvw+1r45 z?^75_J3r}ue~}MWa%+ppZ6qAwMdO|STd`I5zS$)4%t3hDqt zR|Bqdj(Yx8_+j7+^|rZRz)G_IqO!Zyzf*K{)3bJb(J?L2-Dp?0_o(*#~}i40QUB>0T?~ z4<6hjM{jd3{)I!5bL;fRE7o+qYR2^g!Wml`2dHj8#=Ls?c~Ow^!0GBw_|-Jk z8fg`--~fS!%LB*h=~+coTYq2W{{RkYr$L(Ls+N$eq(R9X1?W5H*0NV$7q_M9UbP$s2p!`?<+kkbP8+^PdLFe) zS-neiGO=}2oGHm2J$iNQde*9)<)`)de_xgonn=gi5BhT$_W(d7D&%I0#_=bL?DSnk zTTeV`BgP&?-SqVc1?Jg%vO)E``2qOy>ym0)h0s(-d z94N<9IssaGrl;Zk2E}YV)N}zvyH3*}9GrGo#eDVS@7s67z7Vv1BTc?7BPdDbfr~5) z#$!@=EPJ=tp0)Fj#lPEg#~v+fyB#8SwzZLpu!szl2ZRT%4^l_drF~UgL|W7O{=cq= z#Z$vg?tQ7^U)l%2x($WbjC@6VY?e0jAFI3dMV!tsU^&PnBQC{k^-Kaq2L2M`u!j9*UZ2py@;70S$9->yZ?B z!6!ZY0pB%)YB6$?(Q0(YGMtoRuGvpV@XoWX!#1vCm7s-kNy*;bhI@|YsNU)~=o%}9 zR#Vh%z#mV;rF|<8fponiL`^45v;y8JwqPy^^<$2w{PwRX_=)hz?QYs_J{H)=k+-S( zde@f>#+`c3FGbMyC6~_)Nj|+#EmVH4*ud~AGMKpYse|n=sw|QkF3d3js`=9=| zJk%F3h)B4L1U5kzKrfH7Q0)RcU$5C2ES||;g1G!r#9dVxDomSXn+;*LcIXFCz zZ~nhEs%A|TuAx}hWO&?r=BrB^FU;HyoQxXQx6|7C;E3X8eZ(q(pZ@?=YQgaK@?+=U zKa{^NdyKe0QPV zVWGR&RNw|BPpx^#QM;#32>!J+NU|!%M>rS<`P6bm57hqv_#_|glP-tj>kkGC6RIpm z3q2c1iZ*2umoCyVT$kIzjtR+QU$wH@G?o$TUL)N16GWy+2`tJ;+*FMGqn<(M*jM5Y z{2TfECj4yIJ|$_N9lSpajSk-7zk9MG5wnIQleiolj02Ku_iw}=2V1Q!-%ov6Bbrwa zJed2V(9G+{c{jfi2NIz$P6TgGBeGvH zn^Bsj z>DIYlf_@^C#a4IIYEr<`*-9C$qsH=vJ*xP}%mD>S{u0C2HQz;Uy7sy+Bb&*#wu}hX z+kn_1n1SDK9=O1x7l*5Y#5z;G)xXbAFX`*PxJuB)Qd5fG`qawNJU#udBG^f8l8EBj zx;4)BAdWq8$o%TIqoHa(6Y&24h5UUTxfV8IqSTp32#4e>eSrjL)K{bURTBE*J8ulz zc@~PGa^S<1x(L}gi6w)G>P31-is_UYy#Ys=VgPQLA1K}~HTQ7?>^a6S2 z{{TU?E6lQXIP4dWJ%w{$vuDFCN5eWcrQr_>>v3ObbDuiPO};zdPn||WqY?>Dc~jFo zS9##=ZqvZp`}oh|HKSeKS(jZt!eO;qLBPQ5qXV9t*N2Lu6+1&$)2H9rFP^kC zwYiQyNM>x1NdD;O9;T0h-w|$YZ9Gr$Kg1U)CG~)3dVsk*~CjG>OT0U*L>BB$6qT-rb{&V%o`Bg9FZ3 zoB#*4dY{960>i;~R+@WSL}Yem8wdf4|YFF9fW1p4$9q2Uh@F2CY!I_BlV2xTgXj1iDR;B@B(w1yvEdu{iBU(*z-U0C)w z{>88OcJ9htEQm{?Je6kWIr@**xp*|0rHx>{w2^~woer!R90#^o)4OMeU`*S2?? z<4z=v&J2EpTf-A-<3^FJ@N*!aXf4nT1K;^oFB^DX^43e8KFe2{TWQd% z2AP}XZs51g-==F^YT8GDd_iC7gqROg~#fTG^p=-xJ^4fX?is;AKf9^VItKSFQXQ@$}QUvb>aB z7b*tgP6r$@QpjycwumMq5m|ieb3pY?v%M z^ga3MS=ttrt4C-PP_r$tfEXRhKpo2dAEjQjlJ42EgvGZ2nXSIXJnN4f9l z_cCuS^$j{VGut`_>Z}Mj9^;_RRBO}(F^CZEFrlzd^zWa>y0PL1kwvDZ1UD$jjjp-h z&N}r6imxYzJUgtW`x}F552344=7+qk=l=i?=1YsdjOH$7Pd9mqN~r3` zs^=LX_viW5EmuId=4eUwCIKW&6Q9GVsBbizTe;Y2w?YeoPS_=UB&Q#A6ZcOU z0QIeRe)9R4IKQ&W+O?If%*!Nlfg$-%VE+Kr9tS@4$aseNESgKJ;>#Vl+!cW!xA2c~ zpZ>jWYu8fQ?pxTUjle2V(+#`ad+}ah@k7Lg;r_sm;IbxOW(1tKTmpM2!2At&;b_Wp zQF{FS4m?#z#x0zdw|V`wsmrQdNtc<+9!DP3S3jug%r1(ofB-rXMSV1Bw5*Zy(x{x9 zXRzw})c47;qy>~=UAg1$amP+Rrn1RpBpzB)c97XP=L7nhiLU(N`;&~3kQ<`*=hOLS zog1MbZv+97c{w=ldk@n;jbUwdF$cIybP^Gdn}iL){{R}K!QFi8f(xAOIsP1SeYp4U zQ%J^AM9KtUaxu@QG5&bPZTvs@Z{tsj{x#D+9eh9V{nn?fYd33cVPj~yX&t&K^Z)|I zx+nybflbSmb2n>s8ct5gqu`GN_=m^Z>^>Usrk2y(N=afAkrZq9osUIT2a)i_KExtv{-?jf)`!bin94{=8Ov@{7D4y}z%}^=zc9&LO@U zX~^5BwnuCXl9<6h_0)JoL2IMUp{~*+1)>8OIqZL0n_BahCYCZo00;}t58>_Jx$QSm zl`lxQc6U>S+N6V?qda5l_*RmX>(X(PwYC1e34~!zUg|m>dI1)j84?gUR%K9rr|Zw@ zT+hV|jlLa<)D8g0rg-+P{byA=hM5JlU=~)ypErN=pYg7H$5!n=lp|IgF&XEKjQ;>m zw5Uc=oO!$H-~I`8UEu6|&Ev;Owab{vEw#YOARK#FIT#H#klh}SWzvHA>wfmj> zu*8B-Ty*F2tZN2Ow+(|JV_akP4VpVqt0GvRgCr(q_lAtET- zc`P%L?s{=sS|^2M*01fnC#6M*m6b*m;GFgJz#0DlJ${VRJ{f729vPm|t-wTp00su) zJO2Pb&b~`A%;gDlUsuzy^jKUBEpCUze-iu@VP=}z+^RAV+gG9M*Y)?W7t|~bmDG_s zuq(mOfAT-AeVg%9!M4FCf(Up zuL~YlWrlR6bBtsJjyb{34naTGqI4NQFJ3?T)pAIbE?IHUrvU!|PSpcPy}o1qPwQUd zJd(2o2k#8_=ijgER%{+Cdx+$o-DOpjGsMKkzA$?5)oT3~+0f58#IYVX@4#SotW{4f2TyfbOwi+FMUr=!{>nQMSM?soG}u5d>c z`mf;aC*#(mANH@rx3h^b6hgOQWBOO~Bl{Ho+5Z3<{{UrQhjV zwo7CEo;~a1bNmfza`v#Yi|q>~>h(_kR(@SiV*^?;hb!Ou*y?<2u<6qcJ)@RMB|ju{ zA1*QLpYX3A_?x5O>iR3_7Z;JvZ7&+`-^(~uEW27W>{OHZSGfFBO(R{sx6^gaRvR0l znYmcpWfb%S(DnT5$G$h*X?lK)z8LT>=ZGOmZXz79el~;E*yQB)_7(F!&Tvt6Px{6wF_zYWJv_1v`3zJT#SFST;GN?)Yd#5 z1lK+f`(!Z5DhsOvx@9G@S(IRs05~AvdRN!K@KoQ~FUFep#5+HZ{{RX!SJ$4_+9);c zI@Uik$~NaH=m#<`(Y3bco}5?4c3wTvue?be-OarI9+?(TI_AP%tLJUPmrNcv7RV!~ zQ(vxSSlH$nT{Q=JMI_SQHRPIl^s(_+%rnB{r_G{m+Wk}W0k7PAF7ZyE;*S7pN9_=1 zRav9R$Zf3@fq*Qnf*DwS(mUYS(f1n9g+3xOc*nqcPxgM5tVa#)#+uj!SSs8x=t$bmS0nJLbNW{gk{>ZGG?qO3^i+ z4a+?Geyw{Ql;YR`U}jfVasVzB%V3^^fxxaVC7}smm1{0&yItt)-rCyw?f0XzC95c7 zB}?qRerWU!2FBtY2U0p~ta8H|0Qmm^RybyCbsP`}(0W&G@asYF?UsNoEp<1Y<1GGh z!OJr4DhF04BDot4XHBpZcyiL{{i^t}o@Gp|NO&XMkTHSl^shkpQQ=(&UDd2CZZGbv zbm+(YeWVP{ypO!Y2LmJyMle4=&ao4zOTM=Lzb*d&RDF_3uj~5zjqeHgk{=5BZ{avu zW|m+W%gbapDhEXb_dJ^PjdNDgd}CnI>sm7<-Hb)$xwmD&$5p}SJ-Ejp*MD*QGW`>Zbqx}e!oCT{ z3GQ>(rFnC&S6Ebox1WCd^BO@yTN%1%jt-41mdhf^VsRn2M#m}`DPl)loc#q&;Vn+b zQm~g%)Ai`JINijTwi{YG;==-=cW_mR!7JL^$DI9`$ z#xcM(m1&{BhBd_2bf~4b`#TdHOy>mWKRFr5&OTn`3UubVd1t2D4r^TxT(VDzKN~e$ z?I!!g@>pEmv`-63ArQeDfEpo=Mmiq+dUIZw-U&~JB_q7?6gr&nDw1tm%#APy1-ce) z!>={v9}DaTjxKe(+sNkC<|0vTGvE+;3QtkjC%NLgo9`Ry9w)ZHyA2vz_T+vaRjoV9PTMZucl;EWXR(%vSsfOs;>|-^OJ53H?MuBz@nyP#6Y{|V z9Hcq#h9I2RLk*MY_rhy(14DEX@H>Lo1M|&rUk>zTv4c&&Ah9tsyOWc-{$)Aq$^7`Q zP0;lp4&Gi}+utGH*9KT5R^f&L#ygNkM}AMG4)W4J25sA^OU2iBJL8$<9#o^| ze(|sz5C>7t7Ed;eDn~0B6+?a2L)U5jD|5z|`hK9#1TXu! z`2kxyTz_AF;<9Z#6Rc_Ndv&WA6<0!Rv$gSHZvVRv!|0DgFq` z{w%SJQoWH(mX3`Wk<@h~c*Akjbt5&dT|!lH(2G~wRsR47Zk>>-O4OT2 zH;68^tAj3~DY<5v?PECH3Ce;A&(^tP4~aDB!8TLJ$p9FL$@Z;FUl%kJHq{Xg=OW!U zsN+2c1Eq8tPmZQ!0jAE-HlqT=uc5Ees#L25ZdB3p@r)+h=G5ji?;GFRFpBCT=sJVR zBm9G2nee;f&W$Xw+1<(_kTw(njGsZD!~AN4UwAuP4;H)O`&lyC^I;xaXWJO>>sZ?7 z!wnKg`*(tM2T3}mt;_9ZJe&sTaa^@t9=+i>wfP-X)oL%AGP(4YkK?PRRY7#XXCR|} z;larsoL5Vy_`S6S7L&sq!zB5RIX&@_&uaN=!ygd*N#QG`(L6MyLEo)-SiA$J59|H`?P0O8t7Okre-7z-y7|`kQroc^2L@SyW`GI32TBr`K;Sltd>C03!~$R$k*B zJ!{G3oL~9%Uvb}b(M>#EJRqY=L~WJB4D;?k&rY?St?4=*tVEA&EtrfvkYiyu&m*_g zdezIzFAw;F6Xnh=ZZ_~XlNmVYzCq8wrE}VfYhD(ZTkF>Hq;ecZa~p4o#s)A)e{Q%n zr8vdSa=SjH$=PeFDQlh!)~w+E(9+7>5P+75mR85Farsvlx{KC z*N{G@yX#Ft;&8KC-x$~ojje!2arOTI3d-?Tp=~rr?fPs+Mq&x_xIA|q&*5DZs!hw5 zTK&t{`s`s+oNai!97l<@nG-~sOPLZu6gZ8fPOpwIahwiM_;&C=73$>qmov&Ya$9!* zjPsw*(!Dq0Znm!r+1Ze=#^frXKR2oGf_>}6JY}j0ZUj-Q4WB8`L-<$IWprBinc!v> zdLtI{_2;-U5>9*WG*$lq5BM9zekihSLrAnQ5=vt&j;sJD`O#kAJtd=Oj(VAyRf8x1 zDb9H1ao3TL^VXi1o=P{Egpxf*?WB*Jp51u-Y6xA3SL$+p_dlrZ`coboi>LA;$!5Th z_=#?uetw+s*12V?>KzBHv)VeQWc+ zCE^NoB=A^wc&#M+`7bX20OfP(a=gA&*S4sv`Yzsnrsk)nYI7~wzI9cCs{hoD=M&|d#7CLB-2wxIT!{(ij0YK~qa%=9pcpl~8)O8u&NtHao5FaiG9G`5E z2T$i*-^A|;XfJx_%Z2pz*pSFL%nY&tW4aDa7R3Ug?$DZaH~yUNBv*=>ANwO*0Qb-n{E~|Frc1#XV?D#uU+)gak78c z^oHKIWi4Xi^b3{LE*O28+iaLU2LK<|m&JEc-uQIf!piE)lmm`%bDV#kDouXE9b3cV z+I;gEEHlXV_O1^>*5=Y~-@*FRWMPe)vHQdOqugZk-nyyNjc8qOAM5Y`0DxgSjt8rl+h006x6)BgajE2XdABb)3J#=CUjL0sTx0~i_O`d6X+3h?=v zZZ*Q7yi67|oE2g_A46V$ZmKl2kdS(UIL>~*ko2!!_#*+c(4j8JZNss3#?jCJ0IT(` zJZzf7xTCWP6ze11z6fY`A89(nL{8PmJnZ#6^PK+xJlC#4ufct<{{UlK6*lqPWRMP8 zL)(tPSDO4fu(`6i5hg{f@Tqqk0^Z-JKT7n?4^OdKabp-%4Vk84$@y`|>0b?ue$um> zU#+@-UoPj=(u$01d6&iAA_(;}Z4JA8=zea;=zTIpe7EtJ!3H~-%y67BBn_t^kN*Hy z)AO&izAeFKY~yQKgoY^2=3Ja~_5T1oSA~2`@SAGdF_UO(hDKyvyo?T?rFuD5CZ!i1 zZ|n8{0DURnH}W zIXTbW1ozEC@geJ+ZNbkZjDMec(K9fCc7V7ivFp$I;=fnF;M{)(c!S559}=~D-y2@R zqWFH!6K$l!c+rQsaL01&#ts22dyH4*ExbS|AY(qhmHP$%00y<$_;Xm%J~imqT9XY> z+v$&QYlHs)9_-GNG406!anpg`y!_jn`#z^DYfgto723LI+x{!@$A^9#=@v0+I=Ww7 z$e>(C8JV(4B)cA`sL!Qgc(2B>>e>~ox*?JXt|nx;Wy{QqfHorbEz>n?_P6+Ts_A|) zm&6fTFtK6wiA-<}?1ScWvHDA1+P+JxzXn zP0^cbPS)M;ulzst)cQoP6whDM{vK(%ZiT4)LGhqXHrfkQ7LTP!aVwM6nELUId!I`D z1F@7wS;z z9wG5<{E|ZXRcY9$9-Df4X0v=;HJNQ{cEne(XK4KD-3rez0A2TdTks6Pq3_iu?XM`AKzxk zjfcz!%yM(njC+de#O656Y!bvznJ*;0Z?~q`78?PD!d_IPYx%Zu`ZtAiy$e$wBGz>V zW`JZx6lCw{K<7WFJoT@3_zm$6_r=#RUtf4e;#7umu>{0%l^E*XU!J`{=DgFxJ|2$y zUxwE1?N;7vfFu)=R4;rJ?ewpq{vG&sEmrsb5x)s(g6;%|6UzfdO&1!G}k?d7bF zdJLSg1C0GTbgeIq9tMlUGFUd8#LuQ#Mg5{w@#aV}LC0}`KQCI~sHHD;t+n*|Kk`MU zlic&K40vBn)n%5?R<^fAwM7z2s&|s7BWd~*)1GluHESIb_g<1^yD{5Zu{O{gDRQS9 ziR=j-`s3c6to%)~u)CJZ-M1EXK3WVhAb>%~QgOvr(zLx#T!!NM>eRyzSsr!Tq-VMG zJQ`~bqiZeuT{r!GFa^Tum(R9yts?_n{*k`qzPY%T(7i?-0qTrk1nX2Sl`eg;haf8Gs#691gY9_$OJp zdwI24wN=yYA|)q}IW89sjGmtO`d3?mj4qdNJN_DV?fMfn(H}~9X6pXV&-)?dNAls# zq=;~`Mh+Bb)C^S_Jbizo>oP_Y&4x^{jDj912PX$T52s^ZU-0|mMyCvS4g=3*BnuoY zKq-;;f$F5^7_Gf?#3|y7nD49{#_S5nGlRHs&*#lUrv$m4zMt3p13p)Cw(!TqmDP2Z zx74moD%=w3m2iE!jP=eqBE7fZ{{V|F(i5ZJt*p?>a=YAaAa@KqdCb2Ov) zWkZGp91+}%ebduFoqA4)(^}~;UTM(8CP_elEV8om!xj6aanxh4O6REJ-m6vpd5%j% z);=Mn-Ks+#jG{M@on(oJ-$J+m+>F=irnBNp`Mf6gmQ2RHeOlHEYT`9~~VV!;HkYgl+mBQbt@H z@s4@*$3aeA1H%9YeH!8rPYkjGzd@XF$gB5yMy+oqVuY1km0~>)ALLb5)$i@!CMIMf zJ2v$_$4`3tO=P$Hf8d-vu#T+?JUgMXlWVBRgt0p&j|6efPx<1di$b`GRp7mjV59rk zKAk&auIbl$PNb*LfGaY9%rVYKQ|V5)@LaR+pH0*WR2jlIQR$Dyr`mG-Px}79uJ)mI z?xC{jlf9>tJV`QbBu65GGwGhk=}*#a77L#)#(5%syBV^%&#&X(Kc!Vx*EI0)%o)P) zLoWmzbv^w(zZ#OmR)okBRS`1s2c|&idt)P>l%)xE*ZTX4PB%xPcz4A*){6*DQ5m6T zz_=fEhTfSSvH90^;m;81y1l5jYl7~1K5P8LjsX2D%lt2`7~}gb#m5e#5^mjr-1POv z39GlBBOVgeqnlU)-qvD1%c#Q{@{4EgxF2-%_Tx3=VleTXw%4ci`W}@k?h(E8JxYHL zst-0WRFF#GjD82W?fKM8dvR-QY_$m1OIdh$XI!cL5Az)UhMnPEe(O=wBAP}?qmY7^ z5Vqwl^&LK(b*q=R7P7)^q+E;!3=VPh`XB3FbYoFFNo(c(cV~W$T{Dr?v?S9SO+!JJ zNiD_`B?ebl;zniY!Yi9d-|fan4B$I$cWE4L1vN~u4W_4k~WpRnWx5Oxx1JoC^;mb@c#fB-0}Xn*O#qs zALLeUhp#{9@IQrjf3g?DIebInc|2=%ZeA-^Fxq8F4=WFuf%OBA#=gIv7%U{JOQQb( zhCW(KSe!zHPjvWC@Uu?vhk@-h6|@pOBlpra&{{)+$KRgxSEbwOcRFplS;eOrwy50u zx*t!&r4{g%F?hO_9J!vpU;U}|l<*j6K`S34#*a8yA~|AyP`m-tj(vEq(ckzdKl~GG zOYlqS-yOa+SX*iu+_5djgQ6J(kXV9F$)q5V@^g@&kiMkRMS2`zjl^NF6yr(T+BTc( zWzjyD>U{oBMx9#i$zIzZpgL};Hkol9weF=P)2YO-&ekM!1F#tt*61&%>58`zpf3zX zLXc0)PERMdAW=nrSw>A&$9Eu^cfT*d~fA(~IPn%^dbkv*#b$x8ST_5Z>P2zAIU`@!kgk9sJtW;1f0ztxVEu1L2LT*Phe4$weQHuN5xdUISy#LopS z^^;yhg`F5QaxVo>QQT2Qbzv$`p6S}x{{S;OwBb?>KO@erFaH4HB+=%yzW`m?e6mT7 z8y&~xTnV#lYuOP003LEd`~?(O)YFQEM-{R2^?mrJ%;OTqlK}k)$x+X)dGAYno@Jsw z{!@dI)Z&UOu9Tej{`qU?#al{;YAhj_{5;L z>-wKp0-n7JOKu>1h~GQN11F9@0m=HB^N$l~5G<`}1D(f|2b_X2f=@sGvqcoJvTAne z{{RIcPAUHYUPqSvS@=N;+e?1Zp=ZMRYA~RZ2lcN3@ppo*v@3^n<}xq_MtMJ9S}3ok z$}v@(`CHP*ftcaxC3KErSoWXZV;MYSJ%9aQtyhs)eB|{fk8fHiuc^MM`S~J9V)DxG zBkyzrpRQ}}kN6s{A$hb)$JFQ`BNzO7>Oxq=E$b5PAaaSp5YQSLaiVRYxSZy7aSaMc>T&3OdTi z#vUNiUrr)xgO`-#bCHkfSr(ez#*YEH<>T`S^7FyxpdGtWMQ=h*P;dLK{zp>xw>E7& zMQ`IfOMx4-Z4(IG$&yDL5!>6_70K%7R?u2o>pR7gH3xq1NdWSC_0LbhbfSu>R+aDR zphHWdyWU@oFZ)MHo=1;ic7|35jx&OJ{159_{1x#hR?}u0ae1LxrBGER#_yXrC4E5a z{xnfr!mUV7QBOtwOQ~^6dNh3z;O~iAR)eJ5>XTaL^dOiiQp%DxdCz=tT|b01Ok7IfZnxahY{6FEVy)VL6(<6ZQYGfr_I90&oJ@cCMPY7t1ns~5| z`X4ptUop;ma6dXIuPs)ax1#)1U*XNCq9)0+*Sra)_$BrD?7qeO*xPAFQc`usaD5NI zwR$F>;QbFq*V{{EQ4H#{TfWSYjnEP6o(&XNO-RNKw7OsJvB@UPAC4NukAXZraXr1O ze6lM{iI0>s09a>_d{^gB{1v%%34Cqg4SE~aZ8rBskz;`57^YX`<2;5|UV4lX*il7# zSQl($Js;>p)|2@kAY9F=2?N<)72S`NleGKevF}bg#HksNS<+Hf+qAls2RZiYD5AcG z66fF7@v+F;NtDWIR~wClvPUYmRT*+I&&!`onz6~+sMHQqQTl~K-^EZm; zT9lTB#2n@H7dBf7ShG9`vo8%p{S^*j;C#S~U} zh^mzC#Y#>R)2T}5!=5epUa#U$0qRmVj2rzcPq>(74m0wf;vS#MyFU#4N4D`@hnXZk zUB=KPQ5eC<9sd9tD6A^cg?cVi)vJ8J3fMVgI%<@(*v}u8TPxC2d8EH z0MF(#&3NC%&lOuoVR0SYjvnSBk*bfn*xU~T0|y;C(M4^S!A-%x>vJq^Nk*QXkDu;s z?sbh?J2+H%_Y$fsWw19f1Fu2Jue5#x_*YEuM}};)c$4I{g!zsamzMyu3?Ah7qKfL` zICYc!Jk JkFQPG|JnaF?QH-6 literal 0 HcmV?d00001 diff --git a/data/demo/001763.jpg b/data/demo/001763.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c80ebd9b01f86a52749ec0174fb4bae3672e9365 GIT binary patch literal 73424 zcmbTdWmH>F)IJ&-AZU?7i%WwQC{7_*fEITs?p~m{TY{9*;!aB;v;~S4E$;5cU5bPj zm!JVc`15<;weJ0LKiqrgtTQVoXL8nlvggcxw#?o9-5P*gRZ&F|fP(`7;M^a8yCuL& zzyqBB+JF6n{~9jvza9@47l@0GhmZe1Cy0;$A4CYk$0r~rASC*)-G3q>CL;Oo;=hOd z?@u4#0fBf#AbimO4Eg`m-E{yc2ypyy&VV>i0S_o}fD|})Jpkr=Ir08SynA8)x8Xbh z;^N_h?!_R!A5cqvFFp`>FErl0#P_2^@9zU}Dex#+1?BJ`=~#oFdQu66Cx0UV%U5+$ z>rNcA3EOx@5E4CpLPJZ(&cVs`j9WzXg_t-*;^iv^MI~hwRlV2x28Kq)Cbo9=4vue} zoV|T~{rm$0gCgHWy^sFzF(xH7Ej=Ui)90-3dHDrD3X6(Ms(;ng*3~yOHg$FP^!D`+ z3=U0BP0!5E%`YsjZy-1S{@dE#*+re4o}FJ@qOY$1!-WF?{x7Wi{{Imd#XYVE|LFkq zA1<5+{`V)40vC@}5T8;`2W0K}=&4XR0hN66x2jG;u(0kiwT;&V(PK7|b#~N$(EbP6 z|2tq2|G$v^U%>uvT#En_AkMw=fD`~I0Au?sH>ra0w%EHFz{&u(NGmCgd+{)cQk*$n z92hw$({kc6PTBexvIUv< zsF*^V9V)R;BN?7hGT4i{fJPymDu#Ce`-=R|JHP|}LAYi3ziG?1D{Rr>9l%jVqrDgU z*fh#&a@#Nx*HPR-yf+lQu_d^?BhjN;&5L+g>v%}a(XR_XN}OzSz0J)xYLjs`xFX^c zxyc98g19w8pOp4L?~nB1nXgn0P-OQ;czoBtKJSAQU6T;scAdbtunr$ZzTW}L3;q2- z7v*^}|JbEGZO|Z^5tSvB$Q^)4k=4jH!9h|a)J+J$N$-ZUH5ECT7x=EM)SShP?2(f< z0^65+qfoowVyA0K+){$@on=S!z+OH_4HvKYMG(UGF zJ_Kr;v9>0j@>JyrTjObt_oZ7VlDtt{48zx`TL?7^3Yr<@e4LxR*{frsxjGw%SwWY* z&B&!ZANHXkE*c1A6tiwG^L7}T*%Z=nqP2lInB{h4ooOw)bS*}|YZKjj+!UXu$cKp? zR_v#I$xBgPDSZ=p$P?z23HXqWn72TjqeOQ`sq?dTe_xZ_uvd2V69HF=AO#dGP!UJW_}`;xV0@u8p|sKzqUCkumY@F1 z@c8oKxbrdET>aoW#BBQ#IM&*eQ7^mE zkH0qnWuvm`nm0a`yGh9vuW0VDrs_5DZYzzbkEqbg(y*l__ZNuNj5A#{<~J~sf#V{C zLQho_pB^hJS!Wqxq>G7->`$0o6lp^WY#s(Vj$_lNoJ#O-*vsC{>4pA%&owM?&h{`O zOQV_9&!hN`Y`?(+Ltl6pyrw+7Kx>21W!>VNlf7%DfhlxFj%xJZB{&$H{EgGVUi$2>@7co?lX(AmDj7^g!w&HBs+)*zI z;qZ;;@|QY|PHr4+Ifon`LWiRxBeF=g6qlv*G9IH}$DGkp&o+pm)vGV9$+@>U*lrZM zA9(f?VNUxsXEyHuv>{JJXi>~M6S2);_SR+f0je|vRhnmNoIAL}`LJRnzv9;E4gmO2 zJ31v9%TI_#u19xO5TQ?dB_v-Kp#Kb=9Cgh-^Q3BbPMAgQZ?WqyT_JD?Xr1HsjSLCQE^B1cLD5AFc7q9+$Q0kO7)sQ@na6iy{SE0tk4KyD_4 z78s$Z5p1}x@?&`yAu=P1+m=G}^V>@iK|y9G1MfQFgc!kWe-^`9B?`TH#V4jd?Jnoo za+_j)=hgH18`VX*$2oXc_N1P+B|;GTSo^_&EygC>$#SRJQr7}=<)!e*SJjlLQGt2n zY{MM@2)2+b>LmnxZ3iZOlkX%S0PZcd`kyRyGe_4(c1r~@=23(H5!Y6FA{Mavo`&!_ z+wZzsmU~|ARQWFU8}sJxZN6yva0_><1# zNe{tEPH#ju^vNAqTEJHJNbC3WHe3QJqBBvHuVcaD99D`@fXgQTLd^6a>a_b+9L*1* z6s7#PK}=*O$5ole@$7$@NA@HCb!!9;Tt|10yEab5g;6%O?!q|X@DRHCb-*f$)d3K4 zxvTVTu;ViC)kko{B&{q-+tX&ClpQ&m+P2HHx&Ql%wIyFtIl`?3%lbCFV2b$f6}KUr zVAsJrfH|9X4W7uC`D(=@5TA!<;;I0K47x?#{_g{fbxEG8@N@aLre}m9(QqOx{iB2$ zb2Oy`!|=LOdL2TIIBF++4n$pFnoO4XwDEOEqS3_rS%x@#Mkw8Xzl<~bJFW6*YNjQPGUG>;e4NES&)~9(4rqX?G!Ua_xY==S8^Y^a+{oU4&3*OzR%1J-vQpTqh5KccyHJV zzB3lRYM8L7pg zW})1~9}&jomOFz0Lu`u%2pl?D?KUHz*E78= zp{iPBuX>QbXExv30*>6>z0H3l*~+cMUT*c6^ zY5h1^=#xp$KAg0iTM$w6-@60Uf6AzPq_EzjD88`%Oo+;k@s#Th098G?1H^mOeM8Y1 zQCY?A1w`PC^9F}u^$#YE3MwLN>1T0Zzt`yrP>3H&j!-ky*kntjc5SHl25`4}Jsl&* zUFtm${{mT91WotYK)zArK)L*JA3ITB|2k>+U?u$}u*9sHoDApnR@RP>8nD30Bzzvo z7fFZDD5k-R{q)0Z{)&z$p=&8Aa^wJmlo~R!&idXo?D%uo}@H{l7AM)hg0iyig^Ax+C zX=uy&%Y-!ewy^2qLG0;e4Xl$?!ydZ)J&nsjdehD>+0`y(YMI^v5L?-KAJQJZ&sq(} z(z}kIeuUv-_!CWPn~wW3+~Ye>4)za4e*~tmKC^%GNq~D9(BX#rD8Z#_4x^TDrjfU* z!dRB_X7b&X&rL1GJx?9*D=mL?gx@Ji3q!F9NX=&C9G7;RNT?N?20S>~l%of0@a~!j zD2>YXdG)g;k4^r3ll$;zi{6hVNwmSMs`}3kromsTC+cxANC$K8)W|8%PU@}KLns4P zjb`6=40JP|OQ&b5s6=7Wu*s>`#kp!=CsrnRqH}A@g7$(@a0SEA{xWtmMxaQ_S||%i(m}={jB# z)%R8feno7+@afZp^uJ^5#SroG48!XCNgTaXyz;^Ly{No?X``uv-M?_qMe!dvo|0 z8))0``EMNKCb!l1UvXS}5n_U)O3BM1eC@T-C_D^W2U*~mnlrVJKCg;u7>@sFTLqD!_oW0lj+ClTJmCf41SyE_%RF2 zq_xAYGJP~sT5qeKrx2v{4SX6Wi6h<5XNb_m7YZ0N;ZfcY^x63O?fVgYk~5FUxK*5M zrx1MyP%%9m$NV(velR3?UWi!$IgL3tmv7TWzdz@k@)f@rq$gL+j*OgUdKW6OQGAH^ z(Kgpz=d7YI#o55Y(|^AH`zc#I;^jBc-eR@Pvsd`J#f)EZbG6(tLF++SXu;d6OkQZ)08;4RO#hbG#0`wqYnnC^anbvad1J62STzWDg- zOBh=kCpf}F?EHFe%EXM)B1CxdAiEgcJ@t%Hb-zhL9I6P_JU<0>1;#Nmh%~R^!QHwsxp}(uipx*QQPLTQWr0=|4%H-epCYC=>eLSc?iE|IBDdI?~R8ZVz-2q$= zO#AKtREAyfC+*b-tTS%>@fmq8Zq`QSZmjcF4ZIy-0US*)+Dfm7N?X(fnQ>$!ql?}5AJP{t!XpJ`^+m`uT{xS}ntl~z}l7r=L;H3J;X9;AP!q8N0fXfSqJZ}=lbx;2GS=u%7A`ulOt zWs3J8UWQJB;2q$(T!l?`p7<%J&&(Y_pr|7F4q%F~zp=!;7!OhLE!bTwPbKZNBzDqz zBy>C8iR_R{BBo;VSu@O?4-mU>9&cko2le+bJsel3iz-KGq;F(Uwj_&({7p2NUy&W< zS%5nqQ9Ch_qiu`!7Jr5pCUyk9may{@fbLj2FSlO~;O1Ie=E{hoUX%vYIoH=8UUFy& zsE)`1m;lmY9e`lN)P$aX(vQ`K{_PvX1wVOV$?f|)$KVZ6gou%EV$?OI8uUHCZNl}E z#;Yv6#Wqv$S6nJiZcXLOiqSC|G_-G(tVPXp?G8W}5Lk1dI{o0-LQs!eU`RDe?qQsa z)sQlzVj^+Dy*D)=dR^>2TZ8;$VbtMct+W)P>The_Q`Fy3dTV1{t1(#u%oG@U_afyc zyidPcpzOV$9$EKzJv8KAfq)ny&1se&^eL!bxSX6)-~P)zFNF&Hg3~(2sPSH2eY&{= z`0uEn{?oQ~wdf%y?CR-Btf0OC^}^8{r`z?{*{j4M%5w%8O+5xWJBul$s{a&p2|~ED zANP5+waop#1JFYqv_7=s`;=Nf3MlPHYSvCy{HU%?=cnkY?#dg%(f=GqyIBX4qC574 z_<>0uFHW_epU`c(cqeJdt0$t14x$@sZO6u^E1fAH7-A`3G3^xEMIQWiT-*^-qkH8x z;TIv+|71il_;jq+#R%=pHFLzzPxF$7_t2IG7fwJs$Obpr_+|a(T)@$#RIRopDyL4O z4XpW&Jn&ZhQnauL{eHEtot*zX_KIo7*W4huoqa~W@U!q>VkR)cGwiEp2lZQYyZWg@ zKAp{#YD37e^3JsPPFBW|^0;qN$R#lu^qFBNFUVbP>QAv#v5D_1v5XH(zkd{Zo#5IC z>Axlyu1UE9W_ylJHZg~rpz3B3q<;E!N=ZV)eAI+MC)^YvrP=Rj`!%Bw$6%Ro_=dDZ z-y1vJ)}17t{s{F2&EbUL)2!i!2F1^9Lt(Ez6F`;@6$7^tT4vAYs#1zFHe0Eia=%%E zq#TbYSLKg?e?paqvBOC%36XRK4OaV_zz1uG(vhV7;Fzn}ixl(@>PQ)7(SF=*fFgN( zEo}5W?M2=7Tkf$R$~s8h8)8aX`VTCdwxSz#mptx1e6E`PD%9P0v<7;O%E%%x%SEpX z`d2@KAN6;p;0_RnQSdGKZSP!{tl{&zvd(~)ypq;6KlRk^Vw9(@O(-D7Nt!uzwRI$r z!GG==);gfKfnW9$J-+NRNlR%nFur*ZAI9ppB^lV+ubvwB+OjrV=QoO8YlbDMq2;(X zEgnq0y#Fl^Lfipmp*ibStJt~ri$xyD4?zw#A75@$$}YW*j{3-Ovv&Vtn7|K2zvhU^ z9cz7TsXsXXW+YV*MZ=D*nTDC?Y`*cr=?QvCRHWe-cxFitua1rAnITR;Y9epGyHNSI9f zmhur>Npkk1@QphFDb(3A{14vbpQWCxx&&krz?QgII2ga8)X8i0!JN%(9loqo}&vFcI%@R3leD$n(< zUoJK08XR=9kzkX87 zUva9t7@5p=ctiFjc{$s^q8R>Gu|9FlvByVw^YmdI;Q;oe_sD|w7^(#_M!62NJ|%my zbNr~I!zd?ktevKu4i(#{q>*x~r0JfTIfqi--|y}YF64$lQHJKg6W;+iikdTu%5{7& z)dlXl`uz~Z7IFtDq~_4)|Cx760PYk1E57yDP0}{^Z;TeP>?A1*E-wE^{)vV)d&`7n zLxep7DXJw|P3!$K4*uz$nNUce6Sr9SG7Eac#3A6se(7DyM`{;?Au3dk;bl&q z!LqQ5OeV2)P)U8$?2OQC`UO))zC6F73&l7BahEcB7|dRfro1A2U8IQzK{ix@5XO3HUVmIr`g=`% zwlWr)*R?M!7I(NzH9-47ZZv30N_a~pbp5>w&<(}$kE_`;wUP%XcD^W1UMT0Awmo^N(~NBfn_^Myt9Jmmlw0-H52tGDt0cSE+LCrD zq)!*Xmiqc@uvb~9{A4?wygsHIo$8tXb0pXX1NOvynje9$+~YeQ1%#`^J^e~I}_jIG&qo*6de6p7w3e|v(@Yjz4Z;#o4Q`TV+mVZ*^xV!b0=kFz_Q-6q}9bAR~OC_?_M%GCS8%^B&e)%+wK@W zp8Mhpnzo~*FfnUNXwcW8BpV3!sj$8MAO&LXabE9gy|tDhxacDmx=Ldbc=gB+7|i`LPmq$sWhzxuU6Si#y}u=E4dST1(^j2Fv0 zKwHM3Brg~uU8@s|i&&C<6ev6pSJtS`E8Z_JXwm9z|G@b9&$uYy7X7ppnMDv?!}F)+ zjFg~=7iNhWatXl(6NOKLKdBZR*mk(Ja5};Bl&vcP{U_Y%+xm@bH&ZptID@=%5r-r1 z7MMnQBZ9oNU=4#6a&R*0h=1ZNc%;i;>i7bewLlCL`EO>ehRP(|x*`gWgAz};?H`A$ z9tcM@9P=e)#VGkSGY^fDaEh-ip^A~mX<70wee={rG#J+c+0?&^(}!KsGWG`@IUbi) zT+|o_Q-yqtFa)PEDai-dcm{qUm2ve&T}|$MWXG z)^N;7$Vg|nJ3JF`>#5+LOJFkUbySl80U@q#-;~>DT20n-Zu*A8DbN2w@9F-VvG+RY zNM-nwH`h`&ySQ1wvGLONy}ZsZU9H2YX9ziQBS2*#*Pl2&rI=wndq?m8a95(k*NreY zrcNx)uhIslt~{iEjuw2viDY=kl>L~EDS#P@2}Lu99_N`Rnos80ep^+iP`S#0yeJjW z6{7W0zK<2smMdnbH{~Jt7gT!h0O}iO*|dMz!a{1LJ;!rVCCrcW@JZFgw_e8gNgg!D z-2wJz;@Tuyrrcks;4-(dgL~e>wELI!?*NZiU4riY`@eH;R*6H&PqaSgMWyQ-zl%&! zUb!g;eU&n-jmrGhUDP@WkG}(qY!pAW)1tBn2s#`# z4X|;ndyMK#<9*Syl$6|@nuH@|$6#gnt^Pn5xgz!a^Hfdani#X|{6&J;uc2xx@sNY% zytpmeK}dxmS$v_2$Wf`^)g7SwaZTayZg55}ithQ#E(BXm&9VJ{T}B(DPkV#QHpMF& zTS)*hW( zN#8;_pbXKaX5q%^?9NV|Z1b73Gseh;N_nukuy2+ZMRSK$v5>hbSjSh;9a3t2l@q}v zhTf+#0x^1R$IQLAO0u=prnM-ts3X^n`V_QjQI>ezw|XG~P8*4+KBFWdoqK-l-041SK6^Z!`d4?T|L-Ox8TGM!)ZgD}$>yD4a6gAW z((9)rMUvbmrOf*_1ls$C&dm9O={`b|C}lCeg9RUBnpz&yJ+)D0HH_D{P|_u4ys|0{ z?;z1s-tU_2)y!m)b`f#;3V4s*@J;sSUVkA0ERPL{ivWnuD^d-v7|0vj^cSVy*emDC#wiOc3pb01@#XD?5ZFiv}DzCzwUvktkgN-4KtGE zVVSXutB+-97m(e5D2{hc^7TGEcZ=^%rpkX5!3aAXmE7XQ)&H%BB@(<*b`~gWO=Ege zAHDD5W_UwYwfK{srKS+V;(L&_fq8brhn{;~9dp_%R(JhMOl)J~puj)H$D8?+Sd>-i z2`Zu|?nJ!8=(ry92>x>ROj)!+=J{9Sns?JuzrI7jp5S4&!n(j;WEjF+?W0Dz(xC9w9uRlNjUY7mC%S z_Xg}3ayo#Uz0CAP3TKXC8q0aFA~(#lW0}|@^p{@dC*^s+rbOGT&DGnO>TzEcw+x>u zFJAx)Bt9|te7(&-J53*`6~6;mYflcbfFo?sBh_Ieo&*WRq&a6elBH>R+waB6$dMy!#8rEAbHLl?2)9(-cDAg!X#VcCuQxF z*UA0FeSk6RTz8ZDQm133hu{IC+!tDU*><7I$i>(l3tL_;` zDM<#wKXCC^mc;cz>W{WQ84_Qgs4`K)18B~5;hHl?_f-WU6vujImG#qY;5{`*=d67J zGYhl5r;{9Yc7qUh4<$~pIQ7K)uX}^ND*Sp9AD@rW6WUxoEAbwuU<$lus+spWc<%5p z(~!hFx2d0VTZQ_haU2Z%Ze#k0`rd zJ$s`S)MZ&Uwtwz%O))J+EY)>hE7C~mJoidcsd-3BZ6aVN?sC*+sxbYuw7Br{3jR5< zrf}sBa90GPhT z(S-?`*r`Bz6Hf3G=bmE8OXf8!KUVo1@!87~Cjo}ayOGR1NEmOs1I+Bhf?kB%vVE4& z*kisvla{IF-m!#$m8J=$3F}9-HpmJ>SIS|dxxn@53fG#|s6}-lE#DtY^YYsVe?N#Jd_zuxDTEE?@10LSHLa}|@;0j%BMd2ki~+yQ(jk|Z2wQ#uqcys0Z2d;Z58bFaq_H&ncsF7N2i!9A;b)J>!gm(P2k2QUaV8k2?S0sJOlV&T%7KZF8gc z*1I8_9MB1s4XpSKcVUjmCB!@_*kOX84L3r$h|yneyk&pQd`~Yc_#xe|!mI1$qFz?$#T_7WIhx7ItMq@A2Hy5TS$sr}5xT%wkK2B^7Mr|W4`qq{;^bd$ zfF#LBa(Bd$%UT5+eh=7Ja4bikEKoJIHGVg*bnGCm790)j*s(+~lfw7{O4k_=Hk%la zZyx@7uoRH0j$^{pFsmSA6Bl32Pm)irY2?FZxX{LXTiEXS+eI6t`OO}cDQ_Kmug${G z9z*s4(@)H(Av_b&lA21+``yTAnoqu{RA85|I6?&mJ6`S0qq$emb{6#OZ;mz)JkXzX`Efb^b=91XeIBsC00x0UmLtX6eSTrK)|@Zyk-6M z7H4f#X}x^e5@HyRCvT`oB8b|c553J`6{y(tkfwKuPDnLB=HtxR@lJNO?s`1z7I=wP zd`B%GI0b4WuS27 zWadE=d-_*gTHbRJ7825qN*Zv2)=9AxFMHeZkjaQ)NE=U;0$wHkCD(GOfFe)q@mBxp zyIIP4Mx~zW5>xuy-nSg~PZez)^5f#W%pC` zyHO+%ZXN=G#Sf|y_(|!5_pX(9(}@n8o?Oc8AGJIRs+?b8Np7F0X|(P%+x${&$asCt zykq{b5m!4PgEnUU>6}Oprxa-cS_XqvY%x^4&2M9H5_ZPhoMtmE_NG^Gs)@SM(-<{# zm3%1EVo+CbV;#cR{JBtr+05UaT*1G2zcNmNS=O8Lk4%o_zY_6%Ufq@ns9{CPU-(R- z=Hlg|Tc#%s_vxNQPf-Abp5XyhYv?E1`Glu}#Ef+)*8SZ-iVSvTUae`SR&nahAm~Bj z@r_p91TweVWGM&YzPR=^nr|v&DXv|Xa5V7e0sTeGu|s;E#ZF$g#Fy5ws5xb&)yFs_ zBcm|)=E$iEhRMM9WLNE_tf5-ut6g`1V(9qAiahPx+U8*rw;K)Y2fr#@>+cqM$7O^I zn{CM%LI}tBJgsyO%^8MG@6^P<7Mx|+#XrF5+RviOEfl5SubKk2gf_@M-5P8>;IZG& z`-9|eJkT=q?*cFsC28iq=O8^e!H6AsRop`Y@vY>}lE4JApY5RK-)mL6DkpFQ%i;r1 zDSyK{_Guu0yc|Yrbh2nZTmuBP z|9YMCFTnxvwG-j<3aTl%n})C&b}$3_eyZ82%)`{L9FR1e+Ho>S@0}1dqsF{s@n=z* z@5M2(uv4ACc1RNf7vTx=zizuv69>7Abn@5UzM<^2$yw!xmEK@sZ_e)k#IJ_G@5}|) zhvs%Ynaa%!BczqORoXL{{j+rQ$j~t9r$>OdyrR3V806NXMTTsNS+j1!e(F&NrJFm+ zV16&xbf3<0#FVY06battp5AVIo!@7;eeuFV2sDcVmcd1P&dwozF!Q?n0ZGOp1W9|O z3PF%6ZD4Qc%cP$#2Q-8o{>tyx3eVrk9#GHMZZd|@!xlrKe(QQKQN$ziNmc1)SGdI-r1&sP zTIL@u&Z%OKJ21E=I@l5Ad;`Z4Osow{!!LeJhv)>mSwM*bxE(*%ZMpv5L@gE5f|99p z!}qmR(TpuR*6he(UZJMJpp@zdEw~-TxgFjy*OC_u_Y5udjb)WJN&y4My#uUsx1kv7 z8XFr?I5lc-tPHyVRS=soupoqH%{Fc>N9t8*T;3DEnRw3k<^OW|^_PY*?T;=@wJWQq z-SS3=@`i?aZkJ|C(4n zS!U`kp9g(A+cUeUx&z?dGt|Ohw_~UFU;gi`NxJbv9WLYnU~v4s*^dfT(eS~vkL>t3 zFWeOFyeRzs(tS&nCB7prJ4`?VYwk?D7Ndc`C{NU$Ce)LlDQWnRqxk-bo44PO9G&h< zCnMlw&%@2|Uer&n`9VcH#+wZ?!hs|W>@>&p}{pmT+D9bCEjN}8@Uk}OQICHi^&L9)C1g`|dL zM>6+u!*MaM+UwrGx&*kVh}`%<(ZYUrx`2pTY5x7J5HUk*1kEPfhcFxY)IOqe z2_$E`=vC(i9L)_qP}sPcua&mB_avY5a_I_#-rL9uPa{3I5Vd3Xb_I{%_B(6i&0D&@ zr0PnX`R#4Bu!YXSL0U#3aq)^5KAtCt6oYg4X5f4_hoga$;^4hc-2pYKS*2R3{oYov z0Z2iphQ)x!nLM*6EYqvuo$Bq*N?sFtO>_~k-2fO1sFJZQ&7f_Vc5?MTRR~4|@ct2Y zekQs*3R@91GKvmPPOo&DT?JcBvq8?L`RkE6P(ahfy_21Lh44igb{JX` zV_8yb%>l|(X{tqGeFZF0TZ2&e-J<%f&^+!`BIzr&0T&e_a4Vw%r!nSY(!aLx zWAa4h_7zEV1GuQdK-4a;G4xTLuPFQml;H|GmuQ#)5&;56v>2# zfTJa9slWYr_}8Dt1N`+91lWwGcps`qCM*n8RD>|zn1^tTx8SWMlJ34VP6-9BFE8?a zWtocPYIe~5LYU+xS3`-Uk;bXWSQPkRHrV)Hb6kr#*HV?ZOPHR4Btfp9dP<4WBz{`r z3U1zaO!Z~#b#zkuYpe!sJb&^skW=_Tv{%8` zO;!Bz)PlKIYV#gb=R1;sGFHT>`vUtdW=Twz2DUz<(kK87gpP@ORQ1-x@xnOa&c2+Td; zym7lvHy~G|9gn}DVcqhT&9gGAo)2>ZMeY-Ggtf6q792z*M~ltIO9q5G*qd5>3>ZFm z!N?s)$+JAj;$gdxwk!mJ4o6<;obQ{z5rw=NeXBu4RX9vtQF_9m%v$qKlG60s81zLT zG?CHSg~u@`y~~bLg)QO9*XuJ;%MsI$Mv7h?;VqY(KR}h`(Xtcl2r@ou_RDA5PdFji z5KpsmgJo9Y>dRd;z#ZNS!oUg z2eC?a>WC{i<|XZtjE*h|wA@Y}OxdNb){;3}KCI#J*&j97|D_di#kI99K;hiv4o5R4pgWtFP;3E8GyQ?R_;$tR^WLgpbIYqk?waIWI(^}jMfCil8 zuh9+uK6h02K&dI%C{35=)DQczf}U<+7_cLLWbLt=&~FhUsslX@NX zGLGjYU~l01zJ%`?r6XP@(JJH7@VpF^t~R~Wvvpjl zA&Eo2XK*Z&dPc;UC=PZ`Xt>0}qqKz8sU^4R|8nF0T-fVmLt?Vt2xZ&YJdwIzQ76;u z!ti0*vYQu}XN(E*PX_vw`-IO$u5vmj4ROG6#gK!uGZEUPU%b&E5GPF^G>799;oS@> z?sp$bcg}wdm?5n@mm=~qq$QX~^Kp}h1^df!nAbH( zX&}5>ER?~aPTs&Dul!RRM{%~jv7yC4L94P+BipL`vRrWKldcNjwSOzXop z{~DQofB32I$FRi^sScqLw^9sTiGxugYvn+IM^UtcR8;I9_~I;^p65R{|9tltcRnON zrPP$u)%0-I`a@}M$^61|8#?Cf^e2H<(L>rR7=DdSI}~Hw?Rd1)$vm4Y+EdI1`WGSz z>}O*lyH{)X12IbRF>s9Af<&I<_?jJ&hXLuh8$@c0A!yQTwl`R8Uf`y>BktyOu9)gA ztfU*!v&ro2VO5&2e4=$vU@WqFAkUNhl2;jQGR2npUh%2c4{iQ!vkF}H9upA=aT}0A zmc!bkSlO7HU~TO$uXG=D4&YWwF~At5(Ix-JowK&VwB(zpgcXAOI^7962=o~s`$TCk zM*gQqPiSD@eK=T^QGCsJr2jaPA1D$w|iUwpU%O{0-P?cO+S_s={rCf zAA#&rtbi1GApX09Y1J%s$5cU1fRzUXx~_n?3I%&E{iV|R%^HmBQX_C%hFD}9sux#B zCR$+AQj!>!mkwSt>U_ad?!=H;D4Doc4|^8=Ji&RCm?2Q2RqnkUcN$ak_>UsK6Abb75}`s)`Pix1%$ zp$j<5w9Eb;`EeOQ8g@BCWFp?qACVNb;lLE)?S$VqejuJO{XQHLLNG5#O8;nKHo&)4 zb+x0ZVDv``oyyySh!y*oiMm4v97C@c$MhaHg-&MpR^7A6x(U&}W-?Ormdb(zL0;|L zD@BfgkqCK)x=F&9wPutf`Mw;js=m_0Wm>WQx&aK1O^rbnN+;Ml8O7t2X7CJO)PUf1Rmb&xvywfOd=F*DWJ;6{>s)Gat&)Qa3yGD5FdGE`-H~LVg8pg zn;g@I-19z79=`+1qwb%D|F~bH&ZJAqD%D){E;NL+&Edg3Jun4u5hluV{Ji4f4yyjM zy|2gTr?2g`38V#rwz83}ENe-m&%!XL_k3BfhoPQCjLcWZsOhHD z>z-iNRSERa|AR#h-ijCL_2{u#y!IqXo2PGJRvqlmQr$8qd0lzzawt;qc!Qq4Ox58T z+c#Z;6XCz9Io*4`c3%ojU%n7-Go#e<@Q#Ct)Hk(`{`lg?wp0no`-)7!({WOH*I^ic z^9yazWHPXsR`3j>=I46{Ad!;x-&pb9Pb#I-UlsP#AG7R1S@er0zQ=I7y)-Ww(xhAc zCeC2zCk>Z4g)R(RR!&GM39qjFv`zxh*a>p7Fv|*Py8J*Rd^MnYnU?i+__Ky=$)_GG z5?kZQR%@MF_W6VFbJgv-?J-iWg-@HiS=7mG^npd#9AOK`@?Ec8v4s8JMm8%C!aYOL z;O?U0lov$I2_RWXgMeRl9V?Z-HbGH`aJor~U29#l-ziSB>uyhtWr;TJquo3c%M_5$ z-Yrg*uY3z2-sT=n_jSVScCPo3{NVjpk)HN#NP8xJRrz6IdvoxY4Bp8XT4*xWx-;c| zKc6-l7Lz9@g8*Pc_nyhb+CzQ$4B+wB(&8@+^ie@d6jB_#houh4d6yv-_K=Wzm@|-B zCpx~E=i))1D6KYNGywZj>YVCp#$XyiODjYd&~ZA&*aY)&U2{C{4?_GX-{9d}^9Te> z2He7#Cv#Z!y=x*T8W<%!uVFrAw73y^)+&Kg=pkCXFxQA$f*zf?ZQRPf}q?2pDre&BE2B!Qli#b|o(hf*&-*(rT;z zax7v&g0A(VKXs~y9g~%lSsshzdv|X!AS-C|DSc!dWX~8OI~vUqhaxYK7+K$))q1q< zV$85lNM_2W0Kx>fGr!J>I1R54rnQx_~LaVcBVbmc%nW5!=3{`ue01 z7g?w^n&|{8SM(gXyx*WO39eg@Ty_NgiT&8OtxHcIhFjs5uUlomcAA{f>eL)T!6bLF zlMsx3{W&?Ot^qeeKmvZgts<22i%lxbIFLEvS&1U8S@s&Dtu3-02JJ#r4ZJuC@n3Vy zJjmN}!Cj}hoQYmWA?+T)Q@U!H|5?XBU zlm9RLX_@TlM?I%KbNjM^sp$5jG-vC z+JOPuP6j5z>ub14QeYtKM`ZTdMsnXaRtMqTA$8Hq{OHNAlve@tD*94(h=Beps^&~5 zH`z-@r;xVrPl>R!VoI=;7l_3%$$Ry<(1)X<;C8l1Xxon?$;LlZ_I72ZdN_SAEb(I1bB|o(#TpirK12mj0jx>eu!FV4) ztC(kBnVW1!124xWOD^Sx{l7l5BAizN+-zi-CE*MrAs2=9pOl_;?1oCC&ai1h84ime zFy}Ztn}|A!t44J6Y_UWU^42o}dfkJiVv1$}E>%=vYvS_x{sJoCFZ<82K7ZwLUh{Nx z&>n@4U18HI#!V$jTtDFfM)C2zW6bc?kE$$61ATj`xlWqGq7}|xe7&JhhN5is|j#O@JSG|v3ezp ze^LlUtAKd+<-p@}cxJ+JnYLy!Hd)S2tM!&A@HxFb_k~C64aetQW;-61=3=StLD!p) zi2(`5Pf04nMm45ik!#B7J0P9JE0j@&kw01|>F942)}tnLaeSSt`;u`z{8KzXtSrzr zgQKtviJ34a*o=uqbb!dMf0uy-xozxx&E7NH`pkdNPn8(L>=07pLigbo z`x=Qh;XSOx)Fg60p3T7Txzu;XH(%qF8W8af5ev+J%B>D;bUbiuBX{PKk0zU*h^CL3 z@)r)iOUP=K*DmSJh3rHGFnAkZX5|YEZW=3XX<)d*>d4yN&hg92X>)#_O+uGEJV2Fa zyUxTaTPo|7;|Ghu#UHfNi3M#ZWOr3|Ou8Owj>H*=%AHKjT(Vb9`RcC(27hxGYLu}X ztHDVKkn(3PFlug|dD?B-iJO9xlPnk-JMPGUGgcN#cq0k#6qDf%v7PvaTJ14cHGgtn z;RJ=s*eXIl06S<0ww5nCqc2L_!=QLRRmVzeeetgTa4PVPi$&&En(O9w#C}hhCO{h> z-=*3oo&88sc8vT!JVLMd%RO{Dq zPv#`e*UTi3LWar<<|s$ml*SG=k{fT6EZ3eoUCap=5zH(*8+E_ME<&PvQMh6>Y%jt68DGwKXfScT14C-Hw@S+I zP#E9G%VNk>bnQ9qIhHCR+Hf$hl``y-Ce!h z#azTC1{DP5wj9)8Y8DX8i4>AJnesRSf%k&(y*$`KxOk{3>^A^dJjs$x3rs5)mWnB znF+w?Ijwv97r&M(g=aHJL0|?%OY5IrynEJc5;R)#{g*iN2p;_LRV(RnQc!hQYn@G< zt6EyD0lRkZ`E%*TOp45QV{q(A>FLSo{{Yvdwvd%aRmmKI(>!(e6(CzX$Wwq;2Wu0Y9Otj=OOhDmjD;nazvuX7hF8ccx$WNruWCbE z0VJ-{r#R(7!8q?m(1dRoJRUL4JMIE^0fI5dU;e-N(r!`Jy+Iu~>CbM|dJQGiU@O-L zt}wjh&=j~Y+c?LrKA&24zz`5}2t0$%JO2QaNK>Dciky1%=lpR_={-Rqr2@Gi?!Y|b z1N`$yz%%0nZf>2w&su)vjY%z>u`8ZH>74!))lxq9-QyYW&;I~kfmzrHW&s_3Zmr14 zZvOzw6x1pQqjAaTJwBiPYIx4ZIX67Rw+nz@9)b3M(w>j_s0M;DQeMh%J6c>Gx zmG8!TfBLEAV?QfK0mo1{{As>QsNW-X8OQ)*)AjF5lw~dE0;31!8PC(yQE=C&r(pfz zA1iQjI#WSV3G&W)7$*Qzu_3@R0uLGg06hg8Sg_re&ryysMwhU?0dW$Y!{`^iHF6t; z-M0WXai2{4aA|h(%tm(}GoRFtprrx6W*Hg4TyfVOy?^@jYKlteLd1Zq8#(LgkMoK` zg??~ICkKP?+|!;hzj)vfI0u4g3JRUPbN+axaBD#Wvnk%2f$BH}`;9WhVH61c*nZdta0kH-Rys>Be(IQn9pu%>tC7zB=h zahgVqfN}?}bM*ZG0Q&S2)`NIwBLo4EIQ@TKl$pzn4!^^masF{mEAoKEfOsGdy!QN! zGs*#jD8R|ic*kx2I)Gpv>n3JFvW*_4maq5d6G{+i& zq$af3YZWcEK_N#|+e(P(umoKG34|a&pm&a0)=&`YkM(; zBV&QLkMA+hudlrnj|`21p}y}tccQDB9fIiCmg;>QRBboL(aon`MidQ26Ymesl8YI@(H>w;~&7opS}s$R5(*9^mv+bB;~{>P9dsXr3d|?lh6%UmSVQrN_pf zsl)@%rIW`XkKV$15PA=M5J>mm4sQPd;T!QKyn2R#cD{6NZzlBEco!D!o$xTVid!}swX-admh#b#))>Z0G3*KCeKH4pdz~Wh zOB2m$aPz>+w%{h^e1Eo}^VhHFMRn)wwot=qs@^Tmm2jZxdb)1;Kf10*Di@|l%^q?$ zA!TZsu9O<#qX&`>PR+z4}{r{=Xxlt5&s5p3?sS zKl3>2Giz%Ii5XExI8lWeKjGYQ^sMg?>c%xTR8qf1sUf(EA&JD36uW-1o*ep?PK`s@ry)OmNrSWTUl5#s8gRYhB1+jho_}^U;Gm* z_ENL>`SIKBKiQ1hMutrHBdB=+0RhiZxE%igkzb*Iwzusg;1Ae`;bU3LYi|0Vm$OUK zU?6$2mE0MZvJ4(a9D17c>BcyEDx=-){du21jm1qrWewT$2krg)bHBp~?tTpT2gtR( zRRZwDi;H;5u`x4Mv7ju(=*mMAT3g^~2Ty9=-?a=yH45cY*&b&4rG-Kz~K*hmh*yR5J z^@TTJ+*@fQo->c-QbPoeL}-I^HwCfUrj0OJCRQhb$lLkmp);b_u!_u#pf-Db4Lyso zE?5FP9`y{-c`}vA;EWx>=jcUhT3IQHB`zc`2`IdggWia=(N5``9uX5mEORIVMjRJx z5PK2%bgx30F0S-}cp5ow6dpE(1-SXLF~P1A!j|_R+EGRTP6<+1cGHYGnQWr)(LBWp6R?dvpNfi~*l|soKQFd)l0biL4>imEhD?7y-#i5i!Z{pYxjH^^FSe zNRV$K;ykQ~*;Dw}S>qdsS~hzr_ZKWcX526d@BVsLHjS^(4ZqlKqgkRYgBz$M4`bAH z?NJ%rZWq|cO-}V}=CX~pq<{wm_VoHyXt(a#bIJVcuWbv%7m*vQOTlj-a;DxdmMC8+XIZ6 z!qXm8e1sl%bCX>hv#8Veyj#)mBT2du{o)6$Y4}^g{u%Kl{C2QjE|q%i`=!0RU$abj z!lPi7KFo3MI#AW+V@h%5ns2#c%o75$9=zuR@Hq9&C`4xY5ee*idk@E-)~3}o$@C3Q zO&<9KHxRQUMIiv3dJg{pQBiFy#PuBqrzW{2qdvBr)S|5J01gQ`#~=aJ_WpFxqizFn zk@)e`+|-*Jk_jAwagKk;p_eCubMMp-K}$=O*5eiwU_j^_oQ{-j<967*w+h-`Xy{3+pGx}0Ef#xQBDsA4O) z{KFkcKVQ;;tu#QaK;?-XuN?=bY0Nn)Fr=JtJNtXnLg4RI$peBr@$3C5To88eIqRBT zMzstKf>RxVRmMjjBjy0G8T6q8 z1O#wD-s}DXorxqlU;)YM-^=OEJIlxfU=|tVo<6j$kXqXas9AW)Aan;frt*Z#mN-A> zBiFAU)bxo7Bu3l=oxGmC#U67OJJ^sp>(lY0aI@+)V|MA0j!y%RuleslMFcKD`G5zu zG4J@(_LBQdSR4_OcpuBt@}f-l1la`_x%Im&UGR$_q2 z6o7dE^XfYO6sK{@=dU1VrzantF;7795u`Z;jCASK{{Yw5pL%I_ARW7~PD$>6&+?^i zRD#_PAoutB)3CxaSYYFfarC8oSbatnJ4y@;9Pm3IN*IiRv=M>G$>aLd*c3o=cLFhv z+07tBw}X(q8ytV0ohT*hG=iW2M&NUgQ;*V`RI=rVAYc*P{{V$N;N`XwbAkZt>^(&_ z)yrkugWr)%erQ4*3gVWdaG~LGmGl86tdgncBZ^GXX{9EIT;bY-l7F)Sb-e||l0qd7IP(L2k zx$y_!7sOwNnk+i+igZwrO50XgTsn-8;w$%0J?KeX$<(H&%G$Za{KRvNgPe}P=bC8+ zq#2A6oDY}r#X3YP`|xp|pN%Hsq_T`{9AJ8Pqiu~Xaf}sm6M#BkaDD#(&#fpcoaY@0 z9WpcP_|pFX7Y8`b3GLdD@K+w5oqHMt)s~|QmM3t*$vry%0G=tt?Q#_fBc~i6Y|!XI z>NCzhxX<*VPd{{$y#eZZ{b}4RtRmzOmAL$9rzqW)UO_Zbs1xeiP0xqqN#cJp)cn!N zK4Tbend6Xu{dIrCy7r%Ir^(^(5ZOs_pvp|G3@3Ejbes>gTb9YkCvtSbJP=4a%>!A* zC8X#jv~2l_jm8!|c+V<*E1cDIiL?uWb!@=dvjY^WOBl)e_C1OE8Yoq2R(t+_PtW@G z@i7>Ba)PtGUeCABx0#~jQ21?jEoD^Id^@U-HZLP4JBa@P=d>sIBv4M|>OH~W@i&NV zJU^kLTU;ikre2{MWaE}YoS4{l0FZX}Z-FpT zdGM)PgOwSrJpTZ%>(H}f;#oBCFIptHyqYp|BIjr$f&9iO9zE0|Sc@cR#Xvhy0zf?v zuj5$vnwYqdJ*+rHP?_(*>QAYr+t|LNc^H7P+`wZQ$FJ$n<65i9nDV6?*!_C{0D?65 zOT&H^_-X$D30|9TBzIcl317@jxJ6JCOa{+01x{1v0(M!)fM_Mq_{&D%-#T^9H3 z7SI#9VpNS;h$DgtIn92Af59|7FLmM1f?gl9O{QyYF&)nwhEPEra1KY~#w+um_TNKi z`%w7y-Zx1R{7XBgao1`FQ`iszAB}foZ_cCG@@PJ6bfo_Ptx@v|>6d9Aof;CMcSv)Q zkEI2olr%{a=NQQZVEXZ0p0s?}THj58gDiX^jtD(^`eX7H$yz~g7w@LGnU3rdtfM}p z3gMFIdoX;+jvW&2U`k21f=1DwzTvDx*reO=z6O8 zR(iBH_W+Hynu+qa@_yqU3HHe5tz2o3r_5GBHwMZqQ-k_(oaFQPR&#?p4c_V*&VkZ-N?##AZHmJMQv*g_g3O%w-ZQH z5(YrPgT``tbm`Pntn|y>BSt}a62}??jm&;a!e=llo zDIqId=q;^2(WE8HmSGCFm`K3u@`2w!<5sk7BI{PurMtI;lWg4bat3=2J!$?G3jQ9F zSsQRFNk!y>2*D%q=~8Hyclr*b(%Vdh4g-AW4Uv+hWOL6?dd{TUk#Sdr&vW<_KZE`> z>5%vj;$DFqx{Z>5#J5%vg3lP^$!6fGP;waOr%u&(;>YZp;)ou7Tj9Qi4dt9UXSRyr zntUq!%$xAC4yOyx2LrWvzk)njec-KXFB$6qx@;2WGaIv%yzMo_IDNh6M>d`1GIt4ZnAbh-KU zacOmLJ3(<2kx3ijOxf~*oOJDwKMzX#EB2rLkTiW;SCd`Rty4?7 zx{SI^lO&N#FJImqXXfYTUj0Gu@>hyHEvooKSer)IArV|ad z!5HW81GRJRdJ4&njxxmcC!F!XALq3mNCa*f>Hx%2d7Tdq#cB>&790Vu>Cd<2`qGqeg^$aOdw)7a%t{ou%YZUEV-#pgVt}OX9Xe!o zJ+nsC{vt=Jo`RK-sTt2v`C^cc2N=f#>FY}9rF87YQ0u#C$mI6Vx8X~T#O^{r+6S-U z`F?b&yPIq6Il&Fo5$I??Fu@y7Q^f*F*a{v#Zb|EoPhKgbAu>~w!ReoWW9#@+gmN$L z@<2S}u)#mlmz)w)p#ZP39OwLK9kd^96!#M?yb-jIDZo&Zh2~c*8sz5yV>E5{y*>}U1o+tgFyd$D&Dhv~AaG5dz2WxH}eZU63 z{nY$3aW1C+0BUTV<+g3YhW9<{)MqIBivIxDspRAFvcuI;i?)~iPtETd{5$cF!`tT5 zJX@j5X#E3_85=%~ydS1(5s2!Wv!2H|2hzUi_{;Fl&sMOO(^J#8MIzvajF56qLyivz z2k3pPGb_+>~^JfVlty)BxHaJp1k_d z!+-`cf^o^m1N%v0($sToqmO7o7Vu6gwQX-HPa z-0{~vbI|@2;sX=F+v$#f&mAeFZs4p>VtC{6{AgeVnKHl;f_NFn&>Yi23Z!EhBo3#H zQ^)`UqkwWSIL}|hr|U>mND7j89Wk7bp!(6co3`Ue%$YnE-~tK2?Op!>!asugkBo1k z)OAQqc43CZVTPG=(>;O7=AH0!;YG)auVU8qdnRiYkceZ+2H}9|><4r0UtRnl_&%~U zOD(e~O`t>E40Ov3dK^=XT}kr9^0Ct#`J{7z0eywIEg67%d3gm54!vVcX&M<4Ohr>~6(P9snbY;c|0~~{jo(&#o zQYplM2_*;{Z|Uip^do|$3OBZ=&N}sHQt6+S{vgt3&~?kr3iS%fX?G+JoxxTrdFhaG z`PO;JV5N!UjOUT`tsjbd^cNo*yj5>*n_^!PU5L~YMg+_U>N8p8M2bAJb}-2V^y|fO z)sHOCu%%iRqs)XVRY#T%Gt<-m0Ik-L=kIS*o;mfW9#`hebKIZMQW9CXI0LsMKE|_J zqixXE#(wS;@HynuS0f0Zj1IUxsqGO^!3v&nk=mM2e9FN0Bk9EiSz5q8QH``wlH8pA zGt=6NSMwFFdIqnf+<0M2{D)3tR#}o$D%&d@D+lW(vu6tD=8dwAi~?w1TCJFPCV{wtE>-(LRny9=MA0~rE!IQw5dQ$jX!#*z>Wnbib>Mz5yN;i+P}j#Bn!Kw+3Dbn{g|zgnZm9!!5KL`SF8Tde+w>kuNhh_9vZv3x3-Wy)y!}b7y<}n&rm z)_yAR-@@GvDJ^2M)OC_a`b33gOK>_Yar7j2X3wy$!{A@-SL1p7V-23SsVIU?!LSur zt~gPE00SR&MooIuTq7j+K6AovFt6Y<=Ok zl^N~Z9XPMZFB<$$@gIx6AnO|c0Ew>Tne|OhOPjAd<{{m(dF|L&*}oP%Go^e~_-JrlvYT%Izx(!4iqac>pQx2Y|h63H?|7BIjuXL10@Bz(kYr&219jQl5Kqx?|R zbOtCbpt&m#mN2C_+l=p4HxZq%Vk9yM{9l6!+ z0zRiYd26+KW*~BLgU&kCDSOG<{{YpGN-~Ti%FkEShrpd@<4?mK6X1`5HBC4spQ74$ z#tmm!O+r0OTNedx?w|rS5L?QnHo)78VZLSrSF3-)Kz+6W9ez&zK^9vATTx1rtI>RL~TE?~N`oXZ8(nuMKPfDRmT zF^`u&cP);!qpw+N3oGi9>QYTS^M|>4?pjG_W5#zkEIoxdq62t}H+ScHNz&~TS8Bj^+rAI=2v-#G3t)We6YbC7F#wLC?NAn%o=zVe1nmjqDUFh;h zYOY)s$y8oGQgSkJlbm$!Yo$lG^Va_WTeKUtlQgDWA54}!WMJ?I)}?bHn% z0~14?#OI+Mz{l3PZE8rRx-%&UCx9`5&wPJM?0juCwVi;{ZRJ&aQIJRku*W2T1_}QF z_2#&hba2ZiLC2?GN>hIIkf5%vYj`q!lC%pOKI4Ef!6%Gk)2}tz`1ina_*+4+)$QI# zhWf%QoCM(uJE+MVV+uIyo;w=jd?#v56U`F>%<2M`#~8`aJq`f;O@4!Z$iE4^EAhYf z4%H*o^y^#AS_|uhft8hIfg{=zlahl6ZsYeu797;NeCfWyQn#>IdtCUF!hvtc~Tb%FGz#4)`1ZTa%7OdnfGG@Z-f^FVvI9z7&C4O*U1MXjBv~qXDpU z_qrSrf_}KFqMTaPRoo{@->LdTtawfz8TcFQa|mOVUjdt(zwoeAou}V{$gi2dXn)y9 zK=E##_S(g*#CoDGH2c_QWoX!gjPZv<*J6y_S&p6ffSa2JQKsYlYzk> z$c*DW5mbCt;puf-VSM%mWdxrmJQ3~ar$9xz3E7fv=j7)o^K%@)%~YxCFR_w3>0UxQvJn$~?P?tMB*8LaPC1}I1ONdp^) zL!5hYT$OO}wbAe3^J?<s<3N#i}c{W-2VMk`L|wM#RTUi@$9bO0{gZXBHS z;)Bo~y@=2G_x@FcJVC5mN+OUX;B`WqSbcHV@~f6!DAO-+mkwZ)vcKIYrZPYMbuKLK zrH;eZ()NC4jsPPM!NF|v`FQ;NIt-haw)1*WGb8RDFB6854k>`{`4n(0a%UHu>j)$0mr|s zD99s@4@`6ZdKx(dkr^Jm_x!%J;G=%cpx~3cnh@;lb^ibXHMH=@#;*zMjzCLq5L%aM z2mmRORFmz3d9Tx}uMEo^ml7e61iEBbG49IbnHyZt;ht$D!2(dBAJYeNl76+@gs7I5JWSe)l;btkTkhBV z$u%Dg>Gu||Z*dWGBu9HQWB_~PZFsN6Pl96MCY0L4JWes>EN5vQfd@PeIpZ7(`cB(O zyBdIsJ$EZAVFnQvJQa}p85uAhd>T~tQb#iz+O_NKH zSJUSbM>M5MsX1OUI`tooM7|o+t>FwI^LHF&&jjZB}mDQ zcM{7xQ+FSCJaRFQujO4?)8XL-vLFOA{JTfL*QIKBXTr1ii&Te9wbJ*Qq$-Zk zH1!u zr`W@DEti(~0f=B!dlTthcxc5$@oUM-uPu&p<4V+Rp+j+PCgKLrTOf0c4EOe^d`^j` z>2|(sOS&a#Vp5Oke z@~brGbbEPcG^w}dHS*kr`GLk4w_Xl@o#{NF$agp4>rTS}a+uE?@sa-k>!iy4)g~emEFeSixwEqU`7EyU-QzFc6yJx-!S@me!NqO+T?(E{Xy$UN0@_e7{O8x z-t^|EdVv@?&fIZ|DWtZ3ax?zcb45Eui$}TB_;*;l_l+ozYUKnm|r&3=| zxW^1L&|{E#GX9-@nycZf#PC0hFVtyQ8=pnhrH^z_d4vXM9a`PMDjGmg;3m6bFm{&d<|rVk>zsBc9Pfa9O21}bg}yI+wl@r+}>~hTayNd9&!=QxnXwl_BDjoytICxEkQDV7S!wL3w#A07y{M5UZ2U0N@X5 z?0zO{I`ymA_*2Gqt{X^>Ygn$`2IY%}NK15OQVTC90f-!8xSQyvXn_)hM`cp&k%cRs z4{p`9K1qJ@YksD6<2o~RV*TEl>96&Ew!h5I)ci0Ni(7&RVaXp_PY>BiZK}ZcCEyDn zK5G5t>LXCZF+ITSbH{&rzj*K%LW55x3N5^`GmmZ;SBzBBjPq~e7E}%bk&N>$B^#oU;d?A9*#FwzN+LZQ|@s$@c zhF!#k`EW7``MJh>4#Kz}kDB?`{6RWR7-+3rL2f}gWl?}hAA6ji>(Z55J$sz=qsd#9 zJr6h2?pp2zwUjYr$wyE!r_lQz_1AcNU%86N#Mx)JZ{9}B?Sc6F*CQUAJ+%J-YO@ZZ zSaJ^p9!E;+bhWne5~EpS(*71)ZX;~i>4W_%INip_W}K?~8#;ERFAzgyz&wwQqFihv zl5j^oc>F6~*2Bb6T&fYnq-ix94z*^Y(V;gfsmX0D2xE>1$Wn(Pff?izhW`NTib+W^OP1OqKM-m$czWGt zi+#_QOKt#s&Cyu(J#ur?w@T%1z9VWusratTSBS|q<3e2s z5yQwN9{Kequt$es)$UA@-y@PiLAyTvYI&|W!%a?yM)B6F)@^GQq>4g-vBr5p&m{Bh z{Q0i4NSjd9^vAmKCY&8rFpDaj1|-n>{tfN8$9Q)3$<>Nu0-tEFzjx+E$xx%JJ?k_Hd<@c_fpZ z41xg%CbMm#7n)k#YBoY46u2!AAaX}gdJJR%ir2pJUW=D%B^saG3Ng!1m_xGoBNXJVZ z2Z6Mk>&r{2tzl*j7R3rT9CsjoRr&+}00e=))F#mUZScnCREpSXg5_hFXZMc5NhL?j zIQyqP$9nin!xP%*x^hh3YOxtW2UR1v?oN6N`)B?M3*o(AT==WvU1kF!*x6`;OSnc< zG_1vAR^a1+#16O}2_m*hYv^*~Eh-Xh`K$JV_(yT!Z`vPF@OF`9cRUTKmyUTO0Nw99 znU{bt#E?1+{VVhX_C@`ibk70k(EXf!qB-pl$#x^!#W^4{9))r@cNcGt@Mg&DEu{v<{1gkY%?h?g+^~_#T40zXa$O`gCzvz+NQ~;B-4q?WBD<$o8U_ zx|=xb*K?+!_x}K2*Wf$A8ZE`Fb7?pBaXd0Sh|sq}!-nHLbO(>9tUX^xzwqVM*7B*6 z-IxfPN627CPy^Hwz5A6tab3Qk&c-eAMhMj7mi`LjtER7 zOk^s|py1`3JBJutF#3GmvTJBfG;uX{bNau8V3SNfCeq{EDsW>%fa*^t02~~X!Qk|}XK|Exynb&(OdV$#X>0eao9v`y3(wZl+@_t|69Axbv z;EZ$MmBH`nTz`%{A#ZDAo)p%s#5YzmvUBo> z0Pw)^!5G1>I}=Zp-E}?e4kz12()f{;e_(BkZBSTp21p~@C#_Wb6nCd=h*iSmD);B1 zBlN{<+^A?HnplSIodE0g{OdMQwz4e16g#rSHryXiZ>M_74eWMHO^VUatLU$?(&f>3 z$`QGLBbu~4ZLFaE>@a;ySR{UYRMN*~b86DbKJ&D&z&n)sdUyU+hEh1C+D9Jz{&i_z zb4XXMPBx60t^WWC&ZNpWT0llHa1KxF?NQ0&En(T7Aqe#w2`A9Y$F&Ugl;#Vvti@el1kyuSW3*Hq-i0X%;J{{R}6FCFS#B(b+*FfsEE zJv!DzE_W4l-Tl+l1NEkWH}jIiu%nhegX@^B8k;xa@%t$zZ66sQ^3HDj_9DMSf8ds% zv_6yLZ-aNA40X&wdExC%zq3|;Ma8tsxuOr#D&UYk4SsE^$1p6Ya4FL|d{Y$aA zvpRCx+)w5xKt}ZhVDupNJ!>;slS8+&ae1pi<)i(=OvpE5j1EW#-1F^R?z`fD?3?2h zv%SP^8N-(VAyM*xIvz5AI?B_$7kT5!KGSkVxq*b@crYaSoebo? zpKAGg#CQ77jCDJ$A5mGYCAfsk5=gwlOQCzjphP2h3 zYC0{7ERm=Xv0_AQ#|#*gmL!hDBDpF-CUrtw$sbjGFVgJcfo*&}tXy3P5AGcR7#wAS zo&uhVPb70*hpB2FKhq7o^Z1nASeY1Kr;!m6U}tE243*hd=ajN(>COUaUjxvw- zHJyo;Zo*NLah^iwpgayM=nsY89CU9VLH_^6wRl_MZT_KoD2Zp% zW@KI2QtZl|PkqDz$3KN^d`I}&o&rcb6Rtvnbe)-1azh1fr1l*#j!5fYB7WK*v`>ou zDh2TAw?xwI<_Z}}1eMDLK9XkF!>Fg8?ZW%p~dTC5P-^2mbpTp^jof+HMKqK5p?~V!2AC)5(P^p2@ ze!u668*XsQ5wh9zUI~FI_HKj8Vy@ZlkFN!!1WxMPpIb@JP^bl4R>D@wCw}q4}sU?w}7lI?c`F4 z<2^D3CQUZ&aKCH-E<|i_T=GaEiO6GI7NMrUhPByrNz1GflJ>F?a)cg7J=F2fQHtt5 z8~jCogtU}yqT8wdBcyjp*IFi-Z%y9% zqy|VOMt?8~BjC1sIL|@aw~uB_xvXzRrpKUoWZRvlU^d5Z7DE%kC65^d3=!{MQ{oG` z?sW@~JTRU{`3B>W>@$pGr&{!%5o;P=sbUPF;hqT$PO7Iny+IrRJx^Yuyw}8VX)Nyx z+^~d7c1TCe2VJ@P6YWt}*CLv2>QI92INR)(G9vEl!JA=q)^R z;@vvaZOl@Vo@v6F)i|_nJBSon+jhv9hRA8RJhZX2P8SxKHvATl7Ce9EfaqY$zlY($gN$KfS>GdI0N!r#t;@iRL zdPBzJW_cU<8Ta?c7_F@z!{1?=Ic-c##~4Rpjz<9V{&7=FpcpRqMU)%J5i)HUBd&QL zjas&_g2<0CgWwOF7yx^F{{Z@`d0$fb{nGZ0s$9o-unRJssf2D2fI^E`@V5@f9SwP`>9y;Kh)om)y;?4xrkM7ydBXQJndG_dWQ3m%$ z%{WHogDV@fSfvetg&^PpI}dvOeg6Q0X#5KQ0EI>SDP4Gq?|N!_{7ZLsLhU1R;S(p4 z6);KlV_%&=u*dAX;!oN);|`m*L6Th#*7;54!!Zjf1f8REyi&-RFha&TAB~O)TpwefZiCY`lp0a7!>f>*<)zKl(Ykw*Ac{@0f-+79 zMn_-3`c*fA=Ukm3w}7CDRS0GT61}}}az2}DUi(bCzSFNI4p7Mfa7hCsebdKW@;Hk9<=E;T%{O603vj>Gbxhmv9Jx9%|vwSq>N+b|<&DHS~w=rTaPfgTQ|f^$!zx zcf+?Squk6te;R`rk+~`WB=P~^3|G$js=;up464DS=XL<#XY%h}cPoVK&w4c@R+^Md z`(>0UM8O2)73dH1s(OXEQe;odJm(+Mt-@kLVuOGiKQYE~X}YLwq9LD`B;e!^r*73; z-u8tnvoWJAiu+Cik+@)k_|zP!BdI*|=~pE&f&=%)bI>1JtuiL?K?L>VJ^r;y;*!~- zAQetY!90#Snz81xTi`bYe8a9P0mFvf$3yS$PjqBf!w$4PY)ZK3Tq=#vimThM(XKBSz7$KHDass#@kk z1H_C5;BayVasr-sHS)fh;1$uQ{?NF$cM!?utb6vTIScabJdx^nuH0)!QbpD-p`0{F zax}7i(ife>AQQ&y551Gn;-x7h%;e)ME{Jc6{8I_jE-h{(mO(AymE?HxM28q4b{unz zAC!*O%WBGP-E`YIW%F&oxK>sHs)&<-*&&T4NF+FOkj>vGvFduJ0v8ecGJ zTz%qH^1Vho9CgKV-WTzdI_|x3ANFW@-WAF)W>Tn07z@{d&*AA)8>S6ATCTB)sTc9)tqk#lK&Jllz9g6<~C*(T-K2caW9 zx%yTw!ygq~Yo8P1@TJsf@yxTqZ?Ya03bx<@9Wr~4D-%G2P}KEld^(awrYTx+9L=$d z#t+J;u_Hf@E1}C@B9M4jeB=A@&2#=tm=Hf;HT0g_G#9E@ji^gPFo?d`3; zJl=SBNtP0^>Jl4$h<15osa5B;Nd*0SQ@$f;_s!yU)8W5wwZ3G(mF|>>Vg>?_ryO7b zjyR<`U1|y!XG5jyT5ZbQJXVrNZ25TSg~?*wNG!*UF+3*`l6-NIx#_deK3XjBmR;{{Rr_1IBtx(oYau%X5iYf90F6Nj(TW zZua`uHQ{-kH3@AsDFkv`t3`1ODQ__`%AED%kbNuBHH{wb+WY%@#@!m`-){9zMDd^E z$MFwd++w+38)$L;ocOaugt-Z{ENjV7esv@J>IWGdhQ)JXs(Uu+Nm*^^`+t{_o+dQv zQ&_4_@sifvSAUmZKbh71Ch=9h_Lp(+J6M!lMRz#3@O}A{8v};l21pqv2X7!N0x`;( z;65#BZx*3%6~dEs5ojg^0HmqNC)1(NxTy5Ed)r|Zo#c>6(uwrh;*_bkxazn*{A1AI z_O8FjRyv==kAkP;)vJB3>dyHrJZq=B6_VO;cBRrBa2S;g4<9%uAQC-f+?D#T$NK7g z&L)b(RZr}_ndR(fxhC-~-*=d)Avgiqxya5j#yeuAvin-FXl>nCT6URjeDk!PS++?4+{5nYuNdw+3i<=#XYA=^d*O(o);#pPkY(n9$k=yo7|6f? zk&phpH7i-Orw@UvjK8w!_a1k5G^=Lz^IEiW<#v{hR2K9jzd`NS*14Th38gIZ+as}j zl8mG|B=+n+we&BE{uSw(UZTDqxVT&0u&Ix1TV#N!J6*dlB=Ec(jQdwBu6zZa;^d{C zpF1R|@=10h!YKfAw=78{gWJ;`>rZv;%x8kd(rro)POD$Y@dl1dS=u3LsEpf=Ps*pE z&-nJLvZ_c749u7q+yKW@#s^4P?jQ zeN)WMzMnD#0o`6at~!P%JPt)iD{|1wSbRl~0N>kqdry0-Re2{E*vTGo+nn>0p8mDd zL#$m3BPOU~Os^?`7!IHecOX_in`<71?KIagyvO8qj!@e@NCTDw{O7jZ-wHk@c!I^X zZ8O3ewcV1Tm9B&gB1IgNxv&VwJplvOn|zVfl}*Z8BP!=vwv|Gy$w*m9an*_T&*9dy z=f8+Tz?M;;;`vF*>^SGYQgMtkOu6`HTv?u`1_iQ0e?lWW05N0NoDfA$h^|sy;vPpN>8q$tAan zEn~O3hGPq?SBoQOZtRV}!_RIrUbk;?W8uvrZyI=)TGG7vHpzFRTHYj*+wi6sb|GS1 z{{Rrc+Ckld2(LNujjL+<#Qr?-gmHb2?%8g1<@1WQ!j>5d6p$H)iW8FlsTwBK>0o*DAF_lrkBAc&j zGHY7ZwS;kQRFO$U0Z0H04EpswdsdzAf%O<}z-iV&A?4MHDsj$0=eYdo)lIDqI()Hr z*rgSb)+LI{6v*JQ01s|`Px#kW@Y~?ejy^v4@;?ClIMW(Jr9`qhK2!==E?B6}3H1F1 zWJ?Y1p77{8ZMU6&bXF*l0BIz4Y$~V&Ad`XldROZ={1dPCDbv4Z{{V${T8E2trI*Dz z;q&!Lys*X0BYfbtdhx-qH$UX=72jNX0K=A&X zzV6o2KkY3<#zWlBMp)+=3zNe7dK&fb723ml;;$-D;uLTI7%EvuABI8a@UEpTA@JUf z`>7*Q8Vvv%LFK;LBXhAFFC9Hk-pAGw#cZi=6{Ig-!fS& z+&m!}SDXb>arMaO(=~g*mT#+S23e+&SIJe3jHz+KE6?7~2cKVR$kX(@?OH^=npQ~V zk&2Q#`X9&h&34`o(j&6dmH+^f9G|>02;{HoF9lPhZo{wFsuY)a6@(&4(qvd=)s!UyCmbD0OucGL9SZrIc`nB zEJ=5D`P`O4fG~OFar$=6OI=MO-&4RpX-|eSc+0?d(!Q1^m7_0e7dxXnP5=O@$o3+? zC_HuHo2@IwH!$DZO$hTCva%4sWc3G=$UGD4n*F%dygZQDTqVMdhC&`ttN{6X5J(vw zz4))opWFWcLb33-#d&WnH5jFN_iqpqtVTE>V*@;bp^qb=&0}6Z<Hp}GoDR) zXYAjiU&Z5VU49tD1T3g`;|xYP73UTfbKcx}5T^2Q2>REx{5;U#!kTuT(_94ei#)+5!E2z9S{{Ur$ z=NorC9#(b|KpU`rV}ZvN#80mcUt58+>vV(4$c9fW;~eh6Bj(4hJuB4y5coR!>rfic zh_7!Avy~I1AS$YE10;6pIP3iBd{HxYiLL(t2zSH(03CQ^`!+8UY0%#|-n*p%tynHL zxMQ52N5AP*eir`9-w;3HV@0V!rAzi_wUIApvIy{kRP8>5H}`oX@Wp%a@MpoVhu<5t z?-Kam!`ei?5Am0VqSGw3Y2a3Z!&HTfB1+6L8I>5}k5*h{WLMU|34RTJ%$m3C$>Ki@ zYu*ho>VZ_YsbQ>LSWTvsc3{y6T~*AeKnzue4hBtpw-9k|UBdB<@zm$ZCu_wyJK5P; zYpjCDyajp$)3V&}ycN!J zSR4;?o`uB`&E_^rP`;@zBL+WuyfL)t&i2>|zF@cQmd)Kvo(bIfI{gy6l zd{Jldka)91nouwFFBD4{y|z{%cSS0NGJp;?vhF9jseEtY4K@!FSa_Sl_NyFvvU(@VQ0qt2Me@J8y}baknnG zE00of@|^N(i15#Y?>rIWvElCvMP)gj2Z~$vLXtd#ZV3ms0OP-U_fG*_z@H26Vt}~4 zwz^cFN%`|P3@;tI9X$Xw%jq!LpNO&Qnx(*&J{cvIK?EuJfH>>hCzGCWU2{rSM=T}D z7I__??5U=BBjU^aYxue29X86*i%TpL<|R^F0B4~D5_=MQSJ7Vtf8drj-?qM=b@7kj z{+Au6hpb8aZo7K}#}HibBDVlZC?tdCe5>1c2EJ9+K0o{})<0@r5O}BJmG$-QkA?1I zhr@b7)7t9Q*nF=eAS8K%jk)uZ9|atz|EXuUZSpOzEea3F4k9 zRYpNggtQrlJ*~_xE)Roh-)opca>btPYMB^Yw!b1+f>QgigpeJ$}L;&+R`Y%dq;9~b;{;mN!;@ZLRb zb)8b{Q$AhBw>%+!&uEB#WrcPjB;}cxqJe|t{{S2QD)@imcZg-XlF*B5n2HEyW!|gV zff)cEgB9D5Q_bF4PI;&Cnodhi9hKIaZ-42PQgoEuVXD_(%lh-~cn-hfaWC2KptzLW zMxQ*gOaO3k@{qqlpH9@vj}zPL`ZHWL%y0;lASWev1D-SYiTp)Y_@@?~q3KI$X>TJm zjr*TCV3o!*>cD?G;HR3CmQ?$)J`X=C@kKkxTAt-= z;rTQjaeN_ZcJtcY*o*%F30%wwX)TKpEsKNFNS`Q2V4=DZSz48vY1iI#frXXxfjQmG z6K9U6rh6Ln?}>U{zmL2tYvO+fDT;k=;Okx$aGcufaW3220#}fi2sZ}k%%A`>#~_pG z{yy;Cw!dT*Ak||FIyQ3+((vlRj&q!j_{klq<}{Osw_P-sr$_3a^{L`z^=Fsn4+T^B zbGwcDOYKMJX1ZQUy_!6m#NHWy2J4S$A=_(o<#4P>iN`!~(C}C2MRcDCz9Pf$=f)Qr zzle0PsQ7K*?6) zy7dJ1$nRKR4SYE+wddSwS7AkrrrzoifLMc(ka`Z|{{XE~hQvw5^E=8qKQ#7!jUAY* zv)WJU>uGBj<<)$*U+~9G;Gczl9<%XG`ks*AX@WUg8w=^8*(|o|$IEBr{QC$D-A>W# zUcvpNY2h6a^;z5vk>VJ`ed2fk4}J$V$@ppQZQ@@cKtMl>IKcHisPS&29or+B!$lBg zjR62Nbls0nL0j{?UG967X;8r7RVr&=>rm4!uJs$~b&WxTTwO>DW2%nIKiLEFuH!|w z5=M=YvuOZh2LufM9V?#j2Zg7fN*eyI@!MK7ffjk?+8K{v4Cw$rdBNLA~B-H$-nu1{C6v-q3f9eYgCJUMvU z#*wLi#G>NwS#)6{rd?YlR!G^18yEtk7&s=gjb3VhyL!I9h8p6ZwW5{1FV%Fq{{V*k zec$09?34R5TzD&1x%h?P{WcgZ?IiLgc$U~e5qZS<0!f%CVN#5$JbBtkE3yua)HRoR!emHzOT>|sN-?Q(-StpVF$Ys@~(~3l!R5O%L zS%_9r#j&+WC)T-7k3Y19r^G)L#jW@k;bxy{rdbH0hs@GcHOP%ZFP>MK4%`k&^(3C2 zSK>bsYZ~3y*1S=yCAG)dmN@>;7*~=WKm%q2sN^1`j1Fa8NX<7>|sSbS&wsPs7Ow3{hbTYn5{ zNxE629IH&Nz)^s3atX#q9F6sE$j(kQ^^-M!;FCYFRm|6re%5~)^raT93;zJZF{El1 zp_)cvk#tLR0Ap$6u1$W9ct^vt-4m=`231rgOQ;(Yu)yGZ4%i;Go1*AZ-rU1!XJs9= zovNx^TSFcfXy5<`81Ifgm8;?}8QAFdgGJLEynq2at7K=o@&0(OYS@R%W%4*Com!WF z@WZwJLLEhZ&u=oMDK^!Qt2+!}pM0F>*wsxU#`DBFgoy@GV2PxinX&Tp3zN~9=bpXs z#b>3wlE|8+=rN#tM;HfrC+IlNM;`vw-FQ#Khe*<8ySZJG-#C&;`UPLQ2jD&Xn#!AW z<9?*d&Q=og+d6ARzbYU;UgqQ|Pyhp`uh;ReZ^YKn-`OCybG%#=A?O0L0731LNX2z? zCElhCQ4o~KFehBNy^&I0ph!wIduBFuyFSG6&G97!4`5nI+O+Q9yBzASrmYbD| zoH0DI{sOeuOjks>^MX#t%K@8d1Mv08!TQ&IIN2PMmG(JL8$3FL4K@bLM~%zL0|UQq z`3LFAs=5ZBEH*@2?Lw}uBz60Twc%rGX>>eZkHM=M|wU)-b$Je-YEtShgz{1$Y6Ki6?Ju+0HZH zk9_k|TiQW9bHL$*rJ2GYUUuXR4*1CT#tse*U(~LxL^k&GfCJ0}EA%IVNF4FN^v7DE zs@^t{cd`XwVLSITWEBLd027nbIXOJ?d(`UDXBW9uf5AxgIj%f$7L@YIB3s!MGFpqfsuob+2s!U++ z&lnukV|L+=gQYclSk36rFf79;aqI6&Rha$L>^&*jk}wMJbHUA8)2^?hkXu_^I&N+; z+g^NHd1{fU z9C8vh4bLOyZu!MmDwLUIuRefu&1pJts*;p%>;C`;^D(=Ex^^!9-2VWw=j^-i*Wk6kivAUN z_gC>&zj=74QPi)mo>q)t^)HfGw|w+9$b1y|^R0NB!rFI)H7E+tqN1YA9J5G3!l>GL z&jSOmy?Tf31@OjAYs2&SXH2k+NwOl?Y=gCPyDB!44;b_&wtrx+_$Tj+F8(0se-eCO zV$pbJIsVZbgS6eurwY4IY~w$rQlRJVu7j0$`y}S>&$NGMkJ!S~SN)td3qKx2wz~I; zA&TbMf=AeuKxp?2;FHw*abFq!+P)6>k^3)e7dkJAtRQg|Z9bKF7cpMK%eA(ga8%r8O}-1b zBY;CjbJ>9){sy`wLK8_X(alDzsmk>>dcP-8fzp05Om{ zVz@sN+i8|AC)G6zX&NwEIIW@>bpxs7fI9Kp&{t8cT18}U9_AE=Re+YTe2`Na037Ed zo}Z3*t{+kOed3=G*^7y^$*uI*!ImeIGZM}I!jLk1bvUAJEeA<8xx;HKXRPbi`s|Mz z;^SwPI3hULoCVH(Pds+6U*d0r7sFbsU1|2=E$^vtNyUL3a73zrHMbKayMhdel*r?w96$~ZSdzPP zwvTh9>S1EuvPo_o$&j{kGwJF5X*7%3Wr`$geW4S}^CJhACv!0eoRN>n3iN-Bz8F+! zmVG)9X*{hmv%pla3q~lFl z8~Z;^-@E`B_8fDb2P3y{dZl~eX=0R{X?Y_%SDf+B7_A)+dy7lr&aBcB3gBUIG3(U! z$*p^mC6GLpK3nYp`4ma99zXU+TA7_w4tm-LzL1@AkhA%;|=; z)vf+`m=4923Y?q`r`EZ{J(Z`#O;1I!iEOR>U!@Il+R8Nwwt9lI$9Dn@^jT5J?nX}o z6@TKNjdVRX;#QmScftCAX(UT)^|Vww;gvAX2vy`{5%;+V2a3}8)GYi_;*;@L;qISz zd1a?->#nT!P_(Lay-Fn)ObKGUV!4ch$`8#cT#dM{tPNkR;iBiFYSR3_uS3h?Ds#kP z<64^i=90HwWYXLF&z1T`{P@jY<+Sa8PuGwtM|Pq)(5X4ku*v*Ay4MTh*|pCGc*xD5 z>JMuz(int<1_nsa%FGTy`58}6$KJcY6I@TJ{6D|(MvFm8%hWcaw2}Z~o-!CQ7{&-a272>a`rWUGJ}3BsJKY1owwe{qwS-Ho+gi?A z8~MtC=b|bogL8)^S0gMs0gCiz8fq?0+Q&R|3~ef$l+vjgKXtXQ>HRaa_(}U=*yx(2 z$BDdS;K?s8WRU%U+Rmjd5O5mP~VwNHnw{4wFp6!@n{)_=03aQ^^g)g>lZP#I=G>79(^iLFCP2F_`;w~o%(k&W)m zs;oyO0(%ZR*PZwy_Al`#iLau(_=~D9+9sXfP+MP_%%qaaT#bl1IXNfNz4yT@@n_;~ zyn0uIbc=5e=?d8keLJ-ChCBx#g52cbVR<#(r@=ppJ`XTS;!hJzb72tyXVWaBXyQ|x zj4^GUag294H9VHpBZD8DRmM;EjVIY^aCTp@N5aXxKL(p<^IOjpWc{WF9$aCD&^Jkv zdIQ@%y}3^df59C78~CRFd!G<|Zq{U7?Y7@fv5YK4@wmGZ2HVA{1tyDknrjHEQ*i}f*>H2QNUnuIL>)L%dYg? zBK1d$R;5<1rK$3Uum1o9C(*35=qLDF@dimQSc39v+gBVNE3Tbu>`_wS#S+WBRaKI4Vax3(=!TOvx`eImL!m^N=joftyoPq1f zr12K3rD+isiXdb!v6HJI>5xZb_>OC7xZ3F)u&-TjBgnsI&)9?D5A4aIrN4#zC2uXv zCv=*P>MP39V+_b!Bxlia{VTfGHMw=0zp{&*!lMY0u{p;;G6$gMsb6@XQr9yc-HX$6 zF*|+Fe&Z+5_vWbF+{b9`Gs=^YNDsm1j)U~=(!9(?SuOWAj9jI5&5cK2Z4$?2u-P+2 zOOgKoEP(1Wlh3|AM@r>%QE_v6H94Y`hLh$ycp$et`tUz0(y`O!)viSM1(i08^Nr^u z<366=^|ua*d2+GZ*jN?baSD%@dG*0K;<=$tP4i1}%4!-Tr0@@gTUmJTn8)^ee3(pt zo$rzZeHSEsLG-S#RkYeXxh2XXh0jbJW7F92?@?$L7TzDxZS;L59gHxp`5CjhkFU0K zjAyMA#}K#L%uA;McORGog2&tZMRc^1yE$8JfbwkZfr>!w#^R%pM;s3P_pOB$0r69=_ah zT0(wA$|jAq{6#*=unpjhIlxjl#(l6k_7tnH6B8_!*)G zxx6lAbMqEO3_`Ha&60cc=CtP0?yO-FO5+klTqi=OJoNTGee1rPcGTyQLsZqY`^SY= z>&kNHWP&hPE8pe>WDeQmB=)KvExi8FA7ql;2^GQI4&LD~6|30EXk_?~@+d+_ z5!?)}#jr~5QTK7!_T%eW8pf>!h)c_fLKlU+wUB_PCnE&+Jvrl?R&T^F6xmzIWsL0m zlV&ztqm1xIc^v*;oY%~M9KUS;00QV&CL3LLIc{Jig;AD>hALMFs0SdgINHOu(=V1Q zQ=*+WQRsW$jyz*+X`|i9v!oGQt0PYR6-tkk^Ns-nJwKg(RDRr^K3^GpV!LL#hEZTd zQAuh>9zG8^=)jT)JqqI$_0PwDkC17f1nza4OW5z0_QhoK@T1O-MJhK8@D2#d@ELK& z>ip#KM~E)XeAhN5q?A`v)4)>q+`AMn=6P;fNU(O` zj2=1Z*SP1JsK#lf-dA&DErZjy`NdiC21{P6}aB;>v zisp^BJvPS7B$81SfP%ah!4#9G&{t^cK2o?R-1-imiKJHN?>RZ?jQV~wOo;ygFc=ue z-y`w;XlyOetszWtoDKysRCAMq*8o(Hzds>3&pAD5B8YA*=O7NG@G0~&O6+>X)3T~7 z6Q7p@sq_N3v_!PiZV=)*r(Qs1%t(F5Mo)Kk4Khoqe6T@^HsEkM45V}V3iPqyjVjkM zc#FmQeWcRmC5KOd+)Ent0k&c_8OC=oKGjh8u5-^#5cvEjVc|xJFQbf^a6Id2mQ{y2 z>IMNL8NofeS99TCgZ?Ae?MAzxC6=+L-3AM__W65$^D=Z(^e3lEq448S)8o|-hjgC~ z>N@gG8is4T;Ht}lK3J{Ll?AxY?rY!v8~7(!(l3RVhGep^ltK&J+1a$>+&3gU65N1s zwCw}Jzd6&1)Pbl|bMT5_xII z7~TFgOT^y?^j$~BR=Nhe;rp4bXHpDJtQKTocyNh;ah&5g_32y2n~OP}c)cz7oM-GA z@Ha;NqHi0;UN+La?H0}3%vm91S>(tCF_1_YGrUsw~HmpskMPT#&4(@BmV%eRQ~{Ky#{}S-YmAg@QB8dsImbJCvw3i zc^q;B0x|Tjef^w#FX8_H2>d#+xSlvB)o!8lF765`mvi%ibDlW-tCkU;wDdjvQ^Qli zMyz$`x?k6knek8HM}od4ct}`ytHaPW{5FLx?!-X6M;$YfxB^MR<362h^AqEA{wMfX z<1JIc5@{Eai+kO%m$L}FLB{+7M$!gA2N?9P*L(P;l2bF-$u2T>?-(An^SA9u;7lZp` zwIM)Dn}iBj4ufl80gyK1G|w9NPvV!0H1yLne-`LA7EvJ)M{R$+V~lV)2OQ%)G1I@= zpA0?_YCa?I?#u7(DIk(YlgUktfD_s_0H`?W*14~Y{{RFv`vLY{CqRyCM@_DRG>%0F zsVM5o{{Xf&nPsR+sz_fu?QMXGPy0ta<3Fc0sj2uD zF9~UpXtug6ZE!G!TM*B$33%*oY&L(&%)g!!NyHjOz?b7cF?f8ic3QgZ09&C z#YpSVZ%kJ)@UP(K_V5QY6_8$+!%vO7aWtY1D-ly*XOs!p9*P~elfk9 zMv7fIuV!gQmGnmm+=MI#uSNj#)Yn!foEGp0xKRB) zPhWn9o2w$gn_Dc!l%OrN=NxCTz$dnSxvCmbxsUyxMPu?VV!Rd}W@$E(IUY5Aa zzX?NeaMMF7sAHUQMQ#a^(PgyMiiS{JExz%)zc}WKUiI`TPDxvv^F0&7xA9GBq~2Xz za}zbZzG8+YNZZFa$6SMwYohoi65RYWweao1QRH}2#hN%=5FEvHCPaTwt0q)r1?!Sd zD~a%>ibUGUv}yB<3D2p0lj23Sw2vT$Zw+50te802cLS)$QVBgW27CHrcvfb%_!r|# zfdi)dTX>9##_yP%XLo(QHk|s7`hW0FsAPYOmg{U+{zUu92J^-Z;szs-SbzrrH_&2? z2l4B_KmD-b;%ENa?dSghq@G8i86(s63s}ezpAr!YkGiC9&Nk!@FgW9bp4n7U;93MMbW-3X!=z8&xZ9W0?6>j`>dpHf>5VxDCFlm zeQ{q#d_MmGf_sk*c#8|yVy4$N4%*iPguMpe3C5XX^ULuj815-U5u z18SOFGuq0eJ*>zX2s{uo$F2t>q3d07rzpu?>-}8t@wmJ+s#_0t8*6_}Og{r!DABBZ zO&kG3q8vkj4*>O2dVPD>zWg(|u-3Go=IqFlU_57PusQBBSf1G@@UI*Absm}E?-Ja6 zE7gbD-Wd1V$107F0}>R1FhNs*K?ku0yGtz_Qt+m&=2^h;ugWwm#4;Z_=T0+}gk_P)33b5m;#{~5M02<=;Pub_kULBZA;Xf87(vZW+xL+-b z;0y*+f(PMUkB0T_LOXPg3ZM91d3N#qO)9-k#Gqd;~s#K- zPjl!x=eOrs%5qS-q-`rUj+0RF3fuFM;fcXH$m`ScsI>WQMZ|JlT`LkY zLy`f&$X8mS%XcbB;6Y-2u&K zPYcOxZSJ2t7cn?osUO`ZvE*c$>NQ0WB(cE=U{sLX#z6q|1HY*0)7H6-Vo5D05JvKC z<)dZ=K;t+b`6OWR$*rTKF_x(0{Ap`y74*?8Z;X&sbAofgKA0eXyVs$vBU`^qTdyYm z;#|CWY{kRik{cbk$od-Vd|+nsUPa0IF(ixz;y@%2IQIOHTKV_m$H(6X{6B^_@ea8K zxJ{#Mjueo{Sx!%EtZFWo`?E$R(B!b?@N1i`7P@SZ=o@YP1 zX!2PB10h>0&rFZu&p)640Byhch2vp;EQCIxsi{O}$U^cAFzPmfdF%q7g1s~L-u`&cpV$@o}{A9hZRg*`$g+BG1ZLlw-@?g7F-6+S%*x z>s~G+Dq1BhNOP086OUilKZRcL2Cu64zfrmICb4$rdxtFU?~;2H_2;*#tmvdj?mXnf z6eMljcsT3t>+exHIZo$AIPz0SE^YxQc+PMe1Ft8a<;_#s<~H*d?s3rP+xk`CEhKi4 zok+%bUzmO!4OR)Wb4J6G0LySrC`lw&LL`a2%$T2_x5ow@1&9I+hZrzBUU z_$tp_@wbWN7ApvFRJ^JdHHJ0D31V>^YP~W*AI_qc?zb|VX?ZW}`u?r-FNJQdMwM%& z!)sxrX@G1N(1!r_`P=VIPdP2}=R5&{UXS2!gU04ju4)!>M{bx9-ReWih0ZWUQ7W{P zXCs6h`*o={iUmLx#>8z>Jqi2G`U=q1`~eQJraa;Vu#P1n1hv3-&m534 zPq04R)tgI6w5`(07FAJ&0FuMMQ}>DWKZg~mrC;0J#z43+$lSg^fqHSC=hwGtYH93b z;MT0;JUQ_m^T5-M8^jRL6Q)A!n+kgXN$5!Afzq_^em!Z{7Qbfx)466+-c$vHHzU+_ zIX!&^YWR=9+DD1>{gTbK)DsaWm2R2OZ(P@&{{UrZ8V07H+ZF^$pCl3_L?f>wImbU* z=!EX9$xw~%&R56R9wGRVa?(AUrJ!J9SzP3>KPe+Uk382`;BSb&E$~bWYvKzj$~X zt$~J8Fi7L|_p6lMtd0hvo`az9NBkqPhp~XfV@xw!zR>>wP!CQzkHl4NR`$zMjp5Yo z;*h8Z$+|##dN3IL#%pT(Tw6Un>LT(sQ~(Cmt}lg3*l z!pt@hdfA}T%HLrl5OWAdN4A&zk=4AtK`zIZ}4;4#T@|FgX<03UuGO3aEz~{eC z*sF^~$-7wj>*LjqgQn;=Fv+DnUS3MIzSkM_AdIO#hlA^11^iy|b;Me=%IUWks<0F0 zTHnC0831w$Ib|5fpg#5XzwIUCy(_{^JojXV7_*cieowtcD zuJ3fHq?X!ih7nynie!O~cB`(?ReErF>OmtSvaK$MqlALDytFvjd`aNVdSZ2b9pFO$ z05rOEY^k_($~h>*=vUhm#!sODu5tnVKA=|%ZuVzoz1yP?#TrG; zta3qhY4&w1&`zu~pS%I&obYj6N-4CJ<$_blV!}n-t;RS5sqg43)I3XV7Nu(>f-@k0 zpU)B=J7c-P;B%j;uQ=EApRwe8%Kc6bFsC>Gbo%0>MC@-Dc%*3Q8s+VUlEA^TNp4j~ zUqPOfR#ue~&1SCiDLFrWLy_&$ip9xaQQ2Q();uS6mUdnZ(CS*H zwuNPPC_fW+^XZ4=hEfcHiymLQu$i;^IDvqsjIJtKV|(| zYsnQ(wW`Mso}qBgu_4~Wp(8E^dF64(2D&fUtNTyF3F5CG+Stth0NDN%j>6U`qAb(h z%FREJ6apRCh}Q>|-_IP?AB^4u@jt{HKLL1u!Wwd3SpA7XwY4k)1#{-e?)X##oSXrI zqm$R!{vCWc@ejiN49h{i<0Tm5LU`k|;onmI|k6=NJ`+3Z*(! zCt6QgH>crmxohTmnXk3RW86~L`uz``ye;qx;w`s_VYk(-BD1-c`KOi%Cfrnz6e-!c zROcsdeZ8x!_B^ zS(-*|tDFvYZR6$S5;zz<;AXM)DD`gv_=4Ftt>v){(9ar_U%FT^1Gd~A+#bBworuQD zcZ_Xjdw4t?;|TN0&1$S{{3HFGd?l&sD|LCPJnSw+#v>}WA3QM(rZ-Se0|z+i#eFg0 z&kx!7V(=t-mZ0qT&BxkUZzDVv-~rD}0f0RPcvryB5Nglk8&D-vZz}+h{{RRKpl1MJ zH*Pos_4=C6NQY8_C~d*s*k|-si9q0}?0OtgC zuUPO*16=Ub`g%yNV;ke$!Q4m82fuuA_~yD{Qck9)hgOg%@b;v-y{t>M$YfyT0fEB+ew>bR>s|47(8>#&-Dj%! zU+i-Qkz``(<9jG2K?k-+Ty_S#i+xVs%S>y6;T2SU<_d9>!2_Oq3h|$ZpAh%`lHXOh z01N>mK0_RI01u~H@AW-)%UZHai+g7(s7~jQU^Y6Qyyw5^RV18DXBkE6aF=t*s_R$x zQqF`KkId2Jl6gH3r>|bvsu%OPi_Euy41utApxQa%MmlkheR`UQO7p+8H3dn5D$SPP zyC9L*Kj)?@?Yt=svAC5$3`mHU=NnE59;YWiP&-yrhcps3i{*Nl%PiK?8E+TN47-K_ z7(RslmC$%!QMaCW*78?GEC(yo9XT9lAXY8zthblO!J-O^>SQ}}md1MV#!hk8x=R*_ zTluzjxQ-atb8;1jxctsOy)j%43KF_H8*V1s_`W+E*zC0VLOf$K%mBdj!S(+D;=Qw5 zT5hgH0!b$YnX){_>(5h+^X*?b;x;t+ciJc$LCHDeU z_x2;x0~o2M+~sym>8Ql@jSo=)nWMNWcW2CFjFa!4F_YJ=bsA=(1;B z9)rDkr-e0HZlc1F$S`nIpzKa@p8o)acN%G!KpiDHE0Rt~BzEVwwRORyj%nTK&Z6FE zE?W;FL!5)0la7G#!Tl>zH#ZjP5m6$h0c?VHk&bi7#z)uQxqC9Na;I^~+77}%-T3GH zD@w;uiW2D^(~p#5MLltx@y|}<@~)U9(Tt5#Z}xjx9wx@%RgmC?^&t9m{{SYr4-~}} zwZEHpG;E?kq%-`Xw`6d76P#f7HPK0}To|Rg#B(ByF$8kB>5k(W=cxQE=U>{d_N4H) z!@Du0YHKWa7cdvo+BMv-v^UB?IXS^lMow{nan9*08KpXtvRae=(S9(s@K=DY{7d%1 zZwy7e$)#o@F^&t2ebCs>Il(vt_54@;pnf8Fug2ajyt%#B?yfAp-s@$)KCHS!&cn)(5};(LIsCi*X*Yd_ zliVhp$O^`R0M2)iGJOU+b*h*HD=tr`%t!L8GuvE9b1};>$;UzORh)%jvI3ys^y%(D zoh!YEvH%nya-*s0IsE%|r?`z}59DESmLT@*KhLcsfny-YAkOZD9G`wVW}T6Mz65_+gswUB9&O_KRsN!i-YN z)-`5gNgJGj)!P|9fCb-(n99iHqU#a$I?0@iMM9};jr|J{w<*uZ){r&9r!I+*n1mF^S z=LFyZUrl&R!qLR5btrF?U~e?S-%ea6m+Y5Y^K2oCMeis#Gv;cryqrSC7z78 z0E=V=!sXppkIS#A&3DOi=#K&}QN?iupQBz|lJQ&aP^6U| zagWxQPP9mhBd#zqazP(j+q8^IiVUPaFu3C*x%RGE9Wl_!TUg~?(n1`zRZ+$Yf3!Vo zKU?sEB!B3a4vyqy(}VhV_O6;sFEN@()PRElhB@`-l0=sv%VgeP%aT}N4B!k5bN)4x zj;67jPeacx@4Q{3K(>~t6~~ykJL4gSJx{5?{41C7w~sEhJ1?`}=yEg0M)U*a+&Yo% z#(x_1y;DRMJKz@#eU;nAWW(nl_ODiXFI+5(W-A0|cIZ4R?C) zfoUx4ZAM06K*M-wcL2~+3Qy(Tkf>6U@WMx@%xk7}>4 z%rtvOiC7uq$x=AT>PO*3Zp^0bqB+~!Sv4jwI*|cl0}yZzY#!C)KeT6vWbn3)E$)_* z$s`{vwvZlWUtylS4&eHFSFYNo&r0zB01Rp}-`&2V*9I6{FLFzVW>7FxhTFmC<-yPET3$c#pZq00Cu@B9q_abD zBQr`JI*q~d=v1HL&ISND!L19O9^1ly4kt}MTWguG*;Fg;1wr+~;k$vz&(|Wn>XL4E zN4FTLMM0bvxpx@2mg7j&ZmnZ7?~WM*U;)=@$pm-7`@Z6vc5E*vYq2<3+Av4@N!Z^n z%y#Wk**y<@bv6JTGuueJnLohW0m24V{%q8{o(-6&4KEEna4bTi)^HO_qc@# zE&+AlxP`}ajC^IDSc}im}C3WNZUvoy0HW*^y$rIc#BPj_UYrg)TA*<5Cfc? z0mm6o27CG%hOD(J605~l>KxPXhu$<@HJ9i*MRz zk=EV?1Z`A_6%|0}paa0<*6)RUQ?BW^GR@+MA$5#^CV9%b5^&sS4UT|}b$6$qYs}f(5@qhvBCk86Q%}v`H0=O9WpDW@dmqNZ1U+=LLm>9 z;ueJhFqneNu|WHyI628<)MKxhSzPM+<>mFwoT`ye1j{A60k=G%kP>plfI@`?mO1FA zzJC!`rD<6D9KQ=1KfIIIubFu~Z2F#vmQrfr?&N`GvAB^~1c5gKNyp4~b4DE8=Ly zUPoj5Klm>8?^sPzH!8NWlu%6#_2uAH%(U}TknP(E&Zl5>S49dpJ#NUgm-MX-%uOSwDMcLr8CBzO98`28!P(Q$V? ziA^0Jj(k+B9J9xwtdc7ccNkn}j!#2_#zrzL&Smk}!|xc}w}Ulnqh}@G%Lu(YUwG+) zMnNYWcdm;@_=#n%ERA_=$vktI)=i;FWjNeFyc^%pim|W!315hQ9E)3@O1JXcgt&kN zk|qFr$KQ?F9m%e$eDhXkKe-*zCFZl<~@efuHxk0qa>`7A-y?=pGCI0EAz~UJ@F$ymDeKQKx5|1#SrE z1b}b=AQC|*6~g>7@kG{tXOC7;9ijq3<^aM|w6Zuo2pnU-IO|$+$!N`$rwgsl=u4|< zlH5lMaMFe|0l*%Io(?*5+N>_AaX3g^kXR5HayNbKlh~3+J--^;)2?*Oi%W}Xrbb!H zzH`9H1dR7M{(hAllIeCfjAVpFq~H+WGJw60sUUjOv*odq-t859Ii^jhUL)N|9WlCM zIB3HBr_-P~_pXyvxi&r=mUuuBR%XKaN;z=&BmvI^ z^Mmh8;x8H|y9B--(yX?$ z1gSiKjYZ+jVQ#fkHN?ke@04WzWL2*iYd#W*mg~jqBhfqs;;kCi;wa@RL#Y@N5=rBy zp!Tmx)BH@ADiX#ua0wY)oHjdSr(yLK7^UJ4oRl9X4R$Z3rt9W3=-Rylx{Ldx3{6M&!6}! zj)i$+tZ3dc)9s~!E-qRIiDhmaDv-!N=p7DtZkC=yAyR z{Hx)g_$(Hl&)~}qZr;J3JA3wu^AivXIxs@xrV9*i##FHS)mK-dUJ6#l#h;sAC28+( z%4>HCBC>^v(EtiS!8s(5Mo7zd7(J_#@%`T2LO32*CNx;xp9H$EN6(-b#yb&RPP3?5 zfq2r~N#?SjEuPquUVSCpYC zNniT8^v+5#xs_?EAMAPLiUS)@bkmm^1(!c6k~!lasWpMCN+pmgaq?tgha>Up(-qhJ zM9?n01+86pChiuE^!en9Ra7c)r*23+4hQ()n&Pf4j1&1v;Kd;ZHYX~3f)8W$?_E}L zRypA=Z+iQcOwNRb<&+r%vBz%YA8v-SYT6nm(k3#gUolj%=s^Hy9sP|YOjUwN2qW(PC$>jV@Tk1? zk~NI&C5bG@u6koX=M>R14tT=ga&j@#9+ekMscC6)%+oPl-f=tuunYiwPCp;7daTAS zO1f?xjF=hrtC7KJG%CVG2O$0Or|>7I<5j1&mey_i+Z~%Z0B0ZMibzjlCz3%A%=j73 za(Vv%IQ?qwm2WlOr`YY3y!QkJ5ylycgZEEPq<)yHjJ|K(L%5zkUZ1UdKkVi3vJGPL z9b?2+EWi`?dwEMJI0K<0Zccav<29^e+cT*?M0Ve^55etY!b+Yc)U0BL`fvn0qfsI* zGrJ>p4hhKg?V9}t{gJ*W!KK{8qU&0vq%g}MZ!UTAPTq2J!8`-cY*)!2Xw$qSVH{Av z`{CuSt}RrqP6kE}LB;s0+ihAaO+^zE*xUfD ztOqzf5B08(X&Q>?;Hib32i+fATWN6ImD(@|QlNi2mUw222%_4%K`L9R9)lhJy{fl{ z^_zREHu9Y8+yanE5AHbQt!iBeSxDQ1fD>pZ9^$Z*1h1(Doza@#VVrJL^KR#R4?sHN zl6^MrWypL6Il<^EFDz+*-3ZT`slgu~3!dd>8v-{5$=jzBPEMG=JHo z=$~ltTtFnx7N{14GO=FFAA-Ofa7p*7{w47wnvS+1 zx4wYJq%=XTfmxUga_Tu&$F>jWTfQ~;t8=JmV@0&IlHLhWq>?(QB|*Sksl$w5+&nH z1=l&jBerqC_O4$0;yu=)S{)-!REaW7p?5nEK!1c2-nLCLS2A8iT5hxU%_GRvAu&#@ z0ESJ-PhXheWw`a}?N$xDUfd7t@S#fXI0is5o}(Eg`VU&KVXa);I$GUY0aobGfpVY12^k~%-1~Z0oJ-;jPr?2+n&(Y0MGL8t`0i<1HtUk_ z$t367rxo1j-x2LRV`UbTs|Zc((g763fh1?pcj=$Xyjez1-9D?>CCgIUTl%@dj|ty+ zgGmzI>T@GWD}L{+uA^`YN##pq5(mrq3iA&V3-5<7{hd70q<&*P*OJU#{%wf}bApAs zWqHQzV6m^JZoC^NpR8GUXikW-#?nhE`=UH$elG$ULDm| z^6t-2m7wy}G;caa76xE>^3=N=sK!FBGEOaFr5|~UIJZ`Ol+%8PEf>XK4`|n|1^t@{ zQh2{}DzgZcBxaF7?W4qL^zqEB5=`JUAB1sl9FgfTKfKEv#@a@I>Akc*?xH7jY&G(n;I~whx z)d*QDo$o1zCS28!WkAIg;OBvmJvtn8{b?lEre!E300WSi zz$2#~`6v7s!^ILX_e%#Y5S#Y_227Ytxi_~rN?WZeGVo&eLMXnZeiAu-@a^2EoCa!(xN*jG*A zpZF>MfuZPfY4@7_+>GHsjdb}HKwrbJUtl}e!Vw9=lXMhh06~>=&q2ZbX)U_8z$qOBW z0ylO9{m?Ot_YI79A2obm{in5!Z&rr$;p5!d%N(mKUEDbv&Tz@7V(X9M2ptGaPqN_<6K3Ta&bq(0%iO){;*Z6AJ!}dZkacZ`xjCOmqxot(eZ;`W_*F|^g2@B@qsemZ z)UYj_bRd(TYW9~l)55K0nI*7e#xagqXV(DZ*Y&8ytTzk;M5GlPv)M}y{kZ(VsP;}; zu=dhgBhUOHaXysm{h4<0%Nt~;OgBzJ^zU8nhpic9`$80kR~};yFi7e^J^iU|ydkBn z{{Z%!wuodTsBpO>9A}O)I6tp?u{VULju@wc5UwTK%2PSX!Tb-WY}ZXXb}*e(_dD$a zSh&@^H{?j?%!xO~>_Nr{AaT%v)7KUAC;SxN=<46Iex-k4v3Z($#XOO_0A4amk_HK4 z0}+md5-Z1L+}75ioBmU@Tm`|%sY8m+WmY&sP8 zFz#HHQjE&TbMlpSJwAu6Yf?Pa+9HFsc)v7$H|v*DLX+CspeWJr05;&uyEr?BM_m5^ z`s&ZYy%G-+d{Bo_xYOj8*2WgOnIQz6#<>beBOM#Qc)+e^;^JwW$axDK49Yt4IUI40 zykot6Ir|TO&z3sBgRXpS;){!@qQ98MZ45GOSB(QHVsW0X{EtfWsYjZs@;;iZlBE3f zJWuwD@P4xe?UuKBY+mZ#)IohTU^%#0#u9Qz-YpqY-Ot|VmaiI;$|%X06Xh`@a{S7T z{op@|SYwXgoqmD*LijGu)ocgrp9qOn-6e6=P-Wh18n_OJ(QU|_n9xl%r0_|y9{ zNu^#*dEgyd(()qZ*4`=b!3fCVut)?PgUa;h89UU4-O zdxPkIom0Dnx`3o$9N_Rrsg4KA5Yp{2K8avm}@Hpsq>zn5g7r=KvpGDjzP!%L|KdNI$scAf8Cb zL^x78#~BCuRe~daUKtmk;XGs4j`i8;e+F)j?>|?G90EeBZqL)N)3;jN@aMru4ccAb zeWF`s<(M30ayxK9IqG^+QP${l!&j$IntCr(Wqcs`V>gQRG}LE?WU!6#G}1&u_Q=R5 zpUS?Ew|fr==@HsnDU?MPB0IT458edv(BlV!J?jrm)U>M|A{#p`J*_P5vP^b?w>bj? zB#uwjhdHcupI`BQployxv^~D;IVw;f^(Qz7r+k{}NkJZ5o2e~JddI{qQ&YH#SuRkB zj}UrnUsAC%y&$uPr%O{&ESuS`e%!DN#uDg zZli0fRhMM17~zx?!w`AuIsj|Zbw2_4AI3UU8GI%!W-sS`rA8Po!UGZ5W1Mw3sy_+; z0B0-c{C{O-sarhyRn5Y^ozfvLxjcj13=lczImb$sFMG^af`hsG>H8%9&|V$*sqp&R z^TS%KmkA;uc;_BmurT||>P|xp5ssX2Yv1&X+h&eCgK$a^M&Zc^>Dsh>mjzNp2G$$g9wt@Ok{J-*nALTE=0LGEjlJhB^1oW7qPkoaCpm<|PO4ogLH0 zpoq>(DIk&Buaz>N{~(VAieVe=VdkCUYCN8T@^EaaHf`+11=k2rLS5 zlb)ZaKb>tCb#XPRn*Jh^XInV2yeT=)8TF=pj!B(|E)RZs_aoaR)oX|ZR~~dK7@@&c z#^4Y1tCm{SP-P{Bf^GS5ILB^k#j+-}wEHYdA~|H^2a-AC`P30wvN@VkSh>JrIT%0v zZl3jK>rqCGO#%KLr;c;={Hq%7>hX)Z+!*k@q34|PexJ&n`-xoNNo`c_bWEwoUcZ%H zozdY6t@7?5u6lYLewE4TlZ%Pk;z<5!^){3{ zc7c#zsQgd!uMqvA+T#BJ!&ghD%u-0dc&x4gU8f`;eg+R>1$s`i;_E3lHO<<_3XoDY z>V5wJTJe94cX~&PJU6L8o@U5x+48cT#grZfJ9E>mRhvw4R+?wVpB}$xj|U5ht@N97 zcdbEnw%|m&RHF}HXLrlDH%B92M4Zl zJqKa&IW4TbQ6{;jT1RDSlQ1`Kg_)%caH>7>GEPQo&b&i#*0=Fn&g>cYfv3iD^8$O| z@CeBV)cRu{beogBk8UxHG=WQ8w6)QpYtqYd@bR_>E#>pnsTj}m9Q)T7u4}S1tE}Hh zip?PcH+`U-`VIi=^vz!JEK8>99#zTvIV3f`h>-3X#xi)|9Ch_RwPWEY?EB-t+5^W) zqFAT;JW>ODaw8ie8OTsL-QS$l#W_18Ik-X`(s~>%#CN_X)zJZ#?8zV&_I9U!^V|S3 zPH|oKgZn@HZq#gr#+mTbR<(q``ls510B0mN4sv>63j6QiC+t!1{{Z0*mk_hjqMuP> z8hh)uR(BwF9PReU;%m~iFNcsMc(~*O$ zjgN#s9{e$&cz*jw@wbPsuP42oeEZ1>l1bNbU{~S5qPRaj^W&WlFf`!M;jc7fE?`@00ME0`keBm zSTK15f)sJPzwq<;{1bUI5&{Lqfj~nDw``8^YPp?{4wh1sLOGlCb3xeMM z-ngkFwmSq!S1vNDop*Nw@cgk$N4cWrxW#5<*Z~R@0nT!B$o3T@+d7|^o&f_3(Bm9` zjWJpx}O$G=5wT{(NCUKzA{~2a}L}uz2V3rrZAYf+5e$58gN)qa8hY{{Z#XxM1EJ zYj8O`h~vI7>(l=LuS(Z0KIrIt?TD%i?ipW~C)1DAant#C{{RlY5^Gwu%omSh7*2n3 zm;?-f4o72*`)~)XbicF5!HIP%*3@;~Eo6Bk^C9^+FvuhW{3)JJM>zd{j__~5`Mj%m z(o+fuvL;y0N2$kL9G+_mvvJZVi;c{js|i0YvEppd2Vgq+A@Vy?qU2uX8?|!N7tIHHWGEc=FgYLW8p&aj_1mr68J)%6rHVY zC!Jt-2g{L(3fMek_UnrDuMT`YL2S1!n}J-gjj&0_Q~CZ?>HaMEHEhi@X;CxDuq)v4 zgihU9MmhER8H!31D8 zPt!hzyD0RhPcHSoWns2JxdUk#&mE5x`CChdMP^mpQvrt?+mD#%zXSZ5xW+do?&mXY z;Y*8K;{(F-M#xLH-knH1eeUBON7Ap|>CI##04YF5PI18?;GW$@UDS2^Jw1>hA;#gq zxje789X;!u@rR6TA$1WsAyg!)AB>VVw_Zj^Ufk6sVw$_u=qzq5RZ`j&0G9!ACm`}p zeK_Ft2d#Yf`)X=Je+BrqD+tn3-V?km(gpI#RRk$44^9a^z&vwZWF9`AOMx28D>6B6 zGZ-f)36g&jdUUQQ_K%By!oLvfSC;Q2aA+5XL?@OWQ)NKN>@)5LKCvgG^fZK0l0O$U zT{7@aAc0EFGlT_>5gnUpBh&%&ejO{-bPtXH02cf;;hR4S_h50R$mE%6doOZ`r-?g$s8%=$&N)4`M+mzCwTu{1g78|$UXweKGI-^7!m>%KO>m`0l;A8Ejee(?Y<&^Z9(1GQkA zX|LojDUsNKLBKtUB!kfXa60?ewZ)#Eg}jwk3gJs&4d7sYKQIHXc%myrRd|KN5PA_G zL7tw3w=BQ!VYdl1E)GMtKqP; z%~dK%Zj5uILU39o5^ZKx-N68L_9Wn7_U9ex{{Y$P5AP$0$f1A*h7^&(Y-bn;JahWh z)|(`uu|u``Ng>GNIXLU|riHo4m1B(*92OZQ@H4}E=Ky*ERm{niI9|yW28b?Y@?5lK zoMn-S8R?&t4n4bOx_ui+w6(NrYa5vuARC#6&ip)gVYQh^{$F5ncVpt^0ZZDMhe6cwws?>+6w<#ML}k z2A6FlR&(8XvhGWVX8~|dMgZ^BXOMGO^nE(Y+BESGi0|$MGbU}}yS8UI`j+X)z|MWD z-Q@H|27WQ$3G4CnCPN{ha>*WsAK|C^f5_Sc<$L z$gY@h2LV`-^KeHs2~2DvwiHEkrdgco3sG$}`5+!X)z`*ty z$JEtU_!Z&32U3dm(nk`m8JutjISK*nE7u=NiEgFYDU-{|J32Fe5FbuBs(ROl<7;5@ zs-n2zSe@UOZndBgZ6uJo0jDozv9T zEc9l%K{H#&ILI5E?#@m>UezDkW|}WOB`hL2P(}-L(1X*8;NQnqN+X;|N1CJ&xMvtA zIs6a3R-fW(GqPJ;GBO;ge1#-*{X5mDS!i0+^b3tb+EM1)>IjaxE(hLTLF9T>eMecB z?4)^N*94%Ay8G9iU3^i|E)+qnTZNH`K6gQmna^D2sNU;$5X26L;uJA%&y_Q7Aos;J z7L7SyQ@WeRu*RhlUJ9Nfsgx)Y)h>_Ktm&wBQJ@M`cuP(FrnC*Qno~5LT z3|CS{(<#pIr<{9?=B!@)K+w`dEzY0>e-c!Jr`8eWg6KLnBoRF<6(`!9OvuP6w=fhNnGi?Me#!6U=ploum#7R9A{}5>`poN z>}%!E+I#k^ywEJ+(0nf|`KLa5s_jyDbSJ)X`q!m=Y4K*W{{RTx#*g8>JfTSPcY+4x z=)>11f<5cv{{V`fDvHCz4{_q#7I%oCYga-xgV>Jz;en{DDM48s7V6=-`dwbU0%Oghd?QM}na?6dm$p<*kKb9-H{h59s{1&sp%(c1x%ln_J8h(@W9p{HXT@+0dVGYp+dzM%Ca;O*O5O7yd9fV0j$lBeyl7 zr&>Xxc#v8vEB@#d^xSdz`qxAtp`qo~l^bN^Ayqde+oK0Bq?-E$Z zwohqy3{sqZ=TKRH-C@`9_w}y|_{nbf8a3XTaS5I`43cg_!;!}w{{X~S)m}XKeL1s| zT{7_`0Q~UD$vsa5bT#wG#GeJ-{g!EjFC;(_AP!WIPCdB%tD+I?ILP=D#FzIv4yz`c zENZ0*z~FI`Ivz((lvh#lyGAyiJ)RYAkuHCD-Ew_6KUyoAaBfWZDXwTIq2&=G$0IQe z7m!pqCnKJ7*mXayPZT=1Q$FvJo8<&&E7a%HC!TpcQ=+$186`kjHibjc0O-V@Q=Bi= zjT7F=w&rU>eo_0pjP>Z})Sh;Z+_$B8-A~nhot4=8xgH(STUxR-MMJhmNn^l1t)9cv zr8VJ%Ok#|Z>+(sC0g&~Ed4z+@v<3<`|hWU59c9Uc3+D`c|Lp{rfRr z_@3(DU-1M#Xo}r(wu(VSTn)_UsAebbuR(x&iu!9x(7&+Y5yG$uHe^7MsS%OJeqT%s zl4!k;n9Xxo-OoFINz$r)FP7JD#8YYV{uic+LrY8}`eQ*SCkI8Z^%b5mr^Fb8EyNCek!bI(=hOzY)ffe)D{T2ow?19AFHdYsFywp!^x) zEmm9oG)VC+!9;^qf^zBPqHn4Xo(bfmRDxuZ{gVTKBBWD@%^ke?R|3^ z#~C0-_&6kywEB^by*W9r&;I}(f5BA#CF+_y`WA`d`xt{tK$hl4X&yEQ`J?o+#6KIf zds`1W3&d2B*hl^1Km(xTu;K$rqKB$b^3rfKabMCHuc}y&*L7b z`5rEKzyWsKTdJx2^Ug+VpT79<@n2AJCD)JjJ){bo@Edp}oP9gj=e^I4JX3Z9T=;_aRa})O?TG^9;pGo+tQ?q}s-23Aye;VTQ{vMad@s zjP&I7u1EHs{kgsj_>bX!t>6y@YFFB=_N}MMY-hK#XyS;rRF-!NNWti;dt<$PPxgr< zX7eRh!6STp?(Kj(kMJJ!LFTFg7)SySlQ_X+fCoJDk@yqX))L0fS|oSS;Oq<}ok>bo z>9YJa{7hXl!M3_f$njrD(mY@@uowUnz#~2KezkpcTX~S>nBj;683%#M&vX3oR;7w7 ziDQT^Mou!Ll>m*|4BPvO4j4ZN5N5$a0W7apQr1OO5$?V`f7M8ICv<-M7j-K zG8RVPZn#tgxqZj32^jal_vg~4hfcRv7M^kz$SkUW5_rxseuF)~I+E(vOLdL#s98gw zpKy(Kl8Qj!ki7HIny$+ zi6bEAJ^OzlRwBAs9IHHx+qn^(0&|j2vmQU)T9Qk&A(3qniGE9hc+NmQG6~Pqtu*dJ zqZ=9ak?HGm1;N#xId?L720xjya&UTmPX~&>uTOOil&ZH(@s=ySzUg7e!S9YeF@y9q zCW6}Fu~b|V`$jX2jsZRWbL=|Omk{Z5*hOyAlZ}}m{G%KXsr@?oR`lv5xlfMHa!T^3 z@f5XPSNRouJr0*G>+4#b$(14i{KMur#?m)n1Lz2?%@WH00LLCFx6>KPp&Jnj5(!dkB_fWQl{J0pI83IUkD)hZ}8_&w-&x@ zT*v0`ctl=wtO4af7yxn4zcut65Y3_Kaz;=3P17GTL6X@1=pV|P;SCzV{gs^wXd?kj z9PT@EbN+i)k_a`A7aLOJWMD8SfwT;fp68F&x~7rh#`1S&ZN-W~rp9Ac+7}3+d1pN1 z)c4Q5Qr5H$PA7_MxmB(if$~FTfX4*-eLL4%a{znkZ`v>nd_=52)hO?eQ~FV5Wim|C zx`vukK5j`-obJi*(0(1ccETwoR zWy?*Rj(RA^diJ?|5o>y_x)Clg-@M6CdFVQ^$*lcf;q{)T;SHk36+UcXRd71}YNItK zFM2wkD*QwE<~$7|EzIra+1!IPau5TfDcs(f>BVtgHvPLiC2QcfeKu#3ca(ro5O~fQ zgUH4?9^)1CKgEC9r@=lr)9x2hu#e1DAsuC4z>siq2Pe?iz+beVz`qgvHt_`ZOQuTN zdp2OXc}#$T-PnK!wxtA5_v)~P1TYXnH0N_not zu*!@v7{I}-n;+YQ#!$kxH+JzDM8&WqZPAdtu1+{7zij(g&yqrwT}*Eg38TSHJLx{+40EROKEMYTaD#&Ie|+ugV2UJ`W%nOygjcGgiP|5RwE=F z5_cXw_{TkS`P46Y1Wg;p{{SkK7*KFRu{@tsoKpJLZso&=a*BE?E9bA~cfKY4slGSs zis^mv0&v2OUA?*7N!;^-M;snTae?d7s{OHH zF*FgKrI056GmXPNvM_N~Oz6O22y{i-Gr<8$_VzU{wMp*HUR8mQ!%8aNNTW82?yB>q zS9^Z*X*oPG4p0P3rrbgOD6MBJ)XR5RclE_lzj zDbc_VR!Ns|+yHjK<##SSb>jx7o=M*0`kfZ6(y{6v1pfeNZykI$zO~UcO>MOeA|@ei zt{XByp@2IdxWODZ=U;1Ec;8CZv>h|XnpOFAmL(<7eKGXKes9_tUUf|59G%(Qf>dN@ zw<5lY_yh5SQ-e)Uhx(nNFiP94t{(VGV`d|Pl5z)~#wZtX?$xWnOz3!IE_J^R;TGu_-ry?`iB055N?aK8lO{{V_&-gYE|gY$Gh zjd#ORS>rbF?_#4Y7q{2ex|(UGes4STGoRNI;@uuaxhu|gGUb0h57V`IPsJE+wH+en zc*JWf#CRw`VmcpO{Y86rr4)18guJ$SBPRr?$wBr102<)Daq#itx06uO$(kddHr@VU zJ@MFjitME=EORHzBl5T7uZb=t_>XuJSW%HxWJg1lVVsVXSET;b9{{yGkBhgr`W2f9 zZev+l6Ke%>STN2#Mt>S9I&HnrtfBQ+MlHUG`H;-hvQL(DU!0c5%zAD=QZNViRMmKV z#xhBAyTjylAy}S+*d8)^;c-$0JB&=Mhn?gw$RrL&soV07z_m*>XsCt9L!2oXQ=gcF z(3Kbi9A>_9?H{RfDQZU229zu+tP=nhL&+zp1JsSoKf0zGPGm`{p)~E5Ak4OcpVn2{3!jM^(eJX z9`9d#%W)ddab}CTt!>K^p~?O?Ic$1kHS}+W>|*}ZSZ!gLS=&qH$0Vu4o{Us<$D#E4 z(v-PnW90LE-Bi6Q58dkf`7a}X!QK+pHEVzD{T|9I3rNT-3@S@F0DbJ9Nh7DdciK*a zspvLo9mbZ5aB&;PImtawUw-x5d@R#6D_tsY5NdELsvr;|;IynrI2{S|1TsyNvl>h|19X4K$D)xfcvcX4}Xdaxu_z z#Z0<*@SeNnYH)~?cE>J2I3wmG>(f6|UGIjagGq&Mt|KN$6;Ty=4Y!<=>M%20Pwj`V zO9zK^{aZ$|BI@X*NQ$}nnU`x1z}KNlnuKm~PF~Wgk8trvj4bp~eW+bt2;#TXywXeI zTPFcm{PwRuy8WiKZw6Vem#g@K-GQ`WCM5*M%6AaJbR7pw{&nKNwfF4-CxY6`z}^_T zisw)k$v&m??$}j|#S@N$beU{-3F%)fYdY4i;w?_&SiHP5O*D@4%H&|MJDHC?3j%$W z6m&FH#KM!d?D{OLg|Qg=Dyw|4dpFnQd$+|O+b2+)#5dZHfmtMln-E<}pLv^r!AQ#j zb z+FOa4yyVy;QS&Ll1OP|}sTsh>UTe;)QdM53=(%oBh05tQO=|6T-rv^6*l%4nnJ|h0 zC7ABQwVQ$ed3%mzlMgpGM^a`W%Z(6LE86?Vcvm}zZ*|cW>cO;Mi z^r-H<)s^FDm0feY9Opcrsm~nxR%+HgCo3zOKiWlpS%Wh%06QiMf;x2I^Yx-T=p7`M zb(K+3#`1cqjFJyv004iz$2FR=wl^7DB!UL)jAyv?>Bf1f1EhB1KQY!Lv`D1mA%13K z^%H2Gb_Y;C#E=A4LFtTASuEV`j0;H1cah#B#Z)1+*8T@H2 zqKv$0GBU0RQH+4NEKlevMz}WiNQIQY*~lYr0i{0MhZj3zg0> zyz$e50)-s&&*cnK$0f5zAvlNt8MwjDNhJ0s6(5(4psB{-95-={lh5Ed^gL7i$8sk> zI|$hDxdm5~k6yhgHmoGB#a4zFKfE%30r{{p2V>j4B(g@x8YuQME)yrVbC3^HxYJ`0 z#LDhT{{U9!Xgq?Yn~$kc`O=vjs0oBZn%v3hT{^=lfAYgNmo;g3Mr3)d-gbGv< zBW=h84_@6xA#$~%2*a7%0DRlGj0|V)jC*6B=Sw6m&JoxU>$C!Kf=}iM13!&P1U9I^ z&3BmjVi8FpcWpnZ_s%JP&%ORzjN<^4^K-iZ->lzNUe7KtvTFUIvNSz45KPk`g4oBx+ zh8GnYc8vb>*j^3EOEyab1`XG?v*1XjAxFx$sgwxrK8G_ z>I)Ps5=4x?M^VlIC({F_5i6Uv_c2KQ+Z*@i;2J&a7No?sg*LHi1N=W6a^4bimF) z`t_}Qc%z!yTc}tK?igo3Gjoi44!+gSr0H{VWh#mzj08Km$mEQA(jMh={U1aWb|Yjqv_GO`zj+WoV$Q}=acSh!2bYh z&)LQ=jJ^iG@cy4H5lbP>tj!V3Y&kf=9M`^oXU`1Yd?~*pl{qSZ3e3E>GrOh279f}8 z4hcBGALpf9TWgj@)0F7cRcXCdnm-!;E&MXoz7=?~^TOUBngKPla?vr^z_xhkNjL=Y zisZpI=_p8qF+> z8ZsHl17RHj&rkF3K`p)5MzhTzWpE3T(1CzdxAdt5b59g!BRhii$RUuBG58va+DRRv z3ZEz`l6?tfTzU-TQ5?$($14(WkseNSg21jvew6lF5u;ZdGEF3GRH!Zn5m)_!NAe?| zO6$G_z}H_EG%Im3sh0LWgbu0*=j+9BxNV9{1pp{5p2U>|kHC!ft&arim%0avG`&ks zkyco4?cE%(7(0rPPoc*(bgZf)lNkq72|97>7x^EqegnIL$HLauu(-~$S)Nw?P~iF~((QZ~Wbq<~I3lU~7Mx6xX>R}lTt zf>eMp{p{zZdop~nx%fPz@7m-Vwx??gvd06m4j8+P;gkg za%$T>ysq2eW+jLq`g)4X@pg$}aif_awF@HhKm=g>cILFDQ8O9AT>0zbuYm5ayk$Mo z=`uR9g0B$ySY+}juTqcu9_0ah@e?P=$lQ=c718Y%Z@|(u+DRXa*6f#Zv*hmFKx6IH zjy-z%;)!K*8{$o%s04-M8Tm&&2Uc#_UiABA`#eR?M)FBal2BtLpTLa!nvNuvS^UeF zB}-*dvIaXGckCx#I}j9nxyiL{P7s4cnM)RF)Id*`((TbL8hkYI+|$ayNn1}pD?KquCo z#W=oOErv&Iwz0S>!9hE-#?Y&h54Z$Zs{YC!3h%xqd{~0sWQM^i1G!^>eRE$={>3p_cmnQQpA}r<<500%!3KB?&5%gP zAyfhUt6B*;^Gl)Q=eWPAWs>ChrK0_Q8ClF!-mY{7Lb&?RPZtExHJ;@P`Ph zdvxudduF|EZ-_S9L{izoF6iNjh=sb0bB}(tv}w6E?V6q>UqzPzANd!44}8r zwHFKLx}~%`2_Y4R2{`x9wkvDHAGEH8WFuSoqm>mv^E0^PIKZzE_~YXZ7f_Q{@vf|` za}J*XNaBe%5OI>AdXh>10QFaucpp^J{CVIwwVPc+;>LK)&vzb45U9%m3>x$0C?s%r z#{<@gIjJYm?RaWfiZ}a6`}f0MF7RiHH7F(1r&(jUXHy7om=lAvkEU_d^sf*90D^}8 z)E)@-)!O0l43gr5^!g7yCZ32L)l zt1G!UgameF2ua-Y)KZ|#N96mSrpW>C>`%MO+c-HzvOr&hI2MWL+ z?_{z60C-n!BBG^FN4c&=lT^oMF;x@fj;;RyUEYT&d3krKO4paNNi5fUURfl|Hd~Zp zPgArO{{Vpsu=6(6MP1nilo5p+R2IkMv}5tAh8Oex%mx!;hGKBRheapY0r*vwYd41= zHuNl~X~4iaBmV%dE5>cMe#Hp#t0nRlX;&l?Isvjw2(xH(ekqVD7w-P?_^d#fc8OCX2mQA4TRaH>91E=HuPf9;Qwvzx%QiD3N zEJtw>+bL4%eXhs;1HyTP#!#-GwqG4B2PGf(iEk5=XC04t;4hlDH_PBuv*cEaK(V zsWP*E=nRtGvyO*>^c7-HEctfZRh=B|2r>(2oO^SOpGu_0S8x=N3j@ytj-(FR=qkhz zG_y9@FcKJI-Ht;$7h*m3gU`Je(!mwXwcK<9NC0$D;TX7X;~b7UXFlDz=|Vb4vBe{e zq!S|N<|>Lw%( zPm?*uS%AsMZMi)tM@LmwcPg$F1>h5$V<)F>aoUTyTHO}lzR;0@<`L8?jDUWmpUR|# z=F?>ibGj)Ssb06FhT47#nX9v^}G3}aGnqwh!Rm*1q4<9oY=iiUQp~;b5BPWGs+Q;tgPJoZE2bA$Ycw3~)IEi~s=7^35yXLBw-pK#o`&mjE7@;!;F3z-)T;E=$O#{(O2kUt8RCx}P(k0b)bhamHS z2RZcTIimMP#Uzf4!~P_?(r+#9?OmagDHCg?gfJnPk_hByr=~ks(ciN_?aAYB2WocO zM~d&Iz0*&YcQY|`Ew~a7U;yZP0beZJNMV&ySTa_0z|L}|GD+b2dV1CCd)c6lFR@PQ z;75s4MqYrGIO8DX@m&;gGotioE;lT~<}@8T@lVgH^gm(#8vJzdUyL+}uIzP~Ct0n5 ziXh{0EWDHGe=6-G@pZqFC$yPZ@nXVEo6|`hFGB=+!-4|N#iWVrC>6t(=`sYr`yeL zQ6pe~x`#PGQC#h6R9=Ucd{OXjpW>el+-drqyl))Pl#q4Y2*DX9zajqsYLA3%@OR>^ z{{VzlW&1zNEu@kvW6Ea8RV#wn$sCTu*1u}JL*bjR>}h14c%nRohzyf~-#i-p*#7{6 zseC2Vbo-4j;!HN?85&=*TuG2QCvprPI_J}^XNjlnXg=urUJ=JUCTyv&nM7}#3 zao-?a#ASk#nK&eG$y|3MJv}N*JNLD|hUG$-vJ^1mIBq(AhN8Jw1c1!GRFWyl85v-} zW4G3kq-YR;g-Ik5P69fxBNWrwqH0G+9*68#?6u-+eM{iChHmYl0VJ@Dq+sOd01{1l z{P#v{0wV!S064(OKhC}!{{Vu0{?z^jxA;B(00`fTb-3;I9UF2*1hS0Gjmqp9$vHT{ z_OGvg3;adX{wBmOVYZeOf%4GIyIc{|gI?|9I3|7(kHOW!;_60AMBm-e++8B#61#25 z!2~usRC?Z@bv3+;u$~S_wkxi=NaTUGkDCN~3gf(Mt6gXo-e%J($;J+Mf1baU5lJl( z!+V}xu6SN&)TDHMpq>LaG*<1~$$dGxiZ?3VGQB;huA32dej`QYN%Bbi5KMrd;SITf z_hW(d%}Z?Yn2fSqBJTy+_`o>c=jaIv2ens%4bd3lV$wXhE>{@9$r$ti=BJX{ORGhI zxL-Kro(BLHB>VASGo_E{NOL=Bp({5pY!cb7S3Uh9V0A2=aNoIKBu)! zY?krhMvAJ*3NXhxY_Z8ciU&#_C=Mjpc9qJE9j7NFAEBvwJ0n+5mO2r}n0@%`&JVBn zSJod4A~(JY(fmanqO6*%@mnC;50;9%6l8h;PvR@(OL(4Xk~vi7NmX)3@Sfm%4slm> ze-Y~b9n|2TNz`VBN!nQ%#Hb8O3W#N-oztjOQz9E?xTCQdR2Fb8r+J!_Nrjr&>XTA80y()H_j z+DmfgIj)x|SpD1#f^Z4zUp3r(Xus5aCvl>9vr30kYpX$RZv<*k#I5qHmN@|D@#47a zZv^-nO(r?@Um9w!cIl%dM@KR>OIW|Zot>As0I zt^WW&zioS;ReU1&qow$p!#Dl}Ulv(QCy6csvKUC%lY{y;gTK zUA$$A?MHkFm9TTQbB?6eZ;E_lulSbu-|N>-UGnUPM#M>-gO0?E0DJLRSE+`TG?Li$ z^8Odhp^I{eclrTLjy~an)QA3D~ZE8`CxwVsd6~ru~E;^Q_)k)GDcOm0CuiG z`JZo0WFDvUrHy=<%fpSR5E4%yqy=S}OAXTBvMyz~}YIqUkX>D|WENW@~rzBM4b#k=!mn ziBU#JT5Q4ELK$#QKs;j^!2|XDs(4InRU}i1B#lV!0Aj}j=qUzXaLVg~z=Ci{7~R3g zw_mL*=xNEbAtBg=0E|8kILH9w-lmMn4xc>nxx<-Ck_SAjN_qDur@c91DJm&ITkjAC zIop%h`FrMwE!^4KMLd~b-40WrEY3mn&P551oK@Q!DI`l03^bdg#9SLV_l0A<~lG@S1^Ikucz^Vyc;kOqfj)dfK zNT&$jRk0j)@(&~vfC*`PH2hxa8yv-Puj>U+n1G(PB0yykxu#6H*xed*@c%*??srxfGP5>+v6M^3~CDR!U zhAH3oj&`b??cfjO1xLO}5Cld5#RO_Il0!K-!0W|N5~DC7JhA3>W4JEzI0wD}{ONTo zlSGptou=9Lg^860aG+$6M<*|%( z^rU2MuOFF>%D^x=A-9Z)uh55mf9HYyiujML8 zo>nm&yM4G_zn4FqPa?PP+2U3SEP)YD3$E1!dhwk8KJ=1C%e7DqfUpdBBxkRtd(u6` z-dnkDQDt!083mYe?r2Wo7^z9FrKFg}AuE<+bVD-^im?^rBRT8%Qmn2cXdsQsfXfpx zJOy3DKaX#vK`O}8pDo)aN%tP6zovN)^?HmCvnz;G4cav^w4If~K`d=8{2i1;LYPd;ygMJ-ER7 z*XfslE^l=W2?T(ZMgSJz^y)vtzY_if`1?}$fALF8@fDP<5Erp5+nH2tDiMJvfH}^3 z=D%fr0qdXJei*jAR}PM<%F&R(amn|ue*;lfq3C`q#@TwpX1>-tO<(8K^=lYc%JPy- z{Gb;nImb~}5(S1FDNKWsef@dns%cLIaka>ooC0~l1oY$9rg%Kn^QV-iIb0m%`u#nt zN$z}#G5k$y_Ovjqx0@t{pPQnLa%$*RbiDG2PeJ` zGhb-m8JtL~Dx#<*WL|PR;CgdjKl?}c20tHoHv7YucPQ{(TbSkAIL9NA)KiV67Z!A3 zYP>Eg6_?p*pNThKW+q6pkrS3&9FN~CbLx3CvPR^|ZWOTlOr!uY#|5$U81^;Se02C( z;xB|gKGggb;vG5RcwLRNym%K77WpIxqXGvw=s^diags{!6G*2F%Q<1w83U2+Uq4nZ z6KS8&m|C>)czD%=x{^&0Hv4ZfDA~hBYR4pW%LeW3-lMrjNxbM(mvmLhImqjip8oYV zp$t*UEUMgj1|7ZIPnEtYH~G3m$mj_oam;afCn&kVv2sFbE*8Z+d;iYGr0d>CQfb zH0x;5Bu5OyVZKv>2g~V6OSurYKY!~|4N6sEBd;agJb~Dp91M_1EH_T z*1tm_oGS+NvUtbvZR^zM*0#JK@hisO1iJf2h5SKtVJTGo<-Dw{c8`>taD6$=c46^K zoOx!nK4(7Rs#&a+N^-Q)x0AE~0D^wk-uz^+(o8YEr1M2QfkUDLgX(>&BjX2*?WNIe zTTfxWc~@%m!OuAztMmKd2mBN#SBFDB7x=KV-d;xI?D~*=w5mz!I8Z^yBNe;xKlc9s zvCt;Pq%!IkNfGj(X4BcM5jaqV7y0Q7~P94l>;(d#S-{Ac3*!ueC#kN+C zAxnhL&|h)qcX})HgIoUqf`R;8)h~_Rq*j)Q%Yh2TzZ{-PZb_n&#o_+|nCx63npAqzljCpU#Ra!+j6dsY&iW;HxZ? z#tQCfNC%#~V_J|A_t}6##UoLk0K$TQBSjS^i{6!()Vh)wh357IdD09XK{!FpEy-01 zu%IC%G4eT7MH%|#iK2>A_foLsIb4`XsW5jf$ZLngaq=PBxW`PIuRIudTtCYISdKUw zhz?FW^%PM<(6-#E>)97%h&l5gED!GybDSzJ6m##BP{k0xk$l6EcI&wxkP!)Dzlj2h zDPLA@z9r~3#U!|mx2Q$9ksAtnp?(Lq0MwF1*Jy)`%*FZV@5)L1fD}iilV0&+1aJ(HxF}Zu5%7)+a_+1GCw`LT?jd{nOyYu4JtDm&vKWB>yH08W%qQ)?qv?%v(k z_4tqmFBh0V4A8Ov0D%AoKAhE%fJnCSFEQ>ufQ5-b{CT2^Quk$KOl`=lM+{I~s;dGB zgN$>~V?6i&02-EONUi0anC%GV7z_|XfH)r5qKa!++;MHxap|Dd{o}l9;HjGo0`=P$ zPJQVjg-FigNfDUXamvO=BkFUSD4~1Je_auydJ6W7<{>PhU87aP^NbvQKl=3H3Zid$ z9By!Swhj*6=aK3CXrii0#8Qf>tcatF%t&T}<0X(VY!QG-0REKaFv6& zM>)<&9R44L6j1ed^Zx)N8k?bZ zJZ@D-%m61K9!Ecq1krA?JbTQ9je{8DjFNxPwG>dcxZ$%IH!2HYGEb zcp0Q(=NUQm-n~7rXri6B`3Zj$mF!I*NW!#k;42VI9^m7QeiYT%+p@wW=Ss+NQq#V5;Ct$kg5iKk2$5`qnCIfB7B<>-DGp35W3tPZ|6aw2xAfc_*|URMN~BB(Fxo@zb!PiuAInO7dMSet*Ywp**&n z+SN6$s%O;J3S_!wkc?wF$4^c@f0b+qi$)`p<_C}udMK|>XXQ~bH8}1T;}8V-m$@K) zFDo zjP>d`uZpK$ULd&I2yS~3&(?}7&&*PIh5dIwV&JJtStL{F<@t`fQRWMP$7h=v~3Sg#se6m)n320|Uq)a((EcqDz-!t9Nm# z6{J!@1b{d^9x?tD=D0;fd}k4>ZUl@RkVZW?qKbzmt^IZ!-y~$w8eF>Erp6$<6&&;^1AG9yLi*4C z^RNCh6y*OrDhdiR3K}XJ+W!o6Obj%1Oms9f3~UTctpDug3nDgRGpqf})c0CvA|9uAaVurIodftsU6j-NVz% z+sD`M%h#~*h{&kuiYN1?cM#KzyIMv0wDiySTFwnh>PF_*Q@`&0s4QqkY0Je z3}gZnRJyllgzq)c&0XKn^Mzm#$s`pv^kXvcYaxg&+~%-=i~>7Mm;XWgA7uY`z(W6j zA^U%T{lB>2032kbmyd@`0FVMahNptP{5-yI+5s)D>AjNK(lXl_`H;iOKE>Vu_HE-K z3a>LNzC}~fk6@OspSn#=(;DqJp*jG>%wGTVck@$^DTIRSpcLRzu-SA|@s5ei`_lk5_p%)x!9-KU9^PRw}^nR1wfIR*oO4}s2n}}ib+Pp*>vctR2kgaq$7o+ zT52>c*|aO>V-5|o4n2IQr;vdI(!N5bM4FMb!SafQ+F{h!3;g_DrfFdyOE_Jrc=@N6 z8^iOnJeI?VUdzsm>)7-KZUFy$fNSNfI5l(ph!R)YPWUeHt?Y2S773?xKg*<+FA9(4 z0_3mtNFHuaNcla-$7ug*=}X0yvmPZT<4Rg@T9I*hFYns8jR?x+B$@IFm+{SlbMNDf z?=Ee2jBYU3YtupaM%r*~eYm*noZP_-im*o5C-7um-FVz~Q$cr1D6iLN=t(~2C3ivb zJ?N2!EB}jcmAyA-g1fuR8Bs;Qm%uQoVn7+1MzOiknT_W}KFdfij&m1B#W9jc%xNS~ zjSS%@6)s(fGY5PU9Im+) zmH*1?#KU!iNpPN_5j{Ucz_rM0sJ9*g0@>~7Z>*fXtMlJ|1`O)=P(W^5Or#K-#duNW zi^Pwrs5eC|MX38JN;nIt?a3_Z8g&6BaP1oFtgJ8N>N|KN6pM3?T@=B!CdcW*9p8I9 zI_5H=BU(#mXREw>1Q$tDIbJQVdDnEUBzf*Lc7uUirL*x}vdfopm2%#$oDCyEpr>a* zMtJHOw{lYM5rzJDiynIRR693+*wW^PKi3SneIdK-8Bi}9A3O>Vi!ZPwZjB(^1HIwr zF7Om!M(d8F8B#$K(7s?H{Szz#~W$6RgKFfQf`fl`y81RXpByxXnCGRZ*l=MfqA5xZPsA5wJh zEvq0p9i_duQyJe^*XhDktP`EFv`4UhaTdDNL<{4z2s=47PvZ@5@-`y^54tvO(!gj5 zjVCH*m&I(>p2q!?;&v0lthfy!SbaQcJ`a5zn_+wJ0zRfE8>~ZIrrn*b1H>yeT~Iz3 z;|>vyvLuC}et$ojrp)V8(q2np45|I+vDSVxA4?VZ8#>)hu_sixql&DG82t*xaM*s@ z`l27_k##O!4c5VQ)W^TjTWtQs%jak_;HZN&RsOR#^cm3V94bkvcV8@bIXf6bB~L@K zvT{t#d1*Y^4J+!fS4U|DCjRLxiYr&N2#cw1WUq{^Q=&#fkPJNokUh3APbBf~4=6{5 z8{D15B}xW`zLD9)Z8+{{Mivp$6Lc6)e50zqZt)hED0}2L2ZmhA58g#S&`zgjw6mVde!`v*WI z{91S*DUT0$b16v^PaZfsAO?kTpF42hG&oio33e*^-d3-Pr5wr|tj7?mSo4KojUlI* z^`_Fi>3OI3DA#$(6dtpalUc(fuCbg?)|Gx)(*At`sB~)xG{XjlDyBujv6&Aw`YeBA z-!3dmSp;J~1Nz@>BUpa5RvMp3a$kxa>vmj(drEL=`?&pPkPGDMK275tB_dF9-dEOB zy&ZF6*kzvWr$GxqFM>aU5a)$^o!SL5>iC8VRg<m%{M z0}-Aj4qVn~{KI$Reazq9rXugHSR6+&`8ibi^I5J^$gU~=Gr$h}EO=+L5aFk4Fu$PC z`@mf6`E6^1$kvV^oucRjR3Lod4CS)@Z}(m<%>Fjp9kTdaW%;he_=VDR3)bI z_&4kk@{yeN=}Fa@_XNAa_=Y}W`Uus$gNYh6NDVM=;@4M_~^$S(4O zadY7nPuwQQr@KmZ;@H*qA+;Ccc-Gj1!4)-)wR`rk1XrA4m(ovU{(r$xpFUV%~NB5JK%H5uyZ1xF`tJ*4-b9aWVfcx`L+4X*9S@ISvi zjd51h7=*QISbSD=C*n-_eoYMpOeXg|LRS@b1N~(Xd9Wy84DrF&Z0Gd9kp)xyBm1&+b#G;t=194Qp6t4Q>XuD5ge)(s zbb9ED*n=wB6SC~$ABGv!5&7{?Z&!Q)nYRsP0Js7QL@7Fwjoiy!G z3;50}CuY8?D{1U-FHakacx`KCqGP$Gg9CM)`kuUAD$q38zf!f9=H`3)^d*-kXzuqz z*`mi!vrsx$?BRoEWrB!ouQQiAJ+yZ(MB;b(DK*7qc=sjW=KR*cg3-o@x4g$*->ZuI z$nd!}8~J{ba`0M%kpX>Sb%vS?H_w3ACwH1)DTmY(T}kcvz{2HO7BX8o2Cv;d(kme% ztyk&nfgJsttobkr^btQ$S0m)FKn8(H=Vp5rGN1AmgX+V^d{)W&w{*wCNy2_n`w^mV z)eA~Lp8@6d<4sC~t8mRF{=)C~nwRoi!A8pgY>1Y>LNRh31w*3hw-BQE2^mKotAX@r zZ0k0cq;t4_V!;LiCkW|MGRy|y{~~uDAlq&Zl&^x-f(E2E>2HK#ZdzQGV|s> zaI8*%7wBI~eN`){@%aHZXuU06+gdAeqGi8cD^{RQpcff(@eL*EPFmwRFU>x~mnF~=+53Rwk8J))N`R0b_y*bN#+8UEB7$!}x#dEB4K^FB zYJ4ZJo>UFBvAvgH9y~Ht!Nall8c^I6tNi^CW>?bE5zndoqj~N-2p79AO=9YC^fh z`K!5?=6>jFQz%Q~3H~$p!vw4;XEXf|+ax9cx zDT77-&+y$0A!o0NLa-X9%BmKlaKaU4VqN$*&L z1mio@dMWD2HZ5=iFCHY>NSl8+jT~9Olh%Hl6v?ek(3yk~EQ=@=52C+BOR>apAu+@y zoH_n6oCz0hYj>IY+U#`V)teqY1a$krV_q`0=V3td#oCzO%o<`S!bI{T!Uk(VmQGrV z4vx%Mw=_aznNM{20#a|QE8Em9#sAN#mz34yT1*7OSZJp z1l*2WO4HHe9gM#0{$vP?+vwhmkSx57iTSF;eV$GXk*5F|h0~7tz!h~YbN7>G)l2Tn z4ZFk1Zm1+@&UZyFi7tFJi>$T6kmPZ`T7Ab&%5_b4>1mgL&A;E|wIn81Uas=TWcN08 z_lc(M;o_DH@eCMv1`tF1T4MuL%w0Xs9iSXuojHLUC6}rK7Yv!#)J#?(Kmj^^9U*w( z5@b!Vm*1hEMFj&T%RefAfT>9(ujXH&#bJ_zDcEbUQ^;gFZc`-}H)}Y};8@(hI8I_`(jP4z$?kGs=qr7c664_}5rKGtLH4h#PF38w!lo`iljL8K7X;-=eH`J0DV z6O+nN(SwfZ8k2QQvKkl(sjjwSrWi0P-IHRe5q+W;xkvE6_rp>Tb-+8s*}*S^xcuUZ zHM#B1Q=33Tg^pa~0T2^F+H~xF2;1w}`KFQ*H1aK~9=1pR`>pz)x|@WH^#ZC|&cKZ| zd+f@R`Pu#Ienxk)&(7R}zQ*rgcUl^El3BptUAziFy<~}iQu*KJJ}v(6i0Wdfw|6w( zsT7xUXM-IsTsZH#g?Y7ygAZCiuw)K`vq}cqBOjXb44_7PPI_xPXJr?&EjhlP#7Y)# zMOsDuR!co0Ye5@gQf>d(5ajnosyt`;3~dxnUXu_oS7x-ofHnG9YBjN!(Vh#k$g~IX zl|o;_J?lkS*ZbXa#Kmy^8y}l_I+R$?I5Az1)~GMs^g`2Qo=h=kUJj`r+vI757dBsC zUVI6nJ4QA|@I-t3vlwevhaP7{iTn4VR9Z2spI`SmxBf%v0VJwPxJMc2|5U)Rlt^c|DDub8h$9{|+*dd=Q1 zzX%@t!jxQMtp`84NFR)etY;!qCiQQBF%(rF;9#2DUlO-;vF**;OHm46)>MQkjx$ZR z%O54`#!^z+zN4a!+Ohc&AbNEI=M1+yXABp#wgx5zV`V5{Ow)-Yls_Xe2=0J}<~8ie zo5rdmi&74j3cP~Gx0*%=CKJN^ya%5F@sh7z*yz8~Y6UlO$WFJae0{mWyZ!h34RIML zG+Whl*79^Aj_c2Wv$e_qdziR!vCrfvd0_KjU{)b_UeO5wIGtYqmyJF(3`{unhBLQa zA2DI0M4XvYmV^^YpMuU+L=B$X(Txkas(aVghkQ@l>ViB z%hhu^I@C*I{9#AJaOlkm$<9tZ45pIm6$i?*jPNL6m#sJl3c$b6^6o%}QeDOCh)wyr z_ZHN;CSYDaeU{i@ssD~V7lMa9TaVlqA0=v!P2W876zM;j&wF-mVcT^`{O*9*-MK1p zHBIw{tHves)61@akm9$8mD5Rs-$=e%ncenRC%qRlMVO?Ot*coy^FeZ)O2g=JHXR(M zg61E(Hj3?0BdgH3S7z?*TB%tYsWHF*u)?3NB=gLwsjcG%>Z`nsrex>}F;O6Jj8YLj zTd!@x{z(J;Ed}u5p4DFHB0$g>H7;!W94vYeUmA(trkG2t%3w=ELJB_ohWfGi}DS42$Jnrdy~3%b^1aj0^%=Eh5Sy(?sOtEcp8<+@f+~6q#cJ? z;iQ%>{Sz&2A`<9Pg1+3_E#DVL3j~|*<2~CoiB}l+ztDo-f0pJNqeZ&=q7$FCZA@aK z(>ZrdpaS?2s~oUEy1|g_qK;g2F`@Zbz0z-^lO-#H#(6u!y-RCmFD?7cDr2hW0hKV{p*e|Yoy#YImX`wiIUzRj( zPVuKuo&F$uNBEIDF&4+7ILqaQ;(QN{Jq;r1ksh%X6)_FQ_P;W{FC;LfRC4O zB!RM;Ey+rsG%2rE4`R(G%-WOg^&1My4}8c?1CNiK8ulJLza)z2CmJ7?fqb$TruuF> zvgT3OPO9lfB3Nm$q<8MZIxhV7Ygfq{Tjbb5Xh&6Ihz9Ry=KaLTm6%E(&k!|7+ZUs` zo%p?vNk_Kvhch1zv29k6S>K^hHl|N>?4)JDp8>Z1)yDpcOZDOsd)+*$i&72n3&n0& zjl+IJV+e?9acn?j&sGLxWr_cFEmnw3r>l6_gbq_8{RLADa-G>I06x8@Zkkh#UNhw{V(_Zbisx39v=W($5@TfUNl zR&w&Hh)!o>d|ZV68~c^8MH@D*!BM{})0Fpdxq?KAu9qX&mDv{B29or!AtS;2t|-)t z)mI&l2{ga2-jyQ76j%|LNwz0R;#uAACgc0gu8mCpcqN8zwC~9vA}ILNl0$V|4H3xI zI!ys8H>vj9l<1_|*cuAHSNC-MpzQPejM^I-PF6Tq%NQ;EmYp}779|9mufc(PewLdU z6q?6{KAP?a%yX?WT|r{Xe(zXu(rfookxEfd@7n>HnGM!^GMzptuP&VdxJtL5e>JYf z1Q~N&K5SdRhb2{bs>G_nu_PCWUiHAe9NtW&bqh4~&k5WEf08s=yDEpTXY>Sg|F342?Np0e*D{5(YAK08k2@kQGEdR4eIsGA1#+P z#!qaTHO&N!F}8;h?|E^Bh&sA_YyYF~`Wa(n9z9J}K2MI-ih1T0bW|M+KRmAh&*a?f>ovDyop;4KUKJL+mJUwE8~VaqtyHivVt<$pL(XG~6K z=lk}J_ANm1^*O4me62p{09trpdo_3Slto#eJ#7~rz(;Nx(&b(0W4Py4yBi}?wonv4 zuqWFTfBvrI+G1s@s!s)}?bsA8g zxCf+boA?SVk@Nm;Z#JSvP*v9y@jAl3!*I$3KKylV8jvodxkI6%4AM)-gQk9m&$ulS zv(I#8FbS5Xok7Zjc_et;g%-S2Oc~fU*vq20ubO!>M~ zeM#Wr&9Qe!+|u=I^}4Klm9;*-9evytS+3+V>wD|OxPG;bVswfv#AkK1PGG_AY0fGa z?l<5ap>aRsr*y_B>`HU7sq} zAi9!*&bWCBreVpap!pVzr+H8+@U18x+j~O7;a0xkaPx7ozZs*o6uZMUB-s%Wg1AD5 z6EGzvKJfz-)+v;2cB;{-*TP;RpF$`c3(EqevL4;5TjOt3EoS9H*$#Tz8XAoocLhPt z94^)q9moebP87a$clLHBIGJTJcNh zLT?OJ1xQ->}%S;epJdE4l7>t#l2 z$5|nTAK9Ay97a1=Ufz`oFTIRQPuTTZqU~NZKFIeuCzTVO;3=!77O^5f=mRB_!lC-ZyX; zm)q$8g{5o|)hcEUO-iiVQ8DABV6V~km~*rFfDP~OCxxHz;$nA#J#RXr=N#)NA+3S6 zjk&4IoT{_f_d0>5gQE&dj+Ws&7yE7Cppn*(R~+oq^y1D}45X<9-P(X(g=)V_X9pAT zkNu>Qn4wQ8M^SaPU&DIl2&X1-8e}JX58S_m>XPv^6Bb(z{#AX7XIXj!`osvk8f68^ zDKXis&j$>Dc??TsuiNRW*a;VYAp?*&TdiaDxpU$|j+{y!3#`)AY}lw4JH#f; z?6Om@KDSdlp2)QFElH$oNJed0-EG_n#EEg?N#B!PYVjc3-L(YKKttyP-T2MDsT-JT z%C!i-t(5z}wVs{Q7aA%ZJWH8?OFRr@NFpx^s)uf}UUH>}1PAoOg~&H%TEP&14iyNP z0v^RodZlcD%>J>zSetu_c!S_cVX=3Ix4j4Ec*8|*=#99kX-O$LiwZl=tdw;dw#@N? zV|oE%xj%+>oE88#d>}g1{PX9{-A*&lfRD**ihA4hhG3Q^e<7>uC`|T}uQjnXjpmef zmPUbIDmhDiox5)i3=D2tyIK1~~J;E$~*Wck-Jalme8N zcUxSEvK78MG)u^@j0XB}mn4}hCMw54zv;HyfNWhU_=;RjlTF|BZWjg<1%uV`r>g=~ zJD{y|bnWT>#^{Q=@iR*$N&DW4iS-v`8yh7yoKtSABFH>XSf>xVK1aKw!~Vfv;meUi z$3Zx;gXMNGOiG>GlquvH8Tmq$76zPsCo`fWv>x+&&ws5Ir!9YjVjx^g2SdliwUvw+ z7JaDL37IKtwLxDLa4F@a9JTJ!@^ZR+Ydek$Z~g@GR4ED6vs`aA+?Yo6h^-z&1VYkEhO--qm}s=;SdCxq)ZbCxTm(0$KN9ooI{!Mllvx<|BZ)J(dn2ErlhVW~ z3T@do!2DpDYsDcvO%c%l8C?oLK=nTO(sIW%GGow`@VEazNw;bNmybd%u<_Z#F!X%)maePVOZ7xKOClw* zp2dk3(*>C#zqCo(p8-HYo~SK^y7l}`%pazdK}mt$h1z7(UaIO*)J5rs-R=E5v(1~k zVb4*^(d6cAEH<(n(zX z&X}@B4b7^)U+af6H@)m>ZHN6F%L@G|ll51OwnixzieAl(#r9{iS2BK<2Bdt(4Jf$$ z@`Q!$k2geRyCd~dO0X_h2rRbe^U)nJlz>hYvN0wpk+T=6m6ohizo(x169OuA zCx;%->83~ywYIl*hBka15iKAKjQAzq7U|xO0xT8d8~nJ z)2}E3$yZa=CfS?$W+uCfZGCtF#+_Dw#;^-IdvG3|?p-f0`oVj%VNZ0%Dr(o8n@S+E zfc6E7-FE=OlI=Z)ABl3dSj|(VjhvN15) zq94lz*$4M)dg4Xj1w)OOTHQ}Gi`AT_8b;ZxfO?-P7ONFQ!j+K10W%!_PXrt> zc^2;&bH}OHm53(|r89zs(`y)jV{VN^bg9YT@cDeLDb*4x?fyDd{;RBBVJ?gQ(FI#h zo^<*l`mxNeD2Xxym-^-)H7qI9JWa)KbO6_1FJefvENsV+1A8TGJC!-b{qQ{mxkFUO zI%!3gC`_Jyo6M*uANhD{NR*WN0L7=lc1GMfN%3P$3*(wGIQF9ORfss4gR) zVv!C;r(a8EsHzY-G*@G5HJ_og^3ydDgA=63_iOx`g;J#`_sHY!DXn*GC$)haOUsY{ zll*dcGvNGjx(z6l{MC{$xLg%5i zL>tHW1}k5px5gkEzLTEjOla4M9v0fVW+ueU8@(AiIFlRpJ_hmTk zOozzAVJ!DX#V@iMMO`zp4`}cz7ZJfUNQFe$zv1+ZQiB+u?KGxrl@Ml_myFCS-AR^& zE=32R)@u&E3Q$SA)3Wz70R4${{%vfrf9uh|;6bX0$d`WZ{q0TtBUzOpIA1^Ffp2~` zIE$zOcla}oo3ZI3J6{_mRhy(z>4)7sEdLn1*C!)BPxG)S+#z(rp?q_1bwOQ=2$q!4 z(oqHo)^8r{6{SL-XyUowpf_tqA*kazNCO@0=`8GDIMLiloZQ|IY6YltH7x#KbJV#G zEUPW?En;rS(Rl*`IU-AXf(O5F?x#6W8vG*y`w) zsK}MJH7Z-_QukYF_Y=BEc{%dhh=1px8Af^r6aQ>2S8qyYtph?7*bkCkT2OstMwC{> z8l>FlRO(^2K$7DkJMveXoY`)JIITK8R)_0Cl(PCRHGFPEiNtRDnUudpq4d3i3oZX7 z4YEa>VwM8e$Z4=RAK#ZlfQkBNT??ES**=;AL-}H`7)Z}SB70=98*#DYnqJEiZHhrJ zfw;uBs^03)x%*T7)&)TiyQ3z)l2Rsxv~)(LSI{e492yip1Ca8Eqq0RBtCSnnYE#vWjUiVykCB6BrK z<~)0GW2lw_0C3uQ#F5Z}mt&v>YTscYagISbCi! z!66MxlGqT<-iXKGZCnMl)`ijzB?t4CC1x-NLGu;cW-r|emA(qEl#skI(!73IcR8d) z|0TDQdzD(=s#v#2v5?0w48AfS zl^Fk8@ui0z=uSoZIF6HoFRBR^r*8*)t8eyKBLAfmPVUy!1vU!dTcu zI205vuYXhzglUML*Trus_T0y8*9yyBZpD`@)_AB29sH)Kw$g6P4sLGbisGXm$8pdb zV=#|pv*yaHKH8*N6tMc<@wBo4-#x2MR(u?%

4olGb?ZQcfF>jXrWf!@FvJtfb6l zs^aInr8XF?B-s1Y&oN`j(7zk!-c7}DLrkMHjYPPheqJ#{=Uwi!FQqPjpMOeHL zBQYO-xg86{EO9c$#9EL2(?MBHTHRJq_Q?a~<|C-E^;7Y^_qJ4|33s@J)L=GZq19xR z2(H}0mKu0g$-{-)A2snOMDKw3Z>&y4!i!!kpSDL;LH9wT_8{$>F}or5SlVhNVaGI2 zx}{6q6o%QFchdNnlWm{Neb>+B{Fndu9IcApd*RHv*2b4griu4;F>g3JXWx!4b|Q%& z+~-v5z6lY3xVAGpT!22gGx@Jk!CR=hykwZ0!8UFv!Ko(LD=O^BpVx6MUU7D14}7g{ z$Wj((KY!K}Q&$vSNxD&;Tb6?`nH{)I7ae+H)H^#$gqtcbe=Cux29A0pT4!bWRmR5E zG=3gB>pSg2N5t$h=hgSunE1@Yt_OL=zaJO<{l!n}q$Ab>wvrKtDq*u%dw`PSVJU}S zWG^Lhz{}Z*u$tevx*yYSC?NiM?<|`;!&yGG6WpODEfUSFi{Y4uyz1zrGI0zZnD^nA zUN~b~_JpJX$(@A~Hp_lh^1tXgYx=tA6=TB5bWG3=DA6p`3s{D0={)|FA`bSGQS9PM&__}x{D=l|7_uLN_DgX;B;VT=B+(ma+n~}TZcAF`GqR9Rj`WKs% zDLK4lcN_jxEa^#Jty(+BWn6n56sVSMztDUuB#s&kY_|03D%Wcqc*GQ(_1yh zO2XDPn#d@}`uIYHtM>O?f8z>|Zqm#NV2DfY3>mVPrkPxsStj;XHFAc(r575|Tz9=n zAH?q>IDprM1@lDws&f{X3xHk8BY$vc;Ixq5^4&9&A-Ho5?PI`~$I8iQ5z41p*#1%6 z5q}GeAlyYXDwk#<0~vuc(o96T0qw{sd43+n`l`b9a)BfV|LotSiA>x*3Yjij&fT2m zaVRbgb)f!L$1<#lznWPbFge9dp8xnqzXe5xSYFaR&EXAXpwW(te8XP1aSfwXi%x&3 zMC%pdSWTaZ);lfq>2^1ChOda1GF%be2dJ3G}2=3K8>$OY|zgrEkwz}y6;QYPXBr~w|b}JaW%sx8K+vF0i5%pu|M)E zu!twV#}X+USthiPS7PCAZ+)NE7tC$mOgaG zyMSA~iFX^7@(ffFp!p2IfK^&MzAmlaJZ=R#H44_{@U}g8I489;m@!@_4sElCi)eNF z?PVgI2LeSs8ZIT$Q(WuE{rBH{@8Viqk(Q=eV{=+bf8ov2_>)_SWBi4WFRbp7WvQ67 zBe^RhP;|?`wC=AEc$P8M@}HMD!>DQFGayvTo`zg?7q)nAd+A_8+|lmHzEMnI&$m`B z@JiSQz)`jcGONF)8dG3_cRCUFUz$ zCAfr)&=5zwya+$cbg9ktO$R zu}ocpo&l?;ED(<1^vQ`0lhL2zblJl?9u2h|YiY+l*#WadM$&IM)WZ779!g;qCzEGW?XR%f}_x_31~?;iBPjQ4B9WbJ)N z6qNQ%VtQ#RQlWs9yWwAvv@#|sRhCwca;T)~zg%(T`Va2jm-LGJfs9wc)hMK}~Z#A}X%C)ik)rqQ$2AL)z<;wCnQcmXWzv(#VGrs{UAY z^tow5e~O?De;LZDgRF}oLFJ342xo=<>X$yBDMDS0V%lFJd0BJ$@l)lxQO$9b(AorN zn)>$#{zNe+`ksUMlTBMp%AZO`pGm1Df;CMOq?+#@iH}Bt&~udjo(L3bJf&{ zb794){TsLV75gofi_WFSt11jkxlJfqu*!dXciGyX5XEb6O4Dn%6_G8`Q+CPSy`x`O z=j_aNw%Vv?R7Y^GO*q`b*F=a-)nEO)%W0B{nrQmw4WFME-LR-_*t zE^P;DO;s121fBs{KZkeQ(-%m*bqUy=ba0z7sRBlj0YPV(i8V#tZ*&(ViGt=#U;n-! zv)kl=b}v97?_GBii0qcxoBEiIIMbWrwZETjrJ4ER#dl#N%15*7&zvjZKGy>YkP;tA zO!cIH_id~0cR3u_aSxWkk9Qd1iu;tHV6 zwZc1qtput`l>}227Hd&R*#FdQG}O@S07c(DE=a+D3*tO^FxE_H+vaP(`B%HspZ`18 zH+0@aMkW(MNk&u7-oU|AM#=WamLnnrj0@n*UgAZ1m|}tXAw3L>md%Y{(EKag6bE@V z^lr%*7!jhmSXZXeTP+y_TM?V@7J{hkvY#@lAtdqRf-#Wv+bE|sQ0k<>eH@aO@@Ewx zX^>dBkMoOKBr-^J?LJs;J_zf?i^DN6kg26o5LQATO&0p9VM-XFnVNod-WYBJq7goP z21G)g6P{QQ=?#br^&b~E(@dU39UTes^Ri8u`CxAPG!%ljdInZX(oDq0sh_JBbCMzI zr`O`^I(#1Glf>7OY5zJU`RqGb&eY2T3}U1g9#xC=!CGtmc6Zh8ClI$l^1ivL%1_G=haapHWaKUexNdU^?@TX64Cp~eT9r1qbu||UeKS7RgcRC^QVo-K(oYg$%!o$kU;Y!j zkLk8;zccBF!jYDa`e@GHMgd5qCzH1lU6M^Y#~Mq(AE)GSmAG2+1`loLtPDyf@>GsW zke_~*hFiG_Uw7}=i=@)YnYw<225vfJ!i_lkaA)LMwi|=ox^w zvAt(E%H&Hj$P*(;)i7UgI>ffK%Ht#mOh35kW;d4ia;G#)VcmSku0#>9*F>#1XY2$8 zKg{{@v#*Aoj9;--{`urueH{#<6rIn2TLEQCUSZkvN12nM?-|WA3p)F*CY59$LR`3EadXLs zn{?&I%F1IW+oY3D1BGLNeKazZkmb_*FMEQ$u|Dk=rQcyCy~&VCh#^SRA$n}A>eY8Z z+q!~9%?iqy6i;Ny+E)gDznkmxUWTd~rg!q}w@pRXMjcRh=&lLbNw5^;_kZ*ZN&s(Kde-A&ysf-9G zu=3p%w>{x84Zb6NgxV!voWUk~s6Qkpo_dMKzBBn78IU(5G?C15y_~_((kD=Ij*-yb zFLgPFFOq9s_?1!y&q0a*_bE5`gM{C<1YZ3PHdPdXQ&cIYB(}Q{6hTQG?`m% zu=+i^0=?FO!L3+*gcN~irJx}Vwu#Sh^Y7+H)=DC$Gz&pvjz+DVzAiIQ$ezw@p+DTwO>6-snxKR6R0%50juND7D6UQM2`;H7R)z_f^ zly=@boG*xgjfu=KHAr+=XlZps21C)2sSdlV@5P1+2!A`0U#NF?Ll*+xx)>~v3rktz zaW5rv5_8AkD6VCm|5?e`MGhGt^*i@i`Ug93KJ9fE^+;o{gR~Tji2?o8c6g8EI=*x3Pt2QX% z*~;3$&R!oSez~hPI(CRqv48!>sN=RO&mix6!F}A@>WBUVI^cx9H4fExVgtpAU<}aY zTZ`fZL{Oa1tygwQ%U>0P6Aj-66@OPo6xz6nVv(`v57#}UX%--*e!xLQo^lM_okz~r z|EV);z&+@W?@{oD>Uo+VBhJoaTeCC5p%mz!$|uq@bZ=#c-w;0o(zQf?k)SGJbO&k3 zB&63pN-RS0NFW&o3PasqGrsj;gC1{(_V2%A3(`ti5xPkZ*$ zE<&072OTy6*3jpnPE-K?RLLG~)^7DYkiQ_5r+lawU)|FumZ*A=xfpU_sUQ9y78^cPe=hfBNaiw{whW# zTc$?v-_kqy1-HjxShBCR+h>Nxt{y8hM-393PDKZI$DPhV8?qZNzJUQF*;^7=qf8Vs zd&g%>kS^=Fuyxy7O8VN2;7U7;M|`qciMd^9Qfw1{p}H}gdxY)1#`|r6ya>DYrOM8< z*1(pU>|z(M+xa+R^3mt7lG1dsv@0|^jU_ufATFn0<_JuUnNtX8z!a1&T6T3mf;K;F6hwz`KiM=q$-q ztM!$O3B>y_bA`vQIM5s_$38W0i!MnK1t=rDV!+j__beJ+1*&bHZQXMRtxQfIoVYT} zalBBxKMqY`Y7~3DcKe#o)sX}<054yh7B#K8!a>HcpHugNdpQNMy34>PwoAdK>Mgi* zFSw&ozl;P^KphaZZ@rS#!G1&s()7b2I1Zwm_Z0d88-HZ|I~`vbY%Q?dlJRydG15|A z(~)(3oLy8_rm?yttRbx5>9w)Sj#tt)uJEUB#3RggyhH_DuBYGB$JmwC>&-{3FYI{L z=QtcN!anMpIVI}+)kO4CA>z^3w%T472&)wzfH|MBJ$Bj&*9GZ1Rxgf_CK?Bfn5F_* zIIY=dv&*P?7HXFbZ(v(q;r!*oQm#^qiI#=cLOm3)E{U3TLdjJ~RqfD)l`0c z@oWJBonMCkHe6cavCVlxM0h;Qg$U)Cb4TRyDDVwr;h*1fyBMgJwtX^^FRe84vGFl~ z0R0vY7dOSI1I3=`ecvJ&qi~DXOqgtC+$Q(n6@-}o*|`*4e%X)rs+C&|il)BQ_=2;} zczwJFv(tV@^iBK0lGSX#FC??kwe}>i1e_8^;l$^tIBHe7Cn`+pXQJEc@sgI>%G))8P*~K~3vn861Tm-TWNRnscX&rExyh@g2VFS2xMVE>jFs_TjbUPWBI~i@ zF#~PTOUm?)6GOP(sJpSDq(~c19fg>7_M9f)+qsDB;@__1 zp$&ryUXJG<8jcUjB)%`F5VOT^@cbaTOPpFW|JK$)$j70(Qfkw!t$O6lsq1&kNOOh! zUopRDK-Q_~MO^bs=6R8L%)8w77eTu?(~pUq3a#R3>^yPw`^KEG9{X><7Z)GHkKh+Y zi2nu4Ks3Lnj9ufmOF<@^HPmD6Hmek&Q0`?3jDoIJlx$@PcC%L#9+9hAYAvK|5Xl9s z*cR7qq}(J|@j5$gjtO17xYZag3rNefABKRIx!8A7on0^Iz*I(VuWi9(}O`CWN5@A)&9T-54XTAbyV!^kyDi(MlB07!AWztZn)bUPhuOa|`v`#yG-;9k!hFtW0rIL_s;hBSEDnNV7J}mczkvTpcS*nh_1Ht@E(+E>-~Stus31 zLjbXn@{YOsDo+z8nc-wfb(rood-Q2-_%ZyGwrwW&-Hyl9fj*J503kO<TgBHdZFGxb(zI}*1$LHp3m5>9 zfg>zIEXBbXKR18DGd>l3A@~dNPf-23el1yCYTgw1d*Llw<#dA#etw~OExK9RK>=lC z7ZTpxLmF-cR}Rhcp{_snt^KLLXiwV{;$ENP`}?WRyW-r(rD%3Lme)(MS;Q8)eW=`V z3~wC&0KIH9%<@j^rS`ACgW>EpUx`zz3(0%QeqTHE>7n)&_?I7y!@6|jlC_)bZolyR zo|rx&_?uq%n?J)ZfHoF0c*9nQZ;E_e!KGa~$hZv8E;6x$g;*1r8w<}YKAioXz7c-T zx-W*VJ|Agcv-gMoAk93?{xb1Lj%;o3B-AcR9$+%KVv3_2M#`}O6?wsL2KWu|-%I#8 z@XdS);R~a8sOmSHHJxrOnKc{M`DK_AbLFa$981O&hQ>>0)o=A{Ukvza-@-~Iw(!=R z!LJRpjE7jdUObd!A->LplJY49+?7&WET5oK_ME)+H~hSp`R;sN(&n@_{Lj_@0NCqK z_*bm_C-C>duLyWJ_bZ=^Rn3DYu#fI5LS9m5%ms#E^bq+iSsX{C%$Y z1O5sP;;#YdHdFZ4+r(O&QzoHv9MjxErYti3vIRu`6qVy&J(qzVQ!K`~Z`fnvcE6#?LHJc41_Qr1>d;;+g+x3f& z3E3saxdpn;_VdMX@=Y);xD*T*Y5>E`l2uhF48NCgT@`@FMZ-lVnp$78O8#%;pP}14 zM5l&}Z{CYutskUX7(dxI#L67~ojzco2Kh%I008;`2e7M^!2C1#i>P1xD$_hq;&`rZ zWpqeQ+QO*UpE-~1A|gSJTLt#F%&UxyB z0a@NAaUO$rDZgt_md@tsQ*nz5PV62V7|st~o$E%pPmZ4yJ|Fx$@MY=vgFT_Lp46na z*IJFexVR4M8pD~SKzEiSuOV1#-3i_3mzN6J|)op zIe3Zx0BMao;?IZli%$voe$ZNLPvU7WE;N}hk&M=EXEDJVmJkJwWp-y^(vo6@6L|6| zEAPM`?Z8zeMkKj+*BU}BvJV|@uy*F0Xw2fy)kHWhA^T{mN z==QOQH^T{z(G@L`Rhtarc{h#~pUb`gcn`%NwGZtPI zZA$KRXxb}CrD8>@+LPRB>Ys?{3Y=B z;(x?jjU&Qd9q`5F?vJI<7ue0EyIQry+{?9CTnSz)%XKRvv$Ce$p+X##lc&izYySWT zbDd4PZ8+=wTho8Q_J7%X{t55m?~C3H)#ii7z9E9^!+s>V&^0@c3(aG3Z+UTQktvzJ z)SKgStcp;4pdyii!~&>k{{R@jX#Gb@webgqzhsE@`SnPR*1fEzwPh6ddW$;=-r*%# z7)g}?97Y;38+mF;$w^``u~tj)Ejm{CSKt2t0Q@=9uY5n@9bP-F21L5Od(yEq_Do8K zh^Bt>fIf0nvygslj?y=VuKZ^O&a-b7)X_zC( zUO{LfwT4ZKkE_|+F%IN{iO5i-kC!G&lK7uj{eD`LQmdsTl6QSiIJNk74c?suzwnY<&ougM za?d5hA@XGm(1;{O+*Ps|?qR?&F#t1TZ^2&;8{&P_!E<9b+v0ekg5=w<3R+E|1nSDG zk>3SD0ILQXC*Z!Dqq1nruG`6XYi44)x_Od&B8>`?u3rl1Nh|?549&E+47Rn85!&d! z9Ft1A@dR+sJdJNNXggz+5Kiz>2v=+sD$VkS3}g&5RN-K~>i9=Zhoo<)rqWC)1+Vw7aKQ zxi>K@$i;&!V+ogcTyIx9l~{lj?jZLTx^{(U@Q&+P@NK2^?WIL-+AQ--urf-LmS$u} zJNFcIW4n5bnM#(<7UD=l!C)?uRF<>8SIWXmSTO{AZlO77x`LUcW(rtKL;*?rn|`Hm5(P0h*Z z)6n$YV_uq4zB$*d)kVrHMR997l?qTc;;dDZC4nPrFD-(_a&>$|XQRX6h<+w`vq`mt z$9oL7BUW~v;xOv5tdDMrMio&4fsz0RoaAV0n*GL~;d_r8=z8tus8-h^8%+@>jSkWy z`$fnmB`p5{D=M%mt&OU~72-Y|_?Kt#Fi+vH6kT51#&p9JcA900c;NCxf=GAC`^>Hx zkCdPo*^09iV0_3zwk9vxN0mkEZmY6O^VH>pWm4@cTeoi`mDV-g3fozaQ}F18?Pjxi zmODMsCA5z0g4J3?@Iu`kZoPxnz6;&XG zB>bQ^84amuQ{7D3Wr8)M+pJdeh?d>%Em2j0AObQ8J4+JTBRnV-t#UNK5ZlMAJ^J5j zY9meFdaUzA6Xp4~u`+zPW;=+=;{k9!gOV$knLpR?JDg&iTPD){Z{m$%t@O<<%1br6 zjiS4`TgC!dgnn3u0ESXB%eXK*Sm%n89dhTwclQ1meI^@wy(~ys<+fQ%%78LEEXN== z(!OGmxM1Vt+SymnJ%)s2x!*aGBXe#bC5%s!tgLv>)XC3MKs;cI-nstR@idUZ;~gve zLruTaqk_un)#Eo)5b?@Y3aJpj(&PpVpe1P*pInE_dqSC&#eKf`vKH-hvlzl<&8 zdt03b>s2yK4~A#mcV#jxu8_>K5F%!U<%%iPFnMD_!Ag+cZ+RrY4tyHvp)RfcoimG# zLe&Iv`4;T1rb0t*5ar4!P*BK>ODQU~FM#5-jM{4!lB8NZ*H^2n=_daG%aNHBN`apr zDUkqWVxbacP{808@~cx-F7Eku*Ju9#2h)DK9&Kp*TfLW0k?+407JnQ3Jl8xA4~sON zL&O?X>8p5m%ygPvZpX=K-gt|mv&iv2?=K};l15Oh6j#r7dX3+T^!2rSt7CU0ktLMk zH{8L%VH>dAj2P`I%zxRz1lL_-t80E6XncP^hO~HmJ)$<77L{viO38ZyE^Y55e3>MF znHoa5*z!OhXk|5v>DZKAJKZMwdViU;TlRd{)%5iL05h<+@&5pX^qnRz_(q}C?6cPP1jIFpY7(#peo2TBnN0F`01Ag7 zuoxAJ(?h6VUEWQh-RY7%JK}F9Xu%7~bQoc^m1QmzC>)-nCfph?h%~!)(qUWMX<1n; z%(o$CkhEpI#71MfMcAQ~oSpb84_+C#w!JHh3l=;XM#9L`##1#w}^%nYe ztE9f4b){><3GFVTy|R`UR#=?P32;(C!Q?8wSk6@ZqZ|WVaf~A+%PU^?)QW!fMB?oG z^fB#jz9(v48IQyM9QaM7T3cI(eGJcc8Y)DR`BKPLQG*qg*ea-Fv^x`kz~Xqj;ornf zYQybc5ihK+n%?bgs(PB%TGDzB2j9f1)ByX7{Q1axK*i%C)04MSwv(XWYTBLk{-v*9%Wromy1bgsWrAHo<=L1%b}__i znN6%2bI2oT97$WV0}c|Lk3I$+}nOuR3{nW;%T~egQ@u5eRfR--d#Ep)9RK|P9lmchs3Qe!TkJhvju3GX1CB=aL=h03xKP@%_g5A-n#P@P1=GtEZ4JB&8BD}sa3gnQ z4Dv8`i~t8Dfp5bX`Zj~%jV|BAels^wX%4BXwYBp*Dyu6rQaXVghC)>UU;?X`Rlc&uA~+N<|tqEU+8`dz=e*XnrJinWCCzr~q#yXiJwX;WRKX&As3!YN}&z~~WG zBPuuw6aijEIa2^5=G$GT z_N~wSEzx-#jFU^EU&E@wx8{oBAq=5`=on?eKkakHV*b%K_dXbk4-y#FW3$mieO3^+Hc(cO)0I}bNye;E+ zQFRZAder_bz89)Q=F-E?wz6V`jgiTDcYi(r`M$>vDlY~6CDHyF=rVY(%xBcJ{VlZ3 zF`Qt=-!jVT4>?mSE=K^n4x+se$G;mi{{Rwbnpck=#^U41(Cl=t3F&e|%*g~%BC8Y} z;iWQ3xA>XZlhoHBnhvGoUx_+b!;K>NPj?ocVZObUil7_(%d3Vz!yZxHkNa30kxqmm z8%ZsHt9}EkE9o5<>(}u;AK+(#{8Qp@3EBA5!`>g$%r^Gs4LaXg)K&=C<%7=PV7il# zs=s>&Ewx7OfOrLMwdL_&#ZMSp_=`}8{{W)t+H`wttyyptK;U6O;9voc1yCL;)pfIH z@bkl3+a`~s+bj^gQ3%6@To*sx9AE*Rpatg{GwK?Jj+Bk3=>aywm6B#%*>2d$IVATy zpTfJGp$ciqYI3N_v%b*0bk_bK)-K|>@|+78K1bR_jeTx-9fGR153PMEe`}?9!(H%a z#t#hqN6|bpX=|iu+MkHD%Y{f5{_*uY*rsTlMC|iyf>$Yq3?*Vz0;=BZl>i?uH+Ed-Cm4?drdU_3{2Ta*;@=TX2ZXJz)M)y28eo8%VDAjK=H+K_ zb}((bAt;UV`HsXKwhzYfofrLyJW7u=pwshdw3<(K-_F)$IXZD^E1#r30@XAvE62&9 z_lgmE@UYJ zg(q)8SA`r}ogfkGNbiiYb*Q8DRL&+b+`;7xhwYSuCL#27K>JZMWi-novWkOCI z1F>0nWnkP9jK?}NhBB0u-kK0WaiRu-1;8qVyc%6WyhlBx;D z5zcYZ81}E1{{Y~k-x2f=jb06p_Hyudf^9rmKD%KCs|DKXD-zj+odV&V3adPB;1)Rf zz`)(Oc&F__@n80r@dw3Scj3;z#m&Ki9;jW`^terCZ!q&Fe8ZN6l zNk8^;%Q1$|Im)vvGAyKThiR5M9YZK)arN))llw$zf3t^$wMY1ybAKi6sI<1aw!PJS z9+z=Gu4xJE|# z5k#!b^Kbb{VowXi{YgquyGvE&biEHIHRpx*qW0MRfAAOWQSb}mV%oQZJ|}6nUur+h50)-RdknWep!9m{IrHzSCZ!IB-Nw=%G0P#F3c zY;5I)b*o0!w@v*uZ1-qp()y&UVk2u>)=yKY{jGmx{{V}+SA?{!7r+hTPZ#SD%cyI5 ztKMm;KAWhYG)j=&68SPm9$-X?L3pB%8(9>OA^0)jeN)396ZMF7Ul#ac8=YrTy8i%# zV_osqvu!SweW-aV*LG9=k<4v(w&fyKKa6cgFzBsBQ7=S=vR*J3RRL5elCG@FtJ(2jicIwLKeD_^TG3;SD=h zivH@t=6EBtvXW_H!M3(USj&Vdv^I8~oP|~GtzYG1idgI&Nh*~7=cTNA zXT}c@{?O^-&0E1OZLIjx*TMF8aa&)Pw$(J*^lQd2EOFZy+T6xU2%UkMmQ=zpD$G0< z^TVGP{0ZS*H}-+N&}Fmm#<_QKABJ`0xdfajGr!r7k0u8+q2=<_n45K@71Zc!ovWFUz-Tzmv86wQA*9x^<%JyS?pab-&(V z%$n7OoGGi_#bKw#tC{Zn*bBVq;ZEfYiG`65%-J!5 zRhfNj!~Qz>kMYODaQJ6m*ZezdYTNz%|RC8_A3bt(6p!#Ri9kzg@OF=5jBb2s^MrhPVNKp$jBC`Cs zR$BBIvaLco>i5@IrOMJ=seWh85AknGwvp{1@cxBvk;vN?*Tb@0BL-sm-tJqC+i=H? zykT$wUs-$@@CWRj;q4Pt(|l3!hUY<)Yu7CWwwtC)ExgekxSA&}rIC){GO9-05C}9^ zfhtMMTNY7 zjn~Xsi_a6GWl1&%3)dd`$wRdKIpk zns%Qo=j{^Bc`SGih&DUM3W2yCxK$wFpO@vYH@a2TEa#H%PlDzPhIm%y-sfw8g8{>+ z1%Pe8D}1fEU~ob9PZMc+?}O}bA=F|^wO2C7VS6z9ERdirEyCI$^N-EJ`Fz|kv zXQCqbf5exXbkV4V+Al4(Dh3-4;8nJ+3uR6LJBd8kw_51F8n^I9k#D2g8%s!;P5qKW zrGlVx&Rmk-X7iGzHmC)GC5f*Ivr8Q>#HP{WcS)g&CC@UJh1^SP_G`9C=M5UjP~ib^&w{zZ>bx?H)*_Z86ILldzSixw zn(9?*bt=hO@BaV{bNbcQ?v@Pm!l*5E2b#ns>?R=^yKGR5m>_O-*hytmxB4sq;qd1hMRooT*F0kmt38C;2%61;+|O@4vc|F@V8U3D&ekB4z{6uddnAtf z(UfC%6x!>1eaA&a(%C^}~EDQn*l{3$<&%2`XN=sJ8!EN%mRp5NpTXOM|cn`zN;u!pAsQf$CwFqx6B~P?ptfDF8c^HO}a@ zzHRO#Az76`iF$mBNm58TV#|_i=WA1z5~uD~leP7=yXw5pK39a2g3J+99-(80`c!YzA1BEK72~FvUB~KSR>|LjziP_Re{(ZJv8;_0x=A zRLv1~R##EVlp9F^79p{QDtEptE`?*`XH73clgra>)fP<_-I^O```o+dbi0d&+*z29 zn>jVY_^;w7pRQQhXxH}FFvn=cY~r|7UnMfk%oPB`84lt=Bj#oYwS68M^;Iax(N%=8^Q#yTbRb6xOso@XqsTG>q-^At!j&bqfS^=26I%?8 z7A-vba8 zn`=9($5VbhfQl_=Ksuzs*-qAGNyZtA5In3~oGK?xd0W$7{{Ss}o_%S1I*XNmUCr-> zTHl0b_$wEI{vds)`!Y>3`g?h=W+2;R<4Uzl36wS9{q$-zq94l%Nz)%_7ej za7UKPNF_6da4-V%$TV=5ZqJa;s0zL^7!tz=fzA%rBBht&H;Cl3j^|f4zHFOrZdtanlF+Di zm5K&s*aqYwm3~scF{_q7DAm5+En>Kw+5L*{BWI@ZY|?o}w@8Yld1VP3FsxhnfyM_4 zj$E!OTkg~3E?0h3we>oG0O|U^m*YEmF1%B3b*9)x(_U@7b(I2iZZFf|%)9$T2ojzL}#TwbYndUajNOB+M z%9NMp-Iof(Xx&`i$x5d`Rp^X;l5*AD>7n?EVS2DWtEXJvmWkuHnY6yPL zh%~PYcv-Km^$SyGElrZh_FiH|ET?ibyLz_t0hxDvi~@nU*F!#^HkTUft>QPd(n)D9 z3@|Kq=@iQU05lA$UB>cRf{dvOK4E}-`No_rPpi7s{sT~iJ=+#FYfl_9{{Y0ZPPw%Y ztFl2Qt-Ca)R8U<@ZY7kyGF$I$$s{WQ&T2Z2x8YqX^3%l{J-nKhsbH+yl0>55VM#1P zwn-8c01%@p{{VSdf!_EpL-A#{pMP;Oy~Vw}M7@@We>xkO*^4R`krXSk%4AhgSgzpT zF*|sVi+YS2W}{_&r@hpA-IT`B^xZ=GE6Cut$Rm1urC;SGzH$ogEJiRkz{YW|sio(= z_4U}g8q{36zw#om{>{<669XolZKwyu#ncW9AsaB8cBw1@P=`L*IUpZL_zUrR@8OQW zCby&8#i!g@yIN`%5KOYh_U;>D1pV;2k#NcbB&A3pNM9_2!xL)WBe0J{c_l#x+3qbs zXwhN}?5fPger7CAR~u7sEJrtdFR5u7L=s7&+FnOtaVn;x3lS)_2#5=LgsG4+K0=J` z2))5t>&@XK7b zifIZ@Z#}2k?o$0rNX|+;fhF1qZGl!r`PH()tIKq+6zkpy@i_41QNaH6R0Ak7yOk%v+ zlhbAbHu!-b_N|?W zn@+N{ou&IVz>qJV#jsGbsa=d)Ibg&w$m(prXYGDCBh`FA;>|MSeav%eFzO2|Pc)l0 zqq-;V$vJbLq&5Ktxczg(egx6{M{nYf4*0wKV_LenTTN6)8AgKHQQ091%*7EOBmv20 zW+j=69`nH85iPtvvJ2M}M+}f!IKq%;Ea0IX2odcJr#!aY9AsgLRO-U?-PiT=JqlhM zaLa;)YE8fKKhc>t9uoMM;7hi(@q7xf+es82Xv)U+vaghcxlqW8D`&Ej*egjbpjRl$T!rP8RDi^?jEtO;eTvt_C}-264Zg5S-y zkPC4ls8Qc!hjB$D;1%Ua>A*GdcgES@;m(=;ud8XdR+rZ=ByDXw7MMb6YAmCPq!(W5`C-_V7Uh01XX!2@)GP!k^#!%_(%{(puWYY;>nN}oX z7>s`PiyYx`$=`m-p9=Nsxjb3%!%vG&)EeB}#o_%^#@Di_?qizjG9!fG5<;w-dNU~; ze@*yzmD9^|cZyAZ{>@*sZnyQj`;Up^TlC5mf%siJ5dm1lxfQZ|((8z;JgRk-yph#v{-bs>R zGMiOSL=4Nru^(-S#$hmuUfQCHYpb_U>->+J!(cJE$)!S^r0>yO@~;E?H1WRXZRuHy2RYz2-q!!z_8n$fuM`uG=8z3|tBE|XE#wQ#AbYMTn8!iOso z2Kkl{#Iqm0BVe2bRb6iq&*2Y(R`U3x!k!?u*Y&RxS}JJ9#yJ)k?HyH&$g--k8Awt= zA1a|-{KT9801Z42@oA@C@kXkG;_DJ>x6GxOVDSoPqKx1Hp5*ggbSDYZP8zExe;OI*BfI zE2VxQjYj4<^(rtil1T%M;LPf&)VD+>4p{lKXU6*JPZ?V2>mUP4vL%3jmN_Nb$DdG% z5I@V;>Tmca@5N0&z`qe6#CnDKytCHs^sP5YwTa__W0o6bS*CeqaD`%$RyjM5%*>eE zy1zU1&xm@TiTqgJ5z-aTv8zgMbs(r%Sw$fQQOF|=vucIs0e5<5>yL$g9-qU14Rjkt zX~n&^ogK~8X4#~UFlUvWpDFitDyoopVb2Yb@?3SAeMg69DsNXPrR#hCNan36Q1{m7 z?PGq@J_@({R-L12Hk!rOm2)gQrkkbT!f&jWGFQxdS7ahQ;N%#>!@3e&Xeg4ciDXeJ zRoWSpFge;;a&SP%$Tj(?`$POgxxD!A{{R;9{NG+%+&r*c*iST{ZoXl%Qy4?$fJ&gi zDi>oEB_AS}iT*Hn@#kG)KN@N>&21hbxR+CwMj>{-M(1IYz#YHsXP#@%{9)sLcg1(# zY}U2AnUGBzO4p17Wt9=~*(Z- zt^pu~RR|eXl}e4M0N?cv6L*9RBzu9xgU$n=?JHHvn zsSO<=vQ0W2H&L;9R$Ek4AaWR#SCG4}L5+iP`4o?wJOkkU2Tj(mJQwjpLW=r1eA{gT z%I98zE&Ti>7mCxw;y4y3nkj^>xnE>zHnwD4%fH(9#`?dA?j@VVDPuZ7;LC4hkg7=G zOD;$b{TKRnOF{=ps{r+7}nyU?7uRK-XQ-q7)PmDZQ`(Jp|!1C+%*Yfyd!q#xY zkOI$js7-P9TPt5U$|Hi|i3GH)(eTISZP=sxL3}^d{6Fz3_rN|n)8)4DJ*|bjSNeRO zT-Qc4S59zVPHJX(+?$0yyGf62{R*fT0vLiWOOa z4$=l@+({MBh?Nxy)An|E`@v9!DKz72{wL7C48LxV75pXmH{rV*FN6LT(csf>?X7gp zLf6B}+s&YJc;yk-bkcpLOE_c^yt1%X+;Q{o?Opp*csurh)-?|o{Ajnok5|$3&$8%O zJ{Lh1xxJN~x;#$hOO<`XURj7`V8dw~zluIJ$?+@2cDiSeuHN#)Mz{MGn`d!;sViC} zO(ZhAs-|$oAIl1*{?6f;7A0~%FKKW-r6!S~J(Ls2Y?5koTVF#px-%lK)Hdz4HlX>G zvk=5749Y?1b?d5ps(K~YfATJ?salJImYtoCpS7c|zNv1SkBTBO-zFlNrD>KglsSrD zlm&GI1cl>n0mmnE!I~F{b=%0IxYsVM9$5#<*CB1lDt0`+V=^dQk_gW%GDb2p_3sdA zUKa51ZEsAH)TPrkl$Z8KStSjRCL$h% zwYgBC5o2SP4V}v6%5L69M*};+9Cw6%FKW70n{#*JU0TRN3}RUzjw3R_U4D53?%d6e zOEWP91D(KCoe%by@gIY%t?LdC$730&V ziHzSX)5}1mR`ash^PBkmPvYMZ_*3ES)~|nec7o_$*<8CoEN9$^(77!1t)Fb;OvFOb-2qQuqtWOK7q>I4e4&`SJ!EF5d;f+!) zN8-iDj%U^#!dk~B+jiMDWQ~zj61@VDIVG5;06E=VtbR6=S!RObH@k;(Zv#6)%WfnS z%H$p~jo(Zi@s+W0ad7Ch^<~wZl#@uj-afU|k~_}4 zjO`x%OdTom#qYc7uFrYURT@uy-^}YgTk*+!VWuyIFZ7ubIc=8K8*6w~V~t7G>jx`9P0M@D`cypTy%#@kO1Q%Xw&`ZAm9-Ke3g;c%~AnnC3gc1gjGhuK<9x zZ@~Wm54;l1XD5pFduwKUBOsOwM!1m5ttXv4;!Y!sL%oqkB1|X5Rus9qX8u}N&J}kJ`Zgq%t7&W^X?JUur`&r!) z5v!}Ks8vv+KQE{SR52xhIBy{T0EVH|ydk9PFX8Wm_g*8=pt+2n^fKUK8f<}FU`w8fQ6PA&ueot$jkK|7A+ zLb*^t+IVh;Mg6Zl3el*#{hsaBm~3+gh*>wN18Rj400ss>3bMbp55Xw{MSJ@>%N(({ z6T{*JEV)pu4(Kul2Tj9)UGT~AP>WpZ&!w5F!Bt&b4+*GkiTJr0<ljE)aV zsiCik^*v1u^ekkSBHzD&AabCQzyV3f#&ORl*1pKJ{{Vt_{91nuMPuMU6(*-^%&^Fo z=`YT9<%`7ZTx1LZl^l>cu1fd*3BmhacxOYq@dJD@msGU-6tl;vY4?6(M2N+htPU02 zfE?s2aEy5*3iKsPylKhu{{X`svgJ^A{*mOL4!j$t{{U}9y0(jdsKYbMJeqBRSnh&E zRI;j!l;9F}f)Alkaq52!J_7hl#8=X4^LU!#T|VPbxO>~JR^?VziC8ppBQi3~3j`R( zRg|&9;~sCPX?GVw(p$YrA(|N(BP$CaWi7X9AQGoJ869~5o`=%BA*_5xve0d!ZCh4d zCIZ&hcDB=~k|}a_DDyI1SD;oKSb{jnub9Wn5h%AAq|YxBt2q0uCT94L;r-vk%~mZw zClfTcQ>}_cbZ2LcSRJy&yL_yI3Y-ATl1Ce9wpSh)xS2Hn025igmTlZzNEOIg6gCQl zQUTlo{K^LNoUbgsJQ_9qlrz1bhvtUnISdgSolR#+U5;7qaI9o4xr=9M$^QUoR&R)W z9eJgAuGdb~tZel3xP>P-0>lex6;v}W6LA3LLBt8q1kvEy5tNesEsg4(-G?$Rk>;5IS2I)G2gFr?(xou;4Sdvj&tT@O-A zi+?!VsX`lJRSq(Rh6@G*k^#x|H3qYz=#3!N3qdS!$i8Cfmum#b!{u&f{@BOo+{V>_Q9fq-#xZVg6SYoj^ZmbQ9% z{)cH{uj>8}n(Iw7+-cL_86lc2M@m?Nw&p6)Io*tn`NkP?KqN8CEhN${FQwG%^~kU9 zFGx!O?$RuAe2N{>!lRFr22Kgz_g58vM)9TOmzOr4Bl{k!Yq_nnsl)PHJ;N&rk2!f8 z1qnO0hrt6FsBL4`G{Bc|_%c{@q|`HO3~?!AIYOkDBPDR*LvX*s zW?frM@vY96J-?YAg=%N{e8hERWN^VzoHKwJC__hIAy^Lgc(zYEEMy|*6?nbbKuKKZ=>-nw)VElFCP9@P>&#Swj!JN5x4+C zow*7d8@M?m7--e)+h+Ik{d)~XHE$!Jveq>V9|s=-c(cUuX!lmZ&?c`NM|Y^Rv|zG? zU*9T_TLD95_X1ch+}&$YXna=}igleR>Gsmun+uyLl+30m)S~Q^WJpe9IT-iHO40HE z0E&DK@ha=XI<=Loikegr!KucEYi5C-d`!WVW?-tZ%2cl6ILi^vO^1jy+xxloeSY6q zxDBc$v{yDt(_5X&@u3Q(Ol&}jmNo#k<^*mSs-?{dCiTC?^X+?nMAM6>Q8%|=>vN*F zv(&C{?4z-^v1WqW1id#10Zbwg+YobVx+JPGqj4U;>~6~D_OWUo*=P_KF<=|q%gFss^T_PY?UZca7f2Fz;3l( z4;5+ht7!HfBHt~Rp%Xz6x7(6mcGd%EJ5EQ+*!f1`mIk@;e%4ha`90Y?t^3;Vpr++2 z*Nk*;PtdXP3q#X2Uju3$C-FY8-eX;V#Ikdgk=akoz=6nBE6B*`0Xbki>%pHNHLVbz z59$yWxVF>+MLRe^K-!}qWxnqxj;E;=+Pvs1mL7dj>G_Y1>K1! zzV5tN%GZApg};T@!=pu8%j>I|m|f0UjAf)|+ZbP)DoF#J1t4uT?&R5X$6(u%y6Mv2 z*JJj44m0}~144|ZJ@%mGX;bZ##olX`=g5bAK`cG({u4f<{t|F9#5#P zn`ZJW3un5Rw2Z2(@5?Y}Dlx#w#xYZ0D9a%IoFyuf>&vq8>U|~`HO?#Abt4*3>CLNt zd#fJv`!)PphsXXKmq_r1!?d<@L?CnpLNjEdsNn5w_+oL&o`h4b{u}&P_{H&K!8*OC zhaj3UtXcpgLEOGy$7!tVk%Mb@8y(6ECzRkQBs(&NUzxt)HSAxpzx)!H<9dvw%}3*xg%?}>nSL#c{{Rfm_p9OsjV=t=H+JTD%+kQ_W=%eMVhw3BhKR>8+!j@q z8u+inI%mUeTg4Yvek`)_-kTg++%_*2=ZhnX8>SKgJVF)+%mjYyZE#q>@Z!Gj{k4B* ze*pf(KeR9WAwEC-jwJZ2ZQ{8$_#Ek8D7(>OvKIErOa#F^aix(*Y2-&6qL3Y4{EMKX`k?-Ye5%TdhvtPKtZM98hFW4YZfCN@qn=Arq>u_KPDK z%YKKKXEZRdrGQkGK38kn@wa{3zjxK@d>lAZal=m$hpEj3uWq;BPhT?=_6X2y?lu1a z6nu5~#Qes^qaM-iiB7d|vo# z@khX3Ch#}K4-MN(;Oom+TgMH55=_%sTqzb>_MT9a35nT!$4*3puvvG0T6|sp)A%9d z$N1au6I0ZDKd3=7T*|&5)Mv1XB*!-~2pN2dLV`TS9H}FwG2MJF_=)ii^c@b*SMaym z^mycRX7+llBv@Ncj?>y*$l94LY|SB)4GNvgHaE=5-5H+_W2VP@ZiS1D%*34F*O2^W{i|$sT|zBeQND*yv9ZVY zew`2ssEeEoE=wF^rhE0TyPb80p%pnR`FH-hSp51_m1?r&j+cL*^{ME7F8G7CX*@-L z8IhTST3D$7$mf+%J9HfN1Cv~B?f(Fad|<8NuL@j4b*<_XfJ#izG4rxVTZ)Vy-hw9zkgeA_yt-noNS(_yZj;j zn{{6ec*@^e@kX0umztE3D@@v?Hwp;@du|VxPDWl%EXS;+40DEt+F$C;rrFm15LwnOPu&JChlU zdkFFJuw@~z$_08Khrh4`L2oV9j27>29D+n?S1yaO%+qZfi6Dgx8Ak6ywfQ%RS=DN| zsAbZn%Ss;lcfU2&xwh%$)bKHtoM*_^$7S&E#YbI1rs_#pbbvGvTY(g64TuE~kMR z8tt~J#Aj;kU8kb4Mj1H;Sf9)2D(+8_+TB0k@An>atu)swc0G4M@#WUHX?uCB>TwI% zBUKQ|mPL#J8_EP04ZA#bInEn#tRb#LU~eLcVsvC#LL;j%ImQ8C*!mCeWao@?dKRhT zyUj{p5oo%EI=i$DBvV`36_K-)FrZzf*?0tO8$&7Rz}76@DL1-~q?(*e(U=UR##78- z2G`C>0GGkWc8$zOOjoa!QmE>+CwTt9nY{-l?DaccH^uhaR<9G=PSHlPc?zayO^Nce zWMd5E@JY`MaqreceWU62MJ}OWA(f4}h1s_*TM7cP+%Q7@Mn1kz;a?U(;z!+Z@yl=J zmPwv+q1ZW6pyPH|7%h+q#yu;w*0uMv(-Cy|&)OrL87B@D6f8pp-GCG+Bq=3w6krU4 z>e2R?S0t8_`J3VC$_dKa5?|`8F1H4!YYe8{?cL&doGS>=&B+E6N477L)H49Y{OtG{BrwSN zbB00)H7=pzIOl>WVYiq@kqKjr47O2U2%sOBvhL*MbY9@sOjBCL+Us-Hf=g7^sE$o< zT#Lq5tHV>An;cy07cQ?z8yLL7EUl91p*6oI^1+|5qr2|;RSgORhD8~cl z2294eVU63crHB>hL%?G9UevXT=9=>UYo?C~SLcr^Q(+-b&4dM+e}RE2Fb|3*ZX2CizJoSQlYj$sJJ8sRVp@t)aN9RzJ43&T4uBR3;60C z6HZUI!>7qTq{}c6L_vYvfO`{zj1?s~9E$wr@rAAJ?y4{DbqVesJ6U6iA{)lonb{;o zMUQa7Iab_Tj;5TW8Nw2ix_vrn@L+S5*kb7aEa=m8Rag>Ar<*oSW*XK-S6jAxP;<;De_D&7awmhZ$icVxFw-duW=Ac(A+NMDd_9PK#j zHm*tTYk~Mne5JQq4<-- zdIU>-qP$}MI8rYzR4oKfjOa@wY`$jDKwZS(2FbuseM8}oimkjK;kMH>PYOycW>jrT z)BAO<#7m9ma3T9t_J6w~7H)|ni)ru$HfMS?X@3Ztg^%dtqo7$J&<$X%+ysMGNNjiC5z zLYKsTB-bwOt@Q*|v$)gTO+CY@8AuVGUF2*Ng;Et+nTP_iAow6Jd_QGBitc{NXf))H z`$q;{p`%7cl&D?7I0NOzKJhF!S8w|^_-8@a&Hn(8wGDpW!%y?Sv#mTqtSO!=Q5DC} zc_SUputg5p3I$bS@GeP|;XJn#qZ)97T+dy9KC8RXCwRv8f7kWdlK3m(=Zd^};(K3( zOzr)Td#OZxQDHL)q&A5f&i2nQ$QgD4B!PB?X(E6Ic4jR7}LS>1Mbr$7+QhJ+J%`(yriJxpn^l6xeEzeAcnu z_%`l1VnCqzp_g+4SQC{vP=FjT$og2(jvkzF^jEmIdMmVY(r#7M=B$>=-<^*hk{Pre zdsMNzmMLPko=a<&h0<8x%A1XnjH?ak2z23xLRGnkPqTZAd9?`aZsZWaC%Sl0xQSWT zKQYJyYL|3S4?@6X?iK6$Pwc~at!Ov4-XHPCk9#EkSc$^OjDZ_083-<=vyMRMI@dwr zuh}kL55juRud8^0rPm+q_PSP$ZEG9BEKKn%lkb9ao|yxtNbU8i*3tnPYHjSaO-6I(uvzA^lpULxDFdQqKsfoZ9H}{4=52ff;xBe_U8+ePh#Y684N2jz569xq7uk4Z z&2EpVUqfjWmv=%qmF1CRQt~<4iVFf+j@1CMAStcbJ`;RGw2o`tN5OiOX#y9PX{;pJ zqZk+=NF1}kuvPZ9EQq}a6!&0 zqZxBuqLoP5>TK%%3GoNOUyK+22k`#@jfJM6q+hHFsrYwU4H%b$fi1g*a;$bUBQmj4 zRI;*@`dj-%_^0A_q5BPf%6}Pj{{R;1&`ovl!t%=U>fce71ozrzt29Pg$Z*csX!6H% zV?L)poL&p?9=)x2w$|gs`{uj7z57h>bs*fqqz@$7$-rX7?!X+3XCPPU503?|um1o9 zNd294Yo;kSgYiGbx-?U%3~iQK<+vmU7(7HcIL33^6*#VcsMKlN%{BM4j=bFH$zJwJ zpD_G!_Mh?=)uc;{ ziCc3Qj#$*P098to!E=SlW+ZZw*x5nx6HU;3OQ}7joO)uH{=-EE(TOkB*~F)C8EkGR zvV{V=jcdi)hlnl&r4~pK#TY>|NWn=6j(~%>7gcSm^D`cp#c}q!vC(`>f30ddOwMk# z5v5)EMPqnfp&N&lw~z}0%Z@S!B#=0AxJsldN=n}A+keMP{EVWUI_Uo$M&!&0z5h&VYVFIC%%NGu; ztTVZKe-=!hCGfcMMAllYA8EL?Wy?gWOitXh6krd{&f>&?-MFjv@|}G+?rq)UOD3mex_zFTKiT}*1g&%# zVu)>1&Q1$%B=1tW`GzZz@$JusG@WUGw_(4Id&}r0SzBwR&c$3TTUA0RDyfolle>UG zD=wPEVx<`^)6;!=7~rI*i?o+Vr@0F~IxSVOky^)t^%F zK9hL17nau3-l6@bUetzLiPYuh_t3u?M=jXZg4 zHO-<4^$SFqW3fhHFk(x8kj6;a^E2%d5&%1~AHm7)?tE#ZTlj|ROP5*I9g6K|L_xQN z1MY#0jD6hax$etoR-D|bOPOnBZ8lq9{soS#6*`W0lfUKUa^JLPjGM%N@R44}Jg~-= z`)z~DU63@WstImeYJxI(C#`c|1H22R=^xlWBi6L%j^kdkdz%eLduLHN48CHaWdN`a znJK^wywtupyS2a2^+9-nh1}NSRVRWPW!w+9TI!eLRxJfbpyx#0%f0DxlvF(kEqBYwmm8!WyS{6N#SKO1URR-;AKW4ZfP^~!8GA&A4B zq#R^!<0J8}+$%r$D_88FVc;D%#TUO2tQ9Y?Oi+L1bqcST?^8h$F<$>wI z?Zypj_%B{d>ltisRgvK;%3W}9dau44Jm)w$t_$I3?FI19M{4s)b;BuRf&(^u{5DNrG~!5M)3hd& zzw+%LxyhvcZ)aoduo8*(;p#uQdULa zuHl2hueSVI`w_=2{l|Thgr)70CDH88Fj_xNQyulmF zW?37;{{U@|*!RW$3b$_nX}%V^UlRBO&N{w==LQpQ-QQYd}t1rk%WnkIEA=-I3$mzg*AMnqNJ|SyxXnqIM z;Ff%^@1}jRpzK+X9Xkw~{VDy6Kj50)8r9azQ~jVkR}Q@)!;cT!OxE^562B~k7(Kx` zA1gAcYXu z9*qZVFsof9os_O&Q^(Hm?$|jP-g@!RO1)62%-K@Ko5dEnRUSHbUsz6$tl;n?&~0(d7x(d^`dYgy@9WLELIk+kJe zaN`3cHglf!#QxAfv;P3YEhoVGwbXwLu5a}CBogZyE`u~#mdDGD-JrB~D$*jETr6P= zl)x_#g~RkCT}xSa-A zpiG6pUBoJ`(BP5t5nX4(FM+=f{8Oghd?5Xwz6Hss>K2!Gce)Ra*H)8Avbl}bW3-Gt z(J(Y`5sKxYwuV-QNn}#$8KPfl_)Yr=c)L#U_OyO9{2z;2@pp&XIkjsq4S4En{XH#b zQr6c(IAXV*RfKaZ=W$nw*sxYye65^I^-+HCZC{{UJaB3yiO@n_oZ^t+pT9X>0YHn+5qn8hS0RG<4x;zkEYZn@=_#jXAt7I&SSqi(#@o1i;L7T8 zYFhsQz@G-)m}Bx|4QypQLv%7u?a+_90Vlh5spRli!QEQqj}&|-@Wl4*7>WdoMuaj& zpST@R9A_gRTJvLw%W(GPj=o>l$_3f#*dq^4!^_gQRaLTGtqg~9q zamxZhKPwdgkFJ;CU+lAa3M8Ki?QA9ygcmwRu8&kZw z)P4;7TYWa_L;nCE-xo&G#^)int;_%wJwRrN_2l=huy~vr#X62$x9-0)hOdV?-U@DT z<(mHMcl(-Oh0g%fHJEf;S>rMw5-E+OwFAl^uL_E%0AP+zGwIEBI!}oqSZ#v5o=4jJ zsUm3A)yotCNXm>5cQ#i9oSas7#&3il7XAo4Z}D%!*S2v;40iXhD$iqh-2Kax-4V$+ z#seH+s1?Rtd`!91Exg++C%3g}FtZ{8$}+owBb)#aPyh!5fr|PHbZ0cB1$FtKGgAYK z!dB#*8f&9g{Lb*{nx%#R0QR3pKK{4nOLP@3YnN@U>|yaPv8*Pj z5o6I}NZ19ESzB`kZ2FDlIb48dbe!p@A1}y<4r*(auAegh0F2GvouIYIfecp&o_uKwM2-qF zvXW2D&;UYd?tEc7tq&c<0phlxgBrjFPYjh5>PxVhQ1L$L`?z@4!FUyY_aA;-88% zkBiq|@UXRw6D_28FHe?Rdwc6i5g+ZuM#5WxzEZ4+!DE#Hc4aCCc-(FqEz8|}LT)j( z+P!x4^*iu+PBR}?5T=!-rQ6c`{{UUe-wAv{d#!%N{{Rva#{U2@YQ7cJr@Cj2O8_Bu zGLWHw0g+UWe(4;F`0K?UIe!poGF@2c3g1>19lI9~QC%t+@4Jaj@&5rd~FtEZ~zxzWTlr$)6pO@6C2`c-zP`kM$vIb?=EwvTX z4xYC+3uy%6Hb|Tm2qfGF2s^N+892xXxy5i@`&S6bCZ4xEB^NJtF3oOb_-k3U@MfiZ zD%c3*x{$>r4H*CxRaI^ow_p-8oaK)0a7Rl$)~=}6Q!85GlYES-%vTvIrHKvGb~B!X zIXv-R6c?IIdL{ML6U>)z$#RKrxi_;ktgDmI5EPYcU;&T;0=uj2NiVI<&abB|pJ$UY zsr}yEeCv>;?Wzj+h4h#o<29z zT{YV~#V;q?A&OJA(8&XCBa)ypx!haq3zrpa{eWmQQkt;o&%Bx4` z?{sGbsNCQ%+zW11W08!tc`w0k$+)wcV^Zk)kLa42sH| za2Z*ZzVJA&(Ct6=#JTY8v=R8j_A;}!wTb|O;CN2@)j--ZL4jmkbNB)=j+OWEp-!|E zCoNI-6(LjE#oJE(Kc#eVKMucP4~H$OiFJ*CRA^*XSr3VPPjI%7D`W!k!5NjuPnQQE zfwsL$n*{{W&}%_CSKU{hqOa1u;zDoJ7$09D57=(Jzj_x5#} zzSFIIEYad;7*>|%SaN+8FJg&Xpd53Ydew-3X%E@6UAj#^4I9KZGBX&Bwv{w;6M!-u zVVSpZM<=(ncKbM_moqT5`m@e8-+>+v@U`=49v6eddY7D0KHYBvN?5x0GQy%V56hjT zli!->d`Iw($|ja8&jLiaAU^e36!35X9lL>(*1m-@`~=oWxYc#P8QBQ{!|I+3)fpIc z1Tx!|W%mS+LsqQ(G5aO!ckt;i`$hO7(s!97NcCx6!IZHJw1;)aT%VUer9N4`*=iEG z@>Bc+@Gh->lFRVhPM+yTQbx4e%D%hQjicWq+PWJrg!*-gqOF9{BB@kMYw6WMIp}x% z*z89`&TH>&Fa8Pd;B9syJ|BM7p9pn0GyAl(@zt>|dSsG%pRRfe+mHSU8S$r8giqn! zM?%z-s+}*yR?@3?;}R|^`7137lzEn_=jY;lF@kA8({=4?Ey@lh)fg6Raug859+{^P zhB}n6gg!04WDyd{cV}&IrMoHL82tIKy!B7`B_HioqYpMOhf_(BkK8m;Z$8BwNmOT{!(-bWKC-+0l|DCUcW-rl;N4;wS@%gRMIbJ~5xA3$#{`V~n%nq!@OSov z);wQf;U5Lv_=*iu=0OFVHgh~uDyRbifeewT!6k@M)bot%7g4>LolGqm#@wj+5?vSK zZ-+bus_2>rz}+(L(@~BgBFAfCJnJk>UNHsCfn>8~EJ-o#BomOw81?q}eX3}`@I_w; zd?oRgiE|%{?7leam$n)@TjEQYC$J#Ofr^c%?HE?h8IhC|wTSzxQ2mU23;R8MHlN1+ zHlEAHo-mFSOM7c*!r9s?unx@XzGdWyNXSKO5;p}FjOi*xLkd@VJl`4nP6dCm(chGsMoDrs>qy;j00>Wne`%OC z+pRXrH^0*@d^v9mLuRh1o8K%5Coz=`xB^KVTQ%MMK>e5h0Aw8#!&;AshladaYp3b< zGF?t>d~vHs3=tq1z#UpozCdgM*R`@1uKG^0l>y@7(ROfdhz329|n@{l$v16zwi3_E+i7X7z+A);KT_uf3Wg#QUY?l83 zcn_PO1MsbcdbRGAb*@}ki+f=<YlzzAFA+-Ynj*p1EKG$Ia1^N9ox+#KR(>k5_?6*UG#kG*^2yAW5!`JJ3;7DE z2r;}Yz3vO+vAcEX2c{oW>rAk#QdpOB& z%YQxm&KxwTQjDuf!E*NfPHW+&rQ>~L#yWga!KYhXE#g5EUEVUI`DluOgiNv6S0kw? zNhgzDq2ezO`2D<3tXp5Vge@j$q`ZQ6it1#!v>*m(2}K|kC^omuGmqgWt^UQowO_)I z+55!$#fQZ0X4O1hrfZFJ;fv?^RcGQ`NsAOoB_eB>WJU^vfK}DLO{$Do*X&R1mHQj~ z0sW?aA4~g6d=~hX@OxXc@mBDLdq?!m+fuO#anx?h}u=v#lEQCA8{l~*8*GHiA>&8 zZ;f6t_e#16R4BsW6UoJN7yked?=7wLZ-~QEYu$0LCAZYH_~tBzWkgg_khqPKRge|| zl$nDu+6nDH;HUQfD)=S)LwN5?)~q}o;psdj;yaBt-%RkIf?rLty$V>{-3xWPl_E1E z#_ShtYFjFxle@rlJHHU?ULw-(v8JPmu{%Y8`Q_-fYRT}J-^Z8n<1 zMI3wED;n>&^7t+CfsMxvfnJmQHT((Gz9-rrh&o=mq-m?B-pOc#UGY3n8Kg^Aa>7Au z$U#y7A(wMtan`(o>%=;|8m-2MFNSROgw-_&qc)S=ODH*3+ad--EM_>+wnyDL1%l+) zE8FXaYVb7eE_rga*6RI|@Aw?} zDs^2*$=TiRm-Xmi`1ayP@b;kaNXi=7Uo5yNav5?DeB<({z9RUi!gd}Dn?djfg{at# zV*0}W0L7PjuBNgH+BGt*tPv`xTXdPsp_rCt2oK!`nej?yi%->3M95+XUq|eJDz-cw z;13wjYo~ld)h_hgSytL6*R0@ciFD}JRo=oJfhzfB!Bf!Txxucy6?$|iL!V~f3%|%Xez=qKFO{ZYto&t+(aQF@kGHNY-^9_)c{{5BP#P zq82Gb)PK(Fzd8QiWb-p3m3Qx%F@CSNmZ2+u(KHo8q4Vc$V_kYfYcpT02H$ zj#bWC9bdmE=Ifl0NUz+F*vI2Gnfp+DEYm(BYnD*0&Gn8WU4*hicrmfS=W^uZj)Y>r zoZkRzcIdIQMq`b@M!Hdk9*RdNk&p?n5MA~n|fPgEawLZ;nirZZj)!;{{R`h9c%Fi;XjCe7+u>LWz_Ur zn_F{(ktmIfe02wMj1k5~ek6D!QPs3Due>ufQ;i=^zp}VjC84|1wK!$mI3p_@@{Avp zu2l8Ke&1SZbL;ok-Y$h+1s5@}jD-nsp+OJ3BoxCAdgTuzIIrhZ{t9pLOX9cu6Z8HG zf$=xthK;IEec>yOPAj-HJK&-@Y%eX**;M1@436;@UZ8H_fFtfWe-%RwnM$o#-E!G_ z^FnI=UhTeTiwW}KB}u*7f0}=p_3!NO`#XF${jfY+edE6x__l2y`x?Vn*0sxR4kxhw z%DlI;SBB*ZIp1eH%JMy^+M%F$wwU(g#D8KBkFx&R{{XX>jCJpfUJlYUp9x%E#(W{- zJN;T1FSMT!#|-TM0NK*2Eb$eIV3%t*%O3ZUnO6hAkZnhn7YU9LouLc4fY-W9ooipz`f6!BTC>QSBJ9z_@Z){{!>we7EA z&~lBbOJ<+n{{RH}lj9e_3m<|0D4XJDm7*9m%`L?KKeE!HX0*MuIdARg5b%gV4J2#R zWy2^j#94m&{e?BDd{^Q7Ta6<{Hr9%*zUJolV4g)~+^8}<(!7RYo<=yucz^s83*sHu z?UVa2+J4QS9WHJ(zlt9X?yj_d0eHIfg?pBcIOc6S8NnHc;IS3QKvK3%wddxC=5zXC!PAG-YD}f>sXF z5I%>5bM=Olu=U-Ve~*+*{kpFO?LqT|}A0Nb0VvILQIv3Vod8wSJZD zoTj9XBa9@7Y=fl1`_^7s?F}=39_-pY8QqiFB^fvR{TghOo*EaFMVOCpt zl^y>8biq=|aB*_r zoMaLQYWM^G3Qyu4Yv51(6uaV2jJ^`+P+nMT7xq&4w?x+1J&v;SSz6pMUo44XD)Kp5 z6_wGLl_U}mocI^w#g&A*SY;Kl5s68Mn#2%TYIVsN%cz0 z{v98!pR`BqfAGKJPmVP07sGm9m3iVLZ>d~(>&CifuCdu!&1zy2TfAxesSI-5Lu}Gz zSy?3on4!*ok*mS0SfKL~;!_^w^4Kzv!y8627#?{8*S&lopBug(_>R{bV=j>>a;&Q5ixgL8X6!x({C3s+B@{mo*7({%sX4Ze z=EVSt8B2V#4WNXP$q4KKRxXlZSTS!uBjC*A1C-RMiF0y;vQuhOl3KYoo87xDw2IL- z()x;gOP2j=7+OtoHM+f%ZpnIibUxG4yi0pvrq*l8H}Dw_bDp2XGsbbduk))`KM@R5 z2v&bB`3l}yBoKOFVB;P0(AR@$zY*@^+ZFZWDTw{>-!KQD$t3hWNzHZ|SB)XmAPE)t zm~74w0U&3dMoG#1{*|2yl&U^nr_xrZDth!i=fd9^wL2Ij(sf|B+HK?^FD!1L62Uxe z{4X1Vt}*@Ka6LNLpZswBhkhIUS<@o%&+OvbZh@@98QtM~#@%6fF_5rG?VEWA@S`z| z=Nph7eC@nj;(KjT?d5@<*$y22;t4q89kMz4afQrJ$DE#4vao_8n z#=R^(OjZV5^dHNkJUq)Uz-E;Fo+b(T^uVwZ6LI z+UU&+=6fiiE)0?}T;)-=Z5b#SXB)#l9Pw115{vtG{yTfQ?qfmD41s**JBcAkB!W*| zgWT8GUmJWG@b}{%g)Y7`>Kc@qcB$b#8hg!a`!dcUajj|4s+q0);-#XQAxC9w`Eo33 zvVtQa!3FS9AtuPH(b>GC#J6Pe|LB0&Ml3kHZLQ~ zE0x0mk+c9du^_5|2+1HF&)Tn7_)Fo>2Scj-5b@=uzQ1j$z;zeW?l0EmErcbu0dat1 zjoLlNJTYY?ZU8fSR)_HCz}_tQ1L2R4emGS4Z{av~HiyJsKhg9x)TPw5HMRk*VS(;L zT&uV;kWw~PQpJIfYkG9>G^XI{Puy=;9eOT@&QB|=CgaKPt!<*yO|O4Ghnj!Fv(x-j zpjpl04;Zz+n{}zWsng1&XQ_)6nNp4D?V)$9&PX>G%*R+2cSa_FKzSTC% zrD*p04wV(WR@RpgqBr)H!#lju21IDvZ2$r8vMutsJJyURTkjz8gV#_0Sj&7(e zTbWjCUS}OWrR{Csx|)jnF86QrW5XW|JWcU8Lh;|hy)VS^Yr59Ir{7v=nvL=Vd+Fts zl+U_QrAP1GSdbg#V%v^E`1j*S?31j1&G&Ya_|H|emdf(TUFp*4^3QhHR_5f}%&`Ru zS(Zcrx2hZ{BRK&5;qdqNqxhMocxuZ30OEgw{4=EMKMj5#+FfZMWV>~1>ttw)#j4yg z7*sTy<=q;n%#5W;AfGk(?fZXz&)zWCY&0K_AG2e4lFv$x+R6=Q!@efDSni-P&dF-7 z*Ad)Ks`ntQl7M#>Tmqoi)Zu91aMU4(lb0>N%DY-EH$Rf|+`4$`)h%nqyF2M_`)Zz# z@IDsMwEqAE_*1SWeY1(D%nty~1 zrNq{b*6g=3v|eKZv7s2lWw2ZCXDk8YzQOoS@YnVa{h;i1-Allq@K4`~Ggw*6Yh(|H zwL7U>Ns?7kjG}p;e3COMUoc3dzQqS14>{B{Z}=r_cQ@K*mEy0Ceje4VW4MDU1+C1H zP8H%TLAG6tpg1LnBP3-=$n)owy46)g-KE`2U%O9{@#`R{@hhIH1oJ%%mN|hl&QGtSJuV{nd zcY(C)SVw^W0Bg^M8m^H%D?OdtXg7?0akva_Z4G8Ch843>cK@;iVWPwO`^# z&@7WHinlbh^MB-ia{L5OhSL0BztR3K_|sF;w9}zqOAe``XvuXd+-%G+JW|OLh}ujm zux-e!O76zm`|HEMwMp?`gX7Rgi@qP;Byz~wb>-)UwY@s-Xo&eF^KE|D3W0)E8HNiG zPAlVogWs^{?KR^|Veul{;ZCmJAMqWH)cQ`Owrx0lvVdR7LT50s%87Q6F`hxm_J0%j zCL4R$Zhj(u&rPPX5M`cS8Z}7*Ut{2lo4zi2T_tm)qvzRf$v(<>>swY|2B_h`G6d1|MoS+?`> z-|db600jK;Z;1Z@X`d2!i{PJ#d_Uvu7g(MxLdNgH*M4mAZ$#e=uuiEUWN{+zUCO~j zDfzHEokBl{()gQ6UxoS|{e{=|jknr_cR?cvB||Ruj^*PhW8Mj1I^(T)XUBWr+Lywg z8~j(M$v23+sP*fowicRtUD|nmVc21KqLMQnz`IOsw*X?cQgtQG$!}9AslnP3TK@oD z%A@v4{k6Ur$n#iyD)@_`yeJ&Wr$u)WQGj=5CSEztagU{R-wbts_$iE9^nM@sW#i9{ zZ*vT)>uIK6>bFK0EH<~4o@oTI82NHeF!7va(yOK*!h&8lqLBEO1wu)Qz> zv|C4PS8ed8{tChST6{0@Onxlz&yB6V+vYPF?X_9rk>q9touAKlB;y#}j)NEq1x`*X z@Qwa<{S8;PZ{A6rTz~LTPxvbjgoEh%zsIi`+(l(8Lh$&%!B)41CRQrw$*5`Ua;=qM z+~_%r_i1^WG? zya91}d*Y9QwjL>MMi}G1ZC6!>>0MEovJ&S@omH8gixoLDl|EG4Y5nCjFXjHe^T)ga{{RKW{kSyy z>uo0Q;HSZhz}FE5ll~ByqL>iCje;xSsNjsNDuSnwYxJM?TK&8~Y@dt14c0tI;qL?Z zX3;dQM*bAHj%{@rt?ggSY2A*iw*}HngisVH9Gsf@)4~4$@Lk{dCs&F5Tcuj+zwk_L z0@qNvy^a*RUx&IORcBziVHT#PhbJJR&rDaZ>Hh$<=lm1<;sur8i}Ww}C;1jFe{!l;Pwbc4l{{Xa~>_g)(67G9D7L#oSxw*2JQn+So_q258HU$wr(rODH7pGNZ4D{?=c#U+o3(JN9hT^sm`p zPt?ziJYT9@rN+CU+Ijc729<3Tetg#B1jTW2H1WcdDzir&z0S?9%FFX-{1eB-{{R%c zbFBE<>*ANhUl8g~bZyMCc#~JUx{^uKWfCwK4#k~8bYeLH$YH_9p&m%SZQuExC)M5D zwnx%hhln)|M_!7?C7O1I;iI~p}({z7>-XPXT+P|`7)9o5V zEVsIlFz*e3&J!EhkTZ~=9CZS_--&)d_%n6;34B-h6Y#HGx7RQAUx6C^?x$g>>2S5X z>J!~*R&eSGAwqH*HsTvgnYnZoqd8ll@V~^pbH<)5v$gn#uISo*hN-J) zuX}xG6l^WxS>sQd+{(=EG76%WX8X)b6v*@IwidhZb0+ORTPtOPHEAZ8y`n0O z@?rL{2OEG;e(K=k)BgZyFC5%>#^88V>X!P2j*q0-X&QaapaE`$vaVnFa14Ru3ycA` z7|nTHrvCt6@f+%}ix}R=MYuLFExOL8N0Wv=W^i1)vD!;0$X&b|`HEP`)W%K6X5ZYR zt@@mCu9Zh_{{V)DABJ?xT~}C(Q@Qa1UEW=4?RkAVX-&T96Y?M1e1NH)@iO{iN+-?;nWs5ARWUS zh}w7=uDI4*_iv-`vC!+OLsx75jIkc8;F$Fpn*RXelfv@bh1D!>b+wWvBq?QL#4{Ci zLxvn+9tZ$t>i+-{d{f}R4C+2F@CS|cI5imKmfFJF$5e!gcex1B?qbam19JvYFre-{ z1K0Qy;!ljUO=9m)(!3|(>zk|i-V3#ZRW`D^72U9wgziDf0Gw_lu_Oxc&y33ti2O|l zfp2ehD|I@Ymp@~Q+TQ9Unpq|N+DINhlVT7C-KtA6kQ}P-V=ll_y z_KfiFg7xnhd_DM&`&D@B;l7zBji+ihGH8A~vC*w>Z&GH0WhUYmSCh+lh)uhKqZ7i6 ze0%#n`~moL@pRkxU&Vh6uXQPZv<|nF8v%KZvf~{=;AzN%o>|$U;PcQAv$J z!mBq>#=UR$pzyAdtoSG4pTn;W{>`&^eh&?3nx*css70q+%RaAnsya^v!!@(YUO3}~ z6<1-idjP5j&Erw&);AhlT6O-VHNCu=g}{<2-fBjYvXL zQI|HYy|?uL06!y-7mI~isH=OaM}Eg!mk0r}H(a@vlM1n#>FTOo9Apx|VELCE@w)6{L+9~j0y(Hw~Lk|c&jWn#ec z2qZ9U=OCV@vQqu5Z;iQ7P&Y=%Bxj$`n)9JKxcffZ{{Wf$BorjoX#J$W&3pM&cV0~PKZWyKY5pOPOVsWXBxzL|<=1k?q>MHOSe%iN z0XRGg{o(zQeiV3j;aBX{W#O$0QnI_cj>ygSivhT~VZW&*n=QED`r`nCEBXDm)nL{P zcec7Dp&&$iq;Szp3H~H4v=f}2#BC=71Fe03`y+qBV7v?a4tzV*yc^^FJ5cej+96Y< zY1%dHXytkR)xnsRw;YUyR&kP57#fVKN;sDpM_Flp#+lqDEF5H|q+Z?qHRyipe$V>F z-nFJZmZv((*C%;ms+-vGag3bjlb*eS?fDP?00(XU#NIf*@fX9t+gsu0h_c6Xr$2=J zV=F?+u*m_E=E`pp5exu)8pK-pgMyXy?X06#alcG-x`F-lu8e zzXtf~*Tgg3T`^hgW=rcBz+&zDwoEiqs2j4y6csWt_CMJ_{tA(%{7m?N9;NYb;9rTa z{BPiCtt>1&Y2k}CnS^XY#h)7k<=V50=W)<2S(H9YKBZPeDjL(WuL%xv;&}kqFC{v=NEU z2b&GN3`vsOV&7qC9wxihKWtAAUp9?z;a`NG4Ll*J{6)LGvA977qpQuPK9u@~<~vZu zyHNoZm=;qu?Xs#b<|o1b0NRhm-v@tfulQ_jLrl~E0I>cgc$-q6#dl zcwn+2?n8h;9D)Wp&3rxa$3U?CoP1fR{4Vh12mTR*X>F_cMp+~>D`?Z@qla+=afqb0 zD{{Vn|Rr_!0J}vRL?TzD&N5r<)kblBSs5Yym z+RJPf;@Uh#Z1&mVT~NUz5Hdo>?yShl50Ll=;iaQ^{x1jwOG$h}vuNbbyd<*%u_NT< zAG9P&f4h^OmHL07{2=&U`$gY){{Z0E#Gi-Oce>`2X7`^B{8u~y?=3XD&o!ZlGjc=| zGsN+nf~Hv8<>I{O{umFzAGN>19aF}i4z)Aj&x;-&)Ed`ShA>*jT~0XTp5AQm${ajx zzZop7+q1gAF5(O`!{-$f+D_}|srpV!iH;*HruwCCyF=n%2IXGpa+__b-HS|p|`>l>B2SeO#gqHhjVbPB-^NFWCHuTSwF zjx{LV_2idwHPFTG#T{fCBe^>Zw;KH1(;e7Emz zzjL>#?V3)nrd(QoXxYjmWQ;Sr;Il67#&Vs6HwDZuohv{>9Dv5qt{p zFNvVl_3r{$_fn?%c(^NmXJdqp8--#h^V`jobGSWy>C-(Cs13v zX*tLsE8_=`@vo%+0AgZs*-v6T?2hvJ*&S3nperO z9@3h+m6Kk5ALsX?5?jljT|G-pZgYj26)#U%0C*gzlFM^_zcW+{(Ix_?FN>(y@1na1?ZHq=IlV z!*L_fdJ3-EhlO5AFCy3BBoPsnW^5y_5q@<%U;v{h04M{e9=v#~7oy}^Y`SZ|&euOO zjw=xwuA@!6{vV0?b>R;Z_;*xyowfZwD^RnY5gGgn4yaBtZ0~P3>1N;*I00q7cXx|ciH3x@bwDAv! z^?f@~(p(FB%}?y`**aW3(O=zJ+rCH&%D!_FRzl0S-g1I&^qpup^LtCE`SS8B`D=6OkJ+!oTGx$#VNG`DR@Hn>tX^t<8^5rR zP`=dGeMas%rjjkbeDAc#sBP6tHWbIVqJS*f_;cWQ!h6pPS^Q!Bll~@Z(#vfPv@&Sg z^~@JD>JY?^k_LY=DWnV%M`)O?4ne~*)`#qiWvpvIu%Czrg|4L2Zmv8teQ~AF4ycke zckLp9hmlwz{{U8e-K9xT-P)t^7l{7=YKZ(-q3MwR&b|Qg4c~`sM7G+8i2QbsHdq<{iUv%Bt;BF`Z`!DXD7ty4(8benDPoMk`O5n-0D3i{Pkq)c*ix?~MKz z8lu~2z}h^<8_l~pl)Q5Jj-`aF<+#CesuhN7YvPRG0zN4GP4NecbAK?sh488eT6r4nwGuap~+bO$)B<}hP68_Z$O9Q z9h2DVDR*Wpd_fCI0Yb(-q3wuv4upY{FbM!;8sq#+@Z0t#(qQp^v*Rsq;!d9TnySqn zp?#vN#R;A@XY-}|BeG@0n^*v@$02widaGN;GaE(aj($}Imm5GTIl%!yL5_a$s&|@g zo};LXU1HTIiuT2=t|FB;A=@fUNj!ovy+Q0i?ge_4xOR$-;ZjNY+5Z5M$4iVcm1LD_ zacj!!Z<{<%#$G%9k-i*XTKHSxZ^f@3YT7h55gTipsBFPb2=c&dbs9n&CH(3r(;5iKKC0aOu@j1$I4>D-m%tiJ~jcC6&2x3m*i z^}0T$jyEx{N;N6M7LrX~R(ki)`1iuTCHRYZpxQo<@%LA-`z(G`8cf$x&keTEl~DsQ zR2x9R3NSH&iuAt;{C)kq^gB44!(X)J;Ye^Jg{EWx4uc4o$5EWu1>+3{3A`zJCYhUC z*ln6vWw%@wDtT51A%kR$9&?XDUTtCHUx>O*rPaJTj3uB&c;cT;QqrdEJ{dp&fI;iX z-RWLl6BS<(2H`7POF!ux6j!&4ZQJCJs z<#W@nGn{jp&A0ub{CfA&UwFUvFYxB6G_9Ao)8&@kxa*Iy$fZfZt8io{8;#r zX>}#Bn$e^VVlM^skwzF~WD>=2NZ|A*6#Gw(pAa-#sH`;&Mrmhg@D|@vidF=F5g}M| z2TYM&e$yJdr}vtDU(@bu@@L(DvsS5d`%UV4$AP>v`ycCzeV}P$Qq*C8i_fP;Yijc> zD9AN1BVpz`qN>TboStw^eFN~5_DQqTJYi|#ABrEaC&kTAONtRJQ0P;5Phaz8WnGLs z_Ga6#le9O@mFa_D9Dl(yzin@a-wM2Ui*8*8Q6>N?(;cQ|tvqaDwd zjb(2zjgHbwNQFx7P*mX8+P)F}y+36APQ_B=_WAg0bs?ReYh6cCmf3F+)lrgGxY5)m zQ+k$7t;yt)7-v@-i=!oIU*&VZ50%iZOP*Du2iduPiJgzcZ5#dw>F|@o5O{<3ll_i< zCHO;5zP)QpU0cS#v(|^?%W$EK+pW!+4I_Y#N-J&wTaT5o&G>q6_%>6q^m7 zt`+|P;a6+c3Jym@yG1<;XOOTe-}OBdW7YyUlb8NeBTZzEsnHYEJ;_mj3|pJn$cap=~2lFtmK~S5Dl9!rp&+@44 zyw?0lWSf+XDTa4oKSkfACa~4cJ&*Pcz4+UihNG#{iO6 zm4bb)#z{W&vB7Mv;x`eDemKSZM0^Igx^|o3uZIc=0<&96k&K?;22+8aPd>G`W$_#K zZq$nXr{LwLI_A?@bp-zaz-8u~@v2JIA%Bu&N1aOPKfL{r(SK#%*(bynuQ!JO0N|*< z4kU_HAa3zLhn(ja1crfMpZ9ZKcjKSS{8@dgM;cDe@e4(; zh7k%XM!N(oSP(K;V>vx5^A;Pw*;`X#0Qgm^!bmIx8sy9|j>Vku?V7u%{5$=cHA1t; z;SUogSQP_NQWR&^k(-Q;r@eXD-|UQC2TKu2H0`CkZL#QJaro@N602cqxJ@Um_18zb zd!y@*j-L*^aqy?&t*3=NQGMa7$B)Cy9hKjPHQ03+3N_5T^6}?vf#sM5U_MZ%oRU2A zz>wW|Gxmu80EElIR&dFAcd4$k0h%^3z0C1$kdnA0GnaF>oVh*Caeoi>Z-?F&y123M zN>2jX1WTDOE@Ni%x2JNrz{cDaA2TZTV$1D6vcHD(A0GT$@Vx#Zye*~6cN$(joO{@( zlFoJr*Dl3~8$#|Za8#ZTO`_$^59_InT)mp+J0BBTc;Chv6!+dF*6#I52+a023zbFr zn3EdgAR!=b0f}zB*PVEqMb_;N!;Nre`S3V$lO1Nd`OW?f98p0n@>`)o*#5&dD?Jk*u_^wbZgl1?|It2P&?+TR1s5UO3|fa(;yH=9hnW;T!(|8|qhSZKCM6 z5nF3It&O8x{gLJcW0|5rH%444G29BPouP{DJgKmq+L3&k6At?3eKyLe*uTMf2{YZAD5=2CB<&v9zceiQYa%j5agQ82YX=d|CS@ zd`j^ap>N_XFHE;KQKYgZ)UvBQfoFUKB^aZSm7252}1y(c;wZ)*voz{5ciu zb{AQ~M|uUxSi_`F$XI|19r$9)UrCnmABfnBwy%NZySwL{6G^43TFXwlKP1SH7iF|= z=RRca{oB9z2gv^bv`53Qi=GJh^Qinh{{V!S`&xTlSZLO_XT#8sG7C!_4Z;HaE9-A32p&&2&ZQn+{@R?~F*y*p9So!4UA$1SR~Gse4iv~s$V7vzyyfvpb= z{>nDK8~bjhqo*S^c)EVS3yb@diA>cX@jm?4y-G^L{rNwwr#n>v7 zbX_`*&*6Nn-RbehH@92cLz;&cWb$sLl%vgmvY+1K-}#@P_usN-jO{#CV-~sK%|71Z z`fJGT-}`EKqumNC%4FP`0HZI>oP6uho;q{;82IHa-g&QLk^<`%%|7yZLayZp43?+s zjBq`1iuZ>b>5qGDv(61;&4aqk>s~iB$*f@HE+k9^FIoF zM6U=nN}Bg>?{4pRZdKE-niu-rlFEFFa%nq!H~bT9{Mr4Xw7c(yKNW6#AE;@KZ9ba~ zl%QqVWJiG{+zTR-Nh}UkNXJU$JW7CGw6@0)mdg6)x8c&hp#K1Zk9;}PbqM?+@%zGG z2K;m3Efd9BWcCu<`0hJ(xt{6{n_kyt10+i2hw z!2Vb@@)@R6jmYtf4T_45NTk|LHL6cWv_Eg*oX(DIlT)cmSB<%KR(|fEp-SV#aoJj# z?-7$=8=XOA9D$Mn%8ZecGm7_r+0*upYo7&cUk1E;3SIajO(}IdeMjw38N3g!*;@G) zn)Qrx*+nD*0~}xm>Q{<*OLN&fHjB-Dr?U5z;-Aj{0OWf&$FGmk{8`Xa^3TCO z9MClKyqu+t#a1&yq-4I5LA?$Y8ri)gNGu3(ljRpFK?qje($2HL?{Sw}`T zyzqE_YBAC80P zyMbEu?g1*wWya$k;C8Bjpl}9wuhDOZch>hhWO_`ceEVP-b`YTOdUxrcamH)#=l%%$ z`%mkC0X`uw!G9IPVz$4$f3r#DD?5NPVH=KA6Y~Zc=)=;#XnYakO*>K3?Dd~Hp(t`Z z;Hbot$UBq*MsRW0is;MnD!v*rzPmqDmmPh-KLmVn@VqiQK_8!WYjoD8E9>Z>AS{MC;(}IP!H=HppOjzMgIT>Cri??QI2)uh@*weU#xOwh#)Nb*KeaMLV-SwjfepOsdrEH)vx ztY`V7dNh8yMMW)Rq3Heu_|+B`%gJrH4*3b+@GaB43@_N-?wkZ?}~r5PsFRAjDHvPb-UKC2bXCLzU79(BE*Pc zLd2*nOEYJ4u`b6XW25-B`#}6t_-XOO!~Xyaw4WL3I%bopXr{|n(e(XNXl&-WkV|iG zF-38LyCPnB+m$&L=QQP3E)V&o>2J*SDN~giH6tybDt|X)?BCn-;@`uD{h)Nuf&Tyu zyleje2@GEjJ{MVddsNkq{{Y|LTgf40JBHxuo>^tw!AtGVcM9%)8(!HZmWQmY3o*L3 z5nNlz8oFMo+NKh(m1S?0fNTJ9&TI2);S~1%5WSm5MDklrdz&2b#jLRXqSWAe#OOY# zYxFPS9GCNWFGGq6Ukhaso+!>Fjy8>0v}_faIBbx>95KcYMSktT(~HWZPCD}G`u?Zl zJY7x4GjnU=x8-lp`eVi45dH@I4A2wytoWU;+xS+)Ow*T4)U^w!B#~ef&mPz$7U?T9 z3E}}thEitx-!@z2F9d(VSN{NG{{VoW89pR@Y4F#G{8Qpj1$d0v+UbArjd*4J*)QXF znMjKD?fkH*adjV zYpnQ|-Z@B5F<@BA4&0)K9RSXAj`j9uhyD|3+FqY7n`>tZ+fO8NN|@Srpm2-^Jsroj zbQ&LlwEqAON}7d*rX*4ffG}4~lQKc|398yV#oY{3vKHZ0CYzg+}AGOis*xY_X4V>4EHD9CSlx5Y=OG zm0U^O74|;A{c1HWKE~r%_;cdqM2<@@9K~;NRA&+1+1y$>f&S5SIPO5M>*9BZBGojD zy)?2jn?k5IfS~Qb$>YEB74lh)SC4V8CwC>U%l-wAtjlRb0-YEuC9mKA00jBd!_M<- zv2C4z+m#K_W7oI&6&H)8zqs*6`zcC1F{%UCk?U7{52xo_5Z%TWNE%3(9PQqxX&+pJ zT6*V)XVi5WF6^QkiG0W2RcOFMLg->9&@Os@m_Kb=fQe?gGS#0Ne_dVlY8q z3lcy8HA{KoEiuyHNVV|XKW4ZXPqWUi<)iK{+hSG->c24FK_{ooFnx7O&1k;!^WUh~ z=$GEl$k{bbU*bo`j}`dtdyAXt^=tVZqkDoRxRPs!byDF7B;gf7!s83H3}fma9e4-g zH^I#}!M_Tvt@JMvwZDaByS(s4`>I=M*OSd8lT4C2l$ue1h{3s7TL3Q5-!J?((RC@l zEoq(^(={udI_h5(Kp?uhvGZCrOQ_LzjEsP>?rB{|0AZKE(Vw=Qel5^`Dr&wj_|c*2 zn#X~zWMtC5!*tWZ1G@x0)4v-0X{{T0KVln8Tg?+iwX>IgsI~-=BkY(XT`6_m7-4q>+E;?d zI$y99*1j(Ni@a0t>qO8OR(q{F#tj!lx4#QSxB zUslz0U-&1tjJ!8tc_qAZ&!<6T@vFz?saYO7hT803`@n#t9)4r-JigZt(&@eaN7K}& zDZ=ag&pi0k@lW>Q)HKO#JSp(H#?CpeB7oXx`t^yp3dHYG8I^>)fhx@3DGXJ(Bv&Q- zK5IW3?>r!$8u<5R;vWk5uE@6CI`+p;5#87$c~D%!zCk1|JeSKHI;&*)qP<7r*Tm1- zr^8aK>>0bxSw|)}!%j#x`1g zhlp+Otqy@_qsw6&a@&M-lH8@UvZ@mz0@AT1n8w(XG0m04&K4G7?o^_YTjoh@A?fHHO#KR}YMzcvL%GLE=Ppdo6j@KUy z^{*Mld*c59+3!m6P1o2F7gM^|qO*}>S9Ms5lqI&{fW)3KqlFY#l6*t|0D_JF)!J8z zXR+|tho;o6)e+j~`$|~AcTntrBQPO%V>#r5z@ohAV{*FE=XGAXp7tH)IC?RpsYR%( zUz$JWd6u)Q+S?tPjmq1JN&6-N0Fu7HkRGfLE!^PI(6yJ5Qx}HSfa@7-=3d*qU&= zfvx3B8>Sz-n6}hrV0&zln1Xo&2d*pPa=LKC$@4olwr8h=rS*9}ecC+CYZ|tirq=rI zNld{_#Bv?MHxSqW@E8JlIoq6d=Co5-u(oL4)td~5JK>C|+RQRe&`+j%@zm8{5@>qV z`mM{{+5Md*x{=Y#sZbf2fEYbF$F?!ry;I?D>{Ia<;-|xyJ}BtAta9m)HKJWCX_Ev^ zcrE8*Nyg~bRG4wKkjcm?wY|yIr5L8=WOvl5PMlMdvF2Br#50^cHsp*Pgem~WdwyLi zzxK=!9nx7Qz;Jd>!!_tWJ^h$`Y4G2}_u8(rrLhoNT%?n|%u5_n!+dSQLRgs{_T(ar ze8g@!s>11>rjTk$s8Vh z%O9O{Czl%J<8wQ3sydcDU>fT@P4G(h!~PYs@ePlO?X_8NCE4U@a5S;RK5>?q9iw*4 zpvGNT91JiTO076Fo7}lnrx_&Pt*ti0 z;M|eB&+!Hs8RXmr)}L_^8~_vqRsakN^Oe)D0uAuD%sP-pKRRfFQ2R=@4;UQ{UC~xd z^=PzTgz`?SYw;sXh~u2z+(ZZc_JgH$9uBqeUXSqx3ynYGj+Jp`1<#nV4-u=!GN=wz zAvt2e6$_3_0bU;ZHO;J6*LO%nH&GXYL4q7B6nqigSQQ}h4_=_1Uxu1&t>SdP-Twd{ z+oc?kcpP)lS{uPAiCTDMkh2gAu~|maNyq@4XYVM; z11hBS&HI0RZ~dRVYk8&o4AJbY_1gs1p@V{R-Ts=HV=*zHmHMA0c(+acqJA{^-ftQFOt{y)L#tkS zj{2SZwAne%(fla6z}lbzk;hYCVt&f{_l~T75!m=YP|_u|wbC^^xu=TljBg7~3=zl? z4o^N>Zf*xmo-4KRPr(Q@?L2Do#c3v)cNXxLaR8^F!b+?ZgMb+H=<)bV?Pl`h zTJV^5irr>tt>S306SHkl1E@&AA#%AmUzp$;`K)IUJZ2hnIZ}+YZs(UgYOPC>O3{6c z?G}5vEugWU!b9ab#BCcDf8bT#GqivY10eN1In>o6nQixJx8}-dRFMn4wb9jjU2KtA_c&=t0OC>C)7imY;KC_8N_uTbmoT zO_DkRDGJ4kkTV#-B|$j~JxH&Rs;R=I)8edVRUKQy@@)7e6at5>-zMPJ0q1_?u(m<Jc=!E;neXg z`Zw7uQQmd~$q z$i-xtc0Kb}UvoS^#qs{o+UJO5xsyzoMRRHmn6MeN(}$2=BgGWBOfW_ zV*Q~d@eY%#FUCI&YZhAGhiN1>o+yV*ntNEKQN2>;4=uJ5q^K$v9%flX4A<8dmQ!D9 z<5j)!I>T=oDSKFBTud-YB;XtowBef!(Skq(?$Xx03#g4$_=ElvYt=EX$zp{^+3raL zY^E@~l#GI>J%}WZYwj?=NIodSN>ZyvoUi^R_v`nPhaVrU&8+Y z_$mJY!Yw!9b>irHE~RCvY?1WYCekInhRK{N!!&~`1W;vD%G{ucQB{C`Jc`@)-tlk4 z&xd!u6r1CUE7>C6?KdS>{P(EL-M~Fe>2h+?jzk~%L9~ilG92) zTpffqc`cAY^o*+%y|w}_PkAZ3ThYbqez!T731Br|(WvjTe~y|RckIXi00lqz zdExVKqj;CZ_I@0(xYDG(o$XfE-p{gqY5&aCiuZ7W()vfAz5H`i5do&1TZ!%$QsMwLq> z+ik7;Xr!}!mAY%3IRKRhZr#Lq8)y|zMbhQdHIMBJ4+U9h(5Q90 zmqybf5*hH06iF8j8C-G_3Z!_8k6h)4r`- z?ZR;77;?DsUN3hZz4en{f%5n5?V{Rvd&Is6k3!G|g_gOh-9fH3ybTmn+zUdGNCR*R zlylH>U)sKElh4xi+sy+}P#8M~Ly`N%BxfGu^WwhD{k(LP@gIh~H>XP!$ui$eB$jEn zvKbf%q*p&T%ILrnKjot+46R=+d~*0>uLZIAp{K}@8=W~0q7da4@&FN>5&SYBMhAhA z*}=%K123HO46>W4E6rV9w%fDr_1eeloIg@oZWanlN3NPJ-~1DT)IQH=dFO@cf=O-w z?sMr)(7aQrXqvsv{k7DQHOiP?7}=N2T!XQ|-~vEmype!#2o=ltgT-3C)$>E;c?C!X zc7uWd$sFV8f03y4eN3dGe5qu2kQmY#2s^W$;F3we$JecT*nA(eh3am4IPA~cROO?4 z9pv8$ye)FKzB=&FjN`YqO&;?0*2>})F}=r<(zTA(A(5HEjYbNyGLUw*N7vH)Ak#h{ zd`yo=@cL@Ex(2u5tshbFuA_C~`>j4HVLElcodhjBXf6aY7LU!3axqNqITiBn!XJhW zt?EYW#YPq3<@-G380{kfv&|r07^(xn;Bkz%PeJjo#qS$8j{Y5Z>r(i=;!hLXY13*N zH1cZd@!DHM=4^%-kmSZxAXY2#lAEwO%5~^gbEPeNKR(`PJg}IG5rt)^b=7&k$JDmk zCY7Uj_rtorn`}+z_O-e*#O$TeI+h?~2oE8>Nh6B=s`ypn>r0Od*|d71M>AQ%Q6??M zRd^tty+>c=UxKrV}ZcjJ*)6X{tHd;6G`|}`*QfA4~PC2 z{=?C=jVn&qEp(f5t7ik& zOQl=^eA7r{JHYw!Sy&)kgOQB%&lT}M{1q4Wr_=ucY7dKk8}UwusH--M;LA%3g?N~6 znn|a+a!ZVjf^=V)4ute!Tjn^*GQ&y<`^w*%H_LDnrHgZG-FH3~*L*0N&yTc!3?ykk z+jZMW2~`+iWy9jt`?$3+s1a5_8sb?siq z;1x-}D=&(5IKtUzamh1C>A4K@t49#_BQidD;xchxH~dfWGI;Oe2Cd?V3nE?WGTg=i z!wAa|o~IiEf=_H#Ic!|$d%fY9x{gah1*d#U2*oLhm>c>OW>Ci->HfOOboNm6g@#0@kIq<~`pVD#J@oF1dT z4oBt(!~1vfZ^5L|$&v1K`bp{_MZHzOugLtDkLa#yRsR5*{{V_#trZ&_+Cp-5BQ!7=`H23)-0`N)KYl_w(}@` zj;DZPc@>wa$RrF5{BNadHYxGc>DKGz!)M{^sE`bN>M!9?NjzkP2iJqryiXV@xlzK@ z>1vDci#~(GImSO*hxe(!{V5);4W+bkg}ID*PB_Dwd*(jE3lX;col@05)g5ca5{B3&tGcrod?8vmX)ec_SukW(~wL- zKqH*)E!VCN1~bNUfJbv}I^HtF=4~zk9OPgSz=PZo)9Y9&jw&>toBPF54)UnIKI4b+ zT-LfL#19gPaS|= zQnJ*emK{e@SHeH~wMddSQaS^@Ta5I<&oxU$)zTS%cygzNBxGYC5J))(r%LoAO6l9Z z&N)i*x!HKDPKQ?_Sgm7`-19VJ1RgsMeGfgWwf&ne*1e2rI!WnmaWRNw(W0o@c#hBWWFXhhTahBNkeLs-yo;5~aq}q6?Owvh+BhFw z)}gw&00?evr-dSDfntA+k=tp=EEshp6TzSD7)E!q{{X|OgVYQ8E@8QORPdks$FcAw6*)b%Yo;K?9K9!-|=jfI*(NC1p805O6(ThD=i2WGO9 z#2TiLKZ)bC*idRZn`#WPf{bAT>DoqBDhi_DFr3!##^9pOZvs3F zs;6rl25{K`;FVr-IQgnJ@Mn#*k26Sv!>~(j1<8%9(H7_i2j&4#l?4v|3jm`a44!kX zonqX1eXFwR%-y_?DtV24RAFzs{Es8}mGFzj+JD30;qTeMQ@+x5>pQ5W({yxeVQwK- zFDkfo`^Ru9Nx>tNlES*b*&FtRw;ma}gW?zM2G&;)%GL&V@cb}Z#daTTvC6yK&Wm^} zxH7Y0h*jFT>h3%{cw@L*3t2wWl4UL4P&Skb0LTLX;GLijS(N?kjICecb;hM2wb#5W z1=>7JKeHmZWk~W0y0B#`+kpADh6gxp-!ZLlyhn+n3sgz@=%1gihLFzl3R=zzUxuGA z!1O;3d`kGOrq0*?7WjkV&k`hBvfasJ;k{mKM)DXsnO-+#5+MWSMhv_i%HJt!9y#&H z!v6ppT}|RYjz6+~o#Oo(<_9`$&DVwnmE^XOqO7++ZL?joNL;#~-fjD;R22ohu20$1 zSnxC}ZJ~TEhI64>8bT(h?Ih^w5E6#*_K7~DxYRwK}KPxvJ7?Dyh} zdy6lF-ZJp4taRT$O*2dONu?ksmdza7m?JJ*ei##gG8-ep?Y=zz)}9#fb>D-2B7V=B zZn1W@l3U#Pi(j&WWVU8!8;nj0-R{ZST(JrjMle9$xc#F34R~WfveD0h_5T0{UQKwi zYS&Y5OL-ziWw@5zq-j=Z83d@R30=9`IRN^Tsh8tGpvmjpbwsDusM9lBGce@&Tf_zp?NSRXH!8`~fZ&%i6k3_}a!9p238ccFT!n z25A8@Gcy3mjiE^U{{T5XFb54-n&-nG0cqE^J}1;Its2>`N?n_0+$Rki{I~~lfsutA zV{jS5;WdvF>-JYxmbzw|mr*so>1VeK=3$BXon(+I#;QVqfU_><+F3}bQ^YsRbrcs! zB!w|7wzgX?7zb$!6*4Sp2;sK2KwKz4FFqt{V&fFtuDd68e3eP!Xj4iuQcttKr#a)# zgI@|fRpjcv9{6Wua*Othcy1OS-Gm@BB%DS$+NT^V0rKH?;CRPK@khgNhcS3h!kROs zgcnX?(`+umYl$QXh8B(@!PkHol0m^IZg72PPS?CO;vHfLCe*bAxLrNPy~d#9iNf;~ zk&H7CLmw;hXAnl9k%+f5@3R&e4=y2c0 zPSzWgF)fD1D@Wsxz`aA_EE;vshi&51A^ST4VI8{&Z5MD2Bg-QRZYGh)X=ZFQ1W=(< z3tr1FhcvGf+G!ps(HiM&x69Ib1qNjpIM%Hl-%T0q!E6P7FrD!%|VYeyAR4&wArvwI`5h8fbS8obKgu6%i> ze#O_;w=vszyTkW4R%ov@iwnBM@c>h9^;UE942`E8uwuE~JG(F0SH@cQmf9449e6Ec z(a4@%GfZSMENo?4nd1ZIjwM1o#2MR@&gNSB*Tdc$)pXrHU01{!D#v$k8-rkzkpcPH z6{KQHrr=bV+?K(>{v6~tfqXN6ZEvaGcv8;J-d07t3Rp<8Gj5JX4Y-4~3I@*nh8%P| zR?jL=S0|@$%<8FM3o5C}w{DC63Gwy6{1eLa#8RcEmEvfUFSDeZ?xR@RNrpdmM)D&B zWq=^?tVzxmn|1#H1lEH_x4(~9)!O#$qZc|{x?@N!)(@Eu^6EFAFrdiB6$=8vus+q3 zz*f4Ar=6$xS`@VVGTp7z5B894-2$sJ?F<1f0Q=mjAOL=7pI<|=; zf#lQ?5L`JWcRQdsV0jr{2G<`bZfc!|u5>%((rbF6Ev*gU`UFwkfE|Z zP1psGE3^vB6^+GX++~T9S62Gf_0>t@@fA7!*(+~H>zoux^|r{jj9MFv=aHS z#PKAGtMi17!j)1Va)vzPY5T<1-M5H*F9OGJd@n9kGUDg=w!t?sLzZr=6rIWmY=u75 zY5ISO68`{Je-uw=VSd-_Z3dfZE1P1GD})3RT(Y=V1(k+2s;E}n?E_f1@mY58=CCfB z(nA%=p3+Ht$pJf=B9sE9a;GX;dC1N%c$F79xF)sRx8!r-us+hSvz60(eckLqdv)NC z6!>;)T?58O&_pf`$&rMw69r^dbAWQH2i^_0cHRz2&bRn=s-#I^ZuN$ZcHCQE!)P7} zrI-{U!!{4f0`}JTc)3^(kdXN56wngifgu1&Ku@ka9*8239AA7-0l@eu$dghKH+or%RN| znN*rZ4*OM=gR?PaN{yaM5m+;+KL1#U@F3o0A_)-WGGZD(D1pLQkgsCaC-M&}!=xYoHDwE`D*M3``gRA&INxRgN{{T+8 z(Izq219>(6iv`8Y{oEIJ#Elss5;?|tsZeUXo*nS^iDh-DYj&C%+9WFNHj@gVyKtpi zM+2M!3EB%1qnu)yCyP_y{{V)jn&u5;K_r9zC8F*-v*oGFGl-p)vOr*r#hY+kC{jXv zJourd{8Q2HXPd#$TTc!KrKUHEEN)zsvwsKEdp2wi`O9v!%`wf_Kw zyTy8roup}2GXDU_-l?X_#vzQ5h8a+KjM&;hjAMi(w0q8pi!p z1;aZw2r@7%Msc-pE8EIAR*p5@#?f7G_$zfV#Njf`1vSkcY4q3g*v|O>0P#xC;?3NK z!o}^Z8%_sKa~h#|{O4tuD(7HkB(sT! zhp(-^BVG7P`q}L-?Iw6{Ev_QlBoZUCy0HH6Adi2gb-oPnhL54Y+n@$nkQ~h*<#_5n zJM`wdO+!v^jrQ7?#Q5Oy{A1yZNG-fu7(0fcZ;h~CgOByfphn}X1Xdq(I{Qs81Bpv9@FPLyg&9zMN;AX1nqAD%nO4OBrP}u+UDV(&oLj{SMFJ955uB zwYk zpRVX0IJUd-E}cEKyq+Gry%S6Uj!QR&Xx3C9km}L{9!EzErwpv3Ei1W&AU${8!Lsv#`F7+It(1u+49% z7D(lhFgFM1C9n_8jy~xFirx4Ebnk^96l9vh^{pRHvoXV&)@L)cL-%MDlBgV(&fvWc z4o*1r@jj@|!oqflNz9ItmEF@B!0Z6#x}N|=H2xye^&72sa>ss&acIpWFpcqt`=v0% zfyv>q!N?=EV=+~4N-0`LpDeoxgp6v!InsF!<@?zb8Dx{ zg~QH`xXn8rFs#A0ws$gtg1d!zkBM$IYsoIP-x2DQ-CW+t?=9WK7J1}3%eg$0lb=q* z1A)nE)-Mc?mR zV;ezk?a!2J9xT+`Ot<)Z;mILQDjU+b3k|xYx`esIO*^m5My}bhfuw+KdG}xZrjS1w9-gsw0)OEOYJw90Py#D|Wy|fK$dn)ZB?#A-pkp$9O z&%HviiHrcK2a5djw7!lXinh1+tf?jLoqKG=x`DlAhH|HlcoLq6?)2ilC-!{*0D|0V zn!m&i15VbzWGz-5BFR}UZ!f%OFWOza#%t9=_U7JL!Vv3gQRo#TJIu!JRZ z%7Ui=_V{-1Ur6OB*O980<}l<02M2J-@7Jdp=bH5227kdq{vh1`$y(3s{{XAqPclKL zZI15VP-J*~yHwo@tE2|3pUZYfFPcF{W<>tBEKrlCqofN zgO2fypUe7Z>6t}5Mi<(~qDt~@dED!+q_rWDBqbjs3)h_I{p_3^de&yS@pgX=_$uM- zAN@86nhQcORd?VP0RI4VZeRz0rE`y|U0KK(WF|7;4egu**Pg?@csK3M;tvvd1IGRt zx44>U;$0#2R)I+kwq^+tBaAl0ECJ&W{?V=s9$WR?L}0YGzk%0}%%OtAN>bMS4yMcE z1(mhLuPxbZ$V&r)K=dcQbXqsXZ7%36Mo|1+`5b}>>IfpPXg(a(^y{^4PI~!{-pG;Z|XGSo!y$~d6CA>R^?Yu zsq|^_tVdBmWkigyJd6Oj9RWD%PZjj%?3aDv+wTeLc6WBdM$8gg+`?G3w63m-KtiQ~ zVV!}Lw$K62Gw@7$?5iqBWqqNL24)8*ImkV`cdxQPV9i%lkHWg=iY@J?xVN{K(hI$| zYbqeiNOzVPP(VDAyc3L^W5>*5p%}kD*ZdA_)=GI*UT-fxtD*E1ItHb0sIH%7uEQ6e zrQ)_&THZjxw{1}F2PB~baVkkty$xG!J6qGOT!J?kus&mqvD{Z`;En;Yb*gw?`bf1MW8waaBWYG~ zhPf(?>=?9%aWW1;$=k{Ki0FJPP?o&b^iJQM&&}L$@9kS&&fl9M(XB{(fm

ui!U9JT-<+ZkZQf}@}~4OSuW zU&J30HJzLqUYTzd)x!q3xP^obh78Ia{_R*2o!E`aNcpo_O9@gnJ>=7R^w61MD$|!M zj=v*qb(^bD8%>JqP<@g_W|~P^A|SInFf7RGRPr09}VI?xDPc!=GfgbYNy> zm(6gjSqlxMAzm9CHsC?V&>L#+Glfu~ zHsOtggGjWsibt`!vlj2UWU+xH1npd~Z8Ey)z z1s&$swfJl`@9g+*F0O9P^lzwLYEF=oCPKdQnPWD>N0z0K?c5j_z}3^KFM6YLYrp5Z z*z5Lkr>j@>+t$c#pJlGz>vuN#hs74Wi7o_o)>k@e+lz~bAr8qM*%+{3qieH#+msv- zIzIHn0bF zTkl|jH7Uxqmc6t#g*jq0Y3tb?MacMlq0H8o*74cfJ-l!&nAMru7frxOB43mh8DKWx zf^Yx=nsmPd$uZO{HO)fpg}ORvrbm`5n2QH3vO&F%Bj-TJpgl0&d1a<(y0x8>YaTVz zwJXgTqqud{5@}L4F~82?6=VPg49T~GIOK(0ZvoAwrlBA9HlwCrOp%qE>3-E_(}iv6 zBuVBhaqviD0+w75qi81SVH{%O_4$9BJqkD)wAU-!wZF}q<5lpr^}W(tK`c3C&B>Lc znlx4O6AZX@P&4wlQZ|(U5>0Y?#)+$^iNty?wNe8q7COSuG;1nHdl?|3WmsV+QZa@LyK^X3VmEH$?dGGX_;W>yPZ9mD z)=_U7c~V_3oe|n_PS<5($8i9z*23-_S5z|k6e8X9ea)6fLNUDMc)qUMey0uKzY}Yd z-AvvO*W*k33050<(gvO4B@~r(ZL1rSSm2&O4bB4&+gOJB<3Ve2sMtv~NUBmh2`6zG zB~JB1`w3zSuHts>IONwUuXuvq`sVgn8sg-|oBQjDwP9*)7jmNl0~ScvE4Z8woDI1d zHoQr3tLYvZgH(?G(?*66C4Ev&N;jTAC3XT^G?5>^uf1&78 zz+h?6yd}+Um)~CB=F1=OfNJ`43;j0Y?(X){3R?Y;Ot3+c6sjsp&oLN0`CF8gP%_yE zh2Zg^*P^t%@K=fTSyJ5$I*ywfn8K1bubXh{S(TXXP>R3|5P22F>N@`b#p&;CJg*X2 zgndbFH0!M<7};-PEDTRNuu{>e1UXq-2Wu+jN}oZu@cx%RnvbmfNY)^WRlO6x_D-l- z5@lR`zGJb7w&vf056VimF^Z>!=UmcUy_Np};m({sSyK}wImeMLt#8WAp*O*88^zaG z*B&kSJ>n>0XxdAiM&DM7J7TzBnqw5IBW`hzm=H)fqKxQgL_X%88AL-uU6wAXxX z@k2njxoM16T6Knyx`v^ssa9G1kle!(p*O0Q_6{_2WtY9QZrp*M|NU_$Nxz+$W56muVt1D*ooi-qzT)+cV&nnPV#+ z2JToB*9cOnP8fO-ik7XnvRgOi&!xiUxOx!gbnh7KlDwPWvwoz19Y1WH8{vk5HKCd| zxLn7k*jwS4xm+k941tW2Pw;{OHP`;c9v_3mp9ggx8GKorQHXetUXu3SYfU-rWs^?( zJZMzS8ssUEJ1g!`$f1GRA;*dMYxZ{ij{XJwP_Wj#ck!YhiE`*G{i*$hbz~s5)Ne$r zb8;>jrE6wXftD$hb1bUqC{w%aUx$CUN5bEV_PUpaeiHl&zVOAqgAkr=W_^0mR zi-nFzpu4hPJeI>QV=54ICmeB>BOr=5zqBWayiso*f%qRLp&iYkj^HkeZJIy}6eLI( zC<}nf%FLn2RUl%sJPWK^$qmGi{4mpPZCcbXl(B-W6+*l+?R8b!M*#?C$ik*Fc&AS5 zm!adzk2E=L`X4s`0Kr856nOjLpNuR#YpUBkeh$>n*(}Ok36{vIRahK{a{@61mve1q zX91Xs;iLG2p?p8^y@jTUdlZ_Ur)~&|03KJ~XJ$T$9?mh6#Yb~rTzId*S}%^RAk@53 z;k`EZPXX0{1V?q@#`i30K&;UQ{jzT z-WG;EH~T)~?#&@^=YY%RsL0yIS+?Y^-O6j}vYsHLgQBqYqh%eM+TLj&F^|ez>$gd1 z*!WlCSI4$r9*HknPq94YPF=|;jn&CLO5ip-;EwgvqzihhJPw~XA1}0%j(sYliSz)zg&lJ4xZaySnWix5SS$I=t1=u;<~>H>bkwYx1wv;I>nvUtoo!DP{(N$ zB4k)#IY%l3NSjavcHOy@;kY&D5y)eY$#nTcAH9x$ELWxc1k^k+;~yXRPvJJ=x3lUI z&!WS3Zn40Vst>fcoL_Uas*VDTCm?MFRjacH1nXIUTAUfI+NBv;>3_vF`PhTT^Tnh7 z)Yf*r{{XY+ zxd1nrApmVGKwb_9rb8V2SEqi^cG`c4Y;AR!L2J!A=)#i+`AkxEQ|pESho(=haefwp z+sR207{<;<7pdnN{WFhK^{=SQ>ibMAJFPzB<1=a=#~nV;RrCJF zI=+{yZAmPVRu%z#_}GRlgp4p;1J6A4ueSa%>k=oygW~@H4rsI8Ug)}<8iaC0Lx`gw zqlQf60t@x+PtzzG1xEAuowVHK|m3BBSfO$i-fIvL;Zh7JQv;D)V`tDBrJ%yfyJVRFmQM zkhfYUw|biGfU&VzHeD=cG_{!=u5H%hR&W<{DQQUzfp*jQZTmEQS@_5CTTu9W@VCR# z+*{b(pS0dvLPfo@><_fN@J}Qv=W!czjOM*#_Kon5gFF@C2=%XrH@cEWBf7b=nGWbg z3>BINVou4Gz-0r5+6sY%Vd>usJZ0g}fWHOpA=kBuERkYconr6K4ANZ4fut+|$s3OW zzU~id_^j@SK46ti6Gz_Un9pOBuVwLG-|&9pkNv0q5qNX1y~LO?k`RA%^MTK5_AlE9{t6A@Uyfc4 zZ-w3!@FUG<9oLy{tliw~lOSP=#*4rs7|VYSPipw%#lI9Se%3VYHh;8C?iS+hr1=mt z4cH{%n;98A1B1_RCl!aMT2X~HqF-_D;V~63*mmo^TfY0Bael|2v&V?OE?wIEXYu}t zB6ups@EZs&EnSmdxQvp>0N~rBU`W{kiP$m&~&^^C-aM4aWc@kQ#!DGmapqQveyjZY9~68E@r=4&w0t3TDf>P1v)Pt85l#k3`Y;Fa zQR-{x4+XxbeX7T4sfR1&kjmqbq;SJLk?&t3lXYQr9nGAN6wyc?Hba7}t(7C(5t{p} z;fL)H;r{> z%FgB~QqCx&nWe#3M@K449FW<_2P6z;lTG-0@h0=da%p}f_(5~3i#Qa^a|N7@1g@&Z zQYR6pV%yXbkAz}Zk~W;3LgPc!t=mg~2^m9uC>Jt`%c$~015hS+q++;D%31R^a z$u-3Aag2H7oLBYhzp44u@NPNzCz~4r zr2*uzU_rnHsXG1{@vXjx4Znl6uLnosi7n*B8n1}FK@E(usKG}LqAkON&H-KHKXeMo z(0(8M6~15XzwH;+Z!S?*8_DFD3uRy%afUGxD*ezMOA@2z+Cj|vd|wXiVQW2iTGlS3 zkIZy7G0gFjHk^W2cJKiOKvf`yi?d6T{nxP_nR!g+jp>xu?ORHJ+Xl@ry&>^#8#4e$vNeH=o zvbZPBTpn||m9hp#)(!IA@OG)-9eU9Yho(<7*E33V`|H^*5=f(BJj7IX1zQI`VvKcR zj)e`hkZR*nx$uXFY&4iA5ox;3#<2F6FEWkt1h{D%aBQNofU3+Jjty?PYAeQf`D^(b z>sj-2-}C&8{a#y*Iw_66gA?sS1saU1My2v zT^~^Rw|S&YB@YBb;e4o}KQIN@DLKd+iiQk9`E!c&j|}+R$4hHJjA!tzjG9%tlw)z@ zB+arA5kQA}1S~+w+RK8&pS@iDu9b1F-CbN+cwS8&Y1y0mUrp8EnhAq&RVCW%9!OxS z6`77(JR0kb8d6biSM^)3xgBuAQIn0QfAKD#fzGC(55dbjZ8q~>z55V(C7i9_oh)g+ za9or?3l;@&!(i=fjGgT_;;+FSR>dDc)Gh>4N#)FLLdav=hRZNRDGJ2=%Gm{g2LlzU zsabf_N4u5{K20|2You8%uJwKLBv?(Gi$D}Ja-@90fcZ&XwL`>k`1@UwG}Uc9A%89- zkjytE@JYAt11o_V1PV6cn2@YV=v1&!jGd)#E&l*EX)L$dNw`g0qG`X>*t_v>hY`|i z{SQdHnrOqxJet5&W78-QC<<|&gpyd3&H-L?edA9Rz#+F^3qg8ggeiD+tCxyA6NEyH z7ie9lfsB(yV(^jvXZI2HXi49h{{Y}D`4$;IE8FQ&N#URx0dpgbLvWaR0AZtbY&J4T z1xdqsfSh4P&kE_E6!nh~+v;97i%GSD+BExi{U7YIT+F{ROeFIafK(wuqJRKAo!hH# zQ;Wj4QED&Y>wCLtW{_Ln*fa?ZoYIi8JkBJH45Afs*jF+*P!8dYrm@l>j(-m6_WItf zZ6}*8j5e0gszd^S*<$gyC}S)nWeiNJ6tO0~SjL=}EMc$p-1G4C;U#xY{cN7LHBxJf zc>ckn`7WjzKFqo)g=P_L+IGgwpyvvA9mBaJ<*u&B`xnI;J82#+@pa~r4&X`CC6(?Z zbx>7gFtVs41eQ`;3O-^mE0geVhkOa)YdZ_8dkd7diqT-U(zRVtRvSlf#Ht}h^9wwO zftCzUB>mpUP`SDA_KQ9FmrS&d0H!FSm0*hsr)s;iqkO=D&|#Pj*d(5LVQG6#ben6x z{5m&|29&4I7(4dDsxs^2Cm@Nw*HWcM~f!E;0e#xUp_IAx&*1 z{1vR&wz=bf1n3j$?IDH~)gq1?iI7IDvkBK~lx!-gW>y7Q9HN!24-e`#kEAy1#pQWGptb z6%G|Ml_z%G0r$6Hh9iZ9HCjK%^LiS`D@qPaNj{d>^Dz&Gt+e|Id{29+!=b@pD^DD< z-77t!aHSGRWniJY?oy#fPZ|B0*nBVIjXKzA zP~O}$azG;*R+cTTCPu~?k=O$m3P4@EPdULQfwfMRN_?;NM@=lgg-U8NUxP;jZK`O# z3zzKPES~B)jCU=3+qHI-f-F*Td_}jWQ1rCcCLyeUQla~ z2pdNy3&_H*(eZA9rb?h|wx)Z5`}VSf&dh~IH@u6S@r7`?RRl7fyh@=YtkV2gQIAg)s>HlJacO#m)erp z+UXL)4A#jiyunbcY5*vVSg;v8ScW8Kr;fZ&rs@|Kqe}Rlr`p*(z{wV~VdpeM<-+Yp ziw6o>!v-Leg~k@DrzBF7SA7k!7+NumXDD5*uGjnobJ{n;3%fltPPM!EfpY>{&Z!NS zmpfZUB)kR51~qrwI)X{dDN;K0HoplxE8!W0mUfr-H_5II` zY&4e9tsp^bec^pldz5-_FPc1cu(h8w{lo8=5}LB>WuhxF}2eKJuD+I)I_nr188`6K&7#QA0O!#FYQ z4r9Sx%&HZ$jMqh__|o5S1mF0A_4t74^MO;r74cTWF^7UauX5 z)}dkZwBHfUe-w%`2P?3%WeHsFW4Nve3Mn+-30>;N{4)lnsK<1cVeT(&?iM*;V=IrI zL?w2Be}pOARDqIIyk7dU-32XPr^#MB{gC`|`$l|w*1j$HtKog0_CeLgnWEa?IWRq> zDk2iZk-9U+StXav%Bg2(0k;ueCoQLm{1Nfv#$U79)3h%S9U4gNwFvY{{OFo6q0B~I zid}@Rtl1nNmnUvd>RbFQ(DdsFtvo|kWjY-e5+k_YWq%TN$G7owmQ_R-YISJ zH2fcbbhgtq+ndiY%>(&ul2if<6Xer6WvS)2k{LIAm5Swj#tz8!c!;x4Z>&Hn(zTkBgJ z6_I1pyq^b4r`!@x%B0*N3kc+#BXfcsKomx&KV1>y)vr2}yT9F`=JDBh%U`rMSDMa~ zaXd1YiXAUZySRxXCCp4_WK@a4+_+%M2nXf>ZuBh$JVI@3^-mPsTiZ!-35Hz~R9V=9 z?p24BLNtJKHaR)lw~<|zr{SAhJqh(+4R}(+QPgeHN$ufD-KD!_ZJZu)M=XG#WOqCEQqMR2g?w-MElO(pPBYu0S!)oNh-nqq&LV zKZBOK&xhu<)pfmk%HHfF+gw4ZJ+j(Ht{DJ(a+0gNVMu0T0U)+3iPwG+{6y2FwYAni zXg?C#+G*C(n+UWk`4n14>dU(>`9@?eq=vv=xEW*VJ|461?xEsGe;9ZxUHdiEkPDNn z-aXVFVyQczM3dzJ5<;qi*=^WW6)m^GUk>WJoVT7I_;KOLZkAG)6C0KS_6W!!qZ`%A zhB2@s42*C=EKL!VS5j3)Cu7LGC-C!3)_h5Is-KTqpNJ>7k(2Gqs-z6($0l;fPR0R# zP#HrE{a>woEYg}tZEWJ2((2aESgw~$(iU5pqR9J?AZhYcGO$No`-sB*CY9m+d-jCz4V}yAORL_mjC9+WL{FtSzLE%T;!^I$J<1kU zY!JtCZe81c4my?Zgl-?hy0|9P9NNwXR4?~(I}XRvzhyOVfROm7;MTeD{{X}~{hpkf z-Ik+gbrQuBdB$(FNVB}@w`!}(LoB{zs^~}qDl76!#Qq$=x44@})}b)m+05`x0Lg+y zVvmpS0KoB#AIBBlgslYKIcpo}XzACh6;(feKZ@-AIvMsJBCw7q#HCwrC|Cv!j)4AW z^Q70GPrQw9ZDDV`H;DN${XM%^DdIgM{{T*Zk&->Y>~w5HAj#-|fcmNTtVn!OuV_<* zmMM0QM(*9R2C}EArTEKW+MS|LZF?dzvakT~2v{BF4{4Vo{_Wu~sF+K8GAglIgPbf(O0Wc}7_UPKo6y6*XC=3m@BFNM z?qz|giLT`ru8YYlf15rJ_$lJ)J`HMq9M$~Eu5`_ROBZp$5c31dOCzc%&nveWJcI3C z$F6?ZdUl;9wWaR4rrFCZ!9|{q_;KQ| z2$k>Py<#zFM0Y)5H#r0M!}lS3`bI8>1gn@X2$g9sT*5bHOJPk-AR z_L%V$8g2Hqr1%fP;fh^d-(7jSl*(J?BFQ$%oDsEUXoe1X&JVQw3;zIuBzQ~VM~!TJ zQFHP0TGb%Z?$-L)%i(w-Nn->wDIOGoa;$)}25q~-`F}8K`fEk_ZK7+M2)en{Ce+QX z(?xL=^~|=iNeV{FZQW6ma9r>(z>vW1JlD_WIi*~6MLn9oNBW-54+$ItaHpzz>uF#L7WHM={rs89W!ty@g&*A}yF2Hxy%`LOB)PuMa- zddk3lJovXo`#aoe@XWJ$PUs#p^1cZf9>*E{YqKxJMzua$6^gvqI(I*CzhJ#f#6Pu1 z?6;tNS<-xWYiDy{vuK+2obx+IZ>*Ta<>FKbyO3MR31CXk!yK}oi@)Hro)6YMH}RK7 zkK!h;1KQm9i^I{i{w5Jm60P*ImJEx#WSawE0KX~6HLd>u1RedX?|cvOW5yq|R;jPc zWo_eoSbRz0=N5i*J@hvFE}}vmSsF`V>`I-@_FxAltbf75n%j7X!rl;XI_^OFoC|7Z z(&U8;F}r4Dc6Q&K9&wZM^YWbe_)Ik#IjV90#)`twFIjRhG=H_X%hAEEg(af`XU=_EjYaeH>V2uuq*rIp-EC9t=eCs#6W zMhXkU0=9P+bzQ78kIyYPU+`Y5`!(r)CDQaXI=X8n3+-AMhnVVDa1L;H1_ekYWx36L zUH_}B72WSTYxxW;A6w#;Ep6b)W_A^gy1cut`*HF8Nd5*!6N~Yyas2Kh1G1^bwBCcI( zo+6O3KcUwi)mE1x$t?W3%E_x}Ji&_VFy!H{@{ z!%Oiez{_^KI+DtgY1**1lt8MXZgR4S=3?L$MP+rsVyp?p?-~3I_<3dG=yjRou}dwg zPn~`Km?iSajHj9%5OPZu{v(tJfVkEk4DhatuAuM*yf+rH+S}dS>6(krcCe{H*%Mky z8boI-6y%10^v{OAAGz^C{{V%HE%ofTa=^B?_xgpaTqVGSxQN| z-@)+1p!kr^TqgNlWCseZ!#CaF)Or@Lp{3MPP2+2=9@6Gx zJ>|l9sw>NoydA{^O_x%|Jho83IXj3Pk&52hz59vADc#3i-}SfRO{(eo_lcML8c&OM z7Phh&=aN>tW;aJ-qefX{UFqgvZ7Q;>42A>dZZgB**NrqU5LMRuo@}cdH#2!^q>QmqgUIBLhdKT@y47zXNVKcF-CNAehfJSWf=e|(#F+yo zR#FKYRf3gfJ^Jz&5P162TUEUHePoum0z+oczT!n=hI}g`t`{S#VBxTGpp0s`S+^Zg z$68d@vX0vBdYJfjG;3RUAHzCL=#~YQF7+KU_GxBT$qZT-3GILv1QCHnd9BBfb-1Q) zFHya=4000N#04W^&=pOGZvYHr@(J%nL~{yuSAgT1&`Bo)=fmNRFGh|@bcC>;Sa+nB z=3#L5*ibfycSh_u&N6a%6^mnY;M)s$F1$hEyCt4kq!yZNq)j~UBVmX{c8!Z?qp-jy z86=wTJVW3s8Sf4M0EoOq+I5tXw);^GN|H7}610UG!{;GzMnM4e%|@C+!)(_U_VV1s z?c~Qa4z~~yw2$7ZhF_N%Voy_$8=Caerlrh}LX{dcXU%S!yPAFA-zeq+IL%VB!eUm93lURWwz>85=`La|s!V|R5BwsOn2%@6^>0r9ng zZUG>3UFU;*L7>m3TZ?(GZ&-y%q5Bd-00ffGLzi>70gM$;4lz@I!aW#4r(5`1J6$>x zzcJoN6l~%5D-`9OPai2$(-_8al$w+BIz5~wla9xp>sJ>VB=)*wdR!CTO>C<(oBT&@ zx0Yc6Qu8pt4U#d6cZRP18o}Z_{T5AL+T!^w#H$shibj$aWqu1sn9F3g2`tTq$l*t+ zcxS=hACCGlB!Qp2GHapmkL34CFvYHu6{?*1%;9MMRTmFmT6kBj#lVa6>Q#k85?yxuh{J>MA?`0AJR}ZKloQSZ*e;aiHlMZ1({zVq&V& zsou=+#PPEzcGv>vb8H1eb5u274SY+I%3U_gQMS`eTbx{YhS{W+LREmy;-Qs6JY_)I zpl6)&UODhj#XknfFEs5=E7zF>`hA_oqb#!?cJFpud6}dn5R5|=$X%*Arugf{zZd64 zUm1AL?@80GuKbH#J4=R9r%iOqL`eIZGDA5e?jvs`l^oQz`vmbVCmAh$e_fHOrT9zY zC6$h!Z>4zp%HmYJj&pOV=&IK4A|ncuZ+L5FOBW|Lv`XG2Q{{@qFg&%!FO$TyY1oHUoxW*aH0X9chghk(Uqz?i{{ReahFF@Ic~zqaB(3XZyRlN|#y9X-+FwaDZ47cTf(aXH zj{NiLe(y{UYuvx!n*KHMM}z)2+xW(NJ8Qj1#3?iuK655(<;Mj8|j(EPQ64OVHUz+OTO9m0(qhJPE%U~asg0UxQR`utBl~_xcN9W`@*PRs@)J-XE=X3Q? z_{;W<@o$Zz)wOl7u+wgxWqm^5QjsT`->Q@$Kxo(jg;UDA9!T6f&k1-_#$F-uB>KL& z@fOw{K1Pa9+`^X%n^eZ|v1rh&m?gGuBWoP)AlBc(*`LN92=M;^j`S(6F0{EVBE7e` zd)=`Mc2x#BVpU?BMi(p6-!k3MXu}GzO!+p=oY>q)X^Yc5CplDG@v$NF4ApT zBr02QVpl7WK5SRZI5+Hui)_^RM#vf}2 zA217?0rOxLRVSb|a{mBK@mGZHj-!93GG5$u)8z9cSp<^&≈?ca_KtMlwR~Rc?6O zSiD7NcOJ4l2YVH!n==^BpE>&)K0suh(aN-H`EeHP7Q+QNHF7VF&1I=+8hysCf8m(J z3z_abPi1v!r(MX$ZNeFe5Qe}kt1Bw2y6wqPn}@w5k#SL{8>ji4MYO*a?|fY~&7Z@2 z(5|g(6zdJ{ogI`OSTdyRGD&WXrAE@*vT&f9^(_+9O|{ZA{Wngx)MJ*@Ph$+1H%I^s zq^3%_VoQYtjsqWI=qc-GfOd0~=kdtD+J=aiLF8h2L8pj?(?leJ7_&YEtaV<5QmW8AY9X?Cip0G-4h03*G0QQbinkrj@+ zX?vyW4{YgicDAs}w<{xIaR&bYnG^y@1a3WY#2Stt65VOm4|Ap8>RQ9yG_XZA+(sLV z0T|fE-9)M#*<)rbRsLBE0?Uy`ndAQe6WQt(nmhPzEk;Y2)%>}fM4(}JBZI*^NoG>a z#h3w{05rT((T*^bWnyVGj|l2oB(dqbpP#78>lnJ$gkn2xPSzqV!bN4kMdX}%UuBOYpl z`c0$wwe(x5G%XKSwbUhmsnhhU{UPMo>dA?sX~M|FzseO01v_??+5lgQeiiU<$Df7T zzrv4*{u@@%wcQeHJGpK2$i$)AJZ%%2NB4m9J55Y3QILOJIP5G2P$ky5qe=pU>4U)Q>0aIOzu{+#J`s4| zS@>(?dpCzz@b0NCmE!qjM2x2kA_M;bEaXCxD`y*BPEC33&W5BtoJdMc1gXH=8-Ebz zrcMqk+rrjL36qSX1@|n z@K&#TdmgEO9kM;$)!oDT(0f$+-pTc^Dmb<6P z;tf_wZXydXhUt_#{LzRmok9XgRSms>IpM|Es_ROetk#_uZ8gx06#)N00KX_wtr@HK zdg=Ecj~*%CaR=Fzw|gre(>=YbG7BY(O^NPFk)n{445I*b#(g{U&QH>`uae{KLKi~B z5({K-J9nI{n{+ek^G| z7x2t?(%M~VI)mKY-AQS07w&H3i+t!CW*iqFDP~Yez*Ap(-gtLj*01zQ_3b1;{v6d{ z)oq}$@l4GOx9A92Oi7V(3g$wv;zi4cV65s_;LrFs!^BYdUqtxHcdFh=8k3+(&jie+ z0w$u{xEv_wEJzES^Iv~yzY=^s<9p@t?}U6qq~6*{vVUjl`VGVv$#ZPdMy(ue3pU9- z#{hs7vk}Gv3d*M{sM7wlN0&IxQfAZt0BT*|Tj_R|vOw^aXfEvaSmGXO!IKfp3$SA3 zWC+gIDnk6tRpj`GExxQS-qTkwMERd_Ok|=Sb?;r?Yltvy91rALS1(Dag{++n7}K@1;zov zQo(C)NYXT|SHyQ3RJxpyi4@OaYWkGZ+!CsXB_0498%qtca2o_1k~Fm?@oulEC55H@ z5nVJ6QZW><$iM|%qFsd%o9>agxZ2xCde77RG4U4O?$#H+(yk}9UBXLSwQ$A$UoK!6 zebP#-hYZEb5O-ju2;Wo^wvHz^@mt_Ezs8?}J|*~HZGLrIEnh*rw1(?iy=P+E9g;vJ z!nBI`AmLeCA28Z;{Ns50!k3;L@lF2#hwg*JC9T^g*ZD|L-zg)|j1lZbe@?dcI;{G9 z0^avdyS0=kjzn=BVPyNJAkBioH}TGJ+yjwc&Y$hi;Exx4G5x5#W#FG0>CJC>VXI33 zM?wq6hiWhfoE2P;ZhP0KfR&qxl|-sjEdKz(Jv58OZSUfnj|;s?f0hF%Js-Cb&0mbK@|*9mVB2?d;> zN?8$IH%)kfm9RG9yf>$6T7RCh_+!L|+7oQDpqX^XWN8$u ze(V7GU^fmif<=8b{{RJl@TZJ?FYz-=_=j-}T9w-TH}NQYxFIV97Z&iu;i5Sk*At|Q zc9shs(n$=%qNMGPPpV&?%;?2R5?#;6w@)_431d)1p$g~ypg%6PU886(+C&2hlBDOR z(~h5oZup}?mqNUcN7~9@c$EVVa1;@r!xfQn8ChLGA8-Hw=m|W2we*l_tq+>32`RMC zvcKSza%o;a)wK_Vz9DE7YFbM{1osk*F2)KJs~ivmgSY}zAmaq%(zQPh>z@F0%|u?< z#&v%N>H=GjByKPm-y4Bb^C)$XBx8mLp|76*0N|Tiu9@NAkA4rk)U_|OBwDjaGwx6i znH!)4u^7t`x##G3ue85!>yHX(Ulc9;L*eKo)GWMB;r3m6+SXOy6KrUwQWRlJ$A%dg z+*F*`$7d8{IZdsTPvrML$04lL;|YDJ{wVzv@VAZr8hD?=x_69puLj-eD=maq$!}(o zUNo^y1aj>QGqSt3^%yGL#(CR}Z6Ww?X>oM{Z`{o&jzxyiOo=PAWNclmo0t^^lquV^ za0PHyw|1T^(Cj=>;U5Ov-RaTkmXqkPSb1#(&@2j94hUA6P=zXXvf)>DKsBe~U2^wZ zwYb+c9}nvK9i6+dmd5@#-s0zO#BVkgcQI@$mD?#S8H*}~3tTjL`913%dW~4!JAKDv zuXvNg8lHl5eID>kG=z;or^3JQZuH>Yj4F&Bm0QTowQj9hx@4IXK!2Y-|8V4t|m2 zeMN!r3bvg0 znvCpK&@=gCbCpNQS6-Pp%VgCk#op+olZ~4E3Obdw%=(0yEFKbhY~+t(T|5tcXpBfr zgA=w71=tW2AC)n*S4jT=@Q$U${{V({?E}JarPJHTEP8H;?e^$5Y)gkSosS^|u2}-B zDOSPhni%-?Rv(Bp)=;YJwuVjt_ zcK`%i3p6qn1QH7>w>Vioj`h`hit%C05hT3IjIaLlL!p_}n^>03ma9sL5wT zg?Vs0E`2LKYKg{rH$gXu@&XT zgiCFy>2?=`_jxO0a}tY|Bkv#G?r^ynD!>i6Z1Mzt9$QZbBs!h1h;4rvE6F@B-Npvf-uA8HIBYW4Iy!1`Ua^iROJ#%-rInOz;{*~_uobq95s*%=fILU= zzQaS^CXTVJ@t-nXF`$(UsJIH!@=->5C`IFrrlu||u2s!q&Xs%Pn~0wA8h2HZ1EkkV z8vvYyUG0)OjDVwzZX&JT!GB|`NA~?^RMXOYxt8u#Xn@W)5RA&*u(%t!Tmk?Roi9Q0 zBq+KMigg=1jjopQK0kW7E*8+XMxvn2TpBkMzqr#Qq`@du&{}oA+)r15d~E!iauh*jAWL`$mC~?Z39&C zBo^>p#c2}HB1!$8`YSA#7mU2K6l}`ib=exI=m-ZQyStrJ#hT@eW^E^0)M2=UB>JV6 zj?SpZ0n$Zy^Nu%eQgO8M0*0t@3g_L4Z7)56p#?n0GIAAlASu4HuDh^k(*X3{2=ye~49~dn4w7c*> z#H+sySesTCH&#xz@lP2CDCi3_Za@huwT?5p3Po%9JK?8=pqkQcM)$(-#wAO=N_$;C zYk6)Z4BljsG;FJuI3b-zPg2!W!`}*iA9#0Ax6vfguMOOBt-4z2*B4Sm$l+Kr?2%(q zP!sZ~V4#&O2wK`d3cee7x5k$j-Xi!#s7P#X7workUBh$sS>Z;vc~Ta0Byxpf?{8g{7#rR0pSc|N0UcZWD&pt{N%JF$(??E`K>;9ryAU%{xnK_;u? z8P#tsEy5z{wl_+yP&3HeH>?U6a1~=^kAhgAly7MM6Zmi99R+o5Atkf9yq!!|k!tMF zEFoC1jGXe@PGlJYi+tN~Yc!sxON{nue7~f5#);v3pAg=iPfgH2v@T+HX1%@B<+Md@ z=W6Xxs>N~uU^v)V5y3r_ejoUqHJ$FE;%luhPKF5Gn&)7+WemGnLq6^sB&lv$hyxhG z-J4B171T%jEBi>>kltmK3nZc?1ggm-jfKYESwL;XC>Y79ZM;k4>+MQ)(InH~$hVPR z+6y^lmJ#!Aa~a%RsY0rt9I)d%n2NHshX`^rarnDpBr9CpcfG;17BgY!8{e=)Fs z<)A$7&3v7&SX=nJL@{X=Xl9-=1-$2ADt`AX#(C-o*A?q!vz0iv7i<25YH(MJYtZt! zykX+!@P&?#sOmRAVvfv1Dk3Uvw~Ye2kf;dA1xG#U-?M$ziK+Zfx|dASZzi?-FpB3; zySXyXQB*1t?e{{A1q+O99dTW+h`t`&X#W5W{>!LFvkP~Y8H+@B7}OKEvFqChsm6Ka zRbPeP9n*Xh;VVnTF$TVbNc&ED=YmfhV;Jez+PyEWN~{`kZ>Nx|#;r-g&EJ)E>1~nv zr~3~4Pt)}83a^7LBuj}GN^89sHS)wvp;cvsf(tlr-JdKJL_45Y(w(@7ebS}D>qI3H(>s~oBTPR3J>%w+to{h7Wlc*DTo5p;iu-V@Xw z_RmMUMv@?)DOV*PB1k;4GazG?1&17BzgvDC>m$WlHLr?1AL3YUblq1>ic4mi{{RaV zZa^sHBCv2%=2*+HWhW!%2f@WgO0CV*W6Ee+Uig;Y@=I?JcxynCM~W!z4YXH^meLe- zX;`#`&zQ(oU9t2$sM@7(buBF|T3;OK(KJ^ZqWe6We34pQ0s^sOOoi0$`9l%76Y{VG z)%)v-Zm0WRn(DDN*4ZS9mE?v!rI|x0Lfj*F461O(x$S#fveIrYbnEuGyIDl3H(Ibnb{acxq|`ahGB$Zw6cokwf-U;ZSa}eXNCL=@Y-vi33w~O{vGhPmM%oG+2~#! zT|Gm%FPB)npvs|QB!A0H?%dxos~#-zgx(Xf7chJ{msV*d5C_w2=Dimiw#5@iZxTl$ zI3-!!oJOEF<~u`j-Us;oe%e*0g?r&mR?gny<*lu5wd-h%GL$(!Q{~Gk0F3RzfE3_^ ziM1~lc+WuakNhM)2GsQ3Wa?Hj-lmwk#iT|Bh2ut6kV%GBR^`=LV{S^F!kTfl(kVu3 zM$XGtlluo6gce`f&ax{10BBt6MkQi|mv#!_NmUBoVyWCQ@|~(iMl|0O>Wy`5wy$)o zpKX%LM3c!Ij?n&FE>2DrS&loimrv79npt%yHK`<;PqbhbadMkb zLP@V8x*rjpIvWmR`$I{Ycdp<7blsffZ4AH^IBvt8(taLz?@WoUEv}~2H2dvIS@j`s zBDu4Hh$R^jw(n4JmB&9aw*)OaoBd7x)ud|o+A>)rg3cj&HaM6#-*lMURvq1L2D$E$1(;X)k~1MmM)b%@S8(l`zRkAmiFR~{SX}2FFgaS|&MHw5mu>`5X2lhi8R`=+RSV~Zn>1KWoHKg|@DI|_W zcaJW)Afpw_1N0<{=QZ6vCb%CV^Kvkt5Kq&NPvf5T?Ee6?r-3H;Rr^eM>*1`B&W)pZ zzWx{m$QzmlnWTlAg~@27L(mR*1EqOhuc1o!`=nKHlD}1 zQa;j}l0QQ~;Db+h;m-$rK-cXq?JZhQ6KQe%r%K-{UOAKhttg-3#zUma>XAE~Ayr*c|rM~3TukJ0r(RFF1S$&yHAQJ{}NmpX0E zqi;8w=&Fse)rt@h1`MAw0iCMYtKH~fDJvVg-ji=jh?RRKxB#tAti6^pK&VYR!f zFAmFc@!rDl8+Nm}X)T<@UFC?QDJNi7>=iEC{orOb-*+es8E*d{Visodua#u;(RBDcd`$=}QKGOsH|Sl~b4f*-Pd z=Yf1n@n_?&#IFEbO{{3<$HSf;mg4Ly+x?(C_fo<}MkGOMrG{82AY(0`S^ofn#Q1aU zKLLD8uOA4tz0REmxjZc643_h0EvGngq=F70D5Qb%0y0Yq^>6qlW}7{yz^zj14M8GJ zZ^rj?Ug0*$aTT?-yg(-I7a+cOZN~>WARLeQDwl=4F#iCtkHy_wX_~W5Z{RC~3(IDz zmeO3^+oHtmfG7%)Gh~dA>cD)!(v4YG#a>(A@J`0G7c6eNpMakiuP4;JV?MEUZV|37 zOr=y{4g$766+M2H%U@cmM#U8T+d$)y^KKaT$Ti%2Z?d+7#}nCE$j+ATLY5dLv^-GBL9ct`Yi#P)jc_D~x z5d)p1oD2hkTYx&BAO8S?P<$xR{C)dH_(Q`UFVWiTOVoTs)^b9)0#LqNx}5#sBUy5} z89ZcXpSgTR@WnNMguV#XZf})a;r{@|TOBg_8rTzcw*4Sk$s~foqUc5lW5zH~%;qxo zmFhd_mft7-N4tan0B4;Zjh|Y4GlN|4SHX=V!1~3Gm1%#Y_eLlIm;GFer_&dDYa5K7~5RUjz`JW;%0mgRHvEA=q^C9U09I_yiII6)y2ys?Wu zB_tsM&L0d&-TWhD}eYj>7IBSUb-Lg0|W0=Z4Z50qf3CmqwEbE? zHZKrqw-H4Ot88>x5fHglCuFJ$v0;_~=Cn-BwDd7FjTtqs6W!lh*w|V9xz+VcUo~TC zUnNT&sHMXUGc$(Wh}DT9sx1@Ylp2a^o-O#V;GG{!y}VNe{FkspBv%k5TUD2Z`#hU) z7&8#YG-0$14yOMA;WfUcr{38l{y|uz`!RTvb+aAVlqNR0$qk$pZgZTP$kn_zty$^s zrAHd-FAoHKf8kY+z?z4KZ7(9a zwm;c+_Jm0+#B9JuLRf{|9G!%yAmxC_8i!!2yP~nNIZ~aiZGe^7jP1C0PXVi^)-)o@k8R&5w+f{qDK=+ z490kkmIOsx{(FZiq10eyLExNq3f0owo%Sy3spHh0MycaHT3eqfeBotl3c`eOwK>i( z0p}!V892=qhoX4H;wG)8$9JIk3sq}kFuOI2Xwh4%DhAF}5m2YOJR{$<1+IF!+^a;hz`7;6DiXB5TQ# z{wXyn?=-qK6sS9X?R5SW_|M`kBg5DF4~K0d9&4Cxudc5A$BcQH z+R9XtjHuyNRYune%o~Br>)s30ekc4vgT@c>A6L^Nw@6hqeGg2E8+*~`fP#5i7MTwA zz$($JC<++XDeHL}sHJNO;>+t5@wn4`J!z@kOK|Z{uxYnbND^n2V}{w1M0RkjLN--Z zi#OiK7ozZPsdK8$p=w&>HZNm)Ak=g#y$xDu0;pF4;u9MZUprayfFM;<)am$}MDZ7Z z<u{ZA!>~f_}cIRs;IRz&sjMXR2euo93Cx`94bpHSjH0ZBxR{0pMv|3Y1 z3i)b5mEKp8on6V=%Az$W000cPK_`Gb7ovECYWls^{ISN{t!*s)Mp6}#pK}0^#-=g6 zt2Aipr)zQdfb9HD;muQ1@Y6}*xrDIqls(3sw$dwcw5tFl^3lSe1S!B!IL8fHlK5&H z$YjxV3+WR2IY3Bbv`7pp$i7<$)k_CESb_=pN)4x|O6#c9WY((x0N37MxbbG4ac2gX zsLyA0HRCczXKkvOra%}KljOKYQp|W}z-0^vJ#p-k*GYuvIyRkscX2$pl3A_N?olh{ zkhySIaLB_j&reaAv=q?Bq>15eGfbX$ZH~X%O+3pUcYx)|jK24Jw zSn;L)pJ{9-K^k9|Smc1n#bXSP#o$+9R0i69=>dZ%Ci@!lwC-~r7rXI1I^L-cnd130 zsqCh2JlhtUdxr{&Vn%0ZTW&`&s8nDu4?)mq-ws0+FELITk2jConGD+6FWh&1d*AVW{y{K9I~I9MFIB~z{%WU zywm(W;$1??t#s%kxk6z?ywId+)G$mo%&;i9ZX{i*6b;HikTT|&C28d6rT=-^dQ*#>5^UF3R zo?Ym?MJ%xue6VJ6!)k_M(*)9f8|!y4MPX}cC6%aX<(WhRIOdU9oDeX~?1XOG$7#D-ji4~#g&R)OhR*Q@nc`_~uFFSfYiWFB5?7mICfy!j<)F0WvTL_mWZ?JBtpK{#BJal2{jO3C!pae7%EdY&Tq z?PKvG%fx;a)->yH0z(V~RngbO(q7-)N?Cx2UPUs%n~Mffwb5Q)d^y#$*t7|?TMOHFKqt0> zXGgfgf{I133|RF5x26HxTfPSP7x4b#(c#cE3x%6FYfJZ42;28LFT#S^CoV}WTju17 zhMc}3{3_%8C8~I9T8j3=RF_SJo1h`Jy`5uLk>mj|Lk3{ZP$=UDL6Pfc!P@VIJYS=j zJPUnce{nS1S5UOQhD8$L)yas3*x`}Tt9-$i?u=r$H5~)MIv%TQed2orSZ=pki7nu_ zK_p?a;5;mTMI{E+3>c_lI2e;e_+jvGMbstn2AlAYN;i`{S1ia7Tu8vE^JJ0RE=z5} zY&&fj$>64=YZRYT@(cb7>2W2W?U&<~)@>TvRcis{yR$MW)2&LW+yitanf2yk5$Jj2F*A)CZeTO_vTAgpF_AIX+x_UVkC{pz*f#BUJ5 zBTI|z=>mjE%He#!?u>sk&nMK^p&TsZO>%BwEL0rir>uG(LHIx74flaHXk@saG_+7* zk%7QvE3}**2^=2df!@DOzu=p{82XhkA6dNHrf1 zY7X&SoJJKP5|YHLZ5xpW;0t7o4o!1cp;mMyDJwKrwW6vh!QSZoSJ$4_)){SlN#YAt zSzalul3Qey$`^+lj|G-LlnuK{mwV+`HO_oglScT7@WnhO<2`p-)3n`c<`h5hkINms z#k`0xmZ9z6Xd#e%s;s2vDp!T|Z|$vO<3N{B)NYf`F{z5y#Y`|rrMO~RGU|4zOn^|8 z-M0sv7;NmL*Cf+?Kj2GwM3=H7&!=6Y#saFV31y6Y(iqC9Rs@E^h8waxgx#)bq>mZ! zZ|t4$qrzI%+`bL{qkMIBe{Ef!)p z8jhtT*orp-Hwam_>~a_6RmSBcla;GF9;f3y3&c$)iS*qv_3p7P{1DzrBE=E_t8R&S z;zG%sLA_zAU9Nq6gTBNe;`qD=;_;K5eVgY2YbIJ9m|gY^p>^SzB~_hWB;K5rKqZo|+A(hODKr(lo1Bl_R&fALz{_@-%A8GMCz_ zpCJHYgM7relbW}!_+!M@aVEE@=u+NXU0lYOcFi!ibhQi%$0?6=Y~h2Q%)wjkHw5D! zOz}>e;?%yA?po0;y(FRBd2P>b2ayRMonhK0ML< zFRu7j9X`u!kZQI!7M>svXAH2&vP7ydiH6_3gJ726;IBJ}^Zx+i{{RW=p9Q`q_>19w zvaD`AH?Q4kcRXX}x0#{cp8J9Z{{Vqk@15R*;Vo`GA+2?Ig{F}SjZ(xx{sNHvY*GV_$-%$tv(li-d-Y?!nca{J|WUH&3_^C#^QT#D&UZQWhQHB z814X)*}w#6uLpTa#e^*xJ8W_vv);9&{677pejNN&ys?T)OD`B%OL+{85zs zQMkKts9ql~+R3GgH91hoHxdVPfzrQy{2{OS>%|@y(hj}w{bc)2n`v^9+3B#!Bt~Ud zK3wr_I4tdxDyt2_T!J?>Vq)P@DRw5Kym^uNpZ@>_MXK3+%2lm#JrfM#+ z#K@vXwMZO*?8S2S=Zx{#0bT=ou)HSi5lBG95rR5*Kj)==5&r-MkJd~-wdS9xG*FAD z@Fj}ht*e6?O1D?aFn2dVG1~=53&%e^_%_mi8O<$~v`S@VBnZYp!jMLLjzQ<%x$zNk zil3qDV5Fx_N2&T-{{RGnxX>*>WVy86TT!-)?LIryZeWU7%NJp7WdJ1S03L%XQ~<$< z$Or5lMr6=$pu3w=n&LM|i%ORYn2l70cXk_C3?zB;zh z^<8f29YaZm8>13f1t;ZX+(JZMN6s)5D`YVrD=@CNPw`HNb>Zlyyw!CWE*O`AZ#2EO z@BrpCDLHWw-GjMgRoYbJm0a*}d`!BBN`Trluf}X{{V=mvA6qZmV3nT)y31>T&~wdns{brjU?JqHkFuQ1yE2n z1t!qs)3&=FpWz4bBlyEq)hvE5Yx?$&c8Krc%~E(}i%wT%+HIv+Ba$SPj5IQNg9GF^ z$ZwARJ{yf!_KWehrLl|+uCx}-AZ@}ZNQFiip2#zv_4P-?pBi{$L(uH*?c``}?PQWk z8u5I|GA}B-nSoW=!76LSo8hLbdFr}+yGdpO^6sN3V3X8R_i}! zWz#f&huSZPX1BYFW}4#SJB>c;$ZL@I6Hc&^pWj9hoW@S=%0iErS0(#$>-u)D`zmXG z3)Hl$woQ6nHYl$yY#mVqn$5J1q^c4@MU(~umR+Q=Ij%_F6`SdI`LnkU)#36#1%5Eu z5#n>Hp_BKMC!Ugzm`UYtamS-JJADQXWoX_Wxxdxy*4o{qd9M{hC?pZkboDF;=qt4S zq_owC;?AvO5ZbM*Ojsz+-Qf-fN24F76{GNLM~2&2u#-{@sBVZ?!1)4A18xLz2n2KP z2XbrZBZ%d}Mt(;m@V?VAlx@`e7yby<@VaeF;$MLLOQ*c?&YI=>NpS*)2#;%b69NIs zhIEZd>@adW{imyV3eNW5wb&?7*mkL3I;dv!Nquo z{1NBC_Ex?M@a5L4;W+OtS|fFM(uiBiXSurwSsj$HF-id-;cy8DJbf|n&qGZ+_Qmny zUg-&MVRL&NcQ$Vt0p&|$Zly-f7&3A4^#?uc!NbejR;e!U@ViF%_UQ7p_HFndMCjIc zH_aZU9fX$AT*UH29M_Tz?qWj{hyxRW^BiOnMhO_TXKkz|u#w!}8PaJT*==qX0D$C- za0{>luH)2pt^(&(@zvI=s`x_N?9g24mohsr`1nB;QEtgNaw zw$XsV?cKNvwNcDnRPN`Wm6xF88iU7P zEwHwV4O-bJoSQjkg4)|ymMQb{g2E=$MRG6J_VL_y%^<=k_Uarc)U2^l%4d`)Sr z+uF$$?}wi0Q5EB|7VRTW3BxGd{#$T!j0^&C*0_xe;#Qfh!FF``BtVR-8!ISMg|WW` zuwcA`zN5B9XxwgS8C~#YF65t zLK5pqmrqE?%X4RLUfoKu3cgqYf`tHKSOK1X?P1GdF3YQ*9r&KcX$8h0k{foh3vMt( zY^B{oeLdrX z>1_hKV6!PC6;rj&2U4StL&3c)CnmHxJ!8SrL2!q`BGyY+g<4rBlHyI$atTtBBI9UY zfb*VB71rs#H*0w0wbS(9C(4wh(5@p0;0%m9jPZ|{WFExPQ_4l(WN`iz@V$^#3w$w~eLN!uDl|VOx0*sIdK3HNEPdkeeFtNRtL{J*et8cGe>C@;} zL2a&2 zc&tl=B$nDo%8GiE#v77Ode3ETrs^6bSN2+J5wu#usXqV+>mRxN(*jh$@3WZf(<7p?K zMVjlydXAtiwEqBv#^T;r4RF)zFoIb?VHgPN``Lo>$8JE*6kxIe;w>>##QI-`tu*_% zf8*sRi|rG|#!5pHIA)Q6R%Z;www#sw)UEf8BGre9HJ>g$TUGGpp>1SXq$ztGu(X?( z44`dOza2wkgUKf#&wr@vo+!1sy71<)V;_C&v~zB#mC4Z#Mn zA(Fu%#-AKfs~D6qA(1WeNelnoy4$o)|rLHy{s&=GQ6<~H$`$3fr7^*oQ#qf zWr@k-G?H5jRz=~J@cj1D!yc)1VI|eeMGTTeBXV=f#~g?^f&n|gV!?PNKsBwWcw<9V z-*XL|wxNo~WwecU#y4(UvBLp^`Nr(?z+4v#m|{pZtBXgGFwYglfJGkUVyz-OxcLb< z%I@k;F`BP;3(ar#+sm_PjEw5f5HW|q+Ev?WW+04h-SX$J3sbT*-(tUyqP5g^v^u00 zC6Kh2S946RfphZ3$R0P*MS_u)?x+v_%Wo-?u5 z6?{E+XK<%Fd!TFB!C?Ez z>q0IzM-57mZOgeT*jU(V3;mU-%C7O9f=Bz!#s?jL{Z;lC{1U&$wqFhZ0Bau)$Y2Lj z*0pOJI~Hk3MOKgp-V`0dn1Db8$xP3w;H!o}FF(W20Pu&Nx0A!At zuSxxz{8+vO_~qcQ6z#~f*KDqtNfn;aynA;p>SlG@UuF zWt!JW&=MEB}T=ZJKR+q?J-v%{&`APY3A#v+g^jgl)lkQ^~%fZTAn zdhL#_<1Gg>cYXP1U^y}D>fMi$}r8fiTSX-KCL~Z_K<3K_BuNG zvI8}x#igvQ^Rn#P3xgDaPSsXXklRTO&~ctI)$RTt{689n)zGuH(d{zd+DH}Wk=>ii zmJ40aF@Aq|s;6qI?Hfx~q)eUn(7oXQ00zn6ZER_JSHzuG`aAonF7zvX4$cTg+dD8; zkRWe3sZgYDRYIMWfnIasRFvI45XgOa5{47c>Z5XpVwEknW{4ES}Z#d9Uo!3FN2dzf_}kj%r) zi5ZqOQod)H6q70{`K5^(`pw6JJSipKp`v(~PSC&6jm*g?oGLfnE098@VU&%Es;!N` zCekWWmV~>tqdA)$JL5!p1+;!H*L9t9OJ(0Zt@7AvQM^jgwX&Ebirz(;6=Y(eq$(Sn zzux$h{tL12v&53=o)-A|s@mJfEv5a&iF6?`tcKog)7!B{b|pOMuJ)ay0%gk`zz@3q zAb88+cZYO|d`e`J-&L98md{*~NR}&|ujH&u0hZD%CJAQTRle!3oWJ0y7bnLbuw(_Cp-o5Q2EE}Q1$bM=I`ymS&#BvKmluC$@Z+>%+DH_nNVb@8 z13q46W(&7)Mmw|k9pQ~ALFR(0cc@UOfCC(Rdm8t!$UoDhdZKa4qg<&GOW!wNFFd2y;bmQuCRZcyH7)qTVdEZmx@AxVm+%taK z(Rg#jFi!@VsrYY8k5Iab!S~y4y10?%iChMdD@a(g49UIvXOA`UKf>=2{>wiUHGMBs zxjMzJk$$#QY8rGc9A;*T9LB-MFT=w0Ga9k0Ps!^h*#uc)*oyzzFCtm?W2g`AOTwjz7$J6oA< zC1HT2QYmF*Ay_b0QiPc!Ro%fq8UFymIwZ8d{hsx0TVJ%&p4!eruzQ?z&#Sv;y;D#>}~!Z{7&%Jso|@crUyfNu?$#^)mk+V z7nq86;2$t5I z^1A@8N5>Lh_=8Hb)O;zSLYkqGEp=ACxwl~grUdYy1(k}cx+rj^l&%lV!k~-7KL~6r z0%}eZPDs+vP_SFojtE^8FjcpO)+wdQN7@P8-29<;RXa41&#yJNJwgfmKdfp=Cy2a1 za_cfdBHmlV^T=Y$B5#o&EIwTB0f-Qj{p{rFKLT}|?Q2uCi&E0>@9!ovl`O}@yjkPDci~2jBTIEApK94K zUcJW9Bt^??W7ixU4Cf=7`b$vL=f3c)ei`xih@Vrowq%w|SG)bpb1LB%0actTla^te zWcS5*m&PB1{tDJUDCt`N0L1ff`aBje-DtXP%ad;$uG?39C4p$mpe0l>X4#c039nKV z=ZC{Ome;qb=DEWYicwn~uZ28SCx|`}X^XFTI?~eHQLwj>lTf>j+8KQ3K468@WM#62 zgRpeTRKR8f$^Jih7sA@_!IgiKKNTEA1I}7$+#L_dq>CJb%QCEhEL66~(o# zyLD$}G>}adz0x#x@!TTP{=O_$BbmQ1N887by!hy1{D`w0Zl}Tm|3` zyOP_v8Qi>7-x>Tfq`_mZ$r4IUri*DEw7d5z$tB#~`Rg9@!Q&pF*U*2lKY&Ju@WWUa z+AF>FoR;^N&2s9&*51nAV5e#MLGwY{e(94PMJ!Gxe^XJ0_p$53lxbIUdo+)&zhNJR zH<~xY-6?;wzQulYXa$6`!6B8wjzcCFVKP8oO{^3)OnGbpmGpnvp8CV$C&XVDc%J_N zRQaaYBZdo>ELEbni7nLh1w$-*v49tLF@P!`guW=g@TZ3~zX>&+wa=Ap9lUV0+a!^q zvz3xJB$B|n92HUs+;A{2{3ZB(9nQPs9V1E9j+d!wJ|e!GPbNkw339I411xsAa0%dG zgVPmxT{~#}u6S9rok+KSr?~uO@g9TVj}lt4YRN2mhLRrR_e^5@L}7O*Dhm?G5r{Y( z=avK>Hfz5QUs=zt>6exh&urpLsL|R6hytaF3g9To-Hp3gpg0+?Kh}H=<6U}bd?~AV zw(Tx7?O~_VE}LjKF{lU`(|Ir}FgZsXg0is$Fse~}HPyAtONjMVM*>pbO|`i4iP}hF zANPoPVUWNQ8w73?Vrp|&Z7UasylBNYbJKnt>v~bvXV<(xewX)+8XVCw0?JO( z4&%WLNar<6OtIEvB3HMP*=>_#@@dh`WAh{+CJPe0faigZIXshDdM3SfYZbKs%EC**eXLm7$!N(lNo|jqGJR#xj9z8yNIO^+b3%nP16B~=I zzj;|@2pxiep_PGV+t-2(I}LBcvufr~wM8Tsu;TL4NQ81)?_8poq$hYFlZ;_hgS!I+ zT#rC&kv$9>uZ({WHE`>!YRe44OtFbH>AbKrl~W=q`?3>O=L8?ed>l|MEPTx8@`yNxHqek#=N5IwveO~XeCxQOM)-BJ$l0XPM4 zO5k7)FIOKcc_Sl(P*DYTBQScs(aMl`@ zza^B&PV0HK0PHyrKHF46V7?a&mBJ!R$n4+jcp#{ zD=4In>%@jexpDTF)NLaVB&>KKhxq|tmu?9vx!u<-OK8_pI;7Wdhm^ChD}ArT_n4= ziU1cK5!WQ}LHTyAv}K6MipV4Z@@GQ4teaSIxEzC95b1D5rd(av==w}oY*F7)I!KT|&dZl5 zft+KE5*s8Q7nA1pUlzPIVS2WMTu&}(7)L#&s95Gw7;Zp9`Ho0&fC7!YG5#*c!(S9M zPZ8;?@8Y{_%Qt{V*8x1VV9LQ8*i-Z zeZ+bTPLb^y0o7au40D!M-1IL)G ztAIueRyDW9dtF0U8da8|C7jtI?CmVnnWRI5APP$=u-ZoE1+#*96`-$iKA{_G8Wp^< z#|~qj>?l?z1gJwRV`(66W!=Fe3cwk6Vajajz9iOG=GXfJ!#3*#cGHpO-lUsOCxGbuw%hwA`)FYEjVXa)=G&eUkn7Byo)tLw^SOh{;9J`Ic^uQsxHB-d;cB3_g zowlLkiLEA-v5v|;VPc6%UI{@OmgPo9P~a2t=M|+NpQY&0Tn$FhYEjGfd8Bp*FvSZN z0dfz`fChH$Bo@w1JM zRsD72e`jfs&m(S%D}#6vZCmV*~K*$)u7H+P!tyy4QTE^2+mPe5% zy<1QsnNQ4PA)SuUFbd}!WR9TNywv4uc_m$GY~+E&wo9cfe5g{mA<i7E8U~8I{?C6mq?pYX?ke-7(K-_VaI0GCKDp70W z8#!(*HFFep*6c-%#MkQbJ^*1TRVt*gETAyjxKc^#6C72Q&a3_r{{XYYE|Fs!OB~D$ z&^4pU<^sw}xh%oQ`$r%EHs+$5%H9oC9iS1sG7YjSM(w-+paRIurB#8-vX#gH^NOja z>i7O0wzj|1CB2l~WPz3KBy!kuoHzq~H~nAqV9GhuCI6i!k|5Wlf}lHl9fbJ;ybJX0k(kM6;>eAO(ROXP?HS(KUNJ8+X$69a1Jjb89NO zP)DHMkNfBHr`ZR#xtcqm51SwXqU2-_2t7I;we(!BF}Fj;bB#$U5_rGFZm%N6w{($t zYPci;{{Yql^EK3X3s(D1nV{-#BZ-#W4ENm|G7?W;;0C#02>d?rpT>U}L*Y*VczRt% z_IX5c#Rvj8fjHePr{#$Ua>tI@JXfy#VfZ6=`!x9D!@mk|yhUxP-Pzb%z3!oGr6Cs= z09;8LXXe5*yA_)Sg6_iSXqvL62*#hZW6_`!p~*vp}e-tk6XY zF--X`e6!1ZxW?8kt9|1C003z|BfYb@w3AEK@2%nsZ6d*@HNB{ishT*YVlgQ@8)}AS z7{@5X4x-S5RRkn%%=9*cF7Y7*_KUqe847=0$E7aU`*; zmOd=;Mxe1;+xT)@rnQl7Qs(~pBsPFEh{6sGkF|oX2qP*9&KPaEIvzg;B80sD)@YGh?H-WSt5>By; zne`vDrKpM`rSlt2BS|wTi9|tN50|AvzwnsawM`Tr0(! zK|7RS{0Qb>k+%|7aOZO?wg@DK&*J?T#Ck-!lsbQjlF-@t4SA>D%A(O-i?lL8v8fX? zgXP5@>4OGcyHsuCE&MHMuJ}V$)Be>AcK2d3`apL`&BvD)mmHJK(j~u?2}O{B3X%yb zaB2@X!=4-XU2ox?e?sv!>=r96-PNwR(%;8&rJaJgLlQsl$&no!@edG*_}dYu4LA9d1I9vFjo&LW@9K1 zlx0k&F`S$%KNP+lcsc$Nc!J|h7PmGxULMpfjP`2^ta*ZX+I3kSn+?1MZGfmF427~S z-7(T?EqBoTf6?JdwHd91$Tq2X@zsj*0Q|p7{Wt#rf_8jWzqtLI{38YTj!u_trZw)L zX*`!NZjgCbQ@@f~Tr(>#Ewxkt_$7|o{N?b+f^|JpQntE^+{)n=Rf!iaPBJh*i{{{p z`s4N)@t=tFPYmjQ2hpzfOJS-D(`g`*LKfmRR@o9O0t-Y-90P&?DspRC)QfAm*@%R_ zll48D_T>0ud+|f|bl1~McE6iKfpuLiV~ui;Y^CL8-dT$IZ#tG%QMX|##BM(XrLT$f zzl9R$c96^=lU!Ma=EAGL$Zg7pfO;W4cq7*}`a9xHZVh9?I=+gw(o5&)H_0SFWSB`S zn|DTJjBeZsZP?CNp~>X@OXJOZ!;pLw_;=ua9_mM(G=|l6c10Un;&1>`ahEEn0BqxL z9eO2NRO6<^;bHA1%WdD?q4bCByWt-Td^PxY;OLvhwpP}YX%U+}c}QwVmvz%??sqb$JnaWPaJ`Xo`!^3ts)NS;8xmeqU04p>OSovgxVn)?aFcdPVsVpht`-!3PMx^?# zn0|MQyQHzm z^7%GlZXa^tHYdtimkqma-r2z?8aF;9kHn8}XW|1KOL)2S^}x2RYq>_|mEuA`1QHhv z!*E^A%O5D!wZ9+O=@-_z4y$@~0TkFEF;*b1h8&#ge|zJ9A}TdHh9ax zo&?gbJXx>mViuk_by;+4m~EsIF;ZSK!NbYFZq$+Xu-tRMX{u4ahH^=2&!V-hGsAjC z^LTpC;suSj_G$%7-7svonO6V=StC}+$qdR=9i)T&Rp7rJel7TOz);sG2#o5 ziEOTm5#Km%u3T*nHY-W$KnEnaH5He{T@T_ujRv!0s@UpQ&={dh%|c=$wv;g04a9C0 z;*f$v2>@&YtB^8$HD;s-brh$jU)(3kK?Q zz{O2t`$B5oHng7i#us{aqj{pT#r>gaAc3WdSW;C57l6tN9DoKG5*M8OTYdX2_~S{r zn_lrJ#4SbRg4#*aD-AWlWl^6bw2q33a&ewEE0dGz8m}y(WTZk{{Rt5s81!^ zUEN&^HMzNYosnC5BC})ypDM1ylHJc zuC1a$sKuyhl3Cow(z2<6oI8Z-s^EYXmuoH*t0`R5U9?RbO-n1!5Ng-9Ru*wWnse(j zI)ov&GdsJMbF*o8+`*8X{KVG@<9{CbQ@|E>6L@7~)lzs`NH5`x#Tl7$N-=iCIZv1p z7<{<-PYc59pB{WVZ(@-&Ntj)$I!JDo-#i8YhuY7TCzU58gUIHxaY)h}&sDMIe;Irm zYX+%z;oAt|eKqyl6}gx?mGaTP_KU{5h;?iFq-S@9b9RDH}3^OsV^} zQqBn*h{Ns#=Z{PHZ}9s>yNB)iZ;7>SQq}-kD5XW5EF|Q!hfG8ikinN}8-U3SK;x-O zP1~X0EJrjf)~C>(3e}}LbUo8@>D9W+iYw0i9 z$5wqh;?CysXcp-v21qT`6>>?CkV1|Ka1}_%+s6kL@c#hEKNVScXW=%q+6DTSxj>WH ztdYT$k=i#KK>NhGAQHe87$AXPKz_|L>-ryq(^-eaULw2Q8~{fx_SQy{OuD46fD_Nmm6% zE8uU0dl97RNO-;C((4T*}LJ7 zfaJJ~!qG3=QZXyr4MuB8Aca&Y!-JO!22?)aK_fU&6oiS=_ek=e4g5}@O#<8E7lF0= zM4Ylr*3u`H3L`ewjbT(S6dni}P{%%}qSO8+pA_85WubUxYn%9ijvHN8CXRB!cPfJA zR$$5$l2wNa7bkMpd*aUucvdTUpr1w5Ud65y&1z>^ZBj&IyCWpCI}OgHkXW$jO4c}s z!ZTdSr0OwQJ-waJpKMFCW-P0zUBLeUbb@i$l_TXCq^!jzyBazrmYt|aHj}FOe^Ro$ zYdat#$Z`sTq;p;?GgPni!hhc z?hH#3LFD5ZJBc;N4~Az{KrX^P#4j7c6U>2wgl0Ry&OsmtP^!n*ITfL0sp>Y~60lzq z_;E$Wm6UehWJ!`+J0M$oq*BVNa;^g@Wf>=y86>9eT5*y(M3dq5hQA+;yf5MHHTAoT zIN-e1?PF-AmftAt8$aII06RnS88+jemc7T0Ewnq!jZ)7~jtJ&IC!QFjj8(WQauXnv zz~?7-Cb?}N#w#X|cNOK0(_UM*nrGhg;x7sq+U{cElqS?HJis~O zkIkOL6{FyP1mBAxVH{W2ciRF(ad4+>J%80i{$?W#OKoOl1YnQ`DgOZ3`VNh#i>(&g zD~&-x@~`hxCNS9GU@zUk#@>W~7BG4pKjIF#eRLDr?veq_rqG1 z)VED1pJ{OcKWo$7VUdXr3kIE(xB!p5qZm0HfH?av8u*6Z_C}6i>>RTsd!<#Al6Qfe z=YmEt&=5eZ;dQF(z9e&Xe$whQxy&-n`_SKcj=%#hKpx-&&<+Q=eAikXR=IfAchR-o zoUy>nBezEaX&Ge3S+fs}K~H z83mXO=O;YZC9C*v#uwJn-|3pY&YrD0lXo@1aUzmI%t1o}+=6!k4j3J)f!JtX3O*~g zgH6?ZM{JWq#T4nq!^#>zn>#{=J2G}1#48h#iY{k%Np<7RUhS7nv;rv}=n)K)$0yoZ z6cUWF;mF&OfXYTPG61aDHGdOb>lYdo=Y{o1u4HntJU}Rrf)3#<$h${WJHBOO$iODM zEjPnj7lfc^)Gr{mKWGK-B!yYTe=gDh&eMPaCmTQ~BZ`y8dOFznWi`zvEoQ-FiDQ!T z-$;Kj+m>Qmd!~G*EN~To8<>JRk>8*@rFO>nPH6PI%dfUQqDvjKg*UpQ!z514xweOw zIRVHlGBO4^6zxyOUJ8ovz5U}&HQddMxNiQ@IaU}7S9v8AAq-V^hH$wjajb1kG@D%p z&c)%kyd?~CMW|aa0x&@#oRthVkVyj>esRGPcvoGXz?<(2X;8SB5VkTVAcF*9mty3G zUCWkMBjyAU#;P{z9Mew2ZhT31VF=SaGi`7rxs}4k@W&Dr-brL6e3%4=3|n>><0l4h z5cuZPOMMkD{4acO+G!#v8dDv?avTs`NEio2RoYMhTY{ir*XP5~>O)l4?V`H6wK?5kXUUEfJQQFi}9a{{xNt{ zOjsZLQs!@s zJa4Y)>v5{x_|dQBj?Po(&oE$z%LFpM&~h+vbA<$+O@2Z6*TedTi43}(h2z?#))nSS zHmZ5wBktNV0ahwUaK|~^t_D%?kI(n%mJiWd>^G8h0`0J9EyliuiG zv+sg6y?HP1G&!yAp%4iozqBz(sO|>g8!~}{LglbQBE9+MYBJjD$IM3o&YNe+z6tm* z;-3as*;;6tmBpEsLn9!)k~pSh#{eiKumpDJ>TytO-Wb$1-w==ZN+W3-PnJo-nF}(< z^6lJq;GLu#4l|B;ufP8Q;YIvD&@}1cxwX2DA&rf*Gfgx_nSzBw32m{nF5LX1C~=$< zUKjCC;TDJC-x+G!6_}1Rx4ZizZn%u82%%kpEtU-_AzL`d&CWotGZmT2Q1iL%Wq1|s zTIp?9&2;%6P=3H)5uWowh6y}LGy*L)D=UpYTSjQDrC3^bn{r_yi^)6Lq}N!R`#Of-EGJugqP z{?w1{7dMhcX)o_%E?VK$9fT^bsq(31QZvz-yyVoNq>j4rYP^%vQ=-s59Np>ji(e1w zGF;qQYEfKI9J-l>qIU?pL21@S8+WrtSIK7HL1t10L~A9SH(DL9gG`o1WS%CxzgS|l zSClK2mE-x#IM@+@Roq#3ZXoVU;;mQ3eirlPyN<>Q?u6RKjr3wWsBU*Sl_Q(XEQ!4a zByzJblXEKPb7ok0`LtQm+fHpeOj1lz>Ts1=URBI$FpY`hQMuFxeW!DL%yI=AT*6nn zMAo+&E{UW60A^_xCfGH)L3MYhBS=s`$P~&EiC18(!NUb?xZAmvtb9Mxe0K2ay0?im z#?_3;B56|3EH@$8mHC89U?>>~TrqW3P{SX2XzR=4i*>dC0EACOxt_vXwMlPU;%Hz? zQ0@SY9gMD3#HyJEiNRn-+S1o1zI_@UM$Xg?S_meICXH@yRF-nyLde^8GZg*d$y4To zxk}(A?!5}V&Q=eM9yPsbd_OLZ*H^H~9fVq)>cAE`BPya}<+zh+MFKOHZNu+27N~fO z#hTnx-*{5;$4}NEgH^a=Erq-#tX!%U62y+h2?RQc$Jo4TAOI>7DX|@VV5-xNu$>)KV#mABgLEwsCf`{64a zyNKAz;Q}&buJB0>l2|S?l00|Bf3m-izAtOqTpEh5k>P2si^wcC{iwvC8DuC(Eh`PG zickEsUfZ$so!+J4AB6rK)4V)5bfQ@!Z(hG#2c~bZ2bx@TD9% zAu-BeXkg2`tj z;01MQQF|!K3zA2%FwIn@9ZpPSeU>I_-GAVn`MdDUTd(dewH2As0$NRUsu=@daLT|Q zIUJS7JDlqig)3NG`6Uan9Bl>hB6!>Tum6>voX-=&zq5Gb)@K3(t+zf3gZ!1Hz0IG@= zYp}JuyzwL4Xb|akS11)41+mm+H+IgB36m_Ja2H%~bAY>e=cIiHeJC{ z;#LEIyH%Jr6_t+S1&hgaTWKz}-ET>>w)-`@lXlLlG)f8FORxb-f^fMkaz_~?_jbCD zuN{Sz*M|~F)vkhE&35w2kvQBIbjpz7SOyB%JO$m;Jq7;&!CS8zJ?5#d-$8eMaukcG zEm1P66f;bqXE^zC21edkgN2S{T$fIV7j@x19HV`k!;`hlR!FjD=~YFsLRH4y!NU^R zJq}35>gF%L8t4~xGhe}^>lWMRTib=Pm7_?mql`w9`EHv~st!&U__)a=`lpBf0_ql; zUZHArn`@D6JW%=fw<@ZNOA;j`ab??uKPlbM$~J>rTCawD0UnPPoSrCI9w$QkR#GL8 zX=Wx@a9jXXZefxT6O6DArh|>6x!|54@N9bJ+Bb=BuHwJCnlWp00eDsBML^7!+lYQt zt17C4)8)dE&v@U$J~r_VyFQuW-B(4oveVrsmde`u>`!eL;P(ov0n2VsMts$L4so{? z-uzMcPontJ+S1R&z8|o;h6q+YC8UMsTQ&~G9UPGA+yc%7c(WGkOOGaW}k)zv?11g5YV1u=WuY~j;ike*aSL>r_dJ28N$+(*8)>}Dl zfMD{SuoiE*ysDG;f`Xt7j4DahpED{BDO%^gd_VApm*LM0t&({2$!VTh>|ay5jz&X~ zxFop4D()D{vH92%dtz&T6}`}2dEV$Hq;gxlGOVag$z;HA8Aq1;I0PO}2wr&SG%ZuZ znm(;QrERL*Oj0u;kH}FP_hRIo#n=@sz%78scitHq?~CsBFBV&9o-4Y)ytbK?Sj%Uq zO(H~vNmoGm%jc;C5J4k=w3V(>)g5NP@kd1P#+^LZkX~LutdTT7Xf3>9PgGokkWL6V zIR~Xz)c!MQdiIwb8aASB;uwSMB4VC#PUWvkw# zgcpY1?m{Gum9xBUI3b2u0nXg<#%r7Z0EAM~4QglmRBbztF`Dwxwjz}rDZ$(@NXa0L zpkU*I_CmU`rKxI%PPDze_;;ccbuQ^!%q7&Y!vnEg9Igr4G2KU89(LO2SX;YkWbw_E zsAe}8miF7svNpqzh|YEaoyBqnNgNI<(7aPWi+n&W)%4pNgwrfwvWELl(`{n7mK0s8 z&ng8o-~q#O6?4?{9-U&*!ZrKNA}J@0RW0DTRo;#PJAqsdF}oSt&rT_)p$WTcd6e3o zmofg$u!F>AYo-zT@>x54tVnE0DradbHxnm+){l_6EUUi+vYn%hb-@_vLv4yzEz6*3mTmS;XTw^yuPkJRytSGs zfnCJlv#>eICj@W^6jceX_1ki=UU+ftq*NQHx72<|$j(MVIOo0(wG|{LyM8B8FNp?? zVQ+2Wn_EEBGAG(1w=#{zut>?=-0(6F0OuL!axHe&=ErL4(p<)7+|2Q^s<7N}o3`xd zBP4h0#c{qL)n)L#wep=CSeok+JDIK)IL7AOQ4kUp00RMk5>0ODdTe^668N6OMUolr zk~yb}bZw3YAOgd$VpVw0sHCEcG?FjDn{MS|DAHNhhZVsWmo9 z>RNB1Cx!Ljg_hURFNh=5FK^#2E~K^)MxZMY!8Wv*JO$dM@_860w6zUl!hJB!W29Nx zMij{;EgX>yvtt3-lXpOQQGt)WgNn=HJq?lxG(AG<(pyMbCTW`FMI>wvvwvVnWn?xL1gV?Z~(lx3R+nR!P>5O6d-|*o2Exj3(+lxq!N9!?Y%&g1l!4+~gmcFU$+-_Eimtj~9**%!e9K~NqyE=a=V zfgMgs#bH@pUFp{%bEw^|ymKr|B(Rp1gB^iE5vyk*u4S{mP}D7aE%r<6-G0*6%WS*uF2vEse z`=`4M(YPeCsSXqp-!Z^p)O93PeH%s8qtjn@y>WlHhL-RJlz@2yV`#IegLeaJx=NeNsO_3XX1i5@DZ;a`;ag}qQZco@W#lOZ-LzoVw=gun5b0Nt ze`abH`jm=L$Zn^}m055VNnph7?8NTu-|U)~iZsR7VbZK^(e9WPlHgmu=D<7fVI@$E z5uO(u1CT{>-X-y57VQ+xWf~i}gToM!Ryo~VEBR#YRKOugB(`(&F=3a(;(HAUz;$aV zBZA>fk2HF+kjRE{gsq`j@wO8=^8zvug@gl-CQy$bsr+4uF}9{ZX=#C^EGw% z{x7%HXN4rv-bIxdm$`wMe5jMBhyhC)NB}aiHk87LwiD-x#4XUESGRLliM80vv@_V}QWr zhJIySg&5B%rD{-ki&)6GlAfVbHr!=37=sks6b0AyxX+s5sx1}mJl@Qdm?JXY3v?3R|=d{SA)vD^s~ z2!pc#3o%f^PB${MWDZm|D2lQlGfQO8kH2cqgC7~bCTl~(UIo${QMc2OM>^T+Dv|G$ zuGoybz-Z1xOmVg~h^fnvwC{!Au!oOyjb_JN_^IMuAjfDVxwEoa9_izqk%s4)f~m+{ z?Nwqiw>fJ1-r#9o0MX;swVi%zEioeqt$%NBh9y-{%oPfk7+jLybxRyE0cySUw;H9X zQK{XC-uV>-)(lp7*@k06NZLRu7v;tzR^Ved&nUvyUv{HXsNrkr*x>vz;EfByLge4e zcNU(o6>ID3;{-FR24`ZSPQ+1=0XYN=;2p}I9nt(V;!B2MFahPfN|F#hP~K7nEs`Bh&{>9f$;st> zQxv*YjjpY#_)_lb@;S_JJ+nxb?i_|tPzF^Q0N|3uumgZ=K6uIMRAXgij^Zs3z?MD| z8b!XRe;uXhj56A4N~Q@nYz=~1MJtw7VA(2ElY_^@-afVQZk6Nv{aeSr3cb_cL-7LL z$pyxrBblsGnRdx2Mps5G>awr`Sr-eI0=~)7d?WFF^;Gc{uZJzP{WkJ-w$vQkHc_2P z5JR{3Z7@uojn|IM0j2S3;>SXmUiibpy5zQ^`BMJ?#4>4z%PqbB%(G=m#_5nzRBm4~ zLAkP^7Nsp(xveVIXwThtxybw?@K=T;_)Df<{7}>*(InM;L}0Ox#9DD#Y+-CAymQPP zi&E(r45w)-%FGxymb+dan@jMYiY)wb;`y&_ZCWYQO^V(ddF{$1uM-85-GoZa2mpXt zNd)15W1Zo@irUV(;fTBe;R$uy?+L~(hO4Lei%eK!o#&Ca`9;x1!%OCrSlKs10=D3p zVXtf8i)aR!YyGin;p3-i(py;B_;=+Ma02W)+wCxj%7?<*BZLXpB1$5G|}7VuxjjaNmu)BHp6XHc6( z8!s)qEffMfTlu1AUD1nVVkB&lxJe9iZ9?0OWruNn@d_^nX%?DgtP|M06GpMxYF-&z znD3F*ksZR$BUe!viIynab_p#SiigUr{eEJ4vA+v`D|ox&PPcocz0%JrGaHC*wC1ti zM)902ExaFc~>gwW;Di+AF}C9i6_NQH7zZ!ptW~2d|mNtLGfOhrrv3~k<~2&PaTkk z#6v6(FbdB2!((fM$=pHtM<%n>FN&`vyteSQ)#jmc?9qkO@}^WksH)4EJi)ZE`FI6~ z7|Gnd@NR`*(zVpg*0!vTGFU^AEZ93sKI6MQHFx=VBABy$u7sC2Vc%M!1qAi81 zi7uwN)tFu?yDnuY6sxYm@@_1lo!37x1dC#6-$TiKW$`yzO$PqvKLZ0LxZO{`lRwW3{&min|dKH3$(^Kw_hkk~3~ClgoTd_>VrR zrYD9i?=+WzHyfmT3`k37EakD6B!YhK)l~`)BEE`-Pll;UO1AsmHjfUbZHuK@s#|t? zo{jL!QE}4*t0>sRYM2xCeYVIsPKn^(7)jlwI zLrk@Z>iTunlo9N?vDC~H#+oCJ=)N1%E-rODhqh^a##l?F zKn{V2R?h)Xd{?sm#s2{CPv0DV(_a+tHH#Y?%`3t_9JijrZxA)(I$Yd3EB&1dleia1 zoyi-KKvp?BN7`D4?4{x_+2{64_?6(l2%ijFEZS`Py!xG;h*=@AirOStGT^hck|C9( zJR-OsF=AIJ;6AriG@mM4`Sv;%X0Vi;ApPdnoss#I`#=8B{teM?=e+UWvwap)C~V}j zjF`(3$7_Y;ZBd08;5G=yPf6FlE%-~~9pZn&4dUqai7X;Zw6(UnYj#FfbGgj%mB1iy zqa-y41GCh=BWQZ=mv5=uXj19( zJ>j-GmYF)Fk-H{UlXH{3hs;9;ToQ4}HNku`_&4G&3(0SB;vFASwuaD~T|IRTVBV~V zzbd6{5=KUo1)+9cD{3cu1z~lYkwnRI9I?jIOBQ zCNRXa1tUFq&($BG?Us(Yh;SjJ;KYivK4}u^Oa;$LV(4xMk~_38Tg-5 zvzA%>QK;F7&7f(obp1EXV5+>T#E!+w;Ozi_rGmQvqd5Nn5k4j#5g9Es%Po9a&Sm>7 zb0Juw5tGD#f>FtBz^>xC1D=&!t?PXVQf=7gG`&+p(lq94%_0j{y||3(_A-SlER7nf zsRaC+NJb@weqF<8U|Q~};va_>l6jX}b<&Y!@-4*nt^27w(0*pwgvl9Gl3QtD!0}x6 zk>LGvR+{f!lfw6qU1~9*Pc|iaSz}>_D5^r_V+EC6fd>Q?+e#k;Yn~{bt+m}!`L;TU zR{jQ54jH!ZM%qYLJNN+dMovX1WLmR2=epM6zSeH0(R5gJix}fNESA=@&o$}7x-7_r zgjwDj&m$Q_yba3Lo8a#sX?_UQlTfknawxVYOL(kpA-uQWhgP*=Iav0vB(GAy;O%X= z*V3>2Y2mFpKM!el`dX%&Xd;jcHDwCG@6Pro3PA&LVxW&o1)_RibI-yqi*b!tI zt{uq_yATB?Hrg-(g$&?i1HyuK)PCiSpZHwg4z;7F$>NPX>6e;j*M`pO`D3_&hsv!i zk+|M*&ekjyi6ut{rMdm0d_I>pc6QnY)~9Teg_`OM(5A7{F-@@}HIcC^&zXAG>wd2C2t7Y87PA3Bf&PhtBWd{fc2AMLBD zmUV^!6WrZhz8*AC&mZpM!xw#}oT~l6q>KYqrHGS?XQFs3LbTNE#mmEaaT{(bUi~f% zMTlad*@`$&RY(k_bF?W{tqmC6-DvjO$BHy5bg5Yk2@SlhBD4rpe(?~P(Vc;9p|Q6E zH6Ox1f_^-*)f36OYj`2I{{V@On-VB9I4r};BLzaa0PP!CAsio?y%XTomyUcwM1K>! z197Fv4eXJ@3Dt$tNdgecNkX6&e1fBKQmv8!B9)Yt#N}-_INPgjYCFq@i&BBZ#gq}u zVJ@I>R5t7u4sv=7h2Ubat@W)#OVuA+@s6U2r^RmceB;h~+wsW2U9ffpr{6&`IPoM2K5EV})Qy zd>^EEw%*q3!H$zlB&s&jK+Sn(tKv_C-VyO_{{V*W zd_x7Mt8Z=|3Cyac0*$E5s^lpw2W+0BAk#|hc2A-7eEP4$ne@4@Z#BzpLSbzr@2Tc65_uv8H?c%#S0!8mgbanqQIcyu%f(+A);svL zUlqpI^Ar{yRl3CNs;%;EgkS(pbB=nE>!%}q44aBuozA)O!$Hw6&758r(w``<(rVK> z%;knjAg(~|gU&&synn`T@z(oan(Es|w7lMTNjIIGu_Td#GBKQE@$W?(lP2_!uPr6; z@VJ?D*r2+)v~js0jI*#O87$Zc4i7$qy>$)YyPa}rRKn3)q(Kty^^+U0TWY6Q7{&p{ z2stB)4cBDTx|+qSvJ z&`t*425JlM6;E?>9i7#*8*i6&uBB@{YLh4>w-Nj&wgUi4b>!7~w4GYo^50Ig)avCbO-M+$hssp1RC{3R4gtlw%DjcBEVN9Rn_F(-D^00%9G0iw%F&|F}aF{DnSDTZO+i9i74C5sJF1~PQpJJ z+RVoC=4ovwC9W*5=3^<`L&)JtRO5^Scp1u^=V#&ei*I$PS=y|3V%9r}XTAFk_<}c5 z3M#88W-8bXxR!DhhT3zC%{NbhbmhA6-k){l%&bwtI}qa~33b|ofO?(TEskoi_&=V#fq}+0u@li z5P8cS=K%0)%k`}a;`SxeExb=Jn{PSuS?=B=9svm%2p?!FM&Qibff-dBMP+!KN%6j$ ztge?~u1PMD0!4FqHN1?i3o^dsF46gG{4N!UU%CbufDIOwC0V<-soQB^6Z{RNc%I)& zd9=&h+u7w*zepKGl1CfLLpvtJj1&s0N0>t6erqGeJ~X%Zjc;pb;mLoquAInmb*i*J zT%k)c?nY7-Mnh+a!vNVWwYbj@o4K{UF7oE~%31BDlI8UUx4D&;Eu+i@-{e59%ee1l zUC0jJ0H^Uk)9&~;xN=(5c_t=Hb0CPG4mC2|Ez ze`j-}-$$w4-L=9Uqf5O4=hMBKgSlUgcqDrw! zwr1R1J1Vm=+TbvgdlHA{1%6g#BRZCzHj7!cX>|=X)@uN-5rQEgjo6cfh>wt920OPf zU`|`T$;Y9OB%HQ1yhY#-3hNqWr`l5f<(0{bd(SdQGcuiqR6-q;U>s#sjPf|E{vwZ2 zk3qDs(fna|r`bbnBH4J2B7wC+ZB}wock-&+ShoP+;L_1q>?cQChK3u0(Zw8hWm-_} zle?dkZ8 z?`^rq3~(Bq#kYX3Ov^!fE^ zwHf?fcdDhtzHFXUkk2i;rqSfHpbM2JC*@_zs2f2b-_pJp_%Bt0;`B|cJd;P4NaTnx zLIJ~u8DJ4%);@<)I1H{tVYFcKtH2p%(Ngmn;mO{|R8Cxx}14xO0LaS}( z49pJGYTxXZ5H6K%s1=D=Bv&y=$gdMC63hZCC(H;7ou{0Da6xCgMRP9XmR=9=FMxDa zlfy71YiO+cj;E?$eYWaVQ}ge6m4IB2n`q&K090nNw10%Z3Vtojo;7=&b?>6R6X{xo zw$+Zz2Ha*NC^Fe~P^WeP`G^>rSIf8Yb)5>IZYgTn-`@0+{+E2^@892@e;L|L; zTjB2t=(gHlgl0=Sc;pFpWv4|L2+SE&mvfcF1#gwIFn16+6L-|O#XUS88FIN7BLlF**s&4(zfw^mEEoGpt_y=-2I0(h6z?-cEMT53B6+jYG4h;eo(`v z;hW$rF_(9r2kyXu&FWp4q5vY|4!^mWPvg zpW+|GjUQE(Jvv60?2NL*5c@5;HlH41K zB*d38qzNfiTrgH;A-uI>h!_kibzNRvtNU4`ANNU~kSXQqDYLZlF}$Qg>@sQ~hJggDD_)L1pe zmG!o#2AOpwvRgLm(+HL+rB^N_Nkq*X1yX>8OpSzs7&dftpMe%yCAF@%r(9gM+?Fn} zYH0*awvD|_=C>OzBxGXIqm|y#1|C~yAoMM_pv`LP5hTD1 zi}jTlquWbuZebESt`$gT$`vH=!>Gds1RAMj;mx|?v}>apOEgC2IHEwMlrQc;at7Rh zSgzuv=Yz#VYd)LetpicL(yu(JH2q=hrns5HEzGFF-Nxr)8E_X1g_C|8oaHZhjN`TY zS*O0fPY>wVQ^FxtkH{+`y9_yEqiAde!6iT&KwdbnGrgZz)@|j|?L0$iZ4_TTT6;RX zA~K`81xI0qZKRXC<{`dOUg@uRr^Qz~d=mI-J1tvFo>sSpS>kA-J7#nR(c&(I`D`~b zs9*0k<6dXt?-E-0meS%s_(yH6KPg9tq;_ z*=29bYqNQF@ZZ8By*D&AX0)vUBR zb%Yv~y{rsoHmdC;(8jx%qcR2?mkPU?fg}~Ew67K`=#rDH&1(hF^H$kkf#4y4Cyi*F-LaAkG?;1V*#_=oW4NzrwCmcG*vXz3xk%#p_>1EkpAhQ49@NF|w(*PRxxHeI0D*!tDzhmCKwd(D$Eh{m!Qn50x>WCRbvO1^ z6s9+ZKPKv3?2WY}XE_RT zSOR(ZxW_kaz6(BuroNNm&lyh0%2pS+xBEm&&dgN`O89gcBPVMN3@`wLx2b#=__2Kw z9|!Aj#V?oTG+RodV|mJw3bx;n09WqxY|?Ee8g((J@O{Php%vYpg{SG(*6E zU95UTUuCKSi$yKt)O!}1O*&9cHV#j79hD* zUJfv|vEZ+U8rOzy9ehpV_qq)+Fu`*iE==bkiaMe?$AVNVF>I(gAhRCJM)-Aeb)j79 z);jgiVq&%}L8PKQC!rk>fXn{+K1F&D-g=7_#YOb`Nqzb*zdgKI$W zPOEEacX{Fsa^hH88+&~=IG#w_2;y9nQ3>0f#1dDQ;2uCV*!Uw>@&1*f{f9=5;>jd2 zm6AEtr4kiX1*T@qga8R<94h3Gmy>bbCQ-ETrn8}VN=ETj?V+0g07Ywr7sYuhta9cZ z&aB%G6;*>a6cTU`E#dzF2x#6cS@oMa?kvN@0fN`bnn{6hLPkTPwiy&{Y_J3{Bpsmk z_c#6{vzl3T%Zod>t}Z<3gjP{aEbgI^Si23Xy@UI;ujrG2_)5z5x`EMZnBV;ceGw(ZJ+8mAHx>@A+;9Z&Ly~5Uj;H_jwV)6LCJX; zi($5f8%}#;gp-ZiMtUmH9gKjBGhT5Q@(cXCCk-zhAmqh(?W{LMF0(5V^auyA~T$6g82 zek)#CX&T4DYkMom*xX!RNqCvE+M$(@#;hmg09Dj3Fb->?@Q1_C7i+gx`o61e4xc@Y zf9Q8MDKrpC3oj7ljjtKtYy+n)$O5vQ6*+2JsR?SdJ0FCw-`Hpn-fD6#v8jZV8_0B~ zhB%e_gxp(TRSck(0ERn2$O4X`;h%?(cWo{2z@05vyq;abx8Eo`S&KPuyf6>U*(;IH zYPX_Tc&67+g3npJi%Gk-@__Of$gBnq6rzBmr(i(m03$j4?QNoX^toQ^ZNo)!vKHiw z{JCrsl1L}gv*yxujYiT`@K%v+VWx^u(kfjO}e}9veRXVv}~@}Wo#S(&cmrV z&!9QNyzr!2^}W57hLLHdE~5)9_pwA|mPAt7WgEapVtld}XbORW-SbgJIZ38ylCflI zcbXEVmZ9QX*b*NlR@!`AM}#a0Pzb>Lm zw1oneWAcr>fwv&=GEq>Z(K6BvkWb@_4;8c;8M)u8>|e6l2tZkr3K@)rGOAU;!6bmg z{V+6$^NIWVeFqNNiM2rY0w9wo$XWn5jZpcHnO3G*L;(?r75HM~HZ@ zLh*vmmw(!#!boFb9M&*}X5Ax@tPG5zGE3zZK+Xr<0ECUX3;M>!|&v z>R2GPfvn#xu^7tHMM9J1znJfIk=SYf01NfaD@T$&b6kxehS`!E zC5%6uoFGl=6hv|`8C3050C9jfyhq`w{vUW`riY3(ix`<@ker1=#Yo$`hU%lHKwmwy@h*>{M*6mweLVIz7c)Z?GTBEO1a$!z zs{a5M*KRS@vRHBnMP+v+uhiK>E;lj!Nuqo-xDngkcr5O=xs9Uo?w&BNPVtpS2;7ix z%2YN;MMUP|u`z^f+0<*A zM3Pw8YqDxr7gm~tf4M5DZrD=X0HL@ZQw1z{dgF1dF9~ZN7}I=En%lmjx}D0UwSy%1 z3mUQOw+bS_nr>ZW0o5TVv_Oz6zwjy;5gdZVA3JV@qv-I0cw@!!#@~!tHf4X z#-H&u>|pYp^&5CC0RGibTgq9B24E6Kc0jD93mgy$qKZl@kv}iY$SAI=P>XBK%D1@`(YxW#_$Y70rDknM zOPf=U+RpmjW3`KZ$4YT)T;ZAUH zHvyU`s;Ly9n(8I0J$Fv?>*CMsVZsvTM{V+m82Uv1O^}tqdhCoJUgdPZymk0_JgE`xs6w4xn+U( zk1<9H6M{)RkCzza8Yrr5`%4+=ZHUdvS?ciVx{ieL%XI=Yp=ViF_hK)f8ih1&5o%&I)(ol2{W*k|{4O1Z%t)z0~kAS#rmy+s_RCQnDYGcDI?MEwp^Z4%z9~zoisar|&dpBFr8d@aDgE z%RY-7&ortWUAGcsWRtKiFbK%+$E76R2-7qvbs6-SCASmcO098}Qp!G10LdS8@=5hP zVu~rhXGYApcV-0CsB6GGck2!a@fGOWbpkO&+MlZ^4wv2Xq%U)tJS-bU8; z@?6cER)EOd{m{5z0;u2&0DAi7iYio_RyU@Tk(s6ZO4eks&xv&5s6jhR^R&toCkKow zwpTd=*bJKKbae1+8mzB#r%ffsn&vx*npRb3NBQF=Nyt4n4tY4Dis((oD#kKN$icq6 zShZGzOlf6`L;HZzV3EcNBn*8kD^u1cFo6x3lXG#pP&nO!!;YTEnkcT6{{VM$DQ;A{ Z)vRsZVpWh~c>-4JlkY_oR$R >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/fast_rcnn/solver.prototxt \ + --weights data/imagenet_models/${NET}.v2.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep -B 1 "done solving" ${LOG} | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/fast_rcnn/test.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/faster_rcnn_alt_opt.sh b/experiments/scripts/faster_rcnn_alt_opt.sh new file mode 100755 index 0000000..d2a1ebe --- /dev/null +++ b/experiments/scripts/faster_rcnn_alt_opt.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/faster_rcnn_alt_opt.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is only pascal_voc for now +# +# Example: +# ./experiments/scripts/faster_rcnn_alt_opt.sh 0 VGG_CNN_M_1024 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_2007_trainval" + TEST_IMDB="voc_2007_test" + PT_DIR="pascal_voc" + ITERS=40000 + ;; + coco) + echo "Not implemented: use experiments/scripts/faster_rcnn_end2end.sh for coco" + exit + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/faster_rcnn_alt_opt_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_faster_rcnn_alt_opt.py --gpu ${GPU_ID} \ + --net_name ${NET} \ + --weights data/imagenet_models/${NET}.v2.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --cfg experiments/cfgs/faster_rcnn_alt_opt.yml \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep "Final model:" ${LOG} | awk '{print $3}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/faster_rcnn_alt_opt/faster_rcnn_test.pt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/faster_rcnn_alt_opt.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/faster_rcnn_end2end.sh b/experiments/scripts/faster_rcnn_end2end.sh new file mode 100755 index 0000000..79770aa --- /dev/null +++ b/experiments/scripts/faster_rcnn_end2end.sh @@ -0,0 +1,68 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/faster_rcnn_end2end.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/faster_rcnn_end2end.sh 0 VGG_CNN_M_1024 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_2007_trainval" + TEST_IMDB="voc_2007_test" + PT_DIR="pascal_voc" + ITERS=70000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + ITERS=490000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/faster_rcnn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/faster_rcnn_end2end/solver.prototxt \ + --weights data/imagenet_models/${NET}.v2.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/faster_rcnn_end2end.yml \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep -B 1 "done solving" ${LOG} | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/faster_rcnn_end2end/test.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/faster_rcnn_end2end.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/rfcn_end2end.sh b/experiments/scripts/rfcn_end2end.sh new file mode 100755 index 0000000..c54f9e1 --- /dev/null +++ b/experiments/scripts/rfcn_end2end.sh @@ -0,0 +1,68 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/rfcn_end2end.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/rfcn_end2end.sh 0 ResNet50 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_0712_trainval" + TEST_IMDB="voc_0712_test" + PT_DIR="pascal_voc" + ITERS=120000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + ITERS=490000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/rfcn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver.prototxt \ + --weights data/imagenet_models/${NET}-model.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/rfcn_end2end.yml \ + ${EXTRA_ARGS} + +set +x +NET_FINAL=`grep -B 1 "done solving" ${LOG} | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/rfcn_end2end/test_agonistic.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/rfcn_end2end.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/rfcn_end2end_ohem.sh b/experiments/scripts/rfcn_end2end_ohem.sh new file mode 100755 index 0000000..ed42477 --- /dev/null +++ b/experiments/scripts/rfcn_end2end_ohem.sh @@ -0,0 +1,70 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/rfcn_end2end_ohem.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/rfcn_end2end_ohem.sh 0 ResNet50 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_0712_trainval" + TEST_IMDB="voc_0712_test" + PT_DIR="pascal_voc" + ITERS=120000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + ITERS=490000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/rfcn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver_ohem.prototxt \ + --weights data/imagenet_models/${NET}-model.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} + + +set +x +NET_FINAL=`tail -n 100 ${LOG} | grep -B 1 "done solving" | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/rfcn_end2end/test_agonistic.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} diff --git a/experiments/scripts/rfcn_end2end_ohem_warmup.sh b/experiments/scripts/rfcn_end2end_ohem_warmup.sh new file mode 100755 index 0000000..4efbac7 --- /dev/null +++ b/experiments/scripts/rfcn_end2end_ohem_warmup.sh @@ -0,0 +1,84 @@ +#!/bin/bash +# Usage: +# ./experiments/scripts/rfcn_end2end_ohem.sh GPU NET DATASET [options args to {train,test}_net.py] +# DATASET is either pascal_voc or coco. +# +# Example: +# ./experiments/scripts/rfcn_end2end_ohem.sh 0 ResNet50 pascal_voc \ +# --set EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" + +set -x +set -e + +export PYTHONUNBUFFERED="True" + +GPU_ID=$1 +NET=$2 +NET_lc=${NET,,} +DATASET=$3 + +array=( $@ ) +len=${#array[@]} +EXTRA_ARGS=${array[@]:3:$len} +EXTRA_ARGS_SLUG=${EXTRA_ARGS// /_} + +case $DATASET in + pascal_voc) + TRAIN_IMDB="voc_0712_trainval" + TEST_IMDB="voc_0712_test" + PT_DIR="pascal_voc" + WARMUP_ITERS=10000 + ITERS=150000 + ;; + coco) + # This is a very long and slow training schedule + # You can probably use fewer iterations and reduce the + # time to the LR drop (set in the solver to 350,000 iterations). + TRAIN_IMDB="coco_2014_train" + TEST_IMDB="coco_2014_minival" + PT_DIR="coco" + WARMUP_ITERS=10000 + ITERS=480000 + ;; + *) + echo "No dataset given" + exit + ;; +esac + +LOG="experiments/logs/rfcn_end2end_${NET}_${EXTRA_ARGS_SLUG}.txt.`date +'%Y-%m-%d_%H-%M-%S'`" +exec &> >(tee -a "$LOG") +echo Logging output to "$LOG" + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver_warmup.prototxt \ + --weights data/imagenet_models/${NET}-model.caffemodel \ + --imdb ${TRAIN_IMDB} \ + --iters ${WARMUP_ITERS} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} + +set +x +NET_CONTINUE=`tail -n 100 ${LOG} | grep -B 1 "done solving" | grep "Wrote snapshot" | awk '{print $4}'` +set -x + + +time ./tools/train_net.py --gpu ${GPU_ID} \ + --solver models/${PT_DIR}/${NET}/rfcn_end2end/solver_continue.prototxt \ + --weights ${NET_CONTINUE} \ + --imdb ${TRAIN_IMDB} \ + --iters ${ITERS} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} + + +set +x +NET_FINAL=`tail -n 100 ${LOG} | grep -B 1 "done solving" | grep "Wrote snapshot" | awk '{print $4}'` +set -x + +time ./tools/test_net.py --gpu ${GPU_ID} \ + --def models/${PT_DIR}/${NET}/rfcn_end2end/test_agonistic.prototxt \ + --net ${NET_FINAL} \ + --imdb ${TEST_IMDB} \ + --cfg experiments/cfgs/rfcn_end2end_ohem.yml \ + ${EXTRA_ARGS} diff --git a/lib/Makefile b/lib/Makefile new file mode 100644 index 0000000..a482398 --- /dev/null +++ b/lib/Makefile @@ -0,0 +1,3 @@ +all: + python setup.py build_ext --inplace + rm -rf build diff --git a/lib/datasets/VOCdevkit-matlab-wrapper/get_voc_opts.m b/lib/datasets/VOCdevkit-matlab-wrapper/get_voc_opts.m new file mode 100644 index 0000000..629597a --- /dev/null +++ b/lib/datasets/VOCdevkit-matlab-wrapper/get_voc_opts.m @@ -0,0 +1,14 @@ +function VOCopts = get_voc_opts(path) + +tmp = pwd; +cd(path); +try + addpath('VOCcode'); + VOCinit; +catch + rmpath('VOCcode'); + cd(tmp); + error(sprintf('VOCcode directory not found under %s', path)); +end +rmpath('VOCcode'); +cd(tmp); diff --git a/lib/datasets/VOCdevkit-matlab-wrapper/voc_eval.m b/lib/datasets/VOCdevkit-matlab-wrapper/voc_eval.m new file mode 100644 index 0000000..1911a0e --- /dev/null +++ b/lib/datasets/VOCdevkit-matlab-wrapper/voc_eval.m @@ -0,0 +1,56 @@ +function res = voc_eval(path, comp_id, test_set, output_dir) + +VOCopts = get_voc_opts(path); +VOCopts.testset = test_set; + +for i = 1:length(VOCopts.classes) + cls = VOCopts.classes{i}; + res(i) = voc_eval_cls(cls, VOCopts, comp_id, output_dir); +end + +fprintf('\n~~~~~~~~~~~~~~~~~~~~\n'); +fprintf('Results:\n'); +aps = [res(:).ap]'; +fprintf('%.1f\n', aps * 100); +fprintf('%.1f\n', mean(aps) * 100); +fprintf('~~~~~~~~~~~~~~~~~~~~\n'); + +function res = voc_eval_cls(cls, VOCopts, comp_id, output_dir) + +test_set = VOCopts.testset; +year = VOCopts.dataset(4:end); + +addpath(fullfile(VOCopts.datadir, 'VOCcode')); + +res_fn = sprintf(VOCopts.detrespath, comp_id, cls); + +recall = []; +prec = []; +ap = 0; +ap_auc = 0; + +do_eval = (str2num(year) <= 2007) | ~strcmp(test_set, 'test'); +if do_eval + % Bug in VOCevaldet requires that tic has been called first + tic; + [recall, prec, ap] = VOCevaldet(VOCopts, comp_id, cls, true); + ap_auc = xVOCap(recall, prec); + + % force plot limits + ylim([0 1]); + xlim([0 1]); + + print(gcf, '-djpeg', '-r0', ... + [output_dir '/' cls '_pr.jpg']); +end +fprintf('!!! %s : %.4f %.4f\n', cls, ap, ap_auc); + +res.recall = recall; +res.prec = prec; +res.ap = ap; +res.ap_auc = ap_auc; + +save([output_dir '/' cls '_pr.mat'], ... + 'res', 'recall', 'prec', 'ap', 'ap_auc'); + +rmpath(fullfile(VOCopts.datadir, 'VOCcode')); diff --git a/lib/datasets/VOCdevkit-matlab-wrapper/xVOCap.m b/lib/datasets/VOCdevkit-matlab-wrapper/xVOCap.m new file mode 100644 index 0000000..de6c628 --- /dev/null +++ b/lib/datasets/VOCdevkit-matlab-wrapper/xVOCap.m @@ -0,0 +1,10 @@ +function ap = xVOCap(rec,prec) +% From the PASCAL VOC 2011 devkit + +mrec=[0 ; rec ; 1]; +mpre=[0 ; prec ; 0]; +for i=numel(mpre)-1:-1:1 + mpre(i)=max(mpre(i),mpre(i+1)); +end +i=find(mrec(2:end)~=mrec(1:end-1))+1; +ap=sum((mrec(i)-mrec(i-1)).*mpre(i)); diff --git a/lib/datasets/__init__.py b/lib/datasets/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/datasets/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/datasets/coco.py b/lib/datasets/coco.py new file mode 100644 index 0000000..bfe8ff3 --- /dev/null +++ b/lib/datasets/coco.py @@ -0,0 +1,394 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +from datasets.imdb import imdb +import datasets.ds_utils as ds_utils +from fast_rcnn.config import cfg +import os.path as osp +import sys +import os +import numpy as np +import scipy.sparse +import scipy.io as sio +import cPickle +import json +import uuid +# COCO API +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from pycocotools import mask as COCOmask + +def _filter_crowd_proposals(roidb, crowd_thresh): + """ + Finds proposals that are inside crowd regions and marks them with + overlap = -1 (for all gt rois), which means they will be excluded from + training. + """ + for ix, entry in enumerate(roidb): + overlaps = entry['gt_overlaps'].toarray() + crowd_inds = np.where(overlaps.max(axis=1) == -1)[0] + non_gt_inds = np.where(entry['gt_classes'] == 0)[0] + if len(crowd_inds) == 0 or len(non_gt_inds) == 0: + continue + iscrowd = [int(True) for _ in xrange(len(crowd_inds))] + crowd_boxes = ds_utils.xyxy_to_xywh(entry['boxes'][crowd_inds, :]) + non_gt_boxes = ds_utils.xyxy_to_xywh(entry['boxes'][non_gt_inds, :]) + ious = COCOmask.iou(non_gt_boxes, crowd_boxes, iscrowd) + bad_inds = np.where(ious.max(axis=1) > crowd_thresh)[0] + overlaps[non_gt_inds[bad_inds], :] = -1 + roidb[ix]['gt_overlaps'] = scipy.sparse.csr_matrix(overlaps) + return roidb + +class coco(imdb): + def __init__(self, image_set, year): + imdb.__init__(self, 'coco_' + year + '_' + image_set) + # COCO specific config options + self.config = {'top_k' : 2000, + 'use_salt' : True, + 'cleanup' : True, + 'crowd_thresh' : 0.7, + 'min_size' : 2} + # name, paths + self._year = year + self._image_set = image_set + self._data_path = osp.join(cfg.DATA_DIR, 'coco') + # load COCO API, classes, class <-> id mappings + self._COCO = COCO(self._get_ann_file()) + cats = self._COCO.loadCats(self._COCO.getCatIds()) + self._classes = tuple(['__background__'] + [c['name'] for c in cats]) + self._class_to_ind = dict(zip(self.classes, xrange(self.num_classes))) + self._class_to_coco_cat_id = dict(zip([c['name'] for c in cats], + self._COCO.getCatIds())) + self._image_index = self._load_image_set_index() + # Default to roidb handler + self.set_proposal_method('selective_search') + self.competition_mode(False) + + # Some image sets are "views" (i.e. subsets) into others. + # For example, minival2014 is a random 5000 image subset of val2014. + # This mapping tells us where the view's images and proposals come from. + self._view_map = { + 'minival2014' : 'val2014', # 5k val2014 subset + 'valminusminival2014' : 'val2014', # val2014 \setminus minival2014 + } + coco_name = image_set + year # e.g., "val2014" + self._data_name = (self._view_map[coco_name] + if self._view_map.has_key(coco_name) + else coco_name) + # Dataset splits that have ground-truth annotations (test splits + # do not have gt annotations) + self._gt_splits = ('train', 'val', 'minival') + + def _get_ann_file(self): + prefix = 'instances' if self._image_set.find('test') == -1 \ + else 'image_info' + return osp.join(self._data_path, 'annotations', + prefix + '_' + self._image_set + self._year + '.json') + + def _load_image_set_index(self): + """ + Load image ids. + """ + image_ids = self._COCO.getImgIds() + return image_ids + + def _get_widths(self): + anns = self._COCO.loadImgs(self._image_index) + widths = [ann['width'] for ann in anns] + return widths + + def image_path_at(self, i): + """ + Return the absolute path to image i in the image sequence. + """ + return self.image_path_from_index(self._image_index[i]) + + def image_path_from_index(self, index): + """ + Construct an image path from the image's "index" identifier. + """ + # Example image path for index=119993: + # images/train2014/COCO_train2014_000000119993.jpg + file_name = ('COCO_' + self._data_name + '_' + + str(index).zfill(12) + '.jpg') + image_path = osp.join(self._data_path, 'images', + self._data_name, file_name) + assert osp.exists(image_path), \ + 'Path does not exist: {}'.format(image_path) + return image_path + + def selective_search_roidb(self): + return self._roidb_from_proposals('selective_search') + + def edge_boxes_roidb(self): + return self._roidb_from_proposals('edge_boxes_AR') + + def mcg_roidb(self): + return self._roidb_from_proposals('MCG') + + def _roidb_from_proposals(self, method): + """ + Creates a roidb from pre-computed proposals of a particular methods. + """ + top_k = self.config['top_k'] + cache_file = osp.join(self.cache_path, self.name + + '_{:s}_top{:d}'.format(method, top_k) + + '_roidb.pkl') + + if osp.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{:s} {:s} roidb loaded from {:s}'.format(self.name, method, + cache_file) + return roidb + + if self._image_set in self._gt_splits: + gt_roidb = self.gt_roidb() + method_roidb = self._load_proposals(method, gt_roidb) + roidb = imdb.merge_roidbs(gt_roidb, method_roidb) + # Make sure we don't use proposals that are contained in crowds + roidb = _filter_crowd_proposals(roidb, self.config['crowd_thresh']) + else: + roidb = self._load_proposals(method, None) + with open(cache_file, 'wb') as fid: + cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote {:s} roidb to {:s}'.format(method, cache_file) + return roidb + + def _load_proposals(self, method, gt_roidb): + """ + Load pre-computed proposals in the format provided by Jan Hosang: + http://www.mpi-inf.mpg.de/departments/computer-vision-and-multimodal- + computing/research/object-recognition-and-scene-understanding/how- + good-are-detection-proposals-really/ + For MCG, use boxes from http://www.eecs.berkeley.edu/Research/Projects/ + CS/vision/grouping/mcg/ and convert the file layout using + lib/datasets/tools/mcg_munge.py. + """ + box_list = [] + top_k = self.config['top_k'] + valid_methods = [ + 'MCG', + 'selective_search', + 'edge_boxes_AR', + 'edge_boxes_70'] + assert method in valid_methods + + print 'Loading {} boxes'.format(method) + for i, index in enumerate(self._image_index): + if i % 1000 == 0: + print '{:d} / {:d}'.format(i + 1, len(self._image_index)) + + box_file = osp.join( + cfg.DATA_DIR, 'coco_proposals', method, 'mat', + self._get_box_file(index)) + + raw_data = sio.loadmat(box_file)['boxes'] + boxes = np.maximum(raw_data - 1, 0).astype(np.uint16) + if method == 'MCG': + # Boxes from the MCG website are in (y1, x1, y2, x2) order + boxes = boxes[:, (1, 0, 3, 2)] + # Remove duplicate boxes and very small boxes and then take top k + keep = ds_utils.unique_boxes(boxes) + boxes = boxes[keep, :] + keep = ds_utils.filter_small_boxes(boxes, self.config['min_size']) + boxes = boxes[keep, :] + boxes = boxes[:top_k, :] + box_list.append(boxes) + # Sanity check + im_ann = self._COCO.loadImgs(index)[0] + width = im_ann['width'] + height = im_ann['height'] + ds_utils.validate_boxes(boxes, width=width, height=height) + return self.create_roidb_from_box_list(box_list, gt_roidb) + + def gt_roidb(self): + """ + Return the database of ground-truth regions of interest. + This function loads/saves from/to a cache file to speed up future calls. + """ + cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl') + if osp.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{} gt roidb loaded from {}'.format(self.name, cache_file) + return roidb + + gt_roidb = [self._load_coco_annotation(index) + for index in self._image_index] + + with open(cache_file, 'wb') as fid: + cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote gt roidb to {}'.format(cache_file) + return gt_roidb + + def _load_coco_annotation(self, index): + """ + Loads COCO bounding-box instance annotations. Crowd instances are + handled by marking their overlaps (with all categories) to -1. This + overlap value means that crowd "instances" are excluded from training. + """ + im_ann = self._COCO.loadImgs(index)[0] + width = im_ann['width'] + height = im_ann['height'] + + annIds = self._COCO.getAnnIds(imgIds=index, iscrowd=None) + objs = self._COCO.loadAnns(annIds) + # Sanitize bboxes -- some are invalid + valid_objs = [] + for obj in objs: + x1 = np.max((0, obj['bbox'][0])) + y1 = np.max((0, obj['bbox'][1])) + x2 = np.min((width - 1, x1 + np.max((0, obj['bbox'][2] - 1)))) + y2 = np.min((height - 1, y1 + np.max((0, obj['bbox'][3] - 1)))) + if obj['area'] > 0 and x2 >= x1 and y2 >= y1: + obj['clean_bbox'] = [x1, y1, x2, y2] + valid_objs.append(obj) + objs = valid_objs + num_objs = len(objs) + + boxes = np.zeros((num_objs, 4), dtype=np.uint16) + gt_classes = np.zeros((num_objs), dtype=np.int32) + overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32) + seg_areas = np.zeros((num_objs), dtype=np.float32) + + # Lookup table to map from COCO category ids to our internal class + # indices + coco_cat_id_to_class_ind = dict([(self._class_to_coco_cat_id[cls], + self._class_to_ind[cls]) + for cls in self._classes[1:]]) + + for ix, obj in enumerate(objs): + cls = coco_cat_id_to_class_ind[obj['category_id']] + boxes[ix, :] = obj['clean_bbox'] + gt_classes[ix] = cls + seg_areas[ix] = obj['area'] + if obj['iscrowd']: + # Set overlap to -1 for all classes for crowd objects + # so they will be excluded during training + overlaps[ix, :] = -1.0 + else: + overlaps[ix, cls] = 1.0 + + ds_utils.validate_boxes(boxes, width=width, height=height) + overlaps = scipy.sparse.csr_matrix(overlaps) + return {'boxes' : boxes, + 'gt_classes': gt_classes, + 'gt_overlaps' : overlaps, + 'flipped' : False, + 'seg_areas' : seg_areas} + + def _get_box_file(self, index): + # first 14 chars / first 22 chars / all chars + .mat + # COCO_val2014_0/COCO_val2014_000000447/COCO_val2014_000000447991.mat + file_name = ('COCO_' + self._data_name + + '_' + str(index).zfill(12) + '.mat') + return osp.join(file_name[:14], file_name[:22], file_name) + + def _print_detection_eval_metrics(self, coco_eval): + IoU_lo_thresh = 0.5 + IoU_hi_thresh = 0.95 + def _get_thr_ind(coco_eval, thr): + ind = np.where((coco_eval.params.iouThrs > thr - 1e-5) & + (coco_eval.params.iouThrs < thr + 1e-5))[0][0] + iou_thr = coco_eval.params.iouThrs[ind] + assert np.isclose(iou_thr, thr) + return ind + + ind_lo = _get_thr_ind(coco_eval, IoU_lo_thresh) + ind_hi = _get_thr_ind(coco_eval, IoU_hi_thresh) + # precision has dims (iou, recall, cls, area range, max dets) + # area range index 0: all area ranges + # max dets index 2: 100 per image + precision = \ + coco_eval.eval['precision'][ind_lo:(ind_hi + 1), :, :, 0, 2] + ap_default = np.mean(precision[precision > -1]) + print ('~~~~ Mean and per-category AP @ IoU=[{:.2f},{:.2f}] ' + '~~~~').format(IoU_lo_thresh, IoU_hi_thresh) + print '{:.1f}'.format(100 * ap_default) + for cls_ind, cls in enumerate(self.classes): + if cls == '__background__': + continue + # minus 1 because of __background__ + precision = coco_eval.eval['precision'][ind_lo:(ind_hi + 1), :, cls_ind - 1, 0, 2] + ap = np.mean(precision[precision > -1]) + print '{:.1f}'.format(100 * ap) + + print '~~~~ Summary metrics ~~~~' + coco_eval.summarize() + + def _do_detection_eval(self, res_file, output_dir): + ann_type = 'bbox' + coco_dt = self._COCO.loadRes(res_file) + coco_eval = COCOeval(self._COCO, coco_dt) + coco_eval.params.useSegm = (ann_type == 'segm') + coco_eval.evaluate() + coco_eval.accumulate() + self._print_detection_eval_metrics(coco_eval) + eval_file = osp.join(output_dir, 'detection_results.pkl') + with open(eval_file, 'wb') as fid: + cPickle.dump(coco_eval, fid, cPickle.HIGHEST_PROTOCOL) + print 'Wrote COCO eval results to: {}'.format(eval_file) + + def _coco_results_one_category(self, boxes, cat_id): + results = [] + for im_ind, index in enumerate(self.image_index): + dets = boxes[im_ind].astype(np.float) + if dets == []: + continue + scores = dets[:, -1] + xs = dets[:, 0] + ys = dets[:, 1] + ws = dets[:, 2] - xs + 1 + hs = dets[:, 3] - ys + 1 + results.extend( + [{'image_id' : index, + 'category_id' : cat_id, + 'bbox' : [xs[k], ys[k], ws[k], hs[k]], + 'score' : scores[k]} for k in xrange(dets.shape[0])]) + return results + + def _write_coco_results_file(self, all_boxes, res_file): + # [{"image_id": 42, + # "category_id": 18, + # "bbox": [258.15,41.29,348.26,243.78], + # "score": 0.236}, ...] + results = [] + for cls_ind, cls in enumerate(self.classes): + if cls == '__background__': + continue + print 'Collecting {} results ({:d}/{:d})'.format(cls, cls_ind, + self.num_classes - 1) + coco_cat_id = self._class_to_coco_cat_id[cls] + results.extend(self._coco_results_one_category(all_boxes[cls_ind], + coco_cat_id)) + print 'Writing results json to {}'.format(res_file) + with open(res_file, 'w') as fid: + json.dump(results, fid) + + def evaluate_detections(self, all_boxes, output_dir): + res_file = osp.join(output_dir, ('detections_' + + self._image_set + + self._year + + '_results')) + if self.config['use_salt']: + res_file += '_{}'.format(str(uuid.uuid4())) + res_file += '.json' + self._write_coco_results_file(all_boxes, res_file) + # Only do evaluation on non-test sets + if self._image_set.find('test') == -1: + self._do_detection_eval(res_file, output_dir) + # Optionally cleanup results json file + if self.config['cleanup']: + os.remove(res_file) + + def competition_mode(self, on): + if on: + self.config['use_salt'] = False + self.config['cleanup'] = False + else: + self.config['use_salt'] = True + self.config['cleanup'] = True diff --git a/lib/datasets/ds_utils.py b/lib/datasets/ds_utils.py new file mode 100644 index 0000000..f66a7f6 --- /dev/null +++ b/lib/datasets/ds_utils.py @@ -0,0 +1,41 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np + +def unique_boxes(boxes, scale=1.0): + """Return indices of unique boxes.""" + v = np.array([1, 1e3, 1e6, 1e9]) + hashes = np.round(boxes * scale).dot(v) + _, index = np.unique(hashes, return_index=True) + return np.sort(index) + +def xywh_to_xyxy(boxes): + """Convert [x y w h] box format to [x1 y1 x2 y2] format.""" + return np.hstack((boxes[:, 0:2], boxes[:, 0:2] + boxes[:, 2:4] - 1)) + +def xyxy_to_xywh(boxes): + """Convert [x1 y1 x2 y2] box format to [x y w h] format.""" + return np.hstack((boxes[:, 0:2], boxes[:, 2:4] - boxes[:, 0:2] + 1)) + +def validate_boxes(boxes, width=0, height=0): + """Check that a set of boxes are valid.""" + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + assert (x1 >= 0).all() + assert (y1 >= 0).all() + assert (x2 >= x1).all() + assert (y2 >= y1).all() + assert (x2 < width).all() + assert (y2 < height).all() + +def filter_small_boxes(boxes, min_size): + w = boxes[:, 2] - boxes[:, 0] + h = boxes[:, 3] - boxes[:, 1] + keep = np.where((w >= min_size) & (h > min_size))[0] + return keep diff --git a/lib/datasets/factory.py b/lib/datasets/factory.py new file mode 100644 index 0000000..339ea13 --- /dev/null +++ b/lib/datasets/factory.py @@ -0,0 +1,43 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Factory method for easily getting imdbs by name.""" + +__sets = {} + +from datasets.pascal_voc import pascal_voc +from datasets.coco import coco +import numpy as np + +# Set up voc__ using selective search "fast" mode +for year in ['2007', '2012', '0712']: + for split in ['train', 'val', 'trainval', 'test']: + name = 'voc_{}_{}'.format(year, split) + __sets[name] = (lambda split=split, year=year: pascal_voc(split, year)) + + +# Set up coco_2014_ +for year in ['2014']: + for split in ['train', 'val', 'minival', 'valminusminival']: + name = 'coco_{}_{}'.format(year, split) + __sets[name] = (lambda split=split, year=year: coco(split, year)) + +# Set up coco_2015_ +for year in ['2015']: + for split in ['test', 'test-dev']: + name = 'coco_{}_{}'.format(year, split) + __sets[name] = (lambda split=split, year=year: coco(split, year)) + +def get_imdb(name): + """Get an imdb (image database) by name.""" + if not __sets.has_key(name): + raise KeyError('Unknown dataset: {}'.format(name)) + return __sets[name]() + +def list_imdbs(): + """List all registered imdbs.""" + return __sets.keys() diff --git a/lib/datasets/imdb.py b/lib/datasets/imdb.py new file mode 100644 index 0000000..b56bf0a --- /dev/null +++ b/lib/datasets/imdb.py @@ -0,0 +1,253 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import os +import os.path as osp +import PIL +from utils.cython_bbox import bbox_overlaps +import numpy as np +import scipy.sparse +from fast_rcnn.config import cfg + +class imdb(object): + """Image database.""" + + def __init__(self, name): + self._name = name + self._num_classes = 0 + self._classes = [] + self._image_index = [] + self._obj_proposer = 'selective_search' + self._roidb = None + self._roidb_handler = self.default_roidb + # Use this dict for storing dataset specific config options + self.config = {} + + @property + def name(self): + return self._name + + @property + def num_classes(self): + return len(self._classes) + + @property + def classes(self): + return self._classes + + @property + def image_index(self): + return self._image_index + + @property + def roidb_handler(self): + return self._roidb_handler + + @roidb_handler.setter + def roidb_handler(self, val): + self._roidb_handler = val + + def set_proposal_method(self, method): + method = eval('self.' + method + '_roidb') + self.roidb_handler = method + + @property + def roidb(self): + # A roidb is a list of dictionaries, each with the following keys: + # boxes + # gt_overlaps + # gt_classes + # flipped + if self._roidb is not None: + return self._roidb + self._roidb = self.roidb_handler() + return self._roidb + + @property + def cache_path(self): + cache_path = osp.abspath(osp.join(cfg.DATA_DIR, 'cache')) + if not os.path.exists(cache_path): + os.makedirs(cache_path) + return cache_path + + @property + def num_images(self): + return len(self.image_index) + + def image_path_at(self, i): + raise NotImplementedError + + def default_roidb(self): + raise NotImplementedError + + def evaluate_detections(self, all_boxes, output_dir=None): + """ + all_boxes is a list of length number-of-classes. + Each list element is a list of length number-of-images. + Each of those list elements is either an empty list [] + or a numpy array of detection. + + all_boxes[class][image] = [] or np.array of shape #dets x 5 + """ + raise NotImplementedError + + def _get_widths(self): + return [PIL.Image.open(self.image_path_at(i)).size[0] + for i in xrange(self.num_images)] + + def append_flipped_images(self): + num_images = self.num_images + widths = self._get_widths() + for i in xrange(num_images): + boxes = self.roidb[i]['boxes'].copy() + oldx1 = boxes[:, 0].copy() + oldx2 = boxes[:, 2].copy() + boxes[:, 0] = widths[i] - oldx2 - 1 + boxes[:, 2] = widths[i] - oldx1 - 1 + assert (boxes[:, 2] >= boxes[:, 0]).all() + entry = {'boxes' : boxes, + 'gt_overlaps' : self.roidb[i]['gt_overlaps'], + 'gt_classes' : self.roidb[i]['gt_classes'], + 'flipped' : True} + self.roidb.append(entry) + self._image_index = self._image_index * 2 + + def evaluate_recall(self, candidate_boxes=None, thresholds=None, + area='all', limit=None): + """Evaluate detection proposal recall metrics. + + Returns: + results: dictionary of results with keys + 'ar': average recall + 'recalls': vector recalls at each IoU overlap threshold + 'thresholds': vector of IoU overlap thresholds + 'gt_overlaps': vector of all ground-truth overlaps + """ + # Record max overlap value for each gt box + # Return vector of overlap values + areas = { 'all': 0, 'small': 1, 'medium': 2, 'large': 3, + '96-128': 4, '128-256': 5, '256-512': 6, '512-inf': 7} + area_ranges = [ [0**2, 1e5**2], # all + [0**2, 32**2], # small + [32**2, 96**2], # medium + [96**2, 1e5**2], # large + [96**2, 128**2], # 96-128 + [128**2, 256**2], # 128-256 + [256**2, 512**2], # 256-512 + [512**2, 1e5**2], # 512-inf + ] + assert areas.has_key(area), 'unknown area range: {}'.format(area) + area_range = area_ranges[areas[area]] + gt_overlaps = np.zeros(0) + num_pos = 0 + for i in xrange(self.num_images): + # Checking for max_overlaps == 1 avoids including crowd annotations + # (...pretty hacking :/) + max_gt_overlaps = self.roidb[i]['gt_overlaps'].toarray().max(axis=1) + gt_inds = np.where((self.roidb[i]['gt_classes'] > 0) & + (max_gt_overlaps == 1))[0] + gt_boxes = self.roidb[i]['boxes'][gt_inds, :] + gt_areas = self.roidb[i]['seg_areas'][gt_inds] + valid_gt_inds = np.where((gt_areas >= area_range[0]) & + (gt_areas <= area_range[1]))[0] + gt_boxes = gt_boxes[valid_gt_inds, :] + num_pos += len(valid_gt_inds) + + if candidate_boxes is None: + # If candidate_boxes is not supplied, the default is to use the + # non-ground-truth boxes from this roidb + non_gt_inds = np.where(self.roidb[i]['gt_classes'] == 0)[0] + boxes = self.roidb[i]['boxes'][non_gt_inds, :] + else: + boxes = candidate_boxes[i] + if boxes.shape[0] == 0: + continue + if limit is not None and boxes.shape[0] > limit: + boxes = boxes[:limit, :] + + overlaps = bbox_overlaps(boxes.astype(np.float), + gt_boxes.astype(np.float)) + + _gt_overlaps = np.zeros((gt_boxes.shape[0])) + for j in xrange(gt_boxes.shape[0]): + # find which proposal box maximally covers each gt box + argmax_overlaps = overlaps.argmax(axis=0) + # and get the iou amount of coverage for each gt box + max_overlaps = overlaps.max(axis=0) + # find which gt box is 'best' covered (i.e. 'best' = most iou) + gt_ind = max_overlaps.argmax() + gt_ovr = max_overlaps.max() + assert(gt_ovr >= 0) + # find the proposal box that covers the best covered gt box + box_ind = argmax_overlaps[gt_ind] + # record the iou coverage of this gt box + _gt_overlaps[j] = overlaps[box_ind, gt_ind] + assert(_gt_overlaps[j] == gt_ovr) + # mark the proposal box and the gt box as used + overlaps[box_ind, :] = -1 + overlaps[:, gt_ind] = -1 + # append recorded iou coverage level + gt_overlaps = np.hstack((gt_overlaps, _gt_overlaps)) + + gt_overlaps = np.sort(gt_overlaps) + if thresholds is None: + step = 0.05 + thresholds = np.arange(0.5, 0.95 + 1e-5, step) + recalls = np.zeros_like(thresholds) + # compute recall for each iou threshold + for i, t in enumerate(thresholds): + recalls[i] = (gt_overlaps >= t).sum() / float(num_pos) + # ar = 2 * np.trapz(recalls, thresholds) + ar = recalls.mean() + return {'ar': ar, 'recalls': recalls, 'thresholds': thresholds, + 'gt_overlaps': gt_overlaps} + + def create_roidb_from_box_list(self, box_list, gt_roidb): + assert len(box_list) == self.num_images, \ + 'Number of boxes must match number of ground-truth images' + roidb = [] + for i in xrange(self.num_images): + boxes = box_list[i] + num_boxes = boxes.shape[0] + overlaps = np.zeros((num_boxes, self.num_classes), dtype=np.float32) + + if gt_roidb is not None and gt_roidb[i]['boxes'].size > 0: + gt_boxes = gt_roidb[i]['boxes'] + gt_classes = gt_roidb[i]['gt_classes'] + gt_overlaps = bbox_overlaps(boxes.astype(np.float), + gt_boxes.astype(np.float)) + argmaxes = gt_overlaps.argmax(axis=1) + maxes = gt_overlaps.max(axis=1) + I = np.where(maxes > 0)[0] + overlaps[I, gt_classes[argmaxes[I]]] = maxes[I] + + overlaps = scipy.sparse.csr_matrix(overlaps) + roidb.append({ + 'boxes' : boxes, + 'gt_classes' : np.zeros((num_boxes,), dtype=np.int32), + 'gt_overlaps' : overlaps, + 'flipped' : False, + 'seg_areas' : np.zeros((num_boxes,), dtype=np.float32), + }) + return roidb + + @staticmethod + def merge_roidbs(a, b): + assert len(a) == len(b) + for i in xrange(len(a)): + a[i]['boxes'] = np.vstack((a[i]['boxes'], b[i]['boxes'])) + a[i]['gt_classes'] = np.hstack((a[i]['gt_classes'], + b[i]['gt_classes'])) + a[i]['gt_overlaps'] = scipy.sparse.vstack([a[i]['gt_overlaps'], + b[i]['gt_overlaps']]) + a[i]['seg_areas'] = np.hstack((a[i]['seg_areas'], + b[i]['seg_areas'])) + return a + + def competition_mode(self, on): + """Turn competition mode on or off.""" + pass diff --git a/lib/datasets/pascal_voc.py b/lib/datasets/pascal_voc.py new file mode 100644 index 0000000..b55f2f6 --- /dev/null +++ b/lib/datasets/pascal_voc.py @@ -0,0 +1,344 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import os +from datasets.imdb import imdb +import datasets.ds_utils as ds_utils +import xml.etree.ElementTree as ET +import numpy as np +import scipy.sparse +import scipy.io as sio +import utils.cython_bbox +import cPickle +import subprocess +import uuid +from voc_eval import voc_eval +from fast_rcnn.config import cfg + +class pascal_voc(imdb): + def __init__(self, image_set, year, devkit_path=None): + imdb.__init__(self, 'voc_' + year + '_' + image_set) + self._year = year + self._image_set = image_set + self._devkit_path = self._get_default_path() if devkit_path is None \ + else devkit_path + self._data_path = os.path.join(self._devkit_path, 'VOC' + self._year) + self._classes = ('__background__', # always index 0 + 'aeroplane', 'bicycle', 'bird', 'boat', + 'bottle', 'bus', 'car', 'cat', 'chair', + 'cow', 'diningtable', 'dog', 'horse', + 'motorbike', 'person', 'pottedplant', + 'sheep', 'sofa', 'train', 'tvmonitor') + self._class_to_ind = dict(zip(self.classes, xrange(self.num_classes))) + self._image_ext = '.jpg' + self._image_index = self._load_image_set_index() + # Default to roidb handler + self._roidb_handler = self.selective_search_roidb + self._salt = str(uuid.uuid4()) + self._comp_id = 'comp4' + + # PASCAL specific config options + self.config = {'cleanup' : True, + 'use_salt' : True, + 'use_diff' : False, + 'matlab_eval' : False, + 'rpn_file' : None, + 'min_size' : 2} + + assert os.path.exists(self._devkit_path), \ + 'VOCdevkit path does not exist: {}'.format(self._devkit_path) + assert os.path.exists(self._data_path), \ + 'Path does not exist: {}'.format(self._data_path) + + def image_path_at(self, i): + """ + Return the absolute path to image i in the image sequence. + """ + return self.image_path_from_index(self._image_index[i]) + + def image_path_from_index(self, index): + """ + Construct an image path from the image's "index" identifier. + """ + image_path = os.path.join(self._data_path, 'JPEGImages', + index + self._image_ext) + assert os.path.exists(image_path), \ + 'Path does not exist: {}'.format(image_path) + return image_path + + def _load_image_set_index(self): + """ + Load the indexes listed in this dataset's image set file. + """ + # Example path to image set file: + # self._devkit_path + /VOCdevkit2007/VOC2007/ImageSets/Main/val.txt + image_set_file = os.path.join(self._data_path, 'ImageSets', 'Main', + self._image_set + '.txt') + assert os.path.exists(image_set_file), \ + 'Path does not exist: {}'.format(image_set_file) + with open(image_set_file) as f: + image_index = [x.strip() for x in f.readlines()] + return image_index + + def _get_default_path(self): + """ + Return the default path where PASCAL VOC is expected to be installed. + """ + return os.path.join(cfg.DATA_DIR, 'VOCdevkit' + self._year) + + def gt_roidb(self): + """ + Return the database of ground-truth regions of interest. + + This function loads/saves from/to a cache file to speed up future calls. + """ + cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl') + if os.path.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{} gt roidb loaded from {}'.format(self.name, cache_file) + return roidb + + gt_roidb = [self._load_pascal_annotation(index) + for index in self.image_index] + with open(cache_file, 'wb') as fid: + cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote gt roidb to {}'.format(cache_file) + + return gt_roidb + + def selective_search_roidb(self): + """ + Return the database of selective search regions of interest. + Ground-truth ROIs are also included. + + This function loads/saves from/to a cache file to speed up future calls. + """ + cache_file = os.path.join(self.cache_path, + self.name + '_selective_search_roidb.pkl') + + if os.path.exists(cache_file): + with open(cache_file, 'rb') as fid: + roidb = cPickle.load(fid) + print '{} ss roidb loaded from {}'.format(self.name, cache_file) + return roidb + + if int(self._year) == 2007 or self._image_set != 'test': + gt_roidb = self.gt_roidb() + ss_roidb = self._load_selective_search_roidb(gt_roidb) + roidb = imdb.merge_roidbs(gt_roidb, ss_roidb) + else: + roidb = self._load_selective_search_roidb(None) + with open(cache_file, 'wb') as fid: + cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL) + print 'wrote ss roidb to {}'.format(cache_file) + + return roidb + + def rpn_roidb(self): + if int(self._year) == 2007 or self._image_set != 'test': + gt_roidb = self.gt_roidb() + rpn_roidb = self._load_rpn_roidb(gt_roidb) + roidb = imdb.merge_roidbs(gt_roidb, rpn_roidb) + else: + roidb = self._load_rpn_roidb(None) + + return roidb + + def _load_rpn_roidb(self, gt_roidb): + filename = self.config['rpn_file'] + print 'loading {}'.format(filename) + assert os.path.exists(filename), \ + 'rpn data not found at: {}'.format(filename) + with open(filename, 'rb') as f: + box_list = cPickle.load(f) + return self.create_roidb_from_box_list(box_list, gt_roidb) + + def _load_selective_search_roidb(self, gt_roidb): + filename = os.path.abspath(os.path.join(cfg.DATA_DIR, + 'selective_search_data', + self.name + '.mat')) + assert os.path.exists(filename), \ + 'Selective search data not found at: {}'.format(filename) + raw_data = sio.loadmat(filename)['boxes'].ravel() + + box_list = [] + for i in xrange(raw_data.shape[0]): + boxes = raw_data[i][:, (1, 0, 3, 2)] - 1 + keep = ds_utils.unique_boxes(boxes) + boxes = boxes[keep, :] + keep = ds_utils.filter_small_boxes(boxes, self.config['min_size']) + boxes = boxes[keep, :] + box_list.append(boxes) + + return self.create_roidb_from_box_list(box_list, gt_roidb) + + def _load_pascal_annotation(self, index): + """ + Load image and bounding boxes info from XML file in the PASCAL VOC + format. + """ + filename = os.path.join(self._data_path, 'Annotations', index + '.xml') + tree = ET.parse(filename) + objs = tree.findall('object') + if not self.config['use_diff']: + # Exclude the samples labeled as difficult + non_diff_objs = [ + obj for obj in objs if int(obj.find('difficult').text) == 0] + # if len(non_diff_objs) != len(objs): + # print 'Removed {} difficult objects'.format( + # len(objs) - len(non_diff_objs)) + objs = non_diff_objs + num_objs = len(objs) + + boxes = np.zeros((num_objs, 4), dtype=np.uint16) + gt_classes = np.zeros((num_objs), dtype=np.int32) + overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32) + # "Seg" area for pascal is just the box area + seg_areas = np.zeros((num_objs), dtype=np.float32) + + # Load object bounding boxes into a data frame. + for ix, obj in enumerate(objs): + bbox = obj.find('bndbox') + # Make pixel indexes 0-based + x1 = float(bbox.find('xmin').text) - 1 + y1 = float(bbox.find('ymin').text) - 1 + x2 = float(bbox.find('xmax').text) - 1 + y2 = float(bbox.find('ymax').text) - 1 + cls = self._class_to_ind[obj.find('name').text.lower().strip()] + boxes[ix, :] = [x1, y1, x2, y2] + gt_classes[ix] = cls + overlaps[ix, cls] = 1.0 + seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1) + + overlaps = scipy.sparse.csr_matrix(overlaps) + + return {'boxes' : boxes, + 'gt_classes': gt_classes, + 'gt_overlaps' : overlaps, + 'flipped' : False, + 'seg_areas' : seg_areas} + + def _get_comp_id(self): + comp_id = (self._comp_id + '_' + self._salt if self.config['use_salt'] + else self._comp_id) + return comp_id + + def _get_voc_results_file_template(self): + # VOCdevkit/results/VOC2007/Main/_det_test_aeroplane.txt + filename = self._get_comp_id() + '_det_' + self._image_set + '_{:s}.txt' + path = os.path.join( + self._devkit_path, + 'results', + 'VOC' + self._year, + 'Main', + filename) + return path + + def _write_voc_results_file(self, all_boxes): + for cls_ind, cls in enumerate(self.classes): + if cls == '__background__': + continue + print 'Writing {} VOC results file'.format(cls) + filename = self._get_voc_results_file_template().format(cls) + with open(filename, 'wt') as f: + for im_ind, index in enumerate(self.image_index): + dets = all_boxes[cls_ind][im_ind] + if dets == []: + continue + # the VOCdevkit expects 1-based indices + for k in xrange(dets.shape[0]): + f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'. + format(index, dets[k, -1], + dets[k, 0] + 1, dets[k, 1] + 1, + dets[k, 2] + 1, dets[k, 3] + 1)) + + def _do_python_eval(self, output_dir = 'output'): + annopath = os.path.join( + self._devkit_path, + 'VOC' + self._year, + 'Annotations', + '{:s}.xml') + imagesetfile = os.path.join( + self._devkit_path, + 'VOC' + self._year, + 'ImageSets', + 'Main', + self._image_set + '.txt') + cachedir = os.path.join(self._devkit_path, 'annotations_cache') + aps = [] + # The PASCAL VOC metric changed in 2010 + use_07_metric = True if int(self._year) < 2010 else False + print 'VOC07 metric? ' + ('Yes' if use_07_metric else 'No') + if not os.path.isdir(output_dir): + os.mkdir(output_dir) + for i, cls in enumerate(self._classes): + if cls == '__background__': + continue + filename = self._get_voc_results_file_template().format(cls) + rec, prec, ap = voc_eval( + filename, annopath, imagesetfile, cls, cachedir, ovthresh=0.5, + use_07_metric=use_07_metric) + aps += [ap] + print('AP for {} = {:.4f}'.format(cls, ap)) + with open(os.path.join(output_dir, cls + '_pr.pkl'), 'w') as f: + cPickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f) + print('Mean AP = {:.4f}'.format(np.mean(aps))) + print('~~~~~~~~') + print('Results:') + for ap in aps: + print('{:.3f}'.format(ap)) + print('{:.3f}'.format(np.mean(aps))) + print('~~~~~~~~') + print('') + print('--------------------------------------------------------------') + print('Results computed with the **unofficial** Python eval code.') + print('Results should be very close to the official MATLAB eval code.') + print('Recompute with `./tools/reval.py --matlab ...` for your paper.') + print('-- Thanks, The Management') + print('--------------------------------------------------------------') + + def _do_matlab_eval(self, output_dir='output'): + print '-----------------------------------------------------' + print 'Computing results with the official MATLAB eval code.' + print '-----------------------------------------------------' + path = os.path.join(cfg.ROOT_DIR, 'lib', 'datasets', + 'VOCdevkit-matlab-wrapper') + cmd = 'cd {} && '.format(path) + cmd += '{:s} -nodisplay -nodesktop '.format(cfg.MATLAB) + cmd += '-r "dbstop if error; ' + cmd += 'voc_eval(\'{:s}\',\'{:s}\',\'{:s}\',\'{:s}\'); quit;"' \ + .format(self._devkit_path, self._get_comp_id(), + self._image_set, output_dir) + print('Running:\n{}'.format(cmd)) + status = subprocess.call(cmd, shell=True) + + def evaluate_detections(self, all_boxes, output_dir): + self._write_voc_results_file(all_boxes) + self._do_python_eval(output_dir) + if self.config['matlab_eval']: + self._do_matlab_eval(output_dir) + if self.config['cleanup']: + for cls in self._classes: + if cls == '__background__': + continue + filename = self._get_voc_results_file_template().format(cls) + os.remove(filename) + + def competition_mode(self, on): + if on: + self.config['use_salt'] = False + self.config['cleanup'] = False + else: + self.config['use_salt'] = True + self.config['cleanup'] = True + +if __name__ == '__main__': + from datasets.pascal_voc import pascal_voc + d = pascal_voc('trainval', '2007') + res = d.roidb + from IPython import embed; embed() diff --git a/lib/datasets/tools/mcg_munge.py b/lib/datasets/tools/mcg_munge.py new file mode 100644 index 0000000..1392aa3 --- /dev/null +++ b/lib/datasets/tools/mcg_munge.py @@ -0,0 +1,38 @@ +import os +import sys + +"""Hacky tool to convert file system layout of MCG boxes downloaded from +http://www.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/mcg/ +so that it's consistent with those computed by Jan Hosang (see: +http://www.mpi-inf.mpg.de/departments/computer-vision-and-multimodal- + computing/research/object-recognition-and-scene-understanding/how- + good-are-detection-proposals-really/) + +NB: Boxes from the MCG website are in (y1, x1, y2, x2) order. +Boxes from Hosang et al. are in (x1, y1, x2, y2) order. +""" + +def munge(src_dir): + # stored as: ./MCG-COCO-val2014-boxes/COCO_val2014_000000193401.mat + # want: ./MCG/mat/COCO_val2014_0/COCO_val2014_000000141/COCO_val2014_000000141334.mat + + files = os.listdir(src_dir) + for fn in files: + base, ext = os.path.splitext(fn) + # first 14 chars / first 22 chars / all chars + .mat + # COCO_val2014_0/COCO_val2014_000000447/COCO_val2014_000000447991.mat + first = base[:14] + second = base[:22] + dst_dir = os.path.join('MCG', 'mat', first, second) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir) + src = os.path.join(src_dir, fn) + dst = os.path.join(dst_dir, fn) + print 'MV: {} -> {}'.format(src, dst) + os.rename(src, dst) + +if __name__ == '__main__': + # src_dir should look something like: + # src_dir = 'MCG-COCO-val2014-boxes' + src_dir = sys.argv[1] + munge(src_dir) diff --git a/lib/datasets/voc_eval.py b/lib/datasets/voc_eval.py new file mode 100644 index 0000000..8d0a830 --- /dev/null +++ b/lib/datasets/voc_eval.py @@ -0,0 +1,200 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Bharath Hariharan +# -------------------------------------------------------- + +import xml.etree.ElementTree as ET +import os +import cPickle +import numpy as np + +def parse_rec(filename): + """ Parse a PASCAL VOC xml file """ + tree = ET.parse(filename) + objects = [] + for obj in tree.findall('object'): + obj_struct = {} + obj_struct['name'] = obj.find('name').text + obj_struct['pose'] = obj.find('pose').text + obj_struct['truncated'] = int(obj.find('truncated').text) + obj_struct['difficult'] = int(obj.find('difficult').text) + bbox = obj.find('bndbox') + obj_struct['bbox'] = [int(bbox.find('xmin').text), + int(bbox.find('ymin').text), + int(bbox.find('xmax').text), + int(bbox.find('ymax').text)] + objects.append(obj_struct) + + return objects + +def voc_ap(rec, prec, use_07_metric=False): + """ ap = voc_ap(rec, prec, [use_07_metric]) + Compute VOC AP given precision and recall. + If use_07_metric is true, uses the + VOC 07 11 point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0. + for t in np.arange(0., 1.1, 0.1): + if np.sum(rec >= t) == 0: + p = 0 + else: + p = np.max(prec[rec >= t]) + ap = ap + p / 11. + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.], rec, [1.])) + mpre = np.concatenate(([0.], prec, [0.])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + +def voc_eval(detpath, + annopath, + imagesetfile, + classname, + cachedir, + ovthresh=0.5, + use_07_metric=False): + """rec, prec, ap = voc_eval(detpath, + annopath, + imagesetfile, + classname, + [ovthresh], + [use_07_metric]) + + Top level function that does the PASCAL VOC evaluation. + + detpath: Path to detections + detpath.format(classname) should produce the detection results file. + annopath: Path to annotations + annopath.format(imagename) should be the xml annotations file. + imagesetfile: Text file containing the list of images, one image per line. + classname: Category name (duh) + cachedir: Directory for caching the annotations + [ovthresh]: Overlap threshold (default = 0.5) + [use_07_metric]: Whether to use VOC07's 11 point AP computation + (default False) + """ + # assumes detections are in detpath.format(classname) + # assumes annotations are in annopath.format(imagename) + # assumes imagesetfile is a text file with each line an image name + # cachedir caches the annotations in a pickle file + + # first load gt + if not os.path.isdir(cachedir): + os.mkdir(cachedir) + cachefile = os.path.join(cachedir, 'annots.pkl') + # read list of images + with open(imagesetfile, 'r') as f: + lines = f.readlines() + imagenames = [x.strip() for x in lines] + + if not os.path.isfile(cachefile): + # load annots + recs = {} + for i, imagename in enumerate(imagenames): + recs[imagename] = parse_rec(annopath.format(imagename)) + if i % 100 == 0: + print 'Reading annotation for {:d}/{:d}'.format( + i + 1, len(imagenames)) + # save + print 'Saving cached annotations to {:s}'.format(cachefile) + with open(cachefile, 'w') as f: + cPickle.dump(recs, f) + else: + # load + with open(cachefile, 'r') as f: + recs = cPickle.load(f) + + # extract gt objects for this class + class_recs = {} + npos = 0 + for imagename in imagenames: + R = [obj for obj in recs[imagename] if obj['name'] == classname] + bbox = np.array([x['bbox'] for x in R]) + difficult = np.array([x['difficult'] for x in R]).astype(np.bool) + det = [False] * len(R) + npos = npos + sum(~difficult) + class_recs[imagename] = {'bbox': bbox, + 'difficult': difficult, + 'det': det} + + # read dets + detfile = detpath.format(classname) + with open(detfile, 'r') as f: + lines = f.readlines() + + splitlines = [x.strip().split(' ') for x in lines] + image_ids = [x[0] for x in splitlines] + confidence = np.array([float(x[1]) for x in splitlines]) + BB = np.array([[float(z) for z in x[2:]] for x in splitlines]) + + # sort by confidence + sorted_ind = np.argsort(-confidence) + sorted_scores = np.sort(-confidence) + BB = BB[sorted_ind, :] + image_ids = [image_ids[x] for x in sorted_ind] + + # go down dets and mark TPs and FPs + nd = len(image_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + for d in range(nd): + R = class_recs[image_ids[d]] + bb = BB[d, :].astype(float) + ovmax = -np.inf + BBGT = R['bbox'].astype(float) + + if BBGT.size > 0: + # compute overlaps + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 2], bb[2]) + iymax = np.minimum(BBGT[:, 3], bb[3]) + iw = np.maximum(ixmax - ixmin + 1., 0.) + ih = np.maximum(iymax - iymin + 1., 0.) + inters = iw * ih + + # union + uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) + + (BBGT[:, 2] - BBGT[:, 0] + 1.) * + (BBGT[:, 3] - BBGT[:, 1] + 1.) - inters) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > ovthresh: + if not R['difficult'][jmax]: + if not R['det'][jmax]: + tp[d] = 1. + R['det'][jmax] = 1 + else: + fp[d] = 1. + else: + fp[d] = 1. + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) + ap = voc_ap(rec, prec, use_07_metric) + + return rec, prec, ap diff --git a/lib/fast_rcnn/__init__.py b/lib/fast_rcnn/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/fast_rcnn/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/fast_rcnn/bbox_transform.py b/lib/fast_rcnn/bbox_transform.py new file mode 100644 index 0000000..c83109c --- /dev/null +++ b/lib/fast_rcnn/bbox_transform.py @@ -0,0 +1,75 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np + +def bbox_transform(ex_rois, gt_rois): + ex_widths = ex_rois[:, 2] - ex_rois[:, 0] + 1.0 + ex_heights = ex_rois[:, 3] - ex_rois[:, 1] + 1.0 + ex_ctr_x = ex_rois[:, 0] + 0.5 * ex_widths + ex_ctr_y = ex_rois[:, 1] + 0.5 * ex_heights + + gt_widths = gt_rois[:, 2] - gt_rois[:, 0] + 1.0 + gt_heights = gt_rois[:, 3] - gt_rois[:, 1] + 1.0 + gt_ctr_x = gt_rois[:, 0] + 0.5 * gt_widths + gt_ctr_y = gt_rois[:, 1] + 0.5 * gt_heights + + targets_dx = (gt_ctr_x - ex_ctr_x) / ex_widths + targets_dy = (gt_ctr_y - ex_ctr_y) / ex_heights + targets_dw = np.log(gt_widths / ex_widths) + targets_dh = np.log(gt_heights / ex_heights) + + targets = np.vstack( + (targets_dx, targets_dy, targets_dw, targets_dh)).transpose() + return targets + +def bbox_transform_inv(boxes, deltas): + if boxes.shape[0] == 0: + return np.zeros((0, deltas.shape[1]), dtype=deltas.dtype) + boxes = boxes.astype(deltas.dtype, copy=False) + + widths = boxes[:, 2] - boxes[:, 0] + 1.0 + heights = boxes[:, 3] - boxes[:, 1] + 1.0 + ctr_x = boxes[:, 0] + 0.5 * widths + ctr_y = boxes[:, 1] + 0.5 * heights + + dx = deltas[:, 0::4] + dy = deltas[:, 1::4] + dw = deltas[:, 2::4] + dh = deltas[:, 3::4] + + pred_ctr_x = dx * widths[:, np.newaxis] + ctr_x[:, np.newaxis] + pred_ctr_y = dy * heights[:, np.newaxis] + ctr_y[:, np.newaxis] + pred_w = np.exp(dw) * widths[:, np.newaxis] + pred_h = np.exp(dh) * heights[:, np.newaxis] + + pred_boxes = np.zeros(deltas.shape, dtype=deltas.dtype) + # x1 + pred_boxes[:, 0::4] = pred_ctr_x - 0.5 * pred_w + # y1 + pred_boxes[:, 1::4] = pred_ctr_y - 0.5 * pred_h + # x2 + pred_boxes[:, 2::4] = pred_ctr_x + 0.5 * pred_w + # y2 + pred_boxes[:, 3::4] = pred_ctr_y + 0.5 * pred_h + + return pred_boxes + +def clip_boxes(boxes, im_shape): + """ + Clip boxes to image boundaries. + """ + + # x1 >= 0 + boxes[:, 0::4] = np.maximum(np.minimum(boxes[:, 0::4], im_shape[1] - 1), 0) + # y1 >= 0 + boxes[:, 1::4] = np.maximum(np.minimum(boxes[:, 1::4], im_shape[0] - 1), 0) + # x2 < im_shape[1] + boxes[:, 2::4] = np.maximum(np.minimum(boxes[:, 2::4], im_shape[1] - 1), 0) + # y2 < im_shape[0] + boxes[:, 3::4] = np.maximum(np.minimum(boxes[:, 3::4], im_shape[0] - 1), 0) + return boxes diff --git a/lib/fast_rcnn/config.py b/lib/fast_rcnn/config.py new file mode 100644 index 0000000..1751856 --- /dev/null +++ b/lib/fast_rcnn/config.py @@ -0,0 +1,290 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Fast R-CNN config system. + +This file specifies default config options for Fast R-CNN. You should not +change values in this file. Instead, you should write a config file (in yaml) +and use cfg_from_file(yaml_file) to load it and override the default options. + +Most tools in $ROOT/tools take a --cfg option to specify an override file. + - See tools/{train,test}_net.py for example code that uses cfg_from_file() + - See experiments/cfgs/*.yml for example YAML config override files +""" + +import os +import os.path as osp +import numpy as np +# `pip install easydict` if you don't have it +from easydict import EasyDict as edict + +__C = edict() +# Consumers can get config by: +# from fast_rcnn_config import cfg +cfg = __C + +# +# Training options +# + +__C.TRAIN = edict() + +# Scales to use during training (can list multiple scales) +# Each scale is the pixel size of an image's shortest side +__C.TRAIN.SCALES = (600,) + +# Max pixel size of the longest side of a scaled input image +__C.TRAIN.MAX_SIZE = 1000 + +# Images to use per minibatch +__C.TRAIN.IMS_PER_BATCH = 2 + +# Minibatch size (number of regions of interest [ROIs]) +__C.TRAIN.BATCH_SIZE = 128 + +# Fraction of minibatch that is labeled foreground (i.e. class > 0) +__C.TRAIN.FG_FRACTION = 0.25 + +# Overlap threshold for a ROI to be considered foreground (if >= FG_THRESH) +__C.TRAIN.FG_THRESH = 0.5 + +# Overlap threshold for a ROI to be considered background (class = 0 if +# overlap in [LO, HI)) +__C.TRAIN.BG_THRESH_HI = 0.5 +__C.TRAIN.BG_THRESH_LO = 0.1 + +# Use horizontally-flipped images during training? +__C.TRAIN.USE_FLIPPED = True + +# Train bounding-box regressors +__C.TRAIN.BBOX_REG = True + +# Overlap required between a ROI and ground-truth box in order for that ROI to +# be used as a bounding-box regression training example +__C.TRAIN.BBOX_THRESH = 0.5 + +# Iterations between snapshots +__C.TRAIN.SNAPSHOT_ITERS = 10000 + +# solver.prototxt specifies the snapshot path prefix, this adds an optional +# infix to yield the path: [_]_iters_XYZ.caffemodel +__C.TRAIN.SNAPSHOT_INFIX = '' + +# Use a prefetch thread in roi_data_layer.layer +# So far I haven't found this useful; likely more engineering work is required +__C.TRAIN.USE_PREFETCH = False + +# Normalize the targets (subtract empirical mean, divide by empirical stddev) +__C.TRAIN.BBOX_NORMALIZE_TARGETS = True +# Deprecated (inside weights) +__C.TRAIN.BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0) +# Normalize the targets using "precomputed" (or made up) means and stdevs +# (BBOX_NORMALIZE_TARGETS must also be True) +__C.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED = False +__C.TRAIN.BBOX_NORMALIZE_MEANS = (0.0, 0.0, 0.0, 0.0) +__C.TRAIN.BBOX_NORMALIZE_STDS = (0.1, 0.1, 0.2, 0.2) + +# Train using these proposals +__C.TRAIN.PROPOSAL_METHOD = 'selective_search' + +# Make minibatches from images that have similar aspect ratios (i.e. both +# tall and thin or both short and wide) in order to avoid wasting computation +# on zero-padding. +__C.TRAIN.ASPECT_GROUPING = True + +# Use RPN to detect objects +__C.TRAIN.HAS_RPN = False +# IOU >= thresh: positive example +__C.TRAIN.RPN_POSITIVE_OVERLAP = 0.7 +# IOU < thresh: negative example +__C.TRAIN.RPN_NEGATIVE_OVERLAP = 0.3 +# If an anchor statisfied by positive and negative conditions set to negative +__C.TRAIN.RPN_CLOBBER_POSITIVES = False +# Max number of foreground examples +__C.TRAIN.RPN_FG_FRACTION = 0.5 +# Total number of examples +__C.TRAIN.RPN_BATCHSIZE = 256 +# NMS threshold used on RPN proposals +__C.TRAIN.RPN_NMS_THRESH = 0.7 +# Number of top scoring boxes to keep before apply NMS to RPN proposals +__C.TRAIN.RPN_PRE_NMS_TOP_N = 12000 +# Number of top scoring boxes to keep after applying NMS to RPN proposals +__C.TRAIN.RPN_POST_NMS_TOP_N = 2000 +# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale) +__C.TRAIN.RPN_MIN_SIZE = 16 +# Deprecated (outside weights) +__C.TRAIN.RPN_BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0) +# Give the positive RPN examples weight of p * 1 / {num positives} +# and give negatives a weight of (1 - p) +# Set to -1.0 to use uniform example weighting +__C.TRAIN.RPN_POSITIVE_WEIGHT = -1.0 + +# whether use class aware box or not +__C.TRAIN.AGONISTIC = False + +# +# Testing options +# + +__C.TEST = edict() + +# Scales to use during testing (can list multiple scales) +# Each scale is the pixel size of an image's shortest side +__C.TEST.SCALES = (600,) + +# Max pixel size of the longest side of a scaled input image +__C.TEST.MAX_SIZE = 1000 + +# Overlap threshold used for non-maximum suppression (suppress boxes with +# IoU >= this threshold) +__C.TEST.NMS = 0.3 + +# Experimental: treat the (K+1) units in the cls_score layer as linear +# predictors (trained, eg, with one-vs-rest SVMs). +__C.TEST.SVM = False + +# Test using bounding-box regressors +__C.TEST.BBOX_REG = True + +# Propose boxes +__C.TEST.HAS_RPN = False + +# Test using these proposals +__C.TEST.PROPOSAL_METHOD = 'selective_search' + +## NMS threshold used on RPN proposals +__C.TEST.RPN_NMS_THRESH = 0.7 +## Number of top scoring boxes to keep before apply NMS to RPN proposals +__C.TEST.RPN_PRE_NMS_TOP_N = 6000 +## Number of top scoring boxes to keep after applying NMS to RPN proposals +__C.TEST.RPN_POST_NMS_TOP_N = 300 +# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale) +__C.TEST.RPN_MIN_SIZE = 16 + +# whether use class aware box or not +__C.TEST.AGONISTIC = False + + +# +# MISC +# + +# The mapping from image coordinates to feature map coordinates might cause +# some boxes that are distinct in image space to become identical in feature +# coordinates. If DEDUP_BOXES > 0, then DEDUP_BOXES is used as the scale factor +# for identifying duplicate boxes. +# 1/16 is correct for {Alex,Caffe}Net, VGG_CNN_M_1024, and VGG16 +__C.DEDUP_BOXES = 1./16. + +# Pixel mean values (BGR order) as a (1, 1, 3) array +# We use the same pixel mean for all networks even though it's not exactly what +# they were trained with +__C.PIXEL_MEANS = np.array([[[102.9801, 115.9465, 122.7717]]]) + +# For reproducibility +__C.RNG_SEED = 3 + +# A small number that's used many times +__C.EPS = 1e-14 + +# Root directory of project +__C.ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..')) + +# Data directory +__C.DATA_DIR = osp.abspath(osp.join(__C.ROOT_DIR, 'data')) + +# Model directory +__C.MODELS_DIR = osp.abspath(osp.join(__C.ROOT_DIR, 'models', 'pascal_voc')) + +# Name (or path to) the matlab executable +__C.MATLAB = 'matlab' + +# Place outputs under an experiments directory +__C.EXP_DIR = 'default' + +# Use GPU implementation of non-maximum suppression +__C.USE_GPU_NMS = True + +# Default GPU device id +__C.GPU_ID = 0 + + +def get_output_dir(imdb, net=None): + """Return the directory where experimental artifacts are placed. + If the directory does not exist, it is created. + + A canonical path is built using the name from an imdb and a network + (if not None). + """ + outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name)) + if net is not None: + outdir = osp.join(outdir, net.name) + if not os.path.exists(outdir): + os.makedirs(outdir) + return outdir + +def _merge_a_into_b(a, b): + """Merge config dictionary a into config dictionary b, clobbering the + options in b whenever they are also specified in a. + """ + if type(a) is not edict: + return + + for k, v in a.iteritems(): + # a must specify keys that are in b + if not b.has_key(k): + raise KeyError('{} is not a valid config key'.format(k)) + + # the types must match, too + old_type = type(b[k]) + if old_type is not type(v): + if isinstance(b[k], np.ndarray): + v = np.array(v, dtype=b[k].dtype) + else: + raise ValueError(('Type mismatch ({} vs. {}) ' + 'for config key: {}').format(type(b[k]), + type(v), k)) + + # recursively merge dicts + if type(v) is edict: + try: + _merge_a_into_b(a[k], b[k]) + except: + print('Error under config key: {}'.format(k)) + raise + else: + b[k] = v + +def cfg_from_file(filename): + """Load a config file and merge it into the default options.""" + import yaml + with open(filename, 'r') as f: + yaml_cfg = edict(yaml.load(f)) + + _merge_a_into_b(yaml_cfg, __C) + +def cfg_from_list(cfg_list): + """Set config keys via list (e.g., from command line).""" + from ast import literal_eval + assert len(cfg_list) % 2 == 0 + for k, v in zip(cfg_list[0::2], cfg_list[1::2]): + key_list = k.split('.') + d = __C + for subkey in key_list[:-1]: + assert d.has_key(subkey) + d = d[subkey] + subkey = key_list[-1] + assert d.has_key(subkey) + try: + value = literal_eval(v) + except: + # handle the case when v is a string literal + value = v + assert type(value) == type(d[subkey]), \ + 'type {} does not match original type {}'.format( + type(value), type(d[subkey])) + d[subkey] = value diff --git a/lib/fast_rcnn/nms_wrapper.py b/lib/fast_rcnn/nms_wrapper.py new file mode 100644 index 0000000..d1a11db --- /dev/null +++ b/lib/fast_rcnn/nms_wrapper.py @@ -0,0 +1,20 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +from fast_rcnn.config import cfg +from nms.gpu_nms import gpu_nms +from nms.cpu_nms import cpu_nms + +def nms(dets, thresh, force_cpu=False): + """Dispatch to either CPU or GPU NMS implementations.""" + + if dets.shape[0] == 0: + return [] + if cfg.USE_GPU_NMS and not force_cpu: + return gpu_nms(dets, thresh, device_id=cfg.GPU_ID) + else: + return cpu_nms(dets, thresh) diff --git a/lib/fast_rcnn/test.py b/lib/fast_rcnn/test.py new file mode 100644 index 0000000..78c24ae --- /dev/null +++ b/lib/fast_rcnn/test.py @@ -0,0 +1,298 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Test a Fast R-CNN network on an imdb (image database).""" + +from fast_rcnn.config import cfg, get_output_dir +from fast_rcnn.bbox_transform import clip_boxes, bbox_transform_inv +import argparse +from utils.timer import Timer +import numpy as np +import cv2 +import caffe +from fast_rcnn.nms_wrapper import nms +import cPickle +from utils.blob import im_list_to_blob +import os + +def _get_image_blob(im): + """Converts an image into a network input. + + Arguments: + im (ndarray): a color image in BGR order + + Returns: + blob (ndarray): a data blob holding an image pyramid + im_scale_factors (list): list of image scales (relative to im) used + in the image pyramid + """ + im_orig = im.astype(np.float32, copy=True) + im_orig -= cfg.PIXEL_MEANS + + im_shape = im_orig.shape + im_size_min = np.min(im_shape[0:2]) + im_size_max = np.max(im_shape[0:2]) + + processed_ims = [] + im_scale_factors = [] + + for target_size in cfg.TEST.SCALES: + im_scale = float(target_size) / float(im_size_min) + # Prevent the biggest axis from being more than MAX_SIZE + if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE: + im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max) + im = cv2.resize(im_orig, None, None, fx=im_scale, fy=im_scale, + interpolation=cv2.INTER_LINEAR) + im_scale_factors.append(im_scale) + processed_ims.append(im) + + # Create a blob to hold the input images + blob = im_list_to_blob(processed_ims) + + return blob, np.array(im_scale_factors) + +def _get_rois_blob(im_rois, im_scale_factors): + """Converts RoIs into network inputs. + + Arguments: + im_rois (ndarray): R x 4 matrix of RoIs in original image coordinates + im_scale_factors (list): scale factors as returned by _get_image_blob + + Returns: + blob (ndarray): R x 5 matrix of RoIs in the image pyramid + """ + rois, levels = _project_im_rois(im_rois, im_scale_factors) + rois_blob = np.hstack((levels, rois)) + return rois_blob.astype(np.float32, copy=False) + +def _project_im_rois(im_rois, scales): + """Project image RoIs into the image pyramid built by _get_image_blob. + + Arguments: + im_rois (ndarray): R x 4 matrix of RoIs in original image coordinates + scales (list): scale factors as returned by _get_image_blob + + Returns: + rois (ndarray): R x 4 matrix of projected RoI coordinates + levels (list): image pyramid levels used by each projected RoI + """ + im_rois = im_rois.astype(np.float, copy=False) + + if len(scales) > 1: + widths = im_rois[:, 2] - im_rois[:, 0] + 1 + heights = im_rois[:, 3] - im_rois[:, 1] + 1 + + areas = widths * heights + scaled_areas = areas[:, np.newaxis] * (scales[np.newaxis, :] ** 2) + diff_areas = np.abs(scaled_areas - 224 * 224) + levels = diff_areas.argmin(axis=1)[:, np.newaxis] + else: + levels = np.zeros((im_rois.shape[0], 1), dtype=np.int) + + rois = im_rois * scales[levels] + + return rois, levels + +def _get_blobs(im, rois): + """Convert an image and RoIs within that image into network inputs.""" + blobs = {'data' : None, 'rois' : None} + blobs['data'], im_scale_factors = _get_image_blob(im) + if not cfg.TEST.HAS_RPN: + blobs['rois'] = _get_rois_blob(rois, im_scale_factors) + return blobs, im_scale_factors + +def im_detect(net, im, boxes=None): + """Detect object classes in an image given object proposals. + + Arguments: + net (caffe.Net): Fast R-CNN network to use + im (ndarray): color image to test (in BGR order) + boxes (ndarray): R x 4 array of object proposals or None (for RPN) + + Returns: + scores (ndarray): R x K array of object class scores (K includes + background as object category 0) + boxes (ndarray): R x (4*K) array of predicted bounding boxes + """ + blobs, im_scales = _get_blobs(im, boxes) + + # When mapping from image ROIs to feature map ROIs, there's some aliasing + # (some distinct image ROIs get mapped to the same feature ROI). + # Here, we identify duplicate feature ROIs, so we only compute features + # on the unique subset. + if cfg.DEDUP_BOXES > 0 and not cfg.TEST.HAS_RPN: + v = np.array([1, 1e3, 1e6, 1e9, 1e12]) + hashes = np.round(blobs['rois'] * cfg.DEDUP_BOXES).dot(v) + _, index, inv_index = np.unique(hashes, return_index=True, + return_inverse=True) + blobs['rois'] = blobs['rois'][index, :] + boxes = boxes[index, :] + + if cfg.TEST.HAS_RPN: + im_blob = blobs['data'] + blobs['im_info'] = np.array( + [[im_blob.shape[2], im_blob.shape[3], im_scales[0]]], + dtype=np.float32) + + # reshape network inputs + net.blobs['data'].reshape(*(blobs['data'].shape)) + if cfg.TEST.HAS_RPN: + net.blobs['im_info'].reshape(*(blobs['im_info'].shape)) + else: + net.blobs['rois'].reshape(*(blobs['rois'].shape)) + + # do forward + forward_kwargs = {'data': blobs['data'].astype(np.float32, copy=False)} + if cfg.TEST.HAS_RPN: + forward_kwargs['im_info'] = blobs['im_info'].astype(np.float32, copy=False) + else: + forward_kwargs['rois'] = blobs['rois'].astype(np.float32, copy=False) + blobs_out = net.forward(**forward_kwargs) + + if cfg.TEST.HAS_RPN: + assert len(im_scales) == 1, "Only single-image batch implemented" + rois = net.blobs['rois'].data.copy() + # unscale back to raw image space + boxes = rois[:, 1:5] / im_scales[0] + + if cfg.TEST.SVM: + # use the raw scores before softmax under the assumption they + # were trained as linear SVMs + scores = net.blobs['cls_score'].data + else: + # use softmax estimated probabilities + scores = blobs_out['cls_prob'] + + if cfg.TEST.BBOX_REG: + # Apply bounding-box regression deltas + box_deltas = blobs_out['bbox_pred'] + pred_boxes = bbox_transform_inv(boxes, box_deltas) + pred_boxes = clip_boxes(pred_boxes, im.shape) + else: + # Simply repeat the boxes, once for each class + pred_boxes = np.tile(boxes, (1, scores.shape[1])) + + if cfg.DEDUP_BOXES > 0 and not cfg.TEST.HAS_RPN: + # Map scores and predictions back to the original set of boxes + scores = scores[inv_index, :] + pred_boxes = pred_boxes[inv_index, :] + + return scores, pred_boxes + +def vis_detections(im, class_name, dets, thresh=0.3): + """Visual debugging of detections.""" + import matplotlib.pyplot as plt + im = im[:, :, (2, 1, 0)] + for i in xrange(np.minimum(10, dets.shape[0])): + bbox = dets[i, :4] + score = dets[i, -1] + if score > thresh: + plt.cla() + plt.imshow(im) + plt.gca().add_patch( + plt.Rectangle((bbox[0], bbox[1]), + bbox[2] - bbox[0], + bbox[3] - bbox[1], fill=False, + edgecolor='g', linewidth=3) + ) + plt.title('{} {:.3f}'.format(class_name, score)) + plt.show() + +def apply_nms(all_boxes, thresh): + """Apply non-maximum suppression to all predicted boxes output by the + test_net method. + """ + num_classes = len(all_boxes) + num_images = len(all_boxes[0]) + nms_boxes = [[[] for _ in xrange(num_images)] + for _ in xrange(num_classes)] + for cls_ind in xrange(num_classes): + for im_ind in xrange(num_images): + dets = all_boxes[cls_ind][im_ind] + if dets == []: + continue + # CPU NMS is much faster than GPU NMS when the number of boxes + # is relative small (e.g., < 10k) + # TODO(rbg): autotune NMS dispatch + keep = nms(dets, thresh, force_cpu=True) + if len(keep) == 0: + continue + nms_boxes[cls_ind][im_ind] = dets[keep, :].copy() + return nms_boxes + +def test_net(net, imdb, max_per_image=100, thresh=0.05, vis=False): + """Test a Fast R-CNN network on an image database.""" + num_images = len(imdb.image_index) + # all detections are collected into: + # all_boxes[cls][image] = N x 5 array of detections in + # (x1, y1, x2, y2, score) + all_boxes = [[[] for _ in xrange(num_images)] + for _ in xrange(imdb.num_classes)] + + output_dir = get_output_dir(imdb, net) + + # timers + _t = {'im_detect' : Timer(), 'misc' : Timer()} + + if not cfg.TEST.HAS_RPN: + roidb = imdb.roidb + + for i in xrange(num_images): + # filter out any ground truth boxes + if cfg.TEST.HAS_RPN: + box_proposals = None + else: + # The roidb may contain ground-truth rois (for example, if the roidb + # comes from the training or val split). We only want to evaluate + # detection on the *non*-ground-truth rois. We select those the rois + # that have the gt_classes field set to 0, which means there's no + # ground truth. + box_proposals = roidb[i]['boxes'][roidb[i]['gt_classes'] == 0] + + im = cv2.imread(imdb.image_path_at(i)) + _t['im_detect'].tic() + scores, boxes = im_detect(net, im, box_proposals) + _t['im_detect'].toc() + + _t['misc'].tic() + # skip j = 0, because it's the background class + for j in xrange(1, imdb.num_classes): + inds = np.where(scores[:, j] > thresh)[0] + cls_scores = scores[inds, j] + if cfg.TEST.AGONISTIC: + cls_boxes = boxes[inds, 4:8] + else: + cls_boxes = boxes[inds, j*4:(j+1)*4] + cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \ + .astype(np.float32, copy=False) + keep = nms(cls_dets, cfg.TEST.NMS) + cls_dets = cls_dets[keep, :] + if vis: + vis_detections(im, imdb.classes[j], cls_dets) + all_boxes[j][i] = cls_dets + + # Limit to max_per_image detections *over all classes* + if max_per_image > 0: + image_scores = np.hstack([all_boxes[j][i][:, -1] + for j in xrange(1, imdb.num_classes)]) + if len(image_scores) > max_per_image: + image_thresh = np.sort(image_scores)[-max_per_image] + for j in xrange(1, imdb.num_classes): + keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0] + all_boxes[j][i] = all_boxes[j][i][keep, :] + _t['misc'].toc() + + print 'im_detect: {:d}/{:d} {:.3f}s {:.3f}s' \ + .format(i + 1, num_images, _t['im_detect'].average_time, + _t['misc'].average_time) + + det_file = os.path.join(output_dir, 'detections.pkl') + with open(det_file, 'wb') as f: + cPickle.dump(all_boxes, f, cPickle.HIGHEST_PROTOCOL) + + print 'Evaluating detections' + imdb.evaluate_detections(all_boxes, output_dir) diff --git a/lib/fast_rcnn/train.py b/lib/fast_rcnn/train.py new file mode 100644 index 0000000..6ca5437 --- /dev/null +++ b/lib/fast_rcnn/train.py @@ -0,0 +1,186 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Train a Fast R-CNN network.""" + +import caffe +from fast_rcnn.config import cfg +import roi_data_layer.roidb as rdl_roidb +from utils.timer import Timer +import numpy as np +import os + +from caffe.proto import caffe_pb2 +import google.protobuf as pb2 + +class SolverWrapper(object): + """A simple wrapper around Caffe's solver. + This wrapper gives us control over he snapshotting process, which we + use to unnormalize the learned bounding-box regression weights. + """ + + def __init__(self, solver_prototxt, roidb, output_dir, + pretrained_model=None): + """Initialize the SolverWrapper.""" + self.output_dir = output_dir + + if (cfg.TRAIN.HAS_RPN and cfg.TRAIN.BBOX_REG and + cfg.TRAIN.BBOX_NORMALIZE_TARGETS): + # RPN can only use precomputed normalization because there are no + # fixed statistics to compute a priori + assert cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED + + if cfg.TRAIN.BBOX_REG: + print 'Computing bounding-box regression targets...' + self.bbox_means, self.bbox_stds = \ + rdl_roidb.add_bbox_regression_targets(roidb) + print 'done' + + self.solver = caffe.SGDSolver(solver_prototxt) + if pretrained_model is not None: + print ('Loading pretrained model ' + 'weights from {:s}').format(pretrained_model) + self.solver.net.copy_from(pretrained_model) + + self.solver_param = caffe_pb2.SolverParameter() + with open(solver_prototxt, 'rt') as f: + pb2.text_format.Merge(f.read(), self.solver_param) + + self.solver.net.layers[0].set_roidb(roidb) + + def snapshot(self): + """Take a snapshot of the network after unnormalizing the learned + bounding-box regression weights. This enables easy use at test-time. + """ + net = self.solver.net + + scale_bbox_params_faster_rcnn = (cfg.TRAIN.BBOX_REG and + cfg.TRAIN.BBOX_NORMALIZE_TARGETS and + net.params.has_key('bbox_pred')) + + scale_bbox_params_rfcn = (cfg.TRAIN.BBOX_REG and + cfg.TRAIN.BBOX_NORMALIZE_TARGETS and + net.params.has_key('rfcn_bbox')) + + if scale_bbox_params_faster_rcnn: + # save original values + orig_0 = net.params['bbox_pred'][0].data.copy() + orig_1 = net.params['bbox_pred'][1].data.copy() + + # scale and shift with bbox reg unnormalization; then save snapshot + net.params['bbox_pred'][0].data[...] = \ + (net.params['bbox_pred'][0].data * + self.bbox_stds[:, np.newaxis]) + net.params['bbox_pred'][1].data[...] = \ + (net.params['bbox_pred'][1].data * + self.bbox_stds + self.bbox_means) + + + if scale_bbox_params_rfcn: + # save original values + orig_0 = net.params['rfcn_bbox'][0].data.copy() + orig_1 = net.params['rfcn_bbox'][1].data.copy() + repeat = orig_1.shape[0] / self.bbox_means.shape[0] + + + # scale and shift with bbox reg unnormalization; then save snapshot + net.params['rfcn_bbox'][0].data[...] = \ + (net.params['rfcn_bbox'][0].data * + np.repeat(self.bbox_stds, repeat).reshape((orig_1.shape[0], 1, 1, 1))) + net.params['rfcn_bbox'][1].data[...] = \ + (net.params['rfcn_bbox'][1].data * + np.repeat(self.bbox_stds, repeat) + np.repeat(self.bbox_means, repeat)) + + infix = ('_' + cfg.TRAIN.SNAPSHOT_INFIX + if cfg.TRAIN.SNAPSHOT_INFIX != '' else '') + filename = (self.solver_param.snapshot_prefix + infix + + '_iter_{:d}'.format(self.solver.iter) + '.caffemodel') + filename = os.path.join(self.output_dir, filename) + net.save(str(filename)) + print 'Wrote snapshot to: {:s}'.format(filename) + + if scale_bbox_params_faster_rcnn: + # restore net to original state + net.params['bbox_pred'][0].data[...] = orig_0 + net.params['bbox_pred'][1].data[...] = orig_1 + if scale_bbox_params_rfcn: + # restore net to original state + net.params['rfcn_bbox'][0].data[...] = orig_0 + net.params['rfcn_bbox'][1].data[...] = orig_1 + + return filename + + def train_model(self, max_iters): + """Network training loop.""" + last_snapshot_iter = -1 + timer = Timer() + model_paths = [] + while self.solver.iter < max_iters: + # Make one SGD update + timer.tic() + self.solver.step(1) + timer.toc() + if self.solver.iter % (10 * self.solver_param.display) == 0: + print 'speed: {:.3f}s / iter'.format(timer.average_time) + + if self.solver.iter % cfg.TRAIN.SNAPSHOT_ITERS == 0: + last_snapshot_iter = self.solver.iter + model_paths.append(self.snapshot()) + + if last_snapshot_iter != self.solver.iter: + model_paths.append(self.snapshot()) + return model_paths + +def get_training_roidb(imdb): + """Returns a roidb (Region of Interest database) for use in training.""" + if cfg.TRAIN.USE_FLIPPED: + print 'Appending horizontally-flipped training examples...' + imdb.append_flipped_images() + print 'done' + + print 'Preparing training data...' + rdl_roidb.prepare_roidb(imdb) + print 'done' + + return imdb.roidb + +def filter_roidb(roidb): + """Remove roidb entries that have no usable RoIs.""" + + def is_valid(entry): + # Valid images have: + # (1) At least one foreground RoI OR + # (2) At least one background RoI + overlaps = entry['max_overlaps'] + # find boxes with sufficient overlap + fg_inds = np.where(overlaps >= cfg.TRAIN.FG_THRESH)[0] + # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI) + bg_inds = np.where((overlaps < cfg.TRAIN.BG_THRESH_HI) & + (overlaps >= cfg.TRAIN.BG_THRESH_LO))[0] + # image is only valid if such boxes exist + valid = len(fg_inds) > 0 or len(bg_inds) > 0 + return valid + + num = len(roidb) + filtered_roidb = [entry for entry in roidb if is_valid(entry)] + num_after = len(filtered_roidb) + print 'Filtered {} roidb entries: {} -> {}'.format(num - num_after, + num, num_after) + return filtered_roidb + +def train_net(solver_prototxt, roidb, output_dir, + pretrained_model=None, max_iters=40000): + """Train a Fast R-CNN network.""" + + roidb = filter_roidb(roidb) + sw = SolverWrapper(solver_prototxt, roidb, output_dir, + pretrained_model=pretrained_model) + + print 'Solving...' + model_paths = sw.train_model(max_iters) + print 'done solving' + return model_paths diff --git a/lib/nms/.gitignore b/lib/nms/.gitignore new file mode 100644 index 0000000..15a165d --- /dev/null +++ b/lib/nms/.gitignore @@ -0,0 +1,3 @@ +*.c +*.cpp +*.so diff --git a/lib/nms/__init__.py b/lib/nms/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/nms/cpu_nms.pyx b/lib/nms/cpu_nms.pyx new file mode 100644 index 0000000..1d0bef3 --- /dev/null +++ b/lib/nms/cpu_nms.pyx @@ -0,0 +1,68 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np +cimport numpy as np + +cdef inline np.float32_t max(np.float32_t a, np.float32_t b): + return a if a >= b else b + +cdef inline np.float32_t min(np.float32_t a, np.float32_t b): + return a if a <= b else b + +def cpu_nms(np.ndarray[np.float32_t, ndim=2] dets, np.float thresh): + cdef np.ndarray[np.float32_t, ndim=1] x1 = dets[:, 0] + cdef np.ndarray[np.float32_t, ndim=1] y1 = dets[:, 1] + cdef np.ndarray[np.float32_t, ndim=1] x2 = dets[:, 2] + cdef np.ndarray[np.float32_t, ndim=1] y2 = dets[:, 3] + cdef np.ndarray[np.float32_t, ndim=1] scores = dets[:, 4] + + cdef np.ndarray[np.float32_t, ndim=1] areas = (x2 - x1 + 1) * (y2 - y1 + 1) + cdef np.ndarray[np.int_t, ndim=1] order = scores.argsort()[::-1] + + cdef int ndets = dets.shape[0] + cdef np.ndarray[np.int_t, ndim=1] suppressed = \ + np.zeros((ndets), dtype=np.int) + + # nominal indices + cdef int _i, _j + # sorted indices + cdef int i, j + # temp variables for box i's (the box currently under consideration) + cdef np.float32_t ix1, iy1, ix2, iy2, iarea + # variables for computing overlap with box j (lower scoring box) + cdef np.float32_t xx1, yy1, xx2, yy2 + cdef np.float32_t w, h + cdef np.float32_t inter, ovr + + keep = [] + for _i in range(ndets): + i = order[_i] + if suppressed[i] == 1: + continue + keep.append(i) + ix1 = x1[i] + iy1 = y1[i] + ix2 = x2[i] + iy2 = y2[i] + iarea = areas[i] + for _j in range(_i + 1, ndets): + j = order[_j] + if suppressed[j] == 1: + continue + xx1 = max(ix1, x1[j]) + yy1 = max(iy1, y1[j]) + xx2 = min(ix2, x2[j]) + yy2 = min(iy2, y2[j]) + w = max(0.0, xx2 - xx1 + 1) + h = max(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (iarea + areas[j] - inter) + if ovr >= thresh: + suppressed[j] = 1 + + return keep diff --git a/lib/nms/gpu_nms.hpp b/lib/nms/gpu_nms.hpp new file mode 100644 index 0000000..68b6d42 --- /dev/null +++ b/lib/nms/gpu_nms.hpp @@ -0,0 +1,2 @@ +void _nms(int* keep_out, int* num_out, const float* boxes_host, int boxes_num, + int boxes_dim, float nms_overlap_thresh, int device_id); diff --git a/lib/nms/gpu_nms.pyx b/lib/nms/gpu_nms.pyx new file mode 100644 index 0000000..59d84af --- /dev/null +++ b/lib/nms/gpu_nms.pyx @@ -0,0 +1,31 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np +cimport numpy as np + +assert sizeof(int) == sizeof(np.int32_t) + +cdef extern from "gpu_nms.hpp": + void _nms(np.int32_t*, int*, np.float32_t*, int, int, float, int) + +def gpu_nms(np.ndarray[np.float32_t, ndim=2] dets, np.float thresh, + np.int32_t device_id=0): + cdef int boxes_num = dets.shape[0] + cdef int boxes_dim = dets.shape[1] + cdef int num_out + cdef np.ndarray[np.int32_t, ndim=1] \ + keep = np.zeros(boxes_num, dtype=np.int32) + cdef np.ndarray[np.float32_t, ndim=1] \ + scores = dets[:, 4] + cdef np.ndarray[np.int_t, ndim=1] \ + order = scores.argsort()[::-1] + cdef np.ndarray[np.float32_t, ndim=2] \ + sorted_dets = dets[order, :] + _nms(&keep[0], &num_out, &sorted_dets[0, 0], boxes_num, boxes_dim, thresh, device_id) + keep = keep[:num_out] + return list(order[keep]) diff --git a/lib/nms/nms_kernel.cu b/lib/nms/nms_kernel.cu new file mode 100644 index 0000000..038a590 --- /dev/null +++ b/lib/nms/nms_kernel.cu @@ -0,0 +1,144 @@ +// ------------------------------------------------------------------ +// Faster R-CNN +// Copyright (c) 2015 Microsoft +// Licensed under The MIT License [see fast-rcnn/LICENSE for details] +// Written by Shaoqing Ren +// ------------------------------------------------------------------ + +#include "gpu_nms.hpp" +#include +#include + +#define CUDA_CHECK(condition) \ + /* Code block avoids redefinition of cudaError_t error */ \ + do { \ + cudaError_t error = condition; \ + if (error != cudaSuccess) { \ + std::cout << cudaGetErrorString(error) << std::endl; \ + } \ + } while (0) + +#define DIVUP(m,n) ((m) / (n) + ((m) % (n) > 0)) +int const threadsPerBlock = sizeof(unsigned long long) * 8; + +__device__ inline float devIoU(float const * const a, float const * const b) { + float left = max(a[0], b[0]), right = min(a[2], b[2]); + float top = max(a[1], b[1]), bottom = min(a[3], b[3]); + float width = max(right - left + 1, 0.f), height = max(bottom - top + 1, 0.f); + float interS = width * height; + float Sa = (a[2] - a[0] + 1) * (a[3] - a[1] + 1); + float Sb = (b[2] - b[0] + 1) * (b[3] - b[1] + 1); + return interS / (Sa + Sb - interS); +} + +__global__ void nms_kernel(const int n_boxes, const float nms_overlap_thresh, + const float *dev_boxes, unsigned long long *dev_mask) { + const int row_start = blockIdx.y; + const int col_start = blockIdx.x; + + // if (row_start > col_start) return; + + const int row_size = + min(n_boxes - row_start * threadsPerBlock, threadsPerBlock); + const int col_size = + min(n_boxes - col_start * threadsPerBlock, threadsPerBlock); + + __shared__ float block_boxes[threadsPerBlock * 5]; + if (threadIdx.x < col_size) { + block_boxes[threadIdx.x * 5 + 0] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 0]; + block_boxes[threadIdx.x * 5 + 1] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 1]; + block_boxes[threadIdx.x * 5 + 2] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 2]; + block_boxes[threadIdx.x * 5 + 3] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 3]; + block_boxes[threadIdx.x * 5 + 4] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size) { + const int cur_box_idx = threadsPerBlock * row_start + threadIdx.x; + const float *cur_box = dev_boxes + cur_box_idx * 5; + int i = 0; + unsigned long long t = 0; + int start = 0; + if (row_start == col_start) { + start = threadIdx.x + 1; + } + for (i = start; i < col_size; i++) { + if (devIoU(cur_box, block_boxes + i * 5) > nms_overlap_thresh) { + t |= 1ULL << i; + } + } + const int col_blocks = DIVUP(n_boxes, threadsPerBlock); + dev_mask[cur_box_idx * col_blocks + col_start] = t; + } +} + +void _set_device(int device_id) { + int current_device; + CUDA_CHECK(cudaGetDevice(¤t_device)); + if (current_device == device_id) { + return; + } + // The call to cudaSetDevice must come before any calls to Get, which + // may perform initialization using the GPU. + CUDA_CHECK(cudaSetDevice(device_id)); +} + +void _nms(int* keep_out, int* num_out, const float* boxes_host, int boxes_num, + int boxes_dim, float nms_overlap_thresh, int device_id) { + _set_device(device_id); + + float* boxes_dev = NULL; + unsigned long long* mask_dev = NULL; + + const int col_blocks = DIVUP(boxes_num, threadsPerBlock); + + CUDA_CHECK(cudaMalloc(&boxes_dev, + boxes_num * boxes_dim * sizeof(float))); + CUDA_CHECK(cudaMemcpy(boxes_dev, + boxes_host, + boxes_num * boxes_dim * sizeof(float), + cudaMemcpyHostToDevice)); + + CUDA_CHECK(cudaMalloc(&mask_dev, + boxes_num * col_blocks * sizeof(unsigned long long))); + + dim3 blocks(DIVUP(boxes_num, threadsPerBlock), + DIVUP(boxes_num, threadsPerBlock)); + dim3 threads(threadsPerBlock); + nms_kernel<<>>(boxes_num, + nms_overlap_thresh, + boxes_dev, + mask_dev); + + std::vector mask_host(boxes_num * col_blocks); + CUDA_CHECK(cudaMemcpy(&mask_host[0], + mask_dev, + sizeof(unsigned long long) * boxes_num * col_blocks, + cudaMemcpyDeviceToHost)); + + std::vector remv(col_blocks); + memset(&remv[0], 0, sizeof(unsigned long long) * col_blocks); + + int num_to_keep = 0; + for (int i = 0; i < boxes_num; i++) { + int nblock = i / threadsPerBlock; + int inblock = i % threadsPerBlock; + + if (!(remv[nblock] & (1ULL << inblock))) { + keep_out[num_to_keep++] = i; + unsigned long long *p = &mask_host[0] + i * col_blocks; + for (int j = nblock; j < col_blocks; j++) { + remv[j] |= p[j]; + } + } + } + *num_out = num_to_keep; + + CUDA_CHECK(cudaFree(boxes_dev)); + CUDA_CHECK(cudaFree(mask_dev)); +} diff --git a/lib/nms/py_cpu_nms.py b/lib/nms/py_cpu_nms.py new file mode 100644 index 0000000..54e7b25 --- /dev/null +++ b/lib/nms/py_cpu_nms.py @@ -0,0 +1,38 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import numpy as np + +def py_cpu_nms(dets, thresh): + """Pure Python NMS baseline.""" + x1 = dets[:, 0] + y1 = dets[:, 1] + x2 = dets[:, 2] + y2 = dets[:, 3] + scores = dets[:, 4] + + areas = (x2 - x1 + 1) * (y2 - y1 + 1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(i) + xx1 = np.maximum(x1[i], x1[order[1:]]) + yy1 = np.maximum(y1[i], y1[order[1:]]) + xx2 = np.minimum(x2[i], x2[order[1:]]) + yy2 = np.minimum(y2[i], y2[order[1:]]) + + w = np.maximum(0.0, xx2 - xx1 + 1) + h = np.maximum(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (areas[i] + areas[order[1:]] - inter) + + inds = np.where(ovr <= thresh)[0] + order = order[inds + 1] + + return keep diff --git a/lib/pycocotools/UPSTREAM_REV b/lib/pycocotools/UPSTREAM_REV new file mode 100644 index 0000000..706219b --- /dev/null +++ b/lib/pycocotools/UPSTREAM_REV @@ -0,0 +1 @@ +https://github.com/pdollar/coco/commit/3ac47c77ebd5a1ed4254a98b7fbf2ef4765a3574 diff --git a/lib/pycocotools/__init__.py b/lib/pycocotools/__init__.py new file mode 100644 index 0000000..3f7d85b --- /dev/null +++ b/lib/pycocotools/__init__.py @@ -0,0 +1 @@ +__author__ = 'tylin' diff --git a/lib/pycocotools/_mask.pyx b/lib/pycocotools/_mask.pyx new file mode 100644 index 0000000..e08f1f4 --- /dev/null +++ b/lib/pycocotools/_mask.pyx @@ -0,0 +1,291 @@ +# distutils: language = c +# distutils: sources = ../MatlabAPI/private/maskApi.c + +#************************************************************************** +# Microsoft COCO Toolbox. version 2.0 +# Data, paper, and tutorials available at: http://mscoco.org/ +# Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +# Licensed under the Simplified BSD License [see coco/license.txt] +#************************************************************************** + +__author__ = 'tsungyi' + +# import both Python-level and C-level symbols of Numpy +# the API uses Numpy to interface C and Python +import numpy as np +cimport numpy as np +from libc.stdlib cimport malloc, free + +# intialized Numpy. must do. +np.import_array() + +# import numpy C function +# we use PyArray_ENABLEFLAGS to make Numpy ndarray responsible to memoery management +cdef extern from "numpy/arrayobject.h": + void PyArray_ENABLEFLAGS(np.ndarray arr, int flags) + +# Declare the prototype of the C functions in MaskApi.h +cdef extern from "maskApi.h": + ctypedef unsigned int uint + ctypedef unsigned long siz + ctypedef unsigned char byte + ctypedef double* BB + ctypedef struct RLE: + siz h, + siz w, + siz m, + uint* cnts, + void rlesInit( RLE **R, siz n ) + void rleEncode( RLE *R, const byte *M, siz h, siz w, siz n ) + void rleDecode( const RLE *R, byte *mask, siz n ) + void rleMerge( const RLE *R, RLE *M, siz n, bint intersect ) + void rleArea( const RLE *R, siz n, uint *a ) + void rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o ) + void bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o ) + void rleToBbox( const RLE *R, BB bb, siz n ) + void rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n ) + void rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w ) + char* rleToString( const RLE *R ) + void rleFrString( RLE *R, char *s, siz h, siz w ) + +# python class to wrap RLE array in C +# the class handles the memory allocation and deallocation +cdef class RLEs: + cdef RLE *_R + cdef siz _n + + def __cinit__(self, siz n =0): + rlesInit(&self._R, n) + self._n = n + + # free the RLE array here + def __dealloc__(self): + if self._R is not NULL: + for i in range(self._n): + free(self._R[i].cnts) + free(self._R) + def __getattr__(self, key): + if key == 'n': + return self._n + raise AttributeError(key) + +# python class to wrap Mask array in C +# the class handles the memory allocation and deallocation +cdef class Masks: + cdef byte *_mask + cdef siz _h + cdef siz _w + cdef siz _n + + def __cinit__(self, h, w, n): + self._mask = malloc(h*w*n* sizeof(byte)) + self._h = h + self._w = w + self._n = n + # def __dealloc__(self): + # the memory management of _mask has been passed to np.ndarray + # it doesn't need to be freed here + + # called when passing into np.array() and return an np.ndarray in column-major order + def __array__(self): + cdef np.npy_intp shape[1] + shape[0] = self._h*self._w*self._n + # Create a 1D array, and reshape it to fortran/Matlab column-major array + ndarray = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT8, self._mask).reshape((self._h, self._w, self._n), order='F') + # The _mask allocated by Masks is now handled by ndarray + PyArray_ENABLEFLAGS(ndarray, np.NPY_OWNDATA) + return ndarray + +# internal conversion from Python RLEs object to compressed RLE format +def _toString(RLEs Rs): + cdef siz n = Rs.n + cdef bytes py_string + cdef char* c_string + objs = [] + for i in range(n): + c_string = rleToString( &Rs._R[i] ) + py_string = c_string + objs.append({ + 'size': [Rs._R[i].h, Rs._R[i].w], + 'counts': py_string + }) + free(c_string) + return objs + +# internal conversion from compressed RLE format to Python RLEs object +def _frString(rleObjs): + cdef siz n = len(rleObjs) + Rs = RLEs(n) + cdef bytes py_string + cdef char* c_string + for i, obj in enumerate(rleObjs): + py_string = str(obj['counts']) + c_string = py_string + rleFrString( &Rs._R[i], c_string, obj['size'][0], obj['size'][1] ) + return Rs + +# encode mask to RLEs objects +# list of RLE string can be generated by RLEs member function +def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask): + h, w, n = mask.shape[0], mask.shape[1], mask.shape[2] + cdef RLEs Rs = RLEs(n) + rleEncode(Rs._R,mask.data,h,w,n) + objs = _toString(Rs) + return objs + +# decode mask from compressed list of RLE string or RLEs object +def decode(rleObjs): + cdef RLEs Rs = _frString(rleObjs) + h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n + masks = Masks(h, w, n) + rleDecode( Rs._R, masks._mask, n ); + return np.array(masks) + +def merge(rleObjs, bint intersect=0): + cdef RLEs Rs = _frString(rleObjs) + cdef RLEs R = RLEs(1) + rleMerge(Rs._R, R._R, Rs._n, intersect) + obj = _toString(R)[0] + return obj + +def area(rleObjs): + cdef RLEs Rs = _frString(rleObjs) + cdef uint* _a = malloc(Rs._n* sizeof(uint)) + rleArea(Rs._R, Rs._n, _a) + cdef np.npy_intp shape[1] + shape[0] = Rs._n + a = np.array((Rs._n, ), dtype=np.uint8) + a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a) + PyArray_ENABLEFLAGS(a, np.NPY_OWNDATA) + return a + +# iou computation. support function overload (RLEs-RLEs and bbox-bbox). +def iou( dt, gt, pyiscrowd ): + def _preproc(objs): + if len(objs) == 0: + return objs + if type(objs) == np.ndarray: + if len(objs.shape) == 1: + objs = objs.reshape((objs[0], 1)) + # check if it's Nx4 bbox + if not len(objs.shape) == 2 or not objs.shape[1] == 4: + raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension') + objs = objs.astype(np.double) + elif type(objs) == list: + # check if list is in box format and convert it to np.ndarray + isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs])) + isrle = np.all(np.array([type(obj) == dict for obj in objs])) + if isbox: + objs = np.array(objs, dtype=np.double) + if len(objs.shape) == 1: + objs = objs.reshape((1,objs.shape[0])) + elif isrle: + objs = _frString(objs) + else: + raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])') + else: + raise Exception('unrecognized type. The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.') + return objs + def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou): + rleIou( dt._R, gt._R, m, n, iscrowd.data, _iou.data ) + def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou): + bbIou( dt.data, gt.data, m, n, iscrowd.data, _iou.data ) + def _len(obj): + cdef siz N = 0 + if type(obj) == RLEs: + N = obj.n + elif len(obj)==0: + pass + elif type(obj) == np.ndarray: + N = obj.shape[0] + return N + # convert iscrowd to numpy array + cdef np.ndarray[np.uint8_t, ndim=1] iscrowd = np.array(pyiscrowd, dtype=np.uint8) + # simple type checking + cdef siz m, n + dt = _preproc(dt) + gt = _preproc(gt) + m = _len(dt) + n = _len(gt) + if m == 0 or n == 0: + return [] + if not type(dt) == type(gt): + raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray') + + # define local variables + cdef double* _iou = 0 + cdef np.npy_intp shape[1] + # check type and assign iou function + if type(dt) == RLEs: + _iouFun = _rleIou + elif type(dt) == np.ndarray: + _iouFun = _bbIou + else: + raise Exception('input data type not allowed.') + _iou = malloc(m*n* sizeof(double)) + iou = np.zeros((m*n, ), dtype=np.double) + shape[0] = m*n + iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou) + PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA) + _iouFun(dt, gt, iscrowd, m, n, iou) + return iou.reshape((m,n), order='F') + +def toBbox( rleObjs ): + cdef RLEs Rs = _frString(rleObjs) + cdef siz n = Rs.n + cdef BB _bb = malloc(4*n* sizeof(double)) + rleToBbox( Rs._R, _bb, n ) + cdef np.npy_intp shape[1] + shape[0] = 4*n + bb = np.array((1,4*n), dtype=np.double) + bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4)) + PyArray_ENABLEFLAGS(bb, np.NPY_OWNDATA) + return bb + +def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ): + cdef siz n = bb.shape[0] + Rs = RLEs(n) + rleFrBbox( Rs._R, bb.data, h, w, n ) + objs = _toString(Rs) + return objs + +def frPoly( poly, siz h, siz w ): + cdef np.ndarray[np.double_t, ndim=1] np_poly + n = len(poly) + Rs = RLEs(n) + for i, p in enumerate(poly): + np_poly = np.array(p, dtype=np.double, order='F') + rleFrPoly( &Rs._R[i], np_poly.data, len(np_poly)/2, h, w ) + objs = _toString(Rs) + return objs + +def frUncompressedRLE(ucRles, siz h, siz w): + cdef np.ndarray[np.uint32_t, ndim=1] cnts + cdef RLE R + cdef uint *data + n = len(ucRles) + objs = [] + for i in range(n): + Rs = RLEs(1) + cnts = np.array(ucRles[i]['counts'], dtype=np.uint32) + # time for malloc can be saved here but it's fine + data = malloc(len(cnts)* sizeof(uint)) + for j in range(len(cnts)): + data[j] = cnts[j] + R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), data) + Rs._R[0] = R + objs.append(_toString(Rs)[0]) + return objs + +def frPyObjects(pyobj, siz h, w): + if type(pyobj) == np.ndarray: + objs = frBbox(pyobj, h, w ) + elif type(pyobj) == list and len(pyobj[0]) == 4: + objs = frBbox(pyobj, h, w ) + elif type(pyobj) == list and len(pyobj[0]) > 4: + objs = frPoly(pyobj, h, w ) + elif type(pyobj) == list and type(pyobj[0]) == dict: + objs = frUncompressedRLE(pyobj, h, w) + else: + raise Exception('input type is not supported.') + return objs diff --git a/lib/pycocotools/coco.py b/lib/pycocotools/coco.py new file mode 100644 index 0000000..5d9f6b8 --- /dev/null +++ b/lib/pycocotools/coco.py @@ -0,0 +1,351 @@ +__author__ = 'tylin' +__version__ = '1.0.1' +# Interface for accessing the Microsoft COCO dataset. + +# Microsoft COCO is a large image dataset designed for object detection, +# segmentation, and caption generation. pycocotools is a Python API that +# assists in loading, parsing and visualizing the annotations in COCO. +# Please visit http://mscoco.org/ for more information on COCO, including +# for the data, paper, and tutorials. The exact format of the annotations +# is also described on the COCO website. For example usage of the pycocotools +# please see pycocotools_demo.ipynb. In addition to this API, please download both +# the COCO images and annotations in order to run the demo. + +# An alternative to using the API is to load the annotations directly +# into Python dictionary +# Using the API provides additional utility functions. Note that this API +# supports both *instance* and *caption* annotations. In the case of +# captions not all functions are defined (e.g. categories are undefined). + +# The following API functions are defined: +# COCO - COCO api class that loads COCO annotation file and prepare data structures. +# decodeMask - Decode binary mask M encoded via run-length encoding. +# encodeMask - Encode binary mask M using run-length encoding. +# getAnnIds - Get ann ids that satisfy given filter conditions. +# getCatIds - Get cat ids that satisfy given filter conditions. +# getImgIds - Get img ids that satisfy given filter conditions. +# loadAnns - Load anns with the specified ids. +# loadCats - Load cats with the specified ids. +# loadImgs - Load imgs with the specified ids. +# segToMask - Convert polygon segmentation to binary mask. +# showAnns - Display the specified annotations. +# loadRes - Load algorithm results and create API for accessing them. +# download - Download COCO images from mscoco.org server. +# Throughout the API "ann"=annotation, "cat"=category, and "img"=image. +# Help on each functions can be accessed by: "help COCO>function". + +# See also COCO>decodeMask, +# COCO>encodeMask, COCO>getAnnIds, COCO>getCatIds, +# COCO>getImgIds, COCO>loadAnns, COCO>loadCats, +# COCO>loadImgs, COCO>segToMask, COCO>showAnns + +# Microsoft COCO Toolbox. version 2.0 +# Data, paper, and tutorials available at: http://mscoco.org/ +# Code written by Piotr Dollar and Tsung-Yi Lin, 2014. +# Licensed under the Simplified BSD License [see bsd.txt] + +import json +import datetime +import time +import matplotlib.pyplot as plt +from matplotlib.collections import PatchCollection +from matplotlib.patches import Polygon +import numpy as np +from skimage.draw import polygon +import urllib +import copy +import itertools +import mask +import os + +class COCO: + def __init__(self, annotation_file=None): + """ + Constructor of Microsoft COCO helper class for reading and visualizing annotations. + :param annotation_file (str): location of annotation file + :param image_folder (str): location to the folder that hosts images. + :return: + """ + # load dataset + self.dataset = {} + self.anns = [] + self.imgToAnns = {} + self.catToImgs = {} + self.imgs = {} + self.cats = {} + if not annotation_file == None: + print 'loading annotations into memory...' + tic = time.time() + dataset = json.load(open(annotation_file, 'r')) + print 'Done (t=%0.2fs)'%(time.time()- tic) + self.dataset = dataset + self.createIndex() + + def createIndex(self): + # create index + print 'creating index...' + anns = {} + imgToAnns = {} + catToImgs = {} + cats = {} + imgs = {} + if 'annotations' in self.dataset: + imgToAnns = {ann['image_id']: [] for ann in self.dataset['annotations']} + anns = {ann['id']: [] for ann in self.dataset['annotations']} + for ann in self.dataset['annotations']: + imgToAnns[ann['image_id']] += [ann] + anns[ann['id']] = ann + + if 'images' in self.dataset: + imgs = {im['id']: {} for im in self.dataset['images']} + for img in self.dataset['images']: + imgs[img['id']] = img + + if 'categories' in self.dataset: + cats = {cat['id']: [] for cat in self.dataset['categories']} + for cat in self.dataset['categories']: + cats[cat['id']] = cat + catToImgs = {cat['id']: [] for cat in self.dataset['categories']} + if 'annotations' in self.dataset: + for ann in self.dataset['annotations']: + catToImgs[ann['category_id']] += [ann['image_id']] + + print 'index created!' + + # create class members + self.anns = anns + self.imgToAnns = imgToAnns + self.catToImgs = catToImgs + self.imgs = imgs + self.cats = cats + + def info(self): + """ + Print information about the annotation file. + :return: + """ + for key, value in self.dataset['info'].items(): + print '%s: %s'%(key, value) + + def getAnnIds(self, imgIds=[], catIds=[], areaRng=[], iscrowd=None): + """ + Get ann ids that satisfy given filter conditions. default skips that filter + :param imgIds (int array) : get anns for given imgs + catIds (int array) : get anns for given cats + areaRng (float array) : get anns for given area range (e.g. [0 inf]) + iscrowd (boolean) : get anns for given crowd label (False or True) + :return: ids (int array) : integer array of ann ids + """ + imgIds = imgIds if type(imgIds) == list else [imgIds] + catIds = catIds if type(catIds) == list else [catIds] + + if len(imgIds) == len(catIds) == len(areaRng) == 0: + anns = self.dataset['annotations'] + else: + if not len(imgIds) == 0: + # this can be changed by defaultdict + lists = [self.imgToAnns[imgId] for imgId in imgIds if imgId in self.imgToAnns] + anns = list(itertools.chain.from_iterable(lists)) + else: + anns = self.dataset['annotations'] + anns = anns if len(catIds) == 0 else [ann for ann in anns if ann['category_id'] in catIds] + anns = anns if len(areaRng) == 0 else [ann for ann in anns if ann['area'] > areaRng[0] and ann['area'] < areaRng[1]] + if not iscrowd == None: + ids = [ann['id'] for ann in anns if ann['iscrowd'] == iscrowd] + else: + ids = [ann['id'] for ann in anns] + return ids + + def getCatIds(self, catNms=[], supNms=[], catIds=[]): + """ + filtering parameters. default skips that filter. + :param catNms (str array) : get cats for given cat names + :param supNms (str array) : get cats for given supercategory names + :param catIds (int array) : get cats for given cat ids + :return: ids (int array) : integer array of cat ids + """ + catNms = catNms if type(catNms) == list else [catNms] + supNms = supNms if type(supNms) == list else [supNms] + catIds = catIds if type(catIds) == list else [catIds] + + if len(catNms) == len(supNms) == len(catIds) == 0: + cats = self.dataset['categories'] + else: + cats = self.dataset['categories'] + cats = cats if len(catNms) == 0 else [cat for cat in cats if cat['name'] in catNms] + cats = cats if len(supNms) == 0 else [cat for cat in cats if cat['supercategory'] in supNms] + cats = cats if len(catIds) == 0 else [cat for cat in cats if cat['id'] in catIds] + ids = [cat['id'] for cat in cats] + return ids + + def getImgIds(self, imgIds=[], catIds=[]): + ''' + Get img ids that satisfy given filter conditions. + :param imgIds (int array) : get imgs for given ids + :param catIds (int array) : get imgs with all given cats + :return: ids (int array) : integer array of img ids + ''' + imgIds = imgIds if type(imgIds) == list else [imgIds] + catIds = catIds if type(catIds) == list else [catIds] + + if len(imgIds) == len(catIds) == 0: + ids = self.imgs.keys() + else: + ids = set(imgIds) + for i, catId in enumerate(catIds): + if i == 0 and len(ids) == 0: + ids = set(self.catToImgs[catId]) + else: + ids &= set(self.catToImgs[catId]) + return list(ids) + + def loadAnns(self, ids=[]): + """ + Load anns with the specified ids. + :param ids (int array) : integer ids specifying anns + :return: anns (object array) : loaded ann objects + """ + if type(ids) == list: + return [self.anns[id] for id in ids] + elif type(ids) == int: + return [self.anns[ids]] + + def loadCats(self, ids=[]): + """ + Load cats with the specified ids. + :param ids (int array) : integer ids specifying cats + :return: cats (object array) : loaded cat objects + """ + if type(ids) == list: + return [self.cats[id] for id in ids] + elif type(ids) == int: + return [self.cats[ids]] + + def loadImgs(self, ids=[]): + """ + Load anns with the specified ids. + :param ids (int array) : integer ids specifying img + :return: imgs (object array) : loaded img objects + """ + if type(ids) == list: + return [self.imgs[id] for id in ids] + elif type(ids) == int: + return [self.imgs[ids]] + + def showAnns(self, anns): + """ + Display the specified annotations. + :param anns (array of object): annotations to display + :return: None + """ + if len(anns) == 0: + return 0 + if 'segmentation' in anns[0]: + datasetType = 'instances' + elif 'caption' in anns[0]: + datasetType = 'captions' + if datasetType == 'instances': + ax = plt.gca() + polygons = [] + color = [] + for ann in anns: + c = np.random.random((1, 3)).tolist()[0] + if type(ann['segmentation']) == list: + # polygon + for seg in ann['segmentation']: + poly = np.array(seg).reshape((len(seg)/2, 2)) + polygons.append(Polygon(poly, True,alpha=0.4)) + color.append(c) + else: + # mask + t = self.imgs[ann['image_id']] + if type(ann['segmentation']['counts']) == list: + rle = mask.frPyObjects([ann['segmentation']], t['height'], t['width']) + else: + rle = [ann['segmentation']] + m = mask.decode(rle) + img = np.ones( (m.shape[0], m.shape[1], 3) ) + if ann['iscrowd'] == 1: + color_mask = np.array([2.0,166.0,101.0])/255 + if ann['iscrowd'] == 0: + color_mask = np.random.random((1, 3)).tolist()[0] + for i in range(3): + img[:,:,i] = color_mask[i] + ax.imshow(np.dstack( (img, m*0.5) )) + p = PatchCollection(polygons, facecolors=color, edgecolors=(0,0,0,1), linewidths=3, alpha=0.4) + ax.add_collection(p) + elif datasetType == 'captions': + for ann in anns: + print ann['caption'] + + def loadRes(self, resFile): + """ + Load result file and return a result api object. + :param resFile (str) : file name of result file + :return: res (obj) : result api object + """ + res = COCO() + res.dataset['images'] = [img for img in self.dataset['images']] + # res.dataset['info'] = copy.deepcopy(self.dataset['info']) + # res.dataset['licenses'] = copy.deepcopy(self.dataset['licenses']) + + print 'Loading and preparing results... ' + tic = time.time() + anns = json.load(open(resFile)) + assert type(anns) == list, 'results in not an array of objects' + annsImgIds = [ann['image_id'] for ann in anns] + assert set(annsImgIds) == (set(annsImgIds) & set(self.getImgIds())), \ + 'Results do not correspond to current coco set' + if 'caption' in anns[0]: + imgIds = set([img['id'] for img in res.dataset['images']]) & set([ann['image_id'] for ann in anns]) + res.dataset['images'] = [img for img in res.dataset['images'] if img['id'] in imgIds] + for id, ann in enumerate(anns): + ann['id'] = id+1 + elif 'bbox' in anns[0] and not anns[0]['bbox'] == []: + res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) + for id, ann in enumerate(anns): + bb = ann['bbox'] + x1, x2, y1, y2 = [bb[0], bb[0]+bb[2], bb[1], bb[1]+bb[3]] + if not 'segmentation' in ann: + ann['segmentation'] = [[x1, y1, x1, y2, x2, y2, x2, y1]] + ann['area'] = bb[2]*bb[3] + ann['id'] = id+1 + ann['iscrowd'] = 0 + elif 'segmentation' in anns[0]: + res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) + for id, ann in enumerate(anns): + # now only support compressed RLE format as segmentation results + ann['area'] = mask.area([ann['segmentation']])[0] + if not 'bbox' in ann: + ann['bbox'] = mask.toBbox([ann['segmentation']])[0] + ann['id'] = id+1 + ann['iscrowd'] = 0 + print 'DONE (t=%0.2fs)'%(time.time()- tic) + + res.dataset['annotations'] = anns + res.createIndex() + return res + + def download( self, tarDir = None, imgIds = [] ): + ''' + Download COCO images from mscoco.org server. + :param tarDir (str): COCO results directory name + imgIds (list): images to be downloaded + :return: + ''' + if tarDir is None: + print 'Please specify target directory' + return -1 + if len(imgIds) == 0: + imgs = self.imgs.values() + else: + imgs = self.loadImgs(imgIds) + N = len(imgs) + if not os.path.exists(tarDir): + os.makedirs(tarDir) + for i, img in enumerate(imgs): + tic = time.time() + fname = os.path.join(tarDir, img['file_name']) + if not os.path.exists(fname): + urllib.urlretrieve(img['coco_url'], fname) + print 'downloaded %d/%d images (t=%.1fs)'%(i, N, time.time()- tic) diff --git a/lib/pycocotools/cocoeval.py b/lib/pycocotools/cocoeval.py new file mode 100644 index 0000000..f389eb0 --- /dev/null +++ b/lib/pycocotools/cocoeval.py @@ -0,0 +1,444 @@ +__author__ = 'tsungyi' + +import numpy as np +import datetime +import time +from collections import defaultdict +import mask +import copy + +class COCOeval: + # Interface for evaluating detection on the Microsoft COCO dataset. + # + # The usage for CocoEval is as follows: + # cocoGt=..., cocoDt=... # load dataset and results + # E = CocoEval(cocoGt,cocoDt); # initialize CocoEval object + # E.params.recThrs = ...; # set parameters as desired + # E.evaluate(); # run per image evaluation + # E.accumulate(); # accumulate per image results + # E.summarize(); # display summary metrics of results + # For example usage see evalDemo.m and http://mscoco.org/. + # + # The evaluation parameters are as follows (defaults in brackets): + # imgIds - [all] N img ids to use for evaluation + # catIds - [all] K cat ids to use for evaluation + # iouThrs - [.5:.05:.95] T=10 IoU thresholds for evaluation + # recThrs - [0:.01:1] R=101 recall thresholds for evaluation + # areaRng - [...] A=4 object area ranges for evaluation + # maxDets - [1 10 100] M=3 thresholds on max detections per image + # useSegm - [1] if true evaluate against ground-truth segments + # useCats - [1] if true use category labels for evaluation # Note: if useSegm=0 the evaluation is run on bounding boxes. + # Note: if useCats=0 category labels are ignored as in proposal scoring. + # Note: multiple areaRngs [Ax2] and maxDets [Mx1] can be specified. + # + # evaluate(): evaluates detections on every image and every category and + # concats the results into the "evalImgs" with fields: + # dtIds - [1xD] id for each of the D detections (dt) + # gtIds - [1xG] id for each of the G ground truths (gt) + # dtMatches - [TxD] matching gt id at each IoU or 0 + # gtMatches - [TxG] matching dt id at each IoU or 0 + # dtScores - [1xD] confidence of each dt + # gtIgnore - [1xG] ignore flag for each gt + # dtIgnore - [TxD] ignore flag for each dt at each IoU + # + # accumulate(): accumulates the per-image, per-category evaluation + # results in "evalImgs" into the dictionary "eval" with fields: + # params - parameters used for evaluation + # date - date evaluation was performed + # counts - [T,R,K,A,M] parameter dimensions (see above) + # precision - [TxRxKxAxM] precision for every evaluation setting + # recall - [TxKxAxM] max recall for every evaluation setting + # Note: precision and recall==-1 for settings with no gt objects. + # + # See also coco, mask, pycocoDemo, pycocoEvalDemo + # + # Microsoft COCO Toolbox. version 2.0 + # Data, paper, and tutorials available at: http://mscoco.org/ + # Code written by Piotr Dollar and Tsung-Yi Lin, 2015. + # Licensed under the Simplified BSD License [see coco/license.txt] + def __init__(self, cocoGt=None, cocoDt=None): + ''' + Initialize CocoEval using coco APIs for gt and dt + :param cocoGt: coco object with ground truth annotations + :param cocoDt: coco object with detection results + :return: None + ''' + self.cocoGt = cocoGt # ground truth COCO API + self.cocoDt = cocoDt # detections COCO API + self.params = {} # evaluation parameters + self.evalImgs = defaultdict(list) # per-image per-category evaluation results [KxAxI] elements + self.eval = {} # accumulated evaluation results + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + self.params = Params() # parameters + self._paramsEval = {} # parameters for evaluation + self.stats = [] # result summarization + self.ious = {} # ious between all gts and dts + if not cocoGt is None: + self.params.imgIds = sorted(cocoGt.getImgIds()) + self.params.catIds = sorted(cocoGt.getCatIds()) + + + def _prepare(self): + ''' + Prepare ._gts and ._dts for evaluation based on params + :return: None + ''' + # + def _toMask(objs, coco): + # modify segmentation by reference + for obj in objs: + t = coco.imgs[obj['image_id']] + if type(obj['segmentation']) == list: + if type(obj['segmentation'][0]) == dict: + print 'debug' + obj['segmentation'] = mask.frPyObjects(obj['segmentation'],t['height'],t['width']) + if len(obj['segmentation']) == 1: + obj['segmentation'] = obj['segmentation'][0] + else: + # an object can have multiple polygon regions + # merge them into one RLE mask + obj['segmentation'] = mask.merge(obj['segmentation']) + elif type(obj['segmentation']) == dict and type(obj['segmentation']['counts']) == list: + obj['segmentation'] = mask.frPyObjects([obj['segmentation']],t['height'],t['width'])[0] + elif type(obj['segmentation']) == dict and \ + type(obj['segmentation']['counts'] == unicode or type(obj['segmentation']['counts']) == str): + pass + else: + raise Exception('segmentation format not supported.') + p = self.params + if p.useCats: + gts=self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + dts=self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + else: + gts=self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds)) + dts=self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds)) + + if p.useSegm: + _toMask(gts, self.cocoGt) + _toMask(dts, self.cocoDt) + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + for gt in gts: + self._gts[gt['image_id'], gt['category_id']].append(gt) + for dt in dts: + self._dts[dt['image_id'], dt['category_id']].append(dt) + self.evalImgs = defaultdict(list) # per-image per-category evaluation results + self.eval = {} # accumulated evaluation results + + def evaluate(self): + ''' + Run per image evaluation on given images and store results (a list of dict) in self.evalImgs + :return: None + ''' + tic = time.time() + print 'Running per image evaluation... ' + p = self.params + p.imgIds = list(np.unique(p.imgIds)) + if p.useCats: + p.catIds = list(np.unique(p.catIds)) + p.maxDets = sorted(p.maxDets) + self.params=p + + self._prepare() + # loop through images, area range, max detection number + catIds = p.catIds if p.useCats else [-1] + + computeIoU = self.computeIoU + self.ious = {(imgId, catId): computeIoU(imgId, catId) \ + for imgId in p.imgIds + for catId in catIds} + + evaluateImg = self.evaluateImg + maxDet = p.maxDets[-1] + self.evalImgs = [evaluateImg(imgId, catId, areaRng, maxDet) + for catId in catIds + for areaRng in p.areaRng + for imgId in p.imgIds + ] + self._paramsEval = copy.deepcopy(self.params) + toc = time.time() + print 'DONE (t=%0.2fs).'%(toc-tic) + + def computeIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId,catId] + dt = self._dts[imgId,catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId,cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId,cId]] + if len(gt) == 0 and len(dt) ==0: + return [] + dt = sorted(dt, key=lambda x: -x['score']) + if len(dt) > p.maxDets[-1]: + dt=dt[0:p.maxDets[-1]] + + if p.useSegm: + g = [g['segmentation'] for g in gt] + d = [d['segmentation'] for d in dt] + else: + g = [g['bbox'] for g in gt] + d = [d['bbox'] for d in dt] + + # compute iou between each dt and gt region + iscrowd = [int(o['iscrowd']) for o in gt] + ious = mask.iou(d,g,iscrowd) + return ious + + def evaluateImg(self, imgId, catId, aRng, maxDet): + ''' + perform evaluation for single category and image + :return: dict (single image results) + ''' + # + p = self.params + if p.useCats: + gt = self._gts[imgId,catId] + dt = self._dts[imgId,catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId,cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId,cId]] + if len(gt) == 0 and len(dt) ==0: + return None + + for g in gt: + if 'ignore' not in g: + g['ignore'] = 0 + if g['iscrowd'] == 1 or g['ignore'] or (g['area']aRng[1]): + g['_ignore'] = 1 + else: + g['_ignore'] = 0 + + # sort dt highest score first, sort gt ignore last + # gt = sorted(gt, key=lambda x: x['_ignore']) + gtind = [ind for (ind, g) in sorted(enumerate(gt), key=lambda (ind, g): g['_ignore']) ] + + gt = [gt[ind] for ind in gtind] + dt = sorted(dt, key=lambda x: -x['score'])[0:maxDet] + iscrowd = [int(o['iscrowd']) for o in gt] + # load computed ious + N_iou = len(self.ious[imgId, catId]) + ious = self.ious[imgId, catId][0:maxDet, np.array(gtind)] if N_iou >0 else self.ious[imgId, catId] + + T = len(p.iouThrs) + G = len(gt) + D = len(dt) + gtm = np.zeros((T,G)) + dtm = np.zeros((T,D)) + gtIg = np.array([g['_ignore'] for g in gt]) + dtIg = np.zeros((T,D)) + if not len(ious)==0: + for tind, t in enumerate(p.iouThrs): + for dind, d in enumerate(dt): + # information about best match so far (m=-1 -> unmatched) + iou = min([t,1-1e-10]) + m = -1 + for gind, g in enumerate(gt): + # if this gt already matched, and not a crowd, continue + if gtm[tind,gind]>0 and not iscrowd[gind]: + continue + # if dt matched to reg gt, and on ignore gt, stop + if m>-1 and gtIg[m]==0 and gtIg[gind]==1: + break + # continue to next gt unless better match made + if ious[dind,gind] < iou: + continue + # match successful and best so far, store appropriately + iou=ious[dind,gind] + m=gind + # if match made store id of match for both dt and gt + if m ==-1: + continue + dtIg[tind,dind] = gtIg[m] + dtm[tind,dind] = gt[m]['id'] + gtm[tind,m] = d['id'] + # set unmatched detections outside of area range to ignore + a = np.array([d['area']aRng[1] for d in dt]).reshape((1, len(dt))) + dtIg = np.logical_or(dtIg, np.logical_and(dtm==0, np.repeat(a,T,0))) + # store results for given image and category + return { + 'image_id': imgId, + 'category_id': catId, + 'aRng': aRng, + 'maxDet': maxDet, + 'dtIds': [d['id'] for d in dt], + 'gtIds': [g['id'] for g in gt], + 'dtMatches': dtm, + 'gtMatches': gtm, + 'dtScores': [d['score'] for d in dt], + 'gtIgnore': gtIg, + 'dtIgnore': dtIg, + } + + def accumulate(self, p = None): + ''' + Accumulate per image evaluation results and store the result in self.eval + :param p: input params for evaluation + :return: None + ''' + print 'Accumulating evaluation results... ' + tic = time.time() + if not self.evalImgs: + print 'Please run evaluate() first' + # allows input customized parameters + if p is None: + p = self.params + p.catIds = p.catIds if p.useCats == 1 else [-1] + T = len(p.iouThrs) + R = len(p.recThrs) + K = len(p.catIds) if p.useCats else 1 + A = len(p.areaRng) + M = len(p.maxDets) + precision = -np.ones((T,R,K,A,M)) # -1 for the precision of absent categories + recall = -np.ones((T,K,A,M)) + + # create dictionary for future indexing + _pe = self._paramsEval + catIds = _pe.catIds if _pe.useCats else [-1] + setK = set(catIds) + setA = set(map(tuple, _pe.areaRng)) + setM = set(_pe.maxDets) + setI = set(_pe.imgIds) + # get inds to evaluate + k_list = [n for n, k in enumerate(p.catIds) if k in setK] + m_list = [m for n, m in enumerate(p.maxDets) if m in setM] + a_list = [n for n, a in enumerate(map(lambda x: tuple(x), p.areaRng)) if a in setA] + i_list = [n for n, i in enumerate(p.imgIds) if i in setI] + # K0 = len(_pe.catIds) + I0 = len(_pe.imgIds) + A0 = len(_pe.areaRng) + # retrieve E at each category, area range, and max number of detections + for k, k0 in enumerate(k_list): + Nk = k0*A0*I0 + for a, a0 in enumerate(a_list): + Na = a0*I0 + for m, maxDet in enumerate(m_list): + E = [self.evalImgs[Nk+Na+i] for i in i_list] + E = filter(None, E) + if len(E) == 0: + continue + dtScores = np.concatenate([e['dtScores'][0:maxDet] for e in E]) + + # different sorting method generates slightly different results. + # mergesort is used to be consistent as Matlab implementation. + inds = np.argsort(-dtScores, kind='mergesort') + + dtm = np.concatenate([e['dtMatches'][:,0:maxDet] for e in E], axis=1)[:,inds] + dtIg = np.concatenate([e['dtIgnore'][:,0:maxDet] for e in E], axis=1)[:,inds] + gtIg = np.concatenate([e['gtIgnore'] for e in E]) + npig = len([ig for ig in gtIg if ig == 0]) + if npig == 0: + continue + tps = np.logical_and( dtm, np.logical_not(dtIg) ) + fps = np.logical_and(np.logical_not(dtm), np.logical_not(dtIg) ) + + tp_sum = np.cumsum(tps, axis=1).astype(dtype=np.float) + fp_sum = np.cumsum(fps, axis=1).astype(dtype=np.float) + for t, (tp, fp) in enumerate(zip(tp_sum, fp_sum)): + tp = np.array(tp) + fp = np.array(fp) + nd = len(tp) + rc = tp / npig + pr = tp / (fp+tp+np.spacing(1)) + q = np.zeros((R,)) + + if nd: + recall[t,k,a,m] = rc[-1] + else: + recall[t,k,a,m] = 0 + + # numpy is slow without cython optimization for accessing elements + # use python array gets significant speed improvement + pr = pr.tolist(); q = q.tolist() + + for i in range(nd-1, 0, -1): + if pr[i] > pr[i-1]: + pr[i-1] = pr[i] + + inds = np.searchsorted(rc, p.recThrs) + try: + for ri, pi in enumerate(inds): + q[ri] = pr[pi] + except: + pass + precision[t,:,k,a,m] = np.array(q) + self.eval = { + 'params': p, + 'counts': [T, R, K, A, M], + 'date': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + 'precision': precision, + 'recall': recall, + } + toc = time.time() + print 'DONE (t=%0.2fs).'%( toc-tic ) + + def summarize(self): + ''' + Compute and display summary metrics for evaluation results. + Note this functin can *only* be applied on the default parameter setting + ''' + def _summarize( ap=1, iouThr=None, areaRng='all', maxDets=100 ): + p = self.params + iStr = ' {:<18} {} @[ IoU={:<9} | area={:>6} | maxDets={:>3} ] = {}' + titleStr = 'Average Precision' if ap == 1 else 'Average Recall' + typeStr = '(AP)' if ap==1 else '(AR)' + iouStr = '%0.2f:%0.2f'%(p.iouThrs[0], p.iouThrs[-1]) if iouThr is None else '%0.2f'%(iouThr) + areaStr = areaRng + maxDetsStr = '%d'%(maxDets) + + aind = [i for i, aRng in enumerate(['all', 'small', 'medium', 'large']) if aRng == areaRng] + mind = [i for i, mDet in enumerate([1, 10, 100]) if mDet == maxDets] + if ap == 1: + # dimension of precision: [TxRxKxAxM] + s = self.eval['precision'] + # IoU + if iouThr is not None: + t = np.where(iouThr == p.iouThrs)[0] + s = s[t] + # areaRng + s = s[:,:,:,aind,mind] + else: + # dimension of recall: [TxKxAxM] + s = self.eval['recall'] + s = s[:,:,aind,mind] + if len(s[s>-1])==0: + mean_s = -1 + else: + mean_s = np.mean(s[s>-1]) + print iStr.format(titleStr, typeStr, iouStr, areaStr, maxDetsStr, '%.3f'%(float(mean_s))) + return mean_s + + if not self.eval: + raise Exception('Please run accumulate() first') + self.stats = np.zeros((12,)) + self.stats[0] = _summarize(1) + self.stats[1] = _summarize(1,iouThr=.5) + self.stats[2] = _summarize(1,iouThr=.75) + self.stats[3] = _summarize(1,areaRng='small') + self.stats[4] = _summarize(1,areaRng='medium') + self.stats[5] = _summarize(1,areaRng='large') + self.stats[6] = _summarize(0,maxDets=1) + self.stats[7] = _summarize(0,maxDets=10) + self.stats[8] = _summarize(0,maxDets=100) + self.stats[9] = _summarize(0,areaRng='small') + self.stats[10] = _summarize(0,areaRng='medium') + self.stats[11] = _summarize(0,areaRng='large') + + def __str__(self): + self.summarize() + +class Params: + ''' + Params for coco evaluation api + ''' + def __init__(self): + self.imgIds = [] + self.catIds = [] + # np.arange causes trouble. the data point on arange is slightly larger than the true value + self.iouThrs = np.linspace(.5, 0.95, np.round((0.95-.5)/.05)+1, endpoint=True) + self.recThrs = np.linspace(.0, 1.00, np.round((1.00-.0)/.01)+1, endpoint=True) + self.maxDets = [1,10,100] + self.areaRng = [ [0**2,1e5**2], [0**2, 32**2], [32**2, 96**2], [96**2, 1e5**2] ] + self.useSegm = 0 + self.useCats = 1 \ No newline at end of file diff --git a/lib/pycocotools/license.txt b/lib/pycocotools/license.txt new file mode 100644 index 0000000..495c163 --- /dev/null +++ b/lib/pycocotools/license.txt @@ -0,0 +1,26 @@ +Copyright (c) 2014, Piotr Dollar and Tsung-Yi Lin +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those +of the authors and should not be interpreted as representing official policies, +either expressed or implied, of the FreeBSD Project. diff --git a/lib/pycocotools/mask.py b/lib/pycocotools/mask.py new file mode 100644 index 0000000..6732546 --- /dev/null +++ b/lib/pycocotools/mask.py @@ -0,0 +1,82 @@ +__author__ = 'tsungyi' + +import pycocotools._mask as _mask + +# Interface for manipulating masks stored in RLE format. +# +# RLE is a simple yet efficient format for storing binary masks. RLE +# first divides a vector (or vectorized image) into a series of piecewise +# constant regions and then for each piece simply stores the length of +# that piece. For example, given M=[0 0 1 1 1 0 1] the RLE counts would +# be [2 3 1 1], or for M=[1 1 1 1 1 1 0] the counts would be [0 6 1] +# (note that the odd counts are always the numbers of zeros). Instead of +# storing the counts directly, additional compression is achieved with a +# variable bitrate representation based on a common scheme called LEB128. +# +# Compression is greatest given large piecewise constant regions. +# Specifically, the size of the RLE is proportional to the number of +# *boundaries* in M (or for an image the number of boundaries in the y +# direction). Assuming fairly simple shapes, the RLE representation is +# O(sqrt(n)) where n is number of pixels in the object. Hence space usage +# is substantially lower, especially for large simple objects (large n). +# +# Many common operations on masks can be computed directly using the RLE +# (without need for decoding). This includes computations such as area, +# union, intersection, etc. All of these operations are linear in the +# size of the RLE, in other words they are O(sqrt(n)) where n is the area +# of the object. Computing these operations on the original mask is O(n). +# Thus, using the RLE can result in substantial computational savings. +# +# The following API functions are defined: +# encode - Encode binary masks using RLE. +# decode - Decode binary masks encoded via RLE. +# merge - Compute union or intersection of encoded masks. +# iou - Compute intersection over union between masks. +# area - Compute area of encoded masks. +# toBbox - Get bounding boxes surrounding encoded masks. +# frPyObjects - Convert polygon, bbox, and uncompressed RLE to encoded RLE mask. +# +# Usage: +# Rs = encode( masks ) +# masks = decode( Rs ) +# R = merge( Rs, intersect=false ) +# o = iou( dt, gt, iscrowd ) +# a = area( Rs ) +# bbs = toBbox( Rs ) +# Rs = frPyObjects( [pyObjects], h, w ) +# +# In the API the following formats are used: +# Rs - [dict] Run-length encoding of binary masks +# R - dict Run-length encoding of binary mask +# masks - [hxwxn] Binary mask(s) (must have type np.ndarray(dtype=uint8) in column-major order) +# iscrowd - [nx1] list of np.ndarray. 1 indicates corresponding gt image has crowd region to ignore +# bbs - [nx4] Bounding box(es) stored as [x y w h] +# poly - Polygon stored as [[x1 y1 x2 y2...],[x1 y1 ...],...] (2D list) +# dt,gt - May be either bounding boxes or encoded masks +# Both poly and bbs are 0-indexed (bbox=[0 0 1 1] encloses first pixel). +# +# Finally, a note about the intersection over union (iou) computation. +# The standard iou of a ground truth (gt) and detected (dt) object is +# iou(gt,dt) = area(intersect(gt,dt)) / area(union(gt,dt)) +# For "crowd" regions, we use a modified criteria. If a gt object is +# marked as "iscrowd", we allow a dt to match any subregion of the gt. +# Choosing gt' in the crowd gt that best matches the dt can be done using +# gt'=intersect(dt,gt). Since by definition union(gt',dt)=dt, computing +# iou(gt,dt,iscrowd) = iou(gt',dt) = area(intersect(gt,dt)) / area(dt) +# For crowd gt regions we use this modified criteria above for the iou. +# +# To compile run "python setup.py build_ext --inplace" +# Please do not contact us for help with compiling. +# +# Microsoft COCO Toolbox. version 2.0 +# Data, paper, and tutorials available at: http://mscoco.org/ +# Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +# Licensed under the Simplified BSD License [see coco/license.txt] + +encode = _mask.encode +decode = _mask.decode +iou = _mask.iou +merge = _mask.merge +area = _mask.area +toBbox = _mask.toBbox +frPyObjects = _mask.frPyObjects \ No newline at end of file diff --git a/lib/pycocotools/maskApi.c b/lib/pycocotools/maskApi.c new file mode 100644 index 0000000..2b2d891 --- /dev/null +++ b/lib/pycocotools/maskApi.c @@ -0,0 +1,208 @@ +/************************************************************************** +* Microsoft COCO Toolbox. version 2.0 +* Data, paper, and tutorials available at: http://mscoco.org/ +* Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +* Licensed under the Simplified BSD License [see coco/license.txt] +**************************************************************************/ +#include "maskApi.h" +#include +#include + +uint umin( uint a, uint b ) { return (ab) ? a : b; } + +void rleInit( RLE *R, siz h, siz w, siz m, uint *cnts ) { + R->h=h; R->w=w; R->m=m; R->cnts=(m==0)?0:malloc(sizeof(uint)*m); + if(cnts) for(siz j=0; jcnts[j]=cnts[j]; +} + +void rleFree( RLE *R ) { + free(R->cnts); R->cnts=0; +} + +void rlesInit( RLE **R, siz n ) { + *R = (RLE*) malloc(sizeof(RLE)*n); + for(siz i=0; i0 ) { + c=umin(ca,cb); cc+=c; ct=0; + ca-=c; if(!ca && a0) { + crowd=iscrowd!=NULL && iscrowd[g]; + if(dt[d].h!=gt[g].h || dt[d].w!=gt[g].w) { o[g*m+d]=-1; continue; } + siz ka, kb, a, b; uint c, ca, cb, ct, i, u; bool va, vb; + ca=dt[d].cnts[0]; ka=dt[d].m; va=vb=0; + cb=gt[g].cnts[0]; kb=gt[g].m; a=b=1; i=u=0; ct=1; + while( ct>0 ) { + c=umin(ca,cb); if(va||vb) { u+=c; if(va&&vb) i+=c; } ct=0; + ca-=c; if(!ca && ad?1:c=dy && xs>xe) || (dxye); + if(flip) { t=xs; xs=xe; xe=t; t=ys; ys=ye; ye=t; } + s = dx>=dy ? (double)(ye-ys)/dx : (double)(xe-xs)/dy; + if(dx>=dy) for( int d=0; d<=dx; d++ ) { + t=flip?dx-d:d; u[m]=t+xs; v[m]=(int)(ys+s*t+.5); m++; + } else for( int d=0; d<=dy; d++ ) { + t=flip?dy-d:d; v[m]=t+ys; u[m]=(int)(xs+s*t+.5); m++; + } + } + // get points along y-boundary and downsample + free(x); free(y); k=m; m=0; double xd, yd; + x=malloc(sizeof(int)*k); y=malloc(sizeof(int)*k); + for( j=1; jw-1 ) continue; + yd=(double)(v[j]h) yd=h; yd=ceil(yd); + x[m]=(int) xd; y[m]=(int) yd; m++; + } + // compute rle encoding given y-boundary points + k=m; a=malloc(sizeof(uint)*(k+1)); + for( j=0; j0) b[m++]=a[j++]; else { + j++; if(jm, p=0; long x; bool more; + char *s=malloc(sizeof(char)*m*6); + for( i=0; icnts[i]; if(i>2) x-=(long) R->cnts[i-2]; more=1; + while( more ) { + char c=x & 0x1f; x >>= 5; more=(c & 0x10) ? x!=-1 : x!=0; + if(more) c |= 0x20; c+=48; s[p++]=c; + } + } + s[p]=0; return s; +} + +void rleFrString( RLE *R, char *s, siz h, siz w ) { + siz m=0, p=0, k; long x; bool more; uint *cnts; + while( s[m] ) m++; cnts=malloc(sizeof(uint)*m); m=0; + while( s[p] ) { + x=0; k=0; more=1; + while( more ) { + char c=s[p]-48; x |= (c & 0x1f) << 5*k; + more = c & 0x20; p++; k++; + if(!more && (c & 0x10)) x |= -1 << 5*k; + } + if(m>2) x+=(long) cnts[m-2]; cnts[m++]=(uint) x; + } + rleInit(R,h,w,m,cnts); free(cnts); +} diff --git a/lib/pycocotools/maskApi.h b/lib/pycocotools/maskApi.h new file mode 100644 index 0000000..ff16116 --- /dev/null +++ b/lib/pycocotools/maskApi.h @@ -0,0 +1,55 @@ +/************************************************************************** +* Microsoft COCO Toolbox. version 2.0 +* Data, paper, and tutorials available at: http://mscoco.org/ +* Code written by Piotr Dollar and Tsung-Yi Lin, 2015. +* Licensed under the Simplified BSD License [see coco/license.txt] +**************************************************************************/ +#pragma once +#include + +typedef unsigned int uint; +typedef unsigned long siz; +typedef unsigned char byte; +typedef double* BB; +typedef struct { siz h, w, m; uint *cnts; } RLE; + +// Initialize/destroy RLE. +void rleInit( RLE *R, siz h, siz w, siz m, uint *cnts ); +void rleFree( RLE *R ); + +// Initialize/destroy RLE array. +void rlesInit( RLE **R, siz n ); +void rlesFree( RLE **R, siz n ); + +// Encode binary masks using RLE. +void rleEncode( RLE *R, const byte *mask, siz h, siz w, siz n ); + +// Decode binary masks encoded via RLE. +void rleDecode( const RLE *R, byte *mask, siz n ); + +// Compute union or intersection of encoded masks. +void rleMerge( const RLE *R, RLE *M, siz n, bool intersect ); + +// Compute area of encoded masks. +void rleArea( const RLE *R, siz n, uint *a ); + +// Compute intersection over union between masks. +void rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o ); + +// Compute intersection over union between bounding boxes. +void bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o ); + +// Get bounding boxes surrounding encoded masks. +void rleToBbox( const RLE *R, BB bb, siz n ); + +// Convert bounding boxes to encoded masks. +void rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n ); + +// Convert polygon to encoded mask. +void rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w ); + +// Get compressed string representation of encoded mask. +char* rleToString( const RLE *R ); + +// Convert from compressed string representation of encoded mask. +void rleFrString( RLE *R, char *s, siz h, siz w ); diff --git a/lib/roi_data_layer/__init__.py b/lib/roi_data_layer/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/roi_data_layer/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/roi_data_layer/layer.py b/lib/roi_data_layer/layer.py new file mode 100644 index 0000000..04f4172 --- /dev/null +++ b/lib/roi_data_layer/layer.py @@ -0,0 +1,196 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""The data layer used during training to train a Fast R-CNN network. + +RoIDataLayer implements a Caffe Python layer. +""" + +import caffe +from fast_rcnn.config import cfg +from roi_data_layer.minibatch import get_minibatch +import numpy as np +import yaml +from multiprocessing import Process, Queue + +class RoIDataLayer(caffe.Layer): + """Fast R-CNN data layer used for training.""" + + def _shuffle_roidb_inds(self): + """Randomly permute the training roidb.""" + if cfg.TRAIN.ASPECT_GROUPING: + widths = np.array([r['width'] for r in self._roidb]) + heights = np.array([r['height'] for r in self._roidb]) + horz = (widths >= heights) + vert = np.logical_not(horz) + horz_inds = np.where(horz)[0] + vert_inds = np.where(vert)[0] + inds = np.hstack(( + np.random.permutation(horz_inds), + np.random.permutation(vert_inds))) + inds = np.reshape(inds, (-1, 2)) + row_perm = np.random.permutation(np.arange(inds.shape[0])) + inds = np.reshape(inds[row_perm, :], (-1,)) + self._perm = inds + else: + self._perm = np.random.permutation(np.arange(len(self._roidb))) + self._cur = 0 + + def _get_next_minibatch_inds(self): + """Return the roidb indices for the next minibatch.""" + if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb): + self._shuffle_roidb_inds() + + db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH] + self._cur += cfg.TRAIN.IMS_PER_BATCH + return db_inds + + def _get_next_minibatch(self): + """Return the blobs to be used for the next minibatch. + + If cfg.TRAIN.USE_PREFETCH is True, then blobs will be computed in a + separate process and made available through self._blob_queue. + """ + if cfg.TRAIN.USE_PREFETCH: + return self._blob_queue.get() + else: + db_inds = self._get_next_minibatch_inds() + minibatch_db = [self._roidb[i] for i in db_inds] + return get_minibatch(minibatch_db, self._num_classes) + + def set_roidb(self, roidb): + """Set the roidb to be used by this layer during training.""" + self._roidb = roidb + self._shuffle_roidb_inds() + if cfg.TRAIN.USE_PREFETCH: + self._blob_queue = Queue(10) + self._prefetch_process = BlobFetcher(self._blob_queue, + self._roidb, + self._num_classes) + self._prefetch_process.start() + # Terminate the child process when the parent exists + def cleanup(): + print 'Terminating BlobFetcher' + self._prefetch_process.terminate() + self._prefetch_process.join() + import atexit + atexit.register(cleanup) + + def setup(self, bottom, top): + """Setup the RoIDataLayer.""" + + # parse the layer parameter string, which must be valid YAML + layer_params = yaml.load(self.param_str) + + self._num_classes = layer_params['num_classes'] + + self._name_to_top_map = {} + + # data blob: holds a batch of N images, each with 3 channels + idx = 0 + top[idx].reshape(cfg.TRAIN.IMS_PER_BATCH, 3, + max(cfg.TRAIN.SCALES), cfg.TRAIN.MAX_SIZE) + self._name_to_top_map['data'] = idx + idx += 1 + + if cfg.TRAIN.HAS_RPN: + top[idx].reshape(1, 3) + self._name_to_top_map['im_info'] = idx + idx += 1 + + top[idx].reshape(1, 4) + self._name_to_top_map['gt_boxes'] = idx + idx += 1 + else: # not using RPN + # rois blob: holds R regions of interest, each is a 5-tuple + # (n, x1, y1, x2, y2) specifying an image batch index n and a + # rectangle (x1, y1, x2, y2) + top[idx].reshape(1, 5) + self._name_to_top_map['rois'] = idx + idx += 1 + + # labels blob: R categorical labels in [0, ..., K] for K foreground + # classes plus background + top[idx].reshape(1) + self._name_to_top_map['labels'] = idx + idx += 1 + + if cfg.TRAIN.BBOX_REG: + # bbox_targets blob: R bounding-box regression targets with 4 + # targets per class + top[idx].reshape(1, self._num_classes * 4) + self._name_to_top_map['bbox_targets'] = idx + idx += 1 + + # bbox_inside_weights blob: At most 4 targets per roi are active; + # thisbinary vector sepcifies the subset of active targets + top[idx].reshape(1, self._num_classes * 4) + self._name_to_top_map['bbox_inside_weights'] = idx + idx += 1 + + top[idx].reshape(1, self._num_classes * 4) + self._name_to_top_map['bbox_outside_weights'] = idx + idx += 1 + + print 'RoiDataLayer: name_to_top:', self._name_to_top_map + assert len(top) == len(self._name_to_top_map) + + def forward(self, bottom, top): + """Get blobs and copy them into this layer's top blob vector.""" + blobs = self._get_next_minibatch() + + for blob_name, blob in blobs.iteritems(): + top_ind = self._name_to_top_map[blob_name] + # Reshape net's input blobs + top[top_ind].reshape(*(blob.shape)) + # Copy data into net's input blobs + top[top_ind].data[...] = blob.astype(np.float32, copy=False) + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + +class BlobFetcher(Process): + """Experimental class for prefetching blobs in a separate process.""" + def __init__(self, queue, roidb, num_classes): + super(BlobFetcher, self).__init__() + self._queue = queue + self._roidb = roidb + self._num_classes = num_classes + self._perm = None + self._cur = 0 + self._shuffle_roidb_inds() + # fix the random seed for reproducibility + np.random.seed(cfg.RNG_SEED) + + def _shuffle_roidb_inds(self): + """Randomly permute the training roidb.""" + # TODO(rbg): remove duplicated code + self._perm = np.random.permutation(np.arange(len(self._roidb))) + self._cur = 0 + + def _get_next_minibatch_inds(self): + """Return the roidb indices for the next minibatch.""" + # TODO(rbg): remove duplicated code + if self._cur + cfg.TRAIN.IMS_PER_BATCH >= len(self._roidb): + self._shuffle_roidb_inds() + + db_inds = self._perm[self._cur:self._cur + cfg.TRAIN.IMS_PER_BATCH] + self._cur += cfg.TRAIN.IMS_PER_BATCH + return db_inds + + def run(self): + print 'BlobFetcher started' + while True: + db_inds = self._get_next_minibatch_inds() + minibatch_db = [self._roidb[i] for i in db_inds] + blobs = get_minibatch(minibatch_db, self._num_classes) + self._queue.put(blobs) diff --git a/lib/roi_data_layer/minibatch.py b/lib/roi_data_layer/minibatch.py new file mode 100644 index 0000000..f4535b0 --- /dev/null +++ b/lib/roi_data_layer/minibatch.py @@ -0,0 +1,199 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Compute minibatch blobs for training a Fast R-CNN network.""" + +import numpy as np +import numpy.random as npr +import cv2 +from fast_rcnn.config import cfg +from utils.blob import prep_im_for_blob, im_list_to_blob + +def get_minibatch(roidb, num_classes): + """Given a roidb, construct a minibatch sampled from it.""" + num_images = len(roidb) + # Sample random scales to use for each image in this batch + random_scale_inds = npr.randint(0, high=len(cfg.TRAIN.SCALES), + size=num_images) + assert(cfg.TRAIN.BATCH_SIZE % num_images == 0), \ + 'num_images ({}) must divide BATCH_SIZE ({})'. \ + format(num_images, cfg.TRAIN.BATCH_SIZE) + rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images + fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image) + + # Get the input image blob, formatted for caffe + im_blob, im_scales = _get_image_blob(roidb, random_scale_inds) + + blobs = {'data': im_blob} + + if cfg.TRAIN.HAS_RPN: + assert len(im_scales) == 1, "Single batch only" + assert len(roidb) == 1, "Single batch only" + # gt boxes: (x1, y1, x2, y2, cls) + gt_inds = np.where(roidb[0]['gt_classes'] != 0)[0] + gt_boxes = np.empty((len(gt_inds), 5), dtype=np.float32) + gt_boxes[:, 0:4] = roidb[0]['boxes'][gt_inds, :] * im_scales[0] + gt_boxes[:, 4] = roidb[0]['gt_classes'][gt_inds] + blobs['gt_boxes'] = gt_boxes + blobs['im_info'] = np.array( + [[im_blob.shape[2], im_blob.shape[3], im_scales[0]]], + dtype=np.float32) + else: # not using RPN + # Now, build the region of interest and label blobs + rois_blob = np.zeros((0, 5), dtype=np.float32) + labels_blob = np.zeros((0), dtype=np.float32) + bbox_targets_blob = np.zeros((0, 4 * num_classes), dtype=np.float32) + bbox_inside_blob = np.zeros(bbox_targets_blob.shape, dtype=np.float32) + # all_overlaps = [] + for im_i in xrange(num_images): + labels, overlaps, im_rois, bbox_targets, bbox_inside_weights \ + = _sample_rois(roidb[im_i], fg_rois_per_image, rois_per_image, + num_classes) + + # Add to RoIs blob + rois = _project_im_rois(im_rois, im_scales[im_i]) + batch_ind = im_i * np.ones((rois.shape[0], 1)) + rois_blob_this_image = np.hstack((batch_ind, rois)) + rois_blob = np.vstack((rois_blob, rois_blob_this_image)) + + # Add to labels, bbox targets, and bbox loss blobs + labels_blob = np.hstack((labels_blob, labels)) + bbox_targets_blob = np.vstack((bbox_targets_blob, bbox_targets)) + bbox_inside_blob = np.vstack((bbox_inside_blob, bbox_inside_weights)) + # all_overlaps = np.hstack((all_overlaps, overlaps)) + + # For debug visualizations + # _vis_minibatch(im_blob, rois_blob, labels_blob, all_overlaps) + + blobs['rois'] = rois_blob + blobs['labels'] = labels_blob + + if cfg.TRAIN.BBOX_REG: + blobs['bbox_targets'] = bbox_targets_blob + blobs['bbox_inside_weights'] = bbox_inside_blob + blobs['bbox_outside_weights'] = \ + np.array(bbox_inside_blob > 0).astype(np.float32) + + return blobs + +def _sample_rois(roidb, fg_rois_per_image, rois_per_image, num_classes): + """Generate a random sample of RoIs comprising foreground and background + examples. + """ + # label = class RoI has max overlap with + labels = roidb['max_classes'] + overlaps = roidb['max_overlaps'] + rois = roidb['boxes'] + + # Select foreground RoIs as those with >= FG_THRESH overlap + fg_inds = np.where(overlaps >= cfg.TRAIN.FG_THRESH)[0] + # Guard against the case when an image has fewer than fg_rois_per_image + # foreground RoIs + fg_rois_per_this_image = np.minimum(fg_rois_per_image, fg_inds.size) + # Sample foreground regions without replacement + if fg_inds.size > 0: + fg_inds = npr.choice( + fg_inds, size=fg_rois_per_this_image, replace=False) + + # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI) + bg_inds = np.where((overlaps < cfg.TRAIN.BG_THRESH_HI) & + (overlaps >= cfg.TRAIN.BG_THRESH_LO))[0] + # Compute number of background RoIs to take from this image (guarding + # against there being fewer than desired) + bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image + bg_rois_per_this_image = np.minimum(bg_rois_per_this_image, + bg_inds.size) + # Sample foreground regions without replacement + if bg_inds.size > 0: + bg_inds = npr.choice( + bg_inds, size=bg_rois_per_this_image, replace=False) + + # The indices that we're selecting (both fg and bg) + keep_inds = np.append(fg_inds, bg_inds) + # Select sampled values from various arrays: + labels = labels[keep_inds] + # Clamp labels for the background RoIs to 0 + labels[fg_rois_per_this_image:] = 0 + overlaps = overlaps[keep_inds] + rois = rois[keep_inds] + + bbox_targets, bbox_inside_weights = _get_bbox_regression_labels( + roidb['bbox_targets'][keep_inds, :], num_classes) + + return labels, overlaps, rois, bbox_targets, bbox_inside_weights + +def _get_image_blob(roidb, scale_inds): + """Builds an input blob from the images in the roidb at the specified + scales. + """ + num_images = len(roidb) + processed_ims = [] + im_scales = [] + for i in xrange(num_images): + im = cv2.imread(roidb[i]['image']) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + target_size = cfg.TRAIN.SCALES[scale_inds[i]] + im, im_scale = prep_im_for_blob(im, cfg.PIXEL_MEANS, target_size, + cfg.TRAIN.MAX_SIZE) + im_scales.append(im_scale) + processed_ims.append(im) + + # Create a blob to hold the input images + blob = im_list_to_blob(processed_ims) + + return blob, im_scales + +def _project_im_rois(im_rois, im_scale_factor): + """Project image RoIs into the rescaled training image.""" + rois = im_rois * im_scale_factor + return rois + +def _get_bbox_regression_labels(bbox_target_data, num_classes): + """Bounding-box regression targets are stored in a compact form in the + roidb. + + This function expands those targets into the 4-of-4*K representation used + by the network (i.e. only one class has non-zero targets). The loss weights + are similarly expanded. + + Returns: + bbox_target_data (ndarray): N x 4K blob of regression targets + bbox_inside_weights (ndarray): N x 4K blob of loss weights + """ + clss = bbox_target_data[:, 0] + bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32) + bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32) + inds = np.where(clss > 0)[0] + for ind in inds: + cls = clss[ind] + start = 4 * cls + end = start + 4 + bbox_targets[ind, start:end] = bbox_target_data[ind, 1:] + bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS + return bbox_targets, bbox_inside_weights + +def _vis_minibatch(im_blob, rois_blob, labels_blob, overlaps): + """Visualize a mini-batch for debugging.""" + import matplotlib.pyplot as plt + for i in xrange(rois_blob.shape[0]): + rois = rois_blob[i, :] + im_ind = rois[0] + roi = rois[1:] + im = im_blob[im_ind, :, :, :].transpose((1, 2, 0)).copy() + im += cfg.PIXEL_MEANS + im = im[:, :, (2, 1, 0)] + im = im.astype(np.uint8) + cls = labels_blob[i] + plt.imshow(im) + print 'class: ', cls, ' overlap: ', overlaps[i] + plt.gca().add_patch( + plt.Rectangle((roi[0], roi[1]), roi[2] - roi[0], + roi[3] - roi[1], fill=False, + edgecolor='r', linewidth=3) + ) + plt.show() diff --git a/lib/roi_data_layer/roidb.py b/lib/roi_data_layer/roidb.py new file mode 100644 index 0000000..f2bd231 --- /dev/null +++ b/lib/roi_data_layer/roidb.py @@ -0,0 +1,133 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Transform a roidb into a trainable roidb by adding a bunch of metadata.""" + +import numpy as np +from fast_rcnn.config import cfg +from fast_rcnn.bbox_transform import bbox_transform +from utils.cython_bbox import bbox_overlaps +import PIL + +def prepare_roidb(imdb): + """Enrich the imdb's roidb by adding some derived quantities that + are useful for training. This function precomputes the maximum + overlap, taken over ground-truth boxes, between each ROI and + each ground-truth box. The class with maximum overlap is also + recorded. + """ + sizes = [PIL.Image.open(imdb.image_path_at(i)).size + for i in xrange(imdb.num_images)] + roidb = imdb.roidb + for i in xrange(len(imdb.image_index)): + roidb[i]['image'] = imdb.image_path_at(i) + roidb[i]['width'] = sizes[i][0] + roidb[i]['height'] = sizes[i][1] + # need gt_overlaps as a dense array for argmax + gt_overlaps = roidb[i]['gt_overlaps'].toarray() + # max overlap with gt over classes (columns) + max_overlaps = gt_overlaps.max(axis=1) + # gt class that had the max overlap + max_classes = gt_overlaps.argmax(axis=1) + roidb[i]['max_classes'] = max_classes + roidb[i]['max_overlaps'] = max_overlaps + # sanity checks + # max overlap of 0 => class should be zero (background) + zero_inds = np.where(max_overlaps == 0)[0] + assert all(max_classes[zero_inds] == 0) + # max overlap > 0 => class should not be zero (must be a fg class) + nonzero_inds = np.where(max_overlaps > 0)[0] + assert all(max_classes[nonzero_inds] != 0) + +def add_bbox_regression_targets(roidb): + """Add information needed to train bounding-box regressors.""" + assert len(roidb) > 0 + assert 'max_classes' in roidb[0], 'Did you call prepare_roidb first?' + + num_images = len(roidb) + # Infer number of classes from the number of columns in gt_overlaps + num_classes = 2 if cfg.TRAIN.AGONISTIC else roidb[0]['gt_overlaps'].shape[1] + for im_i in xrange(num_images): + rois = roidb[im_i]['boxes'] + max_overlaps = roidb[im_i]['max_overlaps'] + max_classes = roidb[im_i]['max_classes'] + roidb[im_i]['bbox_targets'] = \ + _compute_targets(rois, max_overlaps, max_classes) + + if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED: + # Use fixed / precomputed "means" and "stds" instead of empirical values + means = np.tile( + np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS), (num_classes, 1)) + stds = np.tile( + np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS), (num_classes, 1)) + else: + # Compute values needed for means and stds + # var(x) = E(x^2) - E(x)^2 + class_counts = np.zeros((num_classes, 1)) + cfg.EPS + sums = np.zeros((num_classes, 4)) + squared_sums = np.zeros((num_classes, 4)) + for im_i in xrange(num_images): + targets = roidb[im_i]['bbox_targets'] + for cls in xrange(1, num_classes): + cls_inds = np.where(targets[:, 0] == cls)[0] + if cls_inds.size > 0: + class_counts[cls] += cls_inds.size + sums[cls, :] += targets[cls_inds, 1:].sum(axis=0) + squared_sums[cls, :] += \ + (targets[cls_inds, 1:] ** 2).sum(axis=0) + + means = sums / class_counts + stds = np.sqrt(squared_sums / class_counts - means ** 2) + + print 'bbox target means:' + print means + print means[1:, :].mean(axis=0) # ignore bg class + print 'bbox target stdevs:' + print stds + print stds[1:, :].mean(axis=0) # ignore bg class + + # Normalize targets + if cfg.TRAIN.BBOX_NORMALIZE_TARGETS: + print "Normalizing targets" + for im_i in xrange(num_images): + targets = roidb[im_i]['bbox_targets'] + for cls in xrange(1, num_classes): + cls_inds = np.where(targets[:, 0] == cls)[0] + roidb[im_i]['bbox_targets'][cls_inds, 1:] -= means[cls, :] + roidb[im_i]['bbox_targets'][cls_inds, 1:] /= stds[cls, :] + else: + print "NOT normalizing targets" + + # These values will be needed for making predictions + # (the predicts will need to be unnormalized and uncentered) + return means.ravel(), stds.ravel() + +def _compute_targets(rois, overlaps, labels): + """Compute bounding-box regression targets for an image.""" + # Indices of ground-truth ROIs + gt_inds = np.where(overlaps == 1)[0] + if len(gt_inds) == 0: + # Bail if the image has no ground-truth ROIs + return np.zeros((rois.shape[0], 5), dtype=np.float32) + # Indices of examples for which we try to make predictions + ex_inds = np.where(overlaps >= cfg.TRAIN.BBOX_THRESH)[0] + + # Get IoU overlap between each ex ROI and gt ROI + ex_gt_overlaps = bbox_overlaps( + np.ascontiguousarray(rois[ex_inds, :], dtype=np.float), + np.ascontiguousarray(rois[gt_inds, :], dtype=np.float)) + + # Find which gt ROI each ex ROI has max overlap with: + # this will be the ex ROI's gt target + gt_assignment = ex_gt_overlaps.argmax(axis=1) + gt_rois = rois[gt_inds[gt_assignment], :] + ex_rois = rois[ex_inds, :] + + targets = np.zeros((rois.shape[0], 5), dtype=np.float32) + targets[ex_inds, 0] = labels[ex_inds] + targets[ex_inds, 1:] = bbox_transform(ex_rois, gt_rois) + return targets diff --git a/lib/rpn/README.md b/lib/rpn/README.md new file mode 100644 index 0000000..80abf48 --- /dev/null +++ b/lib/rpn/README.md @@ -0,0 +1,23 @@ +### `rpn` module overview + +##### `generate_anchors.py` + +Generates a regular grid of multi-scale, multi-aspect anchor boxes. + +##### `proposal_layer.py` + +Converts RPN outputs (per-anchor scores and bbox regression estimates) into object proposals. + +##### `anchor_target_layer.py` + +Generates training targets/labels for each anchor. Classification labels are 1 (object), 0 (not object) or -1 (ignore). +Bbox regression targets are specified when the classification label is > 0. + +##### `proposal_target_layer.py` + +Generates training targets/labels for each object proposal: classification labels 0 - K (bg or object class 1, ... , K) +and bbox regression targets in that case that the label is > 0. + +##### `generate.py` + +Generate object detection proposals from an imdb using an RPN. diff --git a/lib/rpn/__init__.py b/lib/rpn/__init__.py new file mode 100644 index 0000000..23b31b2 --- /dev/null +++ b/lib/rpn/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- diff --git a/lib/rpn/anchor_target_layer.py b/lib/rpn/anchor_target_layer.py new file mode 100644 index 0000000..3934cdd --- /dev/null +++ b/lib/rpn/anchor_target_layer.py @@ -0,0 +1,281 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import os +import caffe +import yaml +from fast_rcnn.config import cfg +import numpy as np +import numpy.random as npr +from generate_anchors import generate_anchors +from utils.cython_bbox import bbox_overlaps +from fast_rcnn.bbox_transform import bbox_transform + +DEBUG = False + +class AnchorTargetLayer(caffe.Layer): + """ + Assign anchors to ground-truth targets. Produces anchor classification + labels and bounding-box regression targets. + """ + + def setup(self, bottom, top): + layer_params = yaml.load(self.param_str) + anchor_scales = layer_params.get('scales', (8, 16, 32)) + self._anchors = generate_anchors(scales=np.array(anchor_scales)) + self._num_anchors = self._anchors.shape[0] + self._feat_stride = layer_params['feat_stride'] + + if DEBUG: + print 'anchors:' + print self._anchors + print 'anchor shapes:' + print np.hstack(( + self._anchors[:, 2::4] - self._anchors[:, 0::4], + self._anchors[:, 3::4] - self._anchors[:, 1::4], + )) + self._counts = cfg.EPS + self._sums = np.zeros((1, 4)) + self._squared_sums = np.zeros((1, 4)) + self._fg_sum = 0 + self._bg_sum = 0 + self._count = 0 + + # allow boxes to sit over the edge by a small amount + self._allowed_border = layer_params.get('allowed_border', 0) + + height, width = bottom[0].data.shape[-2:] + if DEBUG: + print 'AnchorTargetLayer: height', height, 'width', width + + A = self._num_anchors + # labels + top[0].reshape(1, 1, A * height, width) + # bbox_targets + top[1].reshape(1, A * 4, height, width) + # bbox_inside_weights + top[2].reshape(1, A * 4, height, width) + # bbox_outside_weights + top[3].reshape(1, A * 4, height, width) + + def forward(self, bottom, top): + # Algorithm: + # + # for each (H, W) location i + # generate 9 anchor boxes centered on cell i + # apply predicted bbox deltas at cell i to each of the 9 anchors + # filter out-of-image anchors + # measure GT overlap + + assert bottom[0].data.shape[0] == 1, \ + 'Only single item batches are supported' + + # map of shape (..., H, W) + height, width = bottom[0].data.shape[-2:] + # GT boxes (x1, y1, x2, y2, label) + gt_boxes = bottom[1].data + # im_info + im_info = bottom[2].data[0, :] + + if DEBUG: + print '' + print 'im_size: ({}, {})'.format(im_info[0], im_info[1]) + print 'scale: {}'.format(im_info[2]) + print 'height, width: ({}, {})'.format(height, width) + print 'rpn: gt_boxes.shape', gt_boxes.shape + print 'rpn: gt_boxes', gt_boxes + + # 1. Generate proposals from bbox deltas and shifted anchors + shift_x = np.arange(0, width) * self._feat_stride + shift_y = np.arange(0, height) * self._feat_stride + shift_x, shift_y = np.meshgrid(shift_x, shift_y) + shifts = np.vstack((shift_x.ravel(), shift_y.ravel(), + shift_x.ravel(), shift_y.ravel())).transpose() + # add A anchors (1, A, 4) to + # cell K shifts (K, 1, 4) to get + # shift anchors (K, A, 4) + # reshape to (K*A, 4) shifted anchors + A = self._num_anchors + K = shifts.shape[0] + all_anchors = (self._anchors.reshape((1, A, 4)) + + shifts.reshape((1, K, 4)).transpose((1, 0, 2))) + all_anchors = all_anchors.reshape((K * A, 4)) + total_anchors = int(K * A) + + # only keep anchors inside the image + inds_inside = np.where( + (all_anchors[:, 0] >= -self._allowed_border) & + (all_anchors[:, 1] >= -self._allowed_border) & + (all_anchors[:, 2] < im_info[1] + self._allowed_border) & # width + (all_anchors[:, 3] < im_info[0] + self._allowed_border) # height + )[0] + + if DEBUG: + print 'total_anchors', total_anchors + print 'inds_inside', len(inds_inside) + + # keep only inside anchors + anchors = all_anchors[inds_inside, :] + if DEBUG: + print 'anchors.shape', anchors.shape + + # label: 1 is positive, 0 is negative, -1 is dont care + labels = np.empty((len(inds_inside), ), dtype=np.float32) + labels.fill(-1) + + # overlaps between the anchors and the gt boxes + # overlaps (ex, gt) + overlaps = bbox_overlaps( + np.ascontiguousarray(anchors, dtype=np.float), + np.ascontiguousarray(gt_boxes, dtype=np.float)) + argmax_overlaps = overlaps.argmax(axis=1) + max_overlaps = overlaps[np.arange(len(inds_inside)), argmax_overlaps] + gt_argmax_overlaps = overlaps.argmax(axis=0) + gt_max_overlaps = overlaps[gt_argmax_overlaps, + np.arange(overlaps.shape[1])] + gt_argmax_overlaps = np.where(overlaps == gt_max_overlaps)[0] + + if not cfg.TRAIN.RPN_CLOBBER_POSITIVES: + # assign bg labels first so that positive labels can clobber them + labels[max_overlaps < cfg.TRAIN.RPN_NEGATIVE_OVERLAP] = 0 + + # fg label: for each gt, anchor with highest overlap + labels[gt_argmax_overlaps] = 1 + + # fg label: above threshold IOU + labels[max_overlaps >= cfg.TRAIN.RPN_POSITIVE_OVERLAP] = 1 + + if cfg.TRAIN.RPN_CLOBBER_POSITIVES: + # assign bg labels last so that negative labels can clobber positives + labels[max_overlaps < cfg.TRAIN.RPN_NEGATIVE_OVERLAP] = 0 + + # subsample positive labels if we have too many + num_fg = int(cfg.TRAIN.RPN_FG_FRACTION * cfg.TRAIN.RPN_BATCHSIZE) + fg_inds = np.where(labels == 1)[0] + if len(fg_inds) > num_fg: + disable_inds = npr.choice( + fg_inds, size=(len(fg_inds) - num_fg), replace=False) + labels[disable_inds] = -1 + + # subsample negative labels if we have too many + num_bg = cfg.TRAIN.RPN_BATCHSIZE - np.sum(labels == 1) + bg_inds = np.where(labels == 0)[0] + if len(bg_inds) > num_bg: + disable_inds = npr.choice( + bg_inds, size=(len(bg_inds) - num_bg), replace=False) + labels[disable_inds] = -1 + #print "was %s inds, disabling %s, now %s inds" % ( + #len(bg_inds), len(disable_inds), np.sum(labels == 0)) + + bbox_targets = np.zeros((len(inds_inside), 4), dtype=np.float32) + bbox_targets = _compute_targets(anchors, gt_boxes[argmax_overlaps, :]) + + bbox_inside_weights = np.zeros((len(inds_inside), 4), dtype=np.float32) + bbox_inside_weights[labels == 1, :] = np.array(cfg.TRAIN.RPN_BBOX_INSIDE_WEIGHTS) + + bbox_outside_weights = np.zeros((len(inds_inside), 4), dtype=np.float32) + if cfg.TRAIN.RPN_POSITIVE_WEIGHT < 0: + # uniform weighting of examples (given non-uniform sampling) + num_examples = np.sum(labels >= 0) + positive_weights = np.ones((1, 4)) * 1.0 / num_examples + negative_weights = np.ones((1, 4)) * 1.0 / num_examples + else: + assert ((cfg.TRAIN.RPN_POSITIVE_WEIGHT > 0) & + (cfg.TRAIN.RPN_POSITIVE_WEIGHT < 1)) + positive_weights = (cfg.TRAIN.RPN_POSITIVE_WEIGHT / + np.sum(labels == 1)) + negative_weights = ((1.0 - cfg.TRAIN.RPN_POSITIVE_WEIGHT) / + np.sum(labels == 0)) + bbox_outside_weights[labels == 1, :] = positive_weights + bbox_outside_weights[labels == 0, :] = negative_weights + + if DEBUG: + self._sums += bbox_targets[labels == 1, :].sum(axis=0) + self._squared_sums += (bbox_targets[labels == 1, :] ** 2).sum(axis=0) + self._counts += np.sum(labels == 1) + means = self._sums / self._counts + stds = np.sqrt(self._squared_sums / self._counts - means ** 2) + print 'means:' + print means + print 'stdevs:' + print stds + + # map up to original set of anchors + labels = _unmap(labels, total_anchors, inds_inside, fill=-1) + bbox_targets = _unmap(bbox_targets, total_anchors, inds_inside, fill=0) + bbox_inside_weights = _unmap(bbox_inside_weights, total_anchors, inds_inside, fill=0) + bbox_outside_weights = _unmap(bbox_outside_weights, total_anchors, inds_inside, fill=0) + + if DEBUG: + print 'rpn: max max_overlap', np.max(max_overlaps) + print 'rpn: num_positive', np.sum(labels == 1) + print 'rpn: num_negative', np.sum(labels == 0) + self._fg_sum += np.sum(labels == 1) + self._bg_sum += np.sum(labels == 0) + self._count += 1 + print 'rpn: num_positive avg', self._fg_sum / self._count + print 'rpn: num_negative avg', self._bg_sum / self._count + + # labels + labels = labels.reshape((1, height, width, A)).transpose(0, 3, 1, 2) + labels = labels.reshape((1, 1, A * height, width)) + top[0].reshape(*labels.shape) + top[0].data[...] = labels + + # bbox_targets + bbox_targets = bbox_targets \ + .reshape((1, height, width, A * 4)).transpose(0, 3, 1, 2) + top[1].reshape(*bbox_targets.shape) + top[1].data[...] = bbox_targets + + # bbox_inside_weights + bbox_inside_weights = bbox_inside_weights \ + .reshape((1, height, width, A * 4)).transpose(0, 3, 1, 2) + assert bbox_inside_weights.shape[2] == height + assert bbox_inside_weights.shape[3] == width + top[2].reshape(*bbox_inside_weights.shape) + top[2].data[...] = bbox_inside_weights + + # bbox_outside_weights + bbox_outside_weights = bbox_outside_weights \ + .reshape((1, height, width, A * 4)).transpose(0, 3, 1, 2) + assert bbox_outside_weights.shape[2] == height + assert bbox_outside_weights.shape[3] == width + top[3].reshape(*bbox_outside_weights.shape) + top[3].data[...] = bbox_outside_weights + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + + +def _unmap(data, count, inds, fill=0): + """ Unmap a subset of item (data) back to the original set of items (of + size count) """ + if len(data.shape) == 1: + ret = np.empty((count, ), dtype=np.float32) + ret.fill(fill) + ret[inds] = data + else: + ret = np.empty((count, ) + data.shape[1:], dtype=np.float32) + ret.fill(fill) + ret[inds, :] = data + return ret + + +def _compute_targets(ex_rois, gt_rois): + """Compute bounding-box regression targets for an image.""" + + assert ex_rois.shape[0] == gt_rois.shape[0] + assert ex_rois.shape[1] == 4 + assert gt_rois.shape[1] == 5 + + return bbox_transform(ex_rois, gt_rois[:, :4]).astype(np.float32, copy=False) diff --git a/lib/rpn/generate.py b/lib/rpn/generate.py new file mode 100644 index 0000000..060daf4 --- /dev/null +++ b/lib/rpn/generate.py @@ -0,0 +1,117 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +from fast_rcnn.config import cfg +from utils.blob import im_list_to_blob +from utils.timer import Timer +import numpy as np +import cv2 + +def _vis_proposals(im, dets, thresh=0.5): + """Draw detected bounding boxes.""" + inds = np.where(dets[:, -1] >= thresh)[0] + if len(inds) == 0: + return + + class_name = 'obj' + im = im[:, :, (2, 1, 0)] + fig, ax = plt.subplots(figsize=(12, 12)) + ax.imshow(im, aspect='equal') + for i in inds: + bbox = dets[i, :4] + score = dets[i, -1] + + ax.add_patch( + plt.Rectangle((bbox[0], bbox[1]), + bbox[2] - bbox[0], + bbox[3] - bbox[1], fill=False, + edgecolor='red', linewidth=3.5) + ) + ax.text(bbox[0], bbox[1] - 2, + '{:s} {:.3f}'.format(class_name, score), + bbox=dict(facecolor='blue', alpha=0.5), + fontsize=14, color='white') + + ax.set_title(('{} detections with ' + 'p({} | box) >= {:.1f}').format(class_name, class_name, + thresh), + fontsize=14) + plt.axis('off') + plt.tight_layout() + plt.draw() + +def _get_image_blob(im): + """Converts an image into a network input. + + Arguments: + im (ndarray): a color image in BGR order + + Returns: + blob (ndarray): a data blob holding an image pyramid + im_scale_factors (list): list of image scales (relative to im) used + in the image pyramid + """ + im_orig = im.astype(np.float32, copy=True) + im_orig -= cfg.PIXEL_MEANS + + im_shape = im_orig.shape + im_size_min = np.min(im_shape[0:2]) + im_size_max = np.max(im_shape[0:2]) + + processed_ims = [] + + assert len(cfg.TEST.SCALES) == 1 + target_size = cfg.TEST.SCALES[0] + + im_scale = float(target_size) / float(im_size_min) + # Prevent the biggest axis from being more than MAX_SIZE + if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE: + im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max) + im = cv2.resize(im_orig, None, None, fx=im_scale, fy=im_scale, + interpolation=cv2.INTER_LINEAR) + im_info = np.hstack((im.shape[:2], im_scale))[np.newaxis, :] + processed_ims.append(im) + + # Create a blob to hold the input images + blob = im_list_to_blob(processed_ims) + + return blob, im_info + +def im_proposals(net, im): + """Generate RPN proposals on a single image.""" + blobs = {} + blobs['data'], blobs['im_info'] = _get_image_blob(im) + net.blobs['data'].reshape(*(blobs['data'].shape)) + net.blobs['im_info'].reshape(*(blobs['im_info'].shape)) + blobs_out = net.forward( + data=blobs['data'].astype(np.float32, copy=False), + im_info=blobs['im_info'].astype(np.float32, copy=False)) + + scale = blobs['im_info'][0, 2] + boxes = blobs_out['rois'][:, 1:].copy() / scale + scores = blobs_out['scores'].copy() + return boxes, scores + +def imdb_proposals(net, imdb): + """Generate RPN proposals on all images in an imdb.""" + + _t = Timer() + imdb_boxes = [[] for _ in xrange(imdb.num_images)] + for i in xrange(imdb.num_images): + im = cv2.imread(imdb.image_path_at(i)) + _t.tic() + imdb_boxes[i], scores = im_proposals(net, im) + _t.toc() + print 'im_proposals: {:d}/{:d} {:.3f}s' \ + .format(i + 1, imdb.num_images, _t.average_time) + if 0: + dets = np.hstack((imdb_boxes[i], scores)) + # from IPython import embed; embed() + _vis_proposals(im, dets[:3, :], thresh=0.9) + plt.show() + + return imdb_boxes diff --git a/lib/rpn/generate_anchors.py b/lib/rpn/generate_anchors.py new file mode 100644 index 0000000..1125a80 --- /dev/null +++ b/lib/rpn/generate_anchors.py @@ -0,0 +1,105 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import numpy as np + +# Verify that we compute the same anchors as Shaoqing's matlab implementation: +# +# >> load output/rpn_cachedir/faster_rcnn_VOC2007_ZF_stage1_rpn/anchors.mat +# >> anchors +# +# anchors = +# +# -83 -39 100 56 +# -175 -87 192 104 +# -359 -183 376 200 +# -55 -55 72 72 +# -119 -119 136 136 +# -247 -247 264 264 +# -35 -79 52 96 +# -79 -167 96 184 +# -167 -343 184 360 + +#array([[ -83., -39., 100., 56.], +# [-175., -87., 192., 104.], +# [-359., -183., 376., 200.], +# [ -55., -55., 72., 72.], +# [-119., -119., 136., 136.], +# [-247., -247., 264., 264.], +# [ -35., -79., 52., 96.], +# [ -79., -167., 96., 184.], +# [-167., -343., 184., 360.]]) + +def generate_anchors(base_size=16, ratios=[0.5, 1, 2], + scales=2**np.arange(3, 6)): + """ + Generate anchor (reference) windows by enumerating aspect ratios X + scales wrt a reference (0, 0, 15, 15) window. + """ + + base_anchor = np.array([1, 1, base_size, base_size]) - 1 + ratio_anchors = _ratio_enum(base_anchor, ratios) + anchors = np.vstack([_scale_enum(ratio_anchors[i, :], scales) + for i in xrange(ratio_anchors.shape[0])]) + return anchors + +def _whctrs(anchor): + """ + Return width, height, x center, and y center for an anchor (window). + """ + + w = anchor[2] - anchor[0] + 1 + h = anchor[3] - anchor[1] + 1 + x_ctr = anchor[0] + 0.5 * (w - 1) + y_ctr = anchor[1] + 0.5 * (h - 1) + return w, h, x_ctr, y_ctr + +def _mkanchors(ws, hs, x_ctr, y_ctr): + """ + Given a vector of widths (ws) and heights (hs) around a center + (x_ctr, y_ctr), output a set of anchors (windows). + """ + + ws = ws[:, np.newaxis] + hs = hs[:, np.newaxis] + anchors = np.hstack((x_ctr - 0.5 * (ws - 1), + y_ctr - 0.5 * (hs - 1), + x_ctr + 0.5 * (ws - 1), + y_ctr + 0.5 * (hs - 1))) + return anchors + +def _ratio_enum(anchor, ratios): + """ + Enumerate a set of anchors for each aspect ratio wrt an anchor. + """ + + w, h, x_ctr, y_ctr = _whctrs(anchor) + size = w * h + size_ratios = size / ratios + ws = np.round(np.sqrt(size_ratios)) + hs = np.round(ws * ratios) + anchors = _mkanchors(ws, hs, x_ctr, y_ctr) + return anchors + +def _scale_enum(anchor, scales): + """ + Enumerate a set of anchors for each scale wrt an anchor. + """ + + w, h, x_ctr, y_ctr = _whctrs(anchor) + ws = w * scales + hs = h * scales + anchors = _mkanchors(ws, hs, x_ctr, y_ctr) + return anchors + +if __name__ == '__main__': + import time + t = time.time() + a = generate_anchors() + print time.time() - t + print a + from IPython import embed; embed() diff --git a/lib/rpn/proposal_layer.py b/lib/rpn/proposal_layer.py new file mode 100644 index 0000000..24b9db7 --- /dev/null +++ b/lib/rpn/proposal_layer.py @@ -0,0 +1,177 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import caffe +import numpy as np +import yaml +from fast_rcnn.config import cfg +from generate_anchors import generate_anchors +from fast_rcnn.bbox_transform import bbox_transform_inv, clip_boxes +from fast_rcnn.nms_wrapper import nms + +DEBUG = False + +class ProposalLayer(caffe.Layer): + """ + Outputs object detection proposals by applying estimated bounding-box + transformations to a set of regular boxes (called "anchors"). + """ + + def setup(self, bottom, top): + # parse the layer parameter string, which must be valid YAML + layer_params = yaml.load(self.param_str) + + self._feat_stride = layer_params['feat_stride'] + anchor_scales = layer_params.get('scales', (8, 16, 32)) + self._anchors = generate_anchors(scales=np.array(anchor_scales)) + self._num_anchors = self._anchors.shape[0] + + if DEBUG: + print 'feat_stride: {}'.format(self._feat_stride) + print 'anchors:' + print self._anchors + + # rois blob: holds R regions of interest, each is a 5-tuple + # (n, x1, y1, x2, y2) specifying an image batch index n and a + # rectangle (x1, y1, x2, y2) + top[0].reshape(1, 5) + + # scores blob: holds scores for R regions of interest + if len(top) > 1: + top[1].reshape(1, 1, 1, 1) + + def forward(self, bottom, top): + # Algorithm: + # + # for each (H, W) location i + # generate A anchor boxes centered on cell i + # apply predicted bbox deltas at cell i to each of the A anchors + # clip predicted boxes to image + # remove predicted boxes with either height or width < threshold + # sort all (proposal, score) pairs by score from highest to lowest + # take top pre_nms_topN proposals before NMS + # apply NMS with threshold 0.7 to remaining proposals + # take after_nms_topN proposals after NMS + # return the top proposals (-> RoIs top, scores top) + + assert bottom[0].data.shape[0] == 1, \ + 'Only single item batches are supported' + + cfg_key = str('TRAIN' if self.phase == 0 else 'TEST') # either 'TRAIN' or 'TEST' + pre_nms_topN = cfg[cfg_key].RPN_PRE_NMS_TOP_N + post_nms_topN = cfg[cfg_key].RPN_POST_NMS_TOP_N + nms_thresh = cfg[cfg_key].RPN_NMS_THRESH + min_size = cfg[cfg_key].RPN_MIN_SIZE + + # the first set of _num_anchors channels are bg probs + # the second set are the fg probs, which we want + scores = bottom[0].data[:, self._num_anchors:, :, :] + bbox_deltas = bottom[1].data + im_info = bottom[2].data[0, :] + + if DEBUG: + print 'im_size: ({}, {})'.format(im_info[0], im_info[1]) + print 'scale: {}'.format(im_info[2]) + + # 1. Generate proposals from bbox deltas and shifted anchors + height, width = scores.shape[-2:] + + if DEBUG: + print 'score map size: {}'.format(scores.shape) + + # Enumerate all shifts + shift_x = np.arange(0, width) * self._feat_stride + shift_y = np.arange(0, height) * self._feat_stride + shift_x, shift_y = np.meshgrid(shift_x, shift_y) + shifts = np.vstack((shift_x.ravel(), shift_y.ravel(), + shift_x.ravel(), shift_y.ravel())).transpose() + + # Enumerate all shifted anchors: + # + # add A anchors (1, A, 4) to + # cell K shifts (K, 1, 4) to get + # shift anchors (K, A, 4) + # reshape to (K*A, 4) shifted anchors + A = self._num_anchors + K = shifts.shape[0] + anchors = self._anchors.reshape((1, A, 4)) + \ + shifts.reshape((1, K, 4)).transpose((1, 0, 2)) + anchors = anchors.reshape((K * A, 4)) + + # Transpose and reshape predicted bbox transformations to get them + # into the same order as the anchors: + # + # bbox deltas will be (1, 4 * A, H, W) format + # transpose to (1, H, W, 4 * A) + # reshape to (1 * H * W * A, 4) where rows are ordered by (h, w, a) + # in slowest to fastest order + bbox_deltas = bbox_deltas.transpose((0, 2, 3, 1)).reshape((-1, 4)) + + # Same story for the scores: + # + # scores are (1, A, H, W) format + # transpose to (1, H, W, A) + # reshape to (1 * H * W * A, 1) where rows are ordered by (h, w, a) + scores = scores.transpose((0, 2, 3, 1)).reshape((-1, 1)) + + # Convert anchors into proposals via bbox transformations + proposals = bbox_transform_inv(anchors, bbox_deltas) + + # 2. clip predicted boxes to image + proposals = clip_boxes(proposals, im_info[:2]) + + # 3. remove predicted boxes with either height or width < threshold + # (NOTE: convert min_size to input image scale stored in im_info[2]) + keep = _filter_boxes(proposals, min_size * im_info[2]) + proposals = proposals[keep, :] + scores = scores[keep] + + # 4. sort all (proposal, score) pairs by score from highest to lowest + # 5. take top pre_nms_topN (e.g. 6000) + order = scores.ravel().argsort()[::-1] + if pre_nms_topN > 0: + order = order[:pre_nms_topN] + proposals = proposals[order, :] + scores = scores[order] + + # 6. apply nms (e.g. threshold = 0.7) + # 7. take after_nms_topN (e.g. 300) + # 8. return the top proposals (-> RoIs top) + keep = nms(np.hstack((proposals, scores)), nms_thresh) + if post_nms_topN > 0: + keep = keep[:post_nms_topN] + proposals = proposals[keep, :] + scores = scores[keep] + + # Output rois blob + # Our RPN implementation only supports a single input image, so all + # batch inds are 0 + batch_inds = np.zeros((proposals.shape[0], 1), dtype=np.float32) + blob = np.hstack((batch_inds, proposals.astype(np.float32, copy=False))) + # print blob.shape + top[0].reshape(*(blob.shape)) + top[0].data[...] = blob + + # [Optional] output scores blob + if len(top) > 1: + top[1].reshape(*(scores.shape)) + top[1].data[...] = scores + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + +def _filter_boxes(boxes, min_size): + """Remove all boxes with any side smaller than min_size.""" + ws = boxes[:, 2] - boxes[:, 0] + 1 + hs = boxes[:, 3] - boxes[:, 1] + 1 + keep = np.where((ws >= min_size) & (hs >= min_size))[0] + return keep diff --git a/lib/rpn/proposal_target_layer.py b/lib/rpn/proposal_target_layer.py new file mode 100644 index 0000000..0bf2f5a --- /dev/null +++ b/lib/rpn/proposal_target_layer.py @@ -0,0 +1,216 @@ +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick and Sean Bell +# -------------------------------------------------------- + +import caffe +import yaml +import numpy as np +import numpy.random as npr +from fast_rcnn.config import cfg +from fast_rcnn.bbox_transform import bbox_transform +from utils.cython_bbox import bbox_overlaps + +DEBUG = False + +class ProposalTargetLayer(caffe.Layer): + """ + Assign object detection proposals to ground-truth targets. Produces proposal + classification labels and bounding-box regression targets. + """ + + def setup(self, bottom, top): + layer_params = yaml.load(self.param_str) + self._num_classes = layer_params['num_classes'] + + # sampled rois (0, x1, y1, x2, y2) + top[0].reshape(1, 5, 1, 1) + # labels + top[1].reshape(1, 1, 1, 1) + # bbox_targets + top[2].reshape(1, self._num_classes * 4, 1, 1) + # bbox_inside_weights + top[3].reshape(1, self._num_classes * 4, 1, 1) + # bbox_outside_weights + top[4].reshape(1, self._num_classes * 4, 1, 1) + + def forward(self, bottom, top): + # Proposal ROIs (0, x1, y1, x2, y2) coming from RPN + # (i.e., rpn.proposal_layer.ProposalLayer), or any other source + all_rois = bottom[0].data + # GT boxes (x1, y1, x2, y2, label) + # TODO(rbg): it's annoying that sometimes I have extra info before + # and other times after box coordinates -- normalize to one format + gt_boxes = bottom[1].data + + # Include ground-truth boxes in the set of candidate rois + zeros = np.zeros((gt_boxes.shape[0], 1), dtype=gt_boxes.dtype) + all_rois = np.vstack( + (all_rois, np.hstack((zeros, gt_boxes[:, :-1]))) + ) + + # Sanity check: single batch only + assert np.all(all_rois[:, 0] == 0), \ + 'Only single item batches are supported' + + rois_per_image = np.inf if cfg.TRAIN.BATCH_SIZE == -1 else cfg.TRAIN.BATCH_SIZE + fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image) + + # Sample rois with classification labels and bounding box regression + # targets + # print 'proposal_target_layer:', fg_rois_per_image + labels, rois, bbox_targets, bbox_inside_weights = _sample_rois( + all_rois, gt_boxes, fg_rois_per_image, + rois_per_image, self._num_classes) + + if DEBUG: + print 'num fg: {}'.format((labels > 0).sum()) + print 'num bg: {}'.format((labels == 0).sum()) + self._count += 1 + self._fg_num += (labels > 0).sum() + self._bg_num += (labels == 0).sum() + print 'num fg avg: {}'.format(self._fg_num / self._count) + print 'num bg avg: {}'.format(self._bg_num / self._count) + print 'ratio: {:.3f}'.format(float(self._fg_num) / float(self._bg_num)) + + # sampled rois + # modified by ywxiong + rois = rois.reshape((rois.shape[0], rois.shape[1], 1, 1)) + top[0].reshape(*rois.shape) + top[0].data[...] = rois + + # classification labels + # modified by ywxiong + labels = labels.reshape((labels.shape[0], 1, 1, 1)) + top[1].reshape(*labels.shape) + top[1].data[...] = labels + + # bbox_targets + # modified by ywxiong + bbox_targets = bbox_targets.reshape((bbox_targets.shape[0], bbox_targets.shape[1], 1, 1)) + top[2].reshape(*bbox_targets.shape) + top[2].data[...] = bbox_targets + + # bbox_inside_weights + # modified by ywxiong + bbox_inside_weights = bbox_inside_weights.reshape((bbox_inside_weights.shape[0], bbox_inside_weights.shape[1], 1, 1)) + top[3].reshape(*bbox_inside_weights.shape) + top[3].data[...] = bbox_inside_weights + + # bbox_outside_weights + # modified by ywxiong + bbox_inside_weights = bbox_inside_weights.reshape((bbox_inside_weights.shape[0], bbox_inside_weights.shape[1], 1, 1)) + top[4].reshape(*bbox_inside_weights.shape) + top[4].data[...] = np.array(bbox_inside_weights > 0).astype(np.float32) + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass + + +def _get_bbox_regression_labels(bbox_target_data, num_classes): + """Bounding-box regression targets (bbox_target_data) are stored in a + compact form N x (class, tx, ty, tw, th) + + This function expands those targets into the 4-of-4*K representation used + by the network (i.e. only one class has non-zero targets). + + Returns: + bbox_target (ndarray): N x 4K blob of regression targets + bbox_inside_weights (ndarray): N x 4K blob of loss weights + """ + + clss = bbox_target_data[:, 0] + bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32) + # print 'proposal_target_layer:', bbox_targets.shape + bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32) + inds = np.where(clss > 0)[0] + if cfg.TRAIN.AGONISTIC: + for ind in inds: + cls = clss[ind] + start = 4 * (1 if cls > 0 else 0) + end = start + 4 + bbox_targets[ind, start:end] = bbox_target_data[ind, 1:] + bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS + else: + for ind in inds: + cls = clss[ind] + start = 4 * cls + end = start + 4 + bbox_targets[ind, start:end] = bbox_target_data[ind, 1:] + bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS + return bbox_targets, bbox_inside_weights + + +def _compute_targets(ex_rois, gt_rois, labels): + """Compute bounding-box regression targets for an image.""" + + assert ex_rois.shape[0] == gt_rois.shape[0] + assert ex_rois.shape[1] == 4 + assert gt_rois.shape[1] == 4 + + targets = bbox_transform(ex_rois, gt_rois) + if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED: + # Optionally normalize targets by a precomputed mean and stdev + targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS)) + / np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS)) + return np.hstack( + (labels[:, np.newaxis], targets)).astype(np.float32, copy=False) + +def _sample_rois(all_rois, gt_boxes, fg_rois_per_image, rois_per_image, num_classes): + """Generate a random sample of RoIs comprising foreground and background + examples. + """ + # overlaps: (rois x gt_boxes) + overlaps = bbox_overlaps( + np.ascontiguousarray(all_rois[:, 1:5], dtype=np.float), + np.ascontiguousarray(gt_boxes[:, :4], dtype=np.float)) + gt_assignment = overlaps.argmax(axis=1) + max_overlaps = overlaps.max(axis=1) + labels = gt_boxes[gt_assignment, 4] + + # Select foreground RoIs as those with >= FG_THRESH overlap + fg_inds = np.where(max_overlaps >= cfg.TRAIN.FG_THRESH)[0] + # Guard against the case when an image has fewer than fg_rois_per_image + # foreground RoIs + fg_rois_per_this_image = min(fg_rois_per_image, fg_inds.size) + # Sample foreground regions without replacement + if fg_inds.size > 0: + fg_inds = npr.choice(fg_inds, size=fg_rois_per_this_image, replace=False) + + # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI) + bg_inds = np.where((max_overlaps < cfg.TRAIN.BG_THRESH_HI) & + (max_overlaps >= cfg.TRAIN.BG_THRESH_LO))[0] + # Compute number of background RoIs to take from this image (guarding + # against there being fewer than desired) + bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image + bg_rois_per_this_image = min(bg_rois_per_this_image, bg_inds.size) + # Sample background regions without replacement + if bg_inds.size > 0: + bg_inds = npr.choice(bg_inds, size=bg_rois_per_this_image, replace=False) + + # The indices that we're selecting (both fg and bg) + keep_inds = np.append(fg_inds, bg_inds) + # print 'proposal_target_layer:', keep_inds + + # Select sampled values from various arrays: + labels = labels[keep_inds] + # Clamp labels for the background RoIs to 0 + labels[fg_rois_per_this_image:] = 0 + rois = all_rois[keep_inds] + + # print 'proposal_target_layer:', rois + bbox_target_data = _compute_targets( + rois[:, 1:5], gt_boxes[gt_assignment[keep_inds], :4], labels) + + # print 'proposal_target_layer:', bbox_target_data + bbox_targets, bbox_inside_weights = \ + _get_bbox_regression_labels(bbox_target_data, num_classes) + + return labels, rois, bbox_targets, bbox_inside_weights diff --git a/lib/setup.py b/lib/setup.py new file mode 100644 index 0000000..0f4615f --- /dev/null +++ b/lib/setup.py @@ -0,0 +1,156 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import os +from os.path import join as pjoin +from setuptools import setup +from distutils.extension import Extension +from Cython.Distutils import build_ext +import subprocess +import numpy as np + +def find_in_path(name, path): + "Find a file in a search path" + # Adapted fom + # http://code.activestate.com/recipes/52224-find-a-file-given-a-search-path/ + for dir in path.split(os.pathsep): + binpath = pjoin(dir, name) + if os.path.exists(binpath): + return os.path.abspath(binpath) + return None + + +def locate_cuda(): + """Locate the CUDA environment on the system + + Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64' + and values giving the absolute path to each directory. + + Starts by looking for the CUDAHOME env variable. If not found, everything + is based on finding 'nvcc' in the PATH. + """ + + # first check if the CUDAHOME env variable is in use + if 'CUDAHOME' in os.environ: + home = os.environ['CUDAHOME'] + nvcc = pjoin(home, 'bin', 'nvcc') + else: + # otherwise, search the PATH for NVCC + default_path = pjoin(os.sep, 'usr', 'local', 'cuda', 'bin') + nvcc = find_in_path('nvcc', os.environ['PATH'] + os.pathsep + default_path) + if nvcc is None: + raise EnvironmentError('The nvcc binary could not be ' + 'located in your $PATH. Either add it to your path, or set $CUDAHOME') + home = os.path.dirname(os.path.dirname(nvcc)) + + cudaconfig = {'home':home, 'nvcc':nvcc, + 'include': pjoin(home, 'include'), + 'lib64': pjoin(home, 'lib64')} + for k, v in cudaconfig.iteritems(): + if not os.path.exists(v): + raise EnvironmentError('The CUDA %s path could not be located in %s' % (k, v)) + + return cudaconfig +CUDA = locate_cuda() + + +# Obtain the numpy include directory. This logic works across numpy versions. +try: + numpy_include = np.get_include() +except AttributeError: + numpy_include = np.get_numpy_include() + +def customize_compiler_for_nvcc(self): + """inject deep into distutils to customize how the dispatch + to gcc/nvcc works. + + If you subclass UnixCCompiler, it's not trivial to get your subclass + injected in, and still have the right customizations (i.e. + distutils.sysconfig.customize_compiler) run on it. So instead of going + the OO route, I have this. Note, it's kindof like a wierd functional + subclassing going on.""" + + # tell the compiler it can processes .cu + self.src_extensions.append('.cu') + + # save references to the default compiler_so and _comple methods + default_compiler_so = self.compiler_so + super = self._compile + + # now redefine the _compile method. This gets executed for each + # object but distutils doesn't have the ability to change compilers + # based on source extension: we add it. + def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts): + if os.path.splitext(src)[1] == '.cu': + # use the cuda for .cu files + self.set_executable('compiler_so', CUDA['nvcc']) + # use only a subset of the extra_postargs, which are 1-1 translated + # from the extra_compile_args in the Extension class + postargs = extra_postargs['nvcc'] + else: + postargs = extra_postargs['gcc'] + + super(obj, src, ext, cc_args, postargs, pp_opts) + # reset the default compiler_so, which we might have changed for cuda + self.compiler_so = default_compiler_so + + # inject our redefined _compile method into the class + self._compile = _compile + + +# run the customize_compiler +class custom_build_ext(build_ext): + def build_extensions(self): + customize_compiler_for_nvcc(self.compiler) + build_ext.build_extensions(self) + + +ext_modules = [ + Extension( + "utils.cython_bbox", + ["utils/bbox.pyx"], + extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, + include_dirs = [numpy_include] + ), + Extension( + "nms.cpu_nms", + ["nms/cpu_nms.pyx"], + extra_compile_args={'gcc': ["-Wno-cpp", "-Wno-unused-function"]}, + include_dirs = [numpy_include] + ), + Extension('nms.gpu_nms', + ['nms/nms_kernel.cu', 'nms/gpu_nms.pyx'], + library_dirs=[CUDA['lib64']], + libraries=['cudart'], + language='c++', + runtime_library_dirs=[CUDA['lib64']], + # this syntax is specific to this build system + # we're only going to use certain compiler args with nvcc and not with + # gcc the implementation of this trick is in customize_compiler() below + extra_compile_args={'gcc': ["-Wno-unused-function"], + 'nvcc': ['-arch=sm_35', + '--ptxas-options=-v', + '-c', + '--compiler-options', + "'-fPIC'"]}, + include_dirs = [numpy_include, CUDA['include']] + ), + Extension( + 'pycocotools._mask', + sources=['pycocotools/maskApi.c', 'pycocotools/_mask.pyx'], + include_dirs = [numpy_include, 'pycocotools'], + extra_compile_args={ + 'gcc': ['-Wno-cpp', '-Wno-unused-function', '-std=c99']}, + ), +] + +setup( + name='fast_rcnn', + ext_modules=ext_modules, + # inject our custom trigger + cmdclass={'build_ext': custom_build_ext}, +) diff --git a/lib/transform/__init__.py b/lib/transform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/transform/torch_image_transform_layer.py b/lib/transform/torch_image_transform_layer.py new file mode 100644 index 0000000..9273b3a --- /dev/null +++ b/lib/transform/torch_image_transform_layer.py @@ -0,0 +1,64 @@ +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# -------------------------------------------------------- + +""" Transform images for compatibility with models trained with +https://github.com/facebook/fb.resnet.torch. + +Usage in model prototxt: + +layer { + name: 'data_xform' + type: 'Python' + bottom: 'data_caffe' + top: 'data' + python_param { + module: 'transform.torch_image_transform_layer' + layer: 'TorchImageTransformLayer' + } +} +""" + +import caffe +from fast_rcnn.config import cfg +import numpy as np + +class TorchImageTransformLayer(caffe.Layer): + def setup(self, bottom, top): + # (1, 3, 1, 1) shaped arrays + self.PIXEL_MEANS = \ + np.array([[[[0.48462227599918]], + [[0.45624044862054]], + [[0.40588363755159]]]]) + self.PIXEL_STDS = \ + np.array([[[[0.22889466674951]], + [[0.22446679341259]], + [[0.22495548344775]]]]) + # The default ("old") pixel means that were already subtracted + channel_swap = (0, 3, 1, 2) + self.OLD_PIXEL_MEANS = \ + cfg.PIXEL_MEANS[np.newaxis, :, :, :].transpose(channel_swap) + + top[0].reshape(*(bottom[0].shape)) + + def forward(self, bottom, top): + ims = bottom[0].data + # Invert the channel means that were already subtracted + ims += self.OLD_PIXEL_MEANS + # 1. Permute BGR to RGB and normalize to [0, 1] + ims = ims[:, [2, 1, 0], :, :] / 255.0 + # 2. Remove channel means + ims -= self.PIXEL_MEANS + # 3. Standardize channels + ims /= self.PIXEL_STDS + top[0].reshape(*(ims.shape)) + top[0].data[...] = ims + + def backward(self, top, propagate_down, bottom): + """This layer does not propagate gradients.""" + pass + + def reshape(self, bottom, top): + """Reshaping happens during the call to forward.""" + pass diff --git a/lib/utils/.gitignore b/lib/utils/.gitignore new file mode 100644 index 0000000..4b8a745 --- /dev/null +++ b/lib/utils/.gitignore @@ -0,0 +1,2 @@ +*.c +*.so diff --git a/lib/utils/__init__.py b/lib/utils/__init__.py new file mode 100644 index 0000000..7ba6a65 --- /dev/null +++ b/lib/utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- diff --git a/lib/utils/bbox.pyx b/lib/utils/bbox.pyx new file mode 100644 index 0000000..e14780d --- /dev/null +++ b/lib/utils/bbox.pyx @@ -0,0 +1,55 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Sergey Karayev +# -------------------------------------------------------- + +cimport cython +import numpy as np +cimport numpy as np + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + +def bbox_overlaps( + np.ndarray[DTYPE_t, ndim=2] boxes, + np.ndarray[DTYPE_t, ndim=2] query_boxes): + """ + Parameters + ---------- + boxes: (N, 4) ndarray of float + query_boxes: (K, 4) ndarray of float + Returns + ------- + overlaps: (N, K) ndarray of overlap between boxes and query_boxes + """ + cdef unsigned int N = boxes.shape[0] + cdef unsigned int K = query_boxes.shape[0] + cdef np.ndarray[DTYPE_t, ndim=2] overlaps = np.zeros((N, K), dtype=DTYPE) + cdef DTYPE_t iw, ih, box_area + cdef DTYPE_t ua + cdef unsigned int k, n + for k in range(K): + box_area = ( + (query_boxes[k, 2] - query_boxes[k, 0] + 1) * + (query_boxes[k, 3] - query_boxes[k, 1] + 1) + ) + for n in range(N): + iw = ( + min(boxes[n, 2], query_boxes[k, 2]) - + max(boxes[n, 0], query_boxes[k, 0]) + 1 + ) + if iw > 0: + ih = ( + min(boxes[n, 3], query_boxes[k, 3]) - + max(boxes[n, 1], query_boxes[k, 1]) + 1 + ) + if ih > 0: + ua = float( + (boxes[n, 2] - boxes[n, 0] + 1) * + (boxes[n, 3] - boxes[n, 1] + 1) + + box_area - iw * ih + ) + overlaps[n, k] = iw * ih / ua + return overlaps diff --git a/lib/utils/blob.py b/lib/utils/blob.py new file mode 100644 index 0000000..1c31642 --- /dev/null +++ b/lib/utils/blob.py @@ -0,0 +1,45 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Blob helper functions.""" + +import numpy as np +import cv2 + +def im_list_to_blob(ims): + """Convert a list of images into a network input. + + Assumes images are already prepared (means subtracted, BGR order, ...). + """ + max_shape = np.array([im.shape for im in ims]).max(axis=0) + num_images = len(ims) + blob = np.zeros((num_images, max_shape[0], max_shape[1], 3), + dtype=np.float32) + for i in xrange(num_images): + im = ims[i] + blob[i, 0:im.shape[0], 0:im.shape[1], :] = im + # Move channels (axis 3) to axis 1 + # Axis order will become: (batch elem, channel, height, width) + channel_swap = (0, 3, 1, 2) + blob = blob.transpose(channel_swap) + return blob + +def prep_im_for_blob(im, pixel_means, target_size, max_size): + """Mean subtract and scale an image for use in a blob.""" + im = im.astype(np.float32, copy=False) + im -= pixel_means + im_shape = im.shape + im_size_min = np.min(im_shape[0:2]) + im_size_max = np.max(im_shape[0:2]) + im_scale = float(target_size) / float(im_size_min) + # Prevent the biggest axis from being more than MAX_SIZE + if np.round(im_scale * im_size_max) > max_size: + im_scale = float(max_size) / float(im_size_max) + im = cv2.resize(im, None, None, fx=im_scale, fy=im_scale, + interpolation=cv2.INTER_LINEAR) + + return im, im_scale diff --git a/lib/utils/timer.py b/lib/utils/timer.py new file mode 100644 index 0000000..dacc942 --- /dev/null +++ b/lib/utils/timer.py @@ -0,0 +1,32 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +import time + +class Timer(object): + """A simple timer.""" + def __init__(self): + self.total_time = 0. + self.calls = 0 + self.start_time = 0. + self.diff = 0. + self.average_time = 0. + + def tic(self): + # using time.time instead of time.clock because time time.clock + # does not normalize for multithreading + self.start_time = time.time() + + def toc(self, average=True): + self.diff = time.time() - self.start_time + self.total_time += self.diff + self.calls += 1 + self.average_time = self.total_time / self.calls + if average: + return self.average_time + else: + return self.diff diff --git a/models/coco/VGG16/fast_rcnn/solver.prototxt b/models/coco/VGG16/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..e537e1b --- /dev/null +++ b/models/coco/VGG16/fast_rcnn/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/coco/VGG16/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 200000 +display: 20 +average_loss: 100 +# iter_size: 1 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" +#debug_info: true diff --git a/models/coco/VGG16/fast_rcnn/test.prototxt b/models/coco/VGG16/fast_rcnn/test.prototxt new file mode 100644 index 0000000..5bc1e99 --- /dev/null +++ b/models/coco/VGG16/fast_rcnn/test.prototxt @@ -0,0 +1,499 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG16/fast_rcnn/train.prototxt b/models/coco/VGG16/fast_rcnn/train.prototxt new file mode 100644 index 0000000..1bba398 --- /dev/null +++ b/models/coco/VGG16/fast_rcnn/train.prototxt @@ -0,0 +1,485 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/coco/VGG16/faster_rcnn_end2end/solver.prototxt b/models/coco/VGG16/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..c8e57db --- /dev/null +++ b/models/coco/VGG16/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,15 @@ +train_net: "models/coco/VGG16/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 350000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_faster_rcnn" +iter_size: 2 diff --git a/models/coco/VGG16/faster_rcnn_end2end/test.prototxt b/models/coco/VGG16/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..a700a52 --- /dev/null +++ b/models/coco/VGG16/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,590 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG16/faster_rcnn_end2end/train.prototxt b/models/coco/VGG16/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..d36c92d --- /dev/null +++ b/models/coco/VGG16/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,642 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 81" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/coco/VGG_CNN_M_1024/fast_rcnn/solver.prototxt b/models/coco/VGG_CNN_M_1024/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..af9aa44 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/fast_rcnn/solver.prototxt @@ -0,0 +1,15 @@ +train_net: "models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 200000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" +#debug_info: true diff --git a/models/coco/VGG_CNN_M_1024/fast_rcnn/test.prototxt b/models/coco/VGG_CNN_M_1024/fast_rcnn/test.prototxt new file mode 100644 index 0000000..733a759 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/fast_rcnn/test.prototxt @@ -0,0 +1,299 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt b/models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt new file mode 100644 index 0000000..2f87439 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/fast_rcnn/train.prototxt @@ -0,0 +1,292 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..c398a08 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,14 @@ +train_net: "models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 350000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_faster_rcnn" diff --git a/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..ddc633e --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,432 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..a15e7b3 --- /dev/null +++ b/models/coco/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,453 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 81" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 24 # 2(bg/fg) * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 48 # 4 * 12(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 24 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16 \n'scales': !!python/tuple [4, 8, 16, 32]" + } +} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 81" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 81 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 324 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/test.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/test.prototxt new file mode 100644 index 0000000..076dd02 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/test.prototxt @@ -0,0 +1,7186 @@ +name: "ResNet101" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 84 + } + } +} + + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/train_ohem.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/train_ohem.prototxt new file mode 100644 index 0000000..ae3ce44 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/class-aware/train_ohem.prototxt @@ -0,0 +1,7343 @@ +name: "ResNet-101" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver.prototxt new file mode 100644 index 0000000..40e0173 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem.prototxt new file mode 100644 index 0000000..5247534 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem_continue.prototxt~ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem_continue.prototxt~ new file mode 100644 index 0000000..e4b9335 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_ohem_continue.prototxt~ @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "multistep" +gamma: 0.1 +stepvalue: 10000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup.prototxt new file mode 100644 index 0000000..39e5912 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup.prototxt @@ -0,0 +1,14 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +lr_policy: "fixed" +base_lr: 0.0001 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup_continue.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup_continue.prototxt new file mode 100644 index 0000000..06d470c --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/solver_warmup_continue.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 70000 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet101_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/test_agonistic.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/test_agonistic.prototxt new file mode 100644 index 0000000..a2b0965 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/test_agonistic.prototxt @@ -0,0 +1,7186 @@ +name: "ResNet-101" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 8 + } + } +} + + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt new file mode 100644 index 0000000..4580c66 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic.prototxt @@ -0,0 +1,7274 @@ +name: "ResNet-101" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels" + top: "accuarcy" + #include: { phase: TEST } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: 'bbox_inside_weights' + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" +} + diff --git a/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt new file mode 100644 index 0000000..2c43883 --- /dev/null +++ b/models/pascal_voc/ResNet-101/rfcn_end2end/train_agonistic_ohem.prototxt @@ -0,0 +1,7344 @@ +name: "ResNet-101" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "conv1" + bottom: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2a" + bottom: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2a_branch2b" + bottom: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2a" + bottom: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2b_branch2b" + bottom: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2a" + bottom: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res2c_branch2b" + bottom: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2a" + bottom: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3a_branch2b" + bottom: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b1_branch2a" + name: "res3b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "bn3b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2a" + name: "scale3b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2a" + bottom: "res3b1_branch2a" + name: "res3b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2a" + top: "res3b1_branch2b" + name: "res3b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "bn3b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2b" + name: "scale3b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b1_branch2b" + bottom: "res3b1_branch2b" + name: "res3b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1_branch2b" + top: "res3b1_branch2c" + name: "res3b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "bn3b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1_branch2c" + top: "res3b1_branch2c" + name: "scale3b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b1_branch2c" + top: "res3b1" + name: "res3b1" + type: "Eltwise" +} + +layer { + bottom: "res3b1" + top: "res3b1" + name: "res3b1_relu" + type: "ReLU" +} + +layer { + bottom: "res3b1" + top: "res3b2_branch2a" + name: "res3b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "bn3b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2a" + name: "scale3b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2a" + bottom: "res3b2_branch2a" + name: "res3b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2a" + top: "res3b2_branch2b" + name: "res3b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "bn3b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2b" + name: "scale3b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b2_branch2b" + bottom: "res3b2_branch2b" + name: "res3b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2_branch2b" + top: "res3b2_branch2c" + name: "res3b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "bn3b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2_branch2c" + top: "res3b2_branch2c" + name: "scale3b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b1" + bottom: "res3b2_branch2c" + top: "res3b2" + name: "res3b2" + type: "Eltwise" +} + +layer { + bottom: "res3b2" + top: "res3b2" + name: "res3b2_relu" + type: "ReLU" +} + +layer { + bottom: "res3b2" + top: "res3b3_branch2a" + name: "res3b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "bn3b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2a" + name: "scale3b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2a" + bottom: "res3b3_branch2a" + name: "res3b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2a" + top: "res3b3_branch2b" + name: "res3b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "bn3b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2b" + name: "scale3b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res3b3_branch2b" + bottom: "res3b3_branch2b" + name: "res3b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3_branch2b" + top: "res3b3_branch2c" + name: "res3b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "bn3b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3_branch2c" + top: "res3b3_branch2c" + name: "scale3b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b2" + bottom: "res3b3_branch2c" + top: "res3b3" + name: "res3b3" + type: "Eltwise" +} + +layer { + bottom: "res3b3" + top: "res3b3" + name: "res3b3_relu" + type: "ReLU" +} + +layer { + bottom: "res3b3" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b3" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2a" + bottom: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4a_branch2b" + bottom: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b1_branch2a" + name: "res4b1_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "bn4b1_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2a" + name: "scale4b1_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2a" + bottom: "res4b1_branch2a" + name: "res4b1_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2a" + top: "res4b1_branch2b" + name: "res4b1_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "bn4b1_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2b" + name: "scale4b1_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b1_branch2b" + bottom: "res4b1_branch2b" + name: "res4b1_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1_branch2b" + top: "res4b1_branch2c" + name: "res4b1_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "bn4b1_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1_branch2c" + top: "res4b1_branch2c" + name: "scale4b1_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b1_branch2c" + top: "res4b1" + name: "res4b1" + type: "Eltwise" +} + +layer { + bottom: "res4b1" + top: "res4b1" + name: "res4b1_relu" + type: "ReLU" +} + +layer { + bottom: "res4b1" + top: "res4b2_branch2a" + name: "res4b2_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "bn4b2_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2a" + name: "scale4b2_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2a" + bottom: "res4b2_branch2a" + name: "res4b2_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2a" + top: "res4b2_branch2b" + name: "res4b2_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "bn4b2_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2b" + name: "scale4b2_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b2_branch2b" + bottom: "res4b2_branch2b" + name: "res4b2_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2_branch2b" + top: "res4b2_branch2c" + name: "res4b2_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "bn4b2_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2_branch2c" + top: "res4b2_branch2c" + name: "scale4b2_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b1" + bottom: "res4b2_branch2c" + top: "res4b2" + name: "res4b2" + type: "Eltwise" +} + +layer { + bottom: "res4b2" + top: "res4b2" + name: "res4b2_relu" + type: "ReLU" +} + +layer { + bottom: "res4b2" + top: "res4b3_branch2a" + name: "res4b3_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "bn4b3_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2a" + name: "scale4b3_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2a" + bottom: "res4b3_branch2a" + name: "res4b3_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2a" + top: "res4b3_branch2b" + name: "res4b3_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "bn4b3_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2b" + name: "scale4b3_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b3_branch2b" + bottom: "res4b3_branch2b" + name: "res4b3_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3_branch2b" + top: "res4b3_branch2c" + name: "res4b3_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "bn4b3_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3_branch2c" + top: "res4b3_branch2c" + name: "scale4b3_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b2" + bottom: "res4b3_branch2c" + top: "res4b3" + name: "res4b3" + type: "Eltwise" +} + +layer { + bottom: "res4b3" + top: "res4b3" + name: "res4b3_relu" + type: "ReLU" +} + +layer { + bottom: "res4b3" + top: "res4b4_branch2a" + name: "res4b4_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "bn4b4_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2a" + name: "scale4b4_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2a" + bottom: "res4b4_branch2a" + name: "res4b4_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2a" + top: "res4b4_branch2b" + name: "res4b4_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "bn4b4_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2b" + name: "scale4b4_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b4_branch2b" + bottom: "res4b4_branch2b" + name: "res4b4_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4_branch2b" + top: "res4b4_branch2c" + name: "res4b4_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "bn4b4_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4_branch2c" + top: "res4b4_branch2c" + name: "scale4b4_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b3" + bottom: "res4b4_branch2c" + top: "res4b4" + name: "res4b4" + type: "Eltwise" +} + +layer { + bottom: "res4b4" + top: "res4b4" + name: "res4b4_relu" + type: "ReLU" +} + +layer { + bottom: "res4b4" + top: "res4b5_branch2a" + name: "res4b5_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "bn4b5_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2a" + name: "scale4b5_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2a" + bottom: "res4b5_branch2a" + name: "res4b5_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2a" + top: "res4b5_branch2b" + name: "res4b5_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "bn4b5_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2b" + name: "scale4b5_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b5_branch2b" + bottom: "res4b5_branch2b" + name: "res4b5_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5_branch2b" + top: "res4b5_branch2c" + name: "res4b5_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "bn4b5_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5_branch2c" + top: "res4b5_branch2c" + name: "scale4b5_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b4" + bottom: "res4b5_branch2c" + top: "res4b5" + name: "res4b5" + type: "Eltwise" +} + +layer { + bottom: "res4b5" + top: "res4b5" + name: "res4b5_relu" + type: "ReLU" +} + +layer { + bottom: "res4b5" + top: "res4b6_branch2a" + name: "res4b6_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "bn4b6_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2a" + name: "scale4b6_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2a" + bottom: "res4b6_branch2a" + name: "res4b6_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2a" + top: "res4b6_branch2b" + name: "res4b6_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "bn4b6_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2b" + name: "scale4b6_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b6_branch2b" + bottom: "res4b6_branch2b" + name: "res4b6_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6_branch2b" + top: "res4b6_branch2c" + name: "res4b6_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "bn4b6_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6_branch2c" + top: "res4b6_branch2c" + name: "scale4b6_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b5" + bottom: "res4b6_branch2c" + top: "res4b6" + name: "res4b6" + type: "Eltwise" +} + +layer { + bottom: "res4b6" + top: "res4b6" + name: "res4b6_relu" + type: "ReLU" +} + +layer { + bottom: "res4b6" + top: "res4b7_branch2a" + name: "res4b7_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "bn4b7_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2a" + name: "scale4b7_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2a" + bottom: "res4b7_branch2a" + name: "res4b7_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2a" + top: "res4b7_branch2b" + name: "res4b7_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "bn4b7_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2b" + name: "scale4b7_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b7_branch2b" + bottom: "res4b7_branch2b" + name: "res4b7_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7_branch2b" + top: "res4b7_branch2c" + name: "res4b7_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "bn4b7_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7_branch2c" + top: "res4b7_branch2c" + name: "scale4b7_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b6" + bottom: "res4b7_branch2c" + top: "res4b7" + name: "res4b7" + type: "Eltwise" +} + +layer { + bottom: "res4b7" + top: "res4b7" + name: "res4b7_relu" + type: "ReLU" +} + +layer { + bottom: "res4b7" + top: "res4b8_branch2a" + name: "res4b8_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "bn4b8_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2a" + name: "scale4b8_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2a" + bottom: "res4b8_branch2a" + name: "res4b8_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2a" + top: "res4b8_branch2b" + name: "res4b8_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "bn4b8_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2b" + name: "scale4b8_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b8_branch2b" + bottom: "res4b8_branch2b" + name: "res4b8_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8_branch2b" + top: "res4b8_branch2c" + name: "res4b8_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "bn4b8_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8_branch2c" + top: "res4b8_branch2c" + name: "scale4b8_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b7" + bottom: "res4b8_branch2c" + top: "res4b8" + name: "res4b8" + type: "Eltwise" +} + +layer { + bottom: "res4b8" + top: "res4b8" + name: "res4b8_relu" + type: "ReLU" +} + +layer { + bottom: "res4b8" + top: "res4b9_branch2a" + name: "res4b9_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "bn4b9_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2a" + name: "scale4b9_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2a" + bottom: "res4b9_branch2a" + name: "res4b9_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2a" + top: "res4b9_branch2b" + name: "res4b9_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "bn4b9_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2b" + name: "scale4b9_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b9_branch2b" + bottom: "res4b9_branch2b" + name: "res4b9_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9_branch2b" + top: "res4b9_branch2c" + name: "res4b9_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "bn4b9_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9_branch2c" + top: "res4b9_branch2c" + name: "scale4b9_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b8" + bottom: "res4b9_branch2c" + top: "res4b9" + name: "res4b9" + type: "Eltwise" +} + +layer { + bottom: "res4b9" + top: "res4b9" + name: "res4b9_relu" + type: "ReLU" +} + +layer { + bottom: "res4b9" + top: "res4b10_branch2a" + name: "res4b10_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "bn4b10_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2a" + name: "scale4b10_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2a" + bottom: "res4b10_branch2a" + name: "res4b10_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2a" + top: "res4b10_branch2b" + name: "res4b10_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "bn4b10_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2b" + name: "scale4b10_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b10_branch2b" + bottom: "res4b10_branch2b" + name: "res4b10_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10_branch2b" + top: "res4b10_branch2c" + name: "res4b10_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "bn4b10_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10_branch2c" + top: "res4b10_branch2c" + name: "scale4b10_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b9" + bottom: "res4b10_branch2c" + top: "res4b10" + name: "res4b10" + type: "Eltwise" +} + +layer { + bottom: "res4b10" + top: "res4b10" + name: "res4b10_relu" + type: "ReLU" +} + +layer { + bottom: "res4b10" + top: "res4b11_branch2a" + name: "res4b11_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "bn4b11_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2a" + name: "scale4b11_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2a" + bottom: "res4b11_branch2a" + name: "res4b11_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2a" + top: "res4b11_branch2b" + name: "res4b11_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "bn4b11_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2b" + name: "scale4b11_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b11_branch2b" + bottom: "res4b11_branch2b" + name: "res4b11_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11_branch2b" + top: "res4b11_branch2c" + name: "res4b11_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "bn4b11_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11_branch2c" + top: "res4b11_branch2c" + name: "scale4b11_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b10" + bottom: "res4b11_branch2c" + top: "res4b11" + name: "res4b11" + type: "Eltwise" +} + +layer { + bottom: "res4b11" + top: "res4b11" + name: "res4b11_relu" + type: "ReLU" +} + +layer { + bottom: "res4b11" + top: "res4b12_branch2a" + name: "res4b12_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "bn4b12_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2a" + name: "scale4b12_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2a" + bottom: "res4b12_branch2a" + name: "res4b12_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2a" + top: "res4b12_branch2b" + name: "res4b12_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "bn4b12_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2b" + name: "scale4b12_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b12_branch2b" + bottom: "res4b12_branch2b" + name: "res4b12_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12_branch2b" + top: "res4b12_branch2c" + name: "res4b12_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "bn4b12_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12_branch2c" + top: "res4b12_branch2c" + name: "scale4b12_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b11" + bottom: "res4b12_branch2c" + top: "res4b12" + name: "res4b12" + type: "Eltwise" +} + +layer { + bottom: "res4b12" + top: "res4b12" + name: "res4b12_relu" + type: "ReLU" +} + +layer { + bottom: "res4b12" + top: "res4b13_branch2a" + name: "res4b13_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "bn4b13_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2a" + name: "scale4b13_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2a" + bottom: "res4b13_branch2a" + name: "res4b13_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2a" + top: "res4b13_branch2b" + name: "res4b13_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "bn4b13_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2b" + name: "scale4b13_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b13_branch2b" + bottom: "res4b13_branch2b" + name: "res4b13_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13_branch2b" + top: "res4b13_branch2c" + name: "res4b13_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "bn4b13_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13_branch2c" + top: "res4b13_branch2c" + name: "scale4b13_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b12" + bottom: "res4b13_branch2c" + top: "res4b13" + name: "res4b13" + type: "Eltwise" +} + +layer { + bottom: "res4b13" + top: "res4b13" + name: "res4b13_relu" + type: "ReLU" +} + +layer { + bottom: "res4b13" + top: "res4b14_branch2a" + name: "res4b14_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "bn4b14_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2a" + name: "scale4b14_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2a" + bottom: "res4b14_branch2a" + name: "res4b14_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2a" + top: "res4b14_branch2b" + name: "res4b14_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "bn4b14_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2b" + name: "scale4b14_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b14_branch2b" + bottom: "res4b14_branch2b" + name: "res4b14_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14_branch2b" + top: "res4b14_branch2c" + name: "res4b14_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "bn4b14_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14_branch2c" + top: "res4b14_branch2c" + name: "scale4b14_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b13" + bottom: "res4b14_branch2c" + top: "res4b14" + name: "res4b14" + type: "Eltwise" +} + +layer { + bottom: "res4b14" + top: "res4b14" + name: "res4b14_relu" + type: "ReLU" +} + +layer { + bottom: "res4b14" + top: "res4b15_branch2a" + name: "res4b15_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "bn4b15_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2a" + name: "scale4b15_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2a" + bottom: "res4b15_branch2a" + name: "res4b15_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2a" + top: "res4b15_branch2b" + name: "res4b15_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "bn4b15_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2b" + name: "scale4b15_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b15_branch2b" + bottom: "res4b15_branch2b" + name: "res4b15_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15_branch2b" + top: "res4b15_branch2c" + name: "res4b15_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "bn4b15_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15_branch2c" + top: "res4b15_branch2c" + name: "scale4b15_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b14" + bottom: "res4b15_branch2c" + top: "res4b15" + name: "res4b15" + type: "Eltwise" +} + +layer { + bottom: "res4b15" + top: "res4b15" + name: "res4b15_relu" + type: "ReLU" +} + +layer { + bottom: "res4b15" + top: "res4b16_branch2a" + name: "res4b16_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "bn4b16_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2a" + name: "scale4b16_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2a" + bottom: "res4b16_branch2a" + name: "res4b16_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2a" + top: "res4b16_branch2b" + name: "res4b16_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "bn4b16_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2b" + name: "scale4b16_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b16_branch2b" + bottom: "res4b16_branch2b" + name: "res4b16_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16_branch2b" + top: "res4b16_branch2c" + name: "res4b16_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "bn4b16_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16_branch2c" + top: "res4b16_branch2c" + name: "scale4b16_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b15" + bottom: "res4b16_branch2c" + top: "res4b16" + name: "res4b16" + type: "Eltwise" +} + +layer { + bottom: "res4b16" + top: "res4b16" + name: "res4b16_relu" + type: "ReLU" +} + +layer { + bottom: "res4b16" + top: "res4b17_branch2a" + name: "res4b17_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "bn4b17_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2a" + name: "scale4b17_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2a" + bottom: "res4b17_branch2a" + name: "res4b17_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2a" + top: "res4b17_branch2b" + name: "res4b17_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "bn4b17_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2b" + name: "scale4b17_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b17_branch2b" + bottom: "res4b17_branch2b" + name: "res4b17_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17_branch2b" + top: "res4b17_branch2c" + name: "res4b17_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "bn4b17_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17_branch2c" + top: "res4b17_branch2c" + name: "scale4b17_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b16" + bottom: "res4b17_branch2c" + top: "res4b17" + name: "res4b17" + type: "Eltwise" +} + +layer { + bottom: "res4b17" + top: "res4b17" + name: "res4b17_relu" + type: "ReLU" +} + +layer { + bottom: "res4b17" + top: "res4b18_branch2a" + name: "res4b18_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "bn4b18_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2a" + name: "scale4b18_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2a" + bottom: "res4b18_branch2a" + name: "res4b18_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2a" + top: "res4b18_branch2b" + name: "res4b18_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "bn4b18_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2b" + name: "scale4b18_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b18_branch2b" + bottom: "res4b18_branch2b" + name: "res4b18_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18_branch2b" + top: "res4b18_branch2c" + name: "res4b18_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "bn4b18_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18_branch2c" + top: "res4b18_branch2c" + name: "scale4b18_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b17" + bottom: "res4b18_branch2c" + top: "res4b18" + name: "res4b18" + type: "Eltwise" +} + +layer { + bottom: "res4b18" + top: "res4b18" + name: "res4b18_relu" + type: "ReLU" +} + +layer { + bottom: "res4b18" + top: "res4b19_branch2a" + name: "res4b19_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "bn4b19_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2a" + name: "scale4b19_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2a" + bottom: "res4b19_branch2a" + name: "res4b19_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2a" + top: "res4b19_branch2b" + name: "res4b19_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "bn4b19_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2b" + name: "scale4b19_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b19_branch2b" + bottom: "res4b19_branch2b" + name: "res4b19_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19_branch2b" + top: "res4b19_branch2c" + name: "res4b19_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "bn4b19_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19_branch2c" + top: "res4b19_branch2c" + name: "scale4b19_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b18" + bottom: "res4b19_branch2c" + top: "res4b19" + name: "res4b19" + type: "Eltwise" +} + +layer { + bottom: "res4b19" + top: "res4b19" + name: "res4b19_relu" + type: "ReLU" +} + +layer { + bottom: "res4b19" + top: "res4b20_branch2a" + name: "res4b20_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "bn4b20_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2a" + name: "scale4b20_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2a" + bottom: "res4b20_branch2a" + name: "res4b20_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2a" + top: "res4b20_branch2b" + name: "res4b20_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "bn4b20_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2b" + name: "scale4b20_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b20_branch2b" + bottom: "res4b20_branch2b" + name: "res4b20_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20_branch2b" + top: "res4b20_branch2c" + name: "res4b20_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "bn4b20_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20_branch2c" + top: "res4b20_branch2c" + name: "scale4b20_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b19" + bottom: "res4b20_branch2c" + top: "res4b20" + name: "res4b20" + type: "Eltwise" +} + +layer { + bottom: "res4b20" + top: "res4b20" + name: "res4b20_relu" + type: "ReLU" +} + +layer { + bottom: "res4b20" + top: "res4b21_branch2a" + name: "res4b21_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "bn4b21_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2a" + name: "scale4b21_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2a" + bottom: "res4b21_branch2a" + name: "res4b21_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2a" + top: "res4b21_branch2b" + name: "res4b21_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "bn4b21_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2b" + name: "scale4b21_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b21_branch2b" + bottom: "res4b21_branch2b" + name: "res4b21_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21_branch2b" + top: "res4b21_branch2c" + name: "res4b21_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "bn4b21_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21_branch2c" + top: "res4b21_branch2c" + name: "scale4b21_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b20" + bottom: "res4b21_branch2c" + top: "res4b21" + name: "res4b21" + type: "Eltwise" +} + +layer { + bottom: "res4b21" + top: "res4b21" + name: "res4b21_relu" + type: "ReLU" +} + +layer { + bottom: "res4b21" + top: "res4b22_branch2a" + name: "res4b22_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "bn4b22_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2a" + name: "scale4b22_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2a" + bottom: "res4b22_branch2a" + name: "res4b22_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2a" + top: "res4b22_branch2b" + name: "res4b22_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "bn4b22_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2b" + name: "scale4b22_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res4b22_branch2b" + bottom: "res4b22_branch2b" + name: "res4b22_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22_branch2b" + top: "res4b22_branch2c" + name: "res4b22_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "bn4b22_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22_branch2c" + top: "res4b22_branch2c" + name: "scale4b22_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b21" + bottom: "res4b22_branch2c" + top: "res4b22" + name: "res4b22" + type: "Eltwise" +} + +layer { + bottom: "res4b22" + top: "res4b22" + name: "res4b22_relu" + type: "ReLU" +} + +layer { + bottom: "res4b22" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b22" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2a" + bottom: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5a_branch2b" + bottom: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2a" + bottom: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5b_branch2b" + bottom: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2a" + bottom: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + top: "res5c_branch2b" + bottom: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/test.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/test.prototxt new file mode 100644 index 0000000..83b223c --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/test.prototxt @@ -0,0 +1,3787 @@ +name: "ResNet50" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 84 + } + } +} + + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/train_ohem.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/train_ohem.prototxt new file mode 100644 index 0000000..f709103 --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/class-aware/train_ohem.prototxt @@ -0,0 +1,3946 @@ +name: "ResNet-50" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 4116 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 84 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver.prototxt new file mode 100644 index 0000000..873a4bc --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver_ohem.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_ohem.prototxt new file mode 100644 index 0000000..c8d196b --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_ohem.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 80000 +display: 20 + +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup.prototxt new file mode 100644 index 0000000..42274a9 --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup.prototxt @@ -0,0 +1,14 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt" +lr_policy: "fixed" +base_lr: 0.0001 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup_continue.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup_continue.prototxt new file mode 100644 index 0000000..07255ca --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/solver_warmup_continue.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 70000 + +display: 20 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "resnet50_rfcn_ohem" +iter_size: 2 +# debug_info: true diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/test_agonistic.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/test_agonistic.prototxt new file mode 100644 index 0000000..898e8eb --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/test_agonistic.prototxt @@ -0,0 +1,3787 @@ +name: "ResNet50" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred_pre" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob_pre" +} + +layer { + name: "cls_prob_reshape" + type: "Reshape" + bottom: "cls_prob_pre" + top: "cls_prob" + reshape_param { + shape { + dim: -1 + dim: 21 + } + } +} + +layer { + name: "bbox_pred_reshape" + type: "Reshape" + bottom: "bbox_pred_pre" + top: "bbox_pred" + reshape_param { + shape { + dim: -1 + dim: 8 + } + } +} + + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt new file mode 100644 index 0000000..13687cc --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic.prototxt @@ -0,0 +1,3877 @@ +name: "ResNet-50" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels" + top: "accuarcy" + #include: { phase: TEST } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: 'bbox_inside_weights' + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" +} + diff --git a/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt new file mode 100644 index 0000000..7981ac8 --- /dev/null +++ b/models/pascal_voc/ResNet-50/rfcn_end2end/train_agonistic_ohem.prototxt @@ -0,0 +1,3946 @@ +name: "ResNet-50" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +# ------------------------ conv1 ----------------------------- +layer { + bottom: "data" + top: "conv1" + name: "conv1" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 7 + pad: 3 + stride: 2 + } + param { + lr_mult: 0.0 + } + param { + lr_mult: 0.0 + } + +} + +layer { + bottom: "conv1" + top: "conv1" + name: "bn_conv1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "scale_conv1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "conv1" + top: "conv1" + name: "conv1_relu" + type: "ReLU" +} + +layer { + bottom: "conv1" + top: "pool1" + name: "pool1" + type: "Pooling" + pooling_param { + kernel_size: 3 + stride: 2 + pool: MAX + } +} + +layer { + bottom: "pool1" + top: "res2a_branch1" + name: "res2a_branch1" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "bn2a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + top: "res2a_branch1" + name: "scale2a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "pool1" + top: "res2a_branch2a" + name: "res2a_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "bn2a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "scale2a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2a" + name: "res2a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2a" + top: "res2a_branch2b" + name: "res2a_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "bn2a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "scale2a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2b" + name: "res2a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2a_branch2b" + top: "res2a_branch2c" + name: "res2a_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "bn2a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch2c" + top: "res2a_branch2c" + name: "scale2a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a_branch1" + bottom: "res2a_branch2c" + top: "res2a" + name: "res2a" + type: "Eltwise" +} + +layer { + bottom: "res2a" + top: "res2a" + name: "res2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2a" + top: "res2b_branch2a" + name: "res2b_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "bn2b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "scale2b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2a" + name: "res2b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2a" + top: "res2b_branch2b" + name: "res2b_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "bn2b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "scale2b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2b" + name: "res2b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b_branch2b" + top: "res2b_branch2c" + name: "res2b_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "bn2b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b_branch2c" + top: "res2b_branch2c" + name: "scale2b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2a" + bottom: "res2b_branch2c" + top: "res2b" + name: "res2b" + type: "Eltwise" +} + +layer { + bottom: "res2b" + top: "res2b" + name: "res2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2b" + top: "res2c_branch2a" + name: "res2c_branch2a" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "bn2c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "scale2c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2a" + name: "res2c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2a" + top: "res2c_branch2b" + name: "res2c_branch2b" + type: "Convolution" + convolution_param { + num_output: 64 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "bn2c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "scale2c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2b" + name: "res2c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res2c_branch2b" + top: "res2c_branch2c" + name: "res2c_branch2c" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "bn2c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c_branch2c" + top: "res2c_branch2c" + name: "scale2c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2b" + bottom: "res2c_branch2c" + top: "res2c" + name: "res2c" + type: "Eltwise" +} + +layer { + bottom: "res2c" + top: "res2c" + name: "res2c_relu" + type: "ReLU" +} + +layer { + bottom: "res2c" + top: "res3a_branch1" + name: "res3a_branch1" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "bn3a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + top: "res3a_branch1" + name: "scale3a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res2c" + top: "res3a_branch2a" + name: "res3a_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "bn3a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "scale3a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2a" + name: "res3a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2a" + top: "res3a_branch2b" + name: "res3a_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "bn3a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "scale3a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2b" + name: "res3a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3a_branch2b" + top: "res3a_branch2c" + name: "res3a_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "bn3a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch2c" + top: "res3a_branch2c" + name: "scale3a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a_branch1" + bottom: "res3a_branch2c" + top: "res3a" + name: "res3a" + type: "Eltwise" +} + +layer { + bottom: "res3a" + top: "res3a" + name: "res3a_relu" + type: "ReLU" +} + +layer { + bottom: "res3a" + top: "res3b_branch2a" + name: "res3b_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "bn3b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "scale3b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2a" + name: "res3b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2a" + top: "res3b_branch2b" + name: "res3b_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "bn3b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "scale3b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2b" + name: "res3b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b_branch2b" + top: "res3b_branch2c" + name: "res3b_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "bn3b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b_branch2c" + top: "res3b_branch2c" + name: "scale3b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3a" + bottom: "res3b_branch2c" + top: "res3b" + name: "res3b" + type: "Eltwise" +} + +layer { + bottom: "res3b" + top: "res3b" + name: "res3b_relu" + type: "ReLU" +} + +layer { + bottom: "res3b" + top: "res3c_branch2a" + name: "res3c_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "bn3c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "scale3c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2a" + name: "res3c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2a" + top: "res3c_branch2b" + name: "res3c_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "bn3c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "scale3c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2b" + name: "res3c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3c_branch2b" + top: "res3c_branch2c" + name: "res3c_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "bn3c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c_branch2c" + top: "res3c_branch2c" + name: "scale3c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3b" + bottom: "res3c_branch2c" + top: "res3c" + name: "res3c" + type: "Eltwise" +} + +layer { + bottom: "res3c" + top: "res3c" + name: "res3c_relu" + type: "ReLU" +} + +layer { + bottom: "res3c" + top: "res3d_branch2a" + name: "res3d_branch2a" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "bn3d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "scale3d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2a" + name: "res3d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2a" + top: "res3d_branch2b" + name: "res3d_branch2b" + type: "Convolution" + convolution_param { + num_output: 128 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "bn3d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "scale3d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2b" + name: "res3d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res3d_branch2b" + top: "res3d_branch2c" + name: "res3d_branch2c" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "bn3d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d_branch2c" + top: "res3d_branch2c" + name: "scale3d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3c" + bottom: "res3d_branch2c" + top: "res3d" + name: "res3d" + type: "Eltwise" +} + +layer { + bottom: "res3d" + top: "res3d" + name: "res3d_relu" + type: "ReLU" +} + +layer { + bottom: "res3d" + top: "res4a_branch1" + name: "res4a_branch1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "bn4a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + top: "res4a_branch1" + name: "scale4a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res3d" + top: "res4a_branch2a" + name: "res4a_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 2 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "bn4a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "scale4a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2a" + name: "res4a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2a" + top: "res4a_branch2b" + name: "res4a_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "bn4a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "scale4a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2b" + name: "res4a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4a_branch2b" + top: "res4a_branch2c" + name: "res4a_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "bn4a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch2c" + top: "res4a_branch2c" + name: "scale4a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a_branch1" + bottom: "res4a_branch2c" + top: "res4a" + name: "res4a" + type: "Eltwise" +} + +layer { + bottom: "res4a" + top: "res4a" + name: "res4a_relu" + type: "ReLU" +} + +layer { + bottom: "res4a" + top: "res4b_branch2a" + name: "res4b_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "bn4b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "scale4b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2a" + name: "res4b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2a" + top: "res4b_branch2b" + name: "res4b_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "bn4b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "scale4b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2b" + name: "res4b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b_branch2b" + top: "res4b_branch2c" + name: "res4b_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "bn4b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b_branch2c" + top: "res4b_branch2c" + name: "scale4b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4a" + bottom: "res4b_branch2c" + top: "res4b" + name: "res4b" + type: "Eltwise" +} + +layer { + bottom: "res4b" + top: "res4b" + name: "res4b_relu" + type: "ReLU" +} + +layer { + bottom: "res4b" + top: "res4c_branch2a" + name: "res4c_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "bn4c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "scale4c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2a" + name: "res4c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2a" + top: "res4c_branch2b" + name: "res4c_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "bn4c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "scale4c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2b" + name: "res4c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4c_branch2b" + top: "res4c_branch2c" + name: "res4c_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "bn4c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c_branch2c" + top: "res4c_branch2c" + name: "scale4c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4b" + bottom: "res4c_branch2c" + top: "res4c" + name: "res4c" + type: "Eltwise" +} + +layer { + bottom: "res4c" + top: "res4c" + name: "res4c_relu" + type: "ReLU" +} + +layer { + bottom: "res4c" + top: "res4d_branch2a" + name: "res4d_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "bn4d_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "scale4d_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2a" + name: "res4d_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2a" + top: "res4d_branch2b" + name: "res4d_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "bn4d_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "scale4d_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2b" + name: "res4d_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4d_branch2b" + top: "res4d_branch2c" + name: "res4d_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "bn4d_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d_branch2c" + top: "res4d_branch2c" + name: "scale4d_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4c" + bottom: "res4d_branch2c" + top: "res4d" + name: "res4d" + type: "Eltwise" +} + +layer { + bottom: "res4d" + top: "res4d" + name: "res4d_relu" + type: "ReLU" +} + +layer { + bottom: "res4d" + top: "res4e_branch2a" + name: "res4e_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "bn4e_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "scale4e_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2a" + name: "res4e_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2a" + top: "res4e_branch2b" + name: "res4e_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "bn4e_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "scale4e_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2b" + name: "res4e_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4e_branch2b" + top: "res4e_branch2c" + name: "res4e_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "bn4e_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e_branch2c" + top: "res4e_branch2c" + name: "scale4e_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4d" + bottom: "res4e_branch2c" + top: "res4e" + name: "res4e" + type: "Eltwise" +} + +layer { + bottom: "res4e" + top: "res4e" + name: "res4e_relu" + type: "ReLU" +} + +layer { + bottom: "res4e" + top: "res4f_branch2a" + name: "res4f_branch2a" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "bn4f_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "scale4f_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2a" + name: "res4f_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2a" + top: "res4f_branch2b" + name: "res4f_branch2b" + type: "Convolution" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "bn4f_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "scale4f_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2b" + name: "res4f_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res4f_branch2b" + top: "res4f_branch2c" + name: "res4f_branch2c" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "bn4f_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f_branch2c" + top: "res4f_branch2c" + name: "scale4f_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4e" + bottom: "res4f_branch2c" + top: "res4f" + name: "res4f" + type: "Eltwise" +} + +layer { + bottom: "res4f" + top: "res4f" + name: "res4f_relu" + type: "ReLU" +} + +layer { + bottom: "res4f" + top: "res5a_branch1" + name: "res5a_branch1" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "bn5a_branch1" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + top: "res5a_branch1" + name: "scale5a_branch1" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res4f" + top: "res5a_branch2a" + name: "res5a_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "bn5a_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "scale5a_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2a" + name: "res5a_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2a" + top: "res5a_branch2b" + name: "res5a_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "bn5a_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "scale5a_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2b" + name: "res5a_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5a_branch2b" + top: "res5a_branch2c" + name: "res5a_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "bn5a_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch2c" + top: "res5a_branch2c" + name: "scale5a_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a_branch1" + bottom: "res5a_branch2c" + top: "res5a" + name: "res5a" + type: "Eltwise" +} + +layer { + bottom: "res5a" + top: "res5a" + name: "res5a_relu" + type: "ReLU" +} + +layer { + bottom: "res5a" + top: "res5b_branch2a" + name: "res5b_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "bn5b_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "scale5b_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2a" + name: "res5b_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2a" + top: "res5b_branch2b" + name: "res5b_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "bn5b_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "scale5b_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2b" + name: "res5b_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b_branch2b" + top: "res5b_branch2c" + name: "res5b_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "bn5b_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b_branch2c" + top: "res5b_branch2c" + name: "scale5b_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5a" + bottom: "res5b_branch2c" + top: "res5b" + name: "res5b" + type: "Eltwise" +} + +layer { + bottom: "res5b" + top: "res5b" + name: "res5b_relu" + type: "ReLU" +} + +layer { + bottom: "res5b" + top: "res5c_branch2a" + name: "res5c_branch2a" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "bn5c_branch2a" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "scale5c_branch2a" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2a" + name: "res5c_branch2a_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2a" + top: "res5c_branch2b" + name: "res5c_branch2b" + type: "Convolution" + convolution_param { + num_output: 512 + kernel_size: 3 + dilation: 2 + pad: 2 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "bn5c_branch2b" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "scale5c_branch2b" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2b" + name: "res5c_branch2b_relu" + type: "ReLU" +} + +layer { + bottom: "res5c_branch2b" + top: "res5c_branch2c" + name: "res5c_branch2c" + type: "Convolution" + convolution_param { + num_output: 2048 + kernel_size: 1 + pad: 0 + stride: 1 + bias_term: false + } + param { + lr_mult: 1.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "bn5c_branch2c" + type: "BatchNorm" + batch_norm_param { + use_global_stats: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5c_branch2c" + top: "res5c_branch2c" + name: "scale5c_branch2c" + type: "Scale" + scale_param { + bias_term: true + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } + param { + lr_mult: 0.0 + decay_mult: 0.0 + } +} + +layer { + bottom: "res5b" + bottom: "res5c_branch2c" + top: "res5c" + name: "res5c" + type: "Eltwise" +} + +layer { + bottom: "res5c" + top: "res5c" + name: "res5c_relu" + type: "ReLU" +} + + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "res5c" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 2" + } +} + +#----------------------new conv layer------------------ +layer { + bottom: "res5c" + top: "conv_new_1" + name: "conv_new_1" + type: "Convolution" + convolution_param { + num_output: 1024 + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +layer { + bottom: "conv_new_1" + top: "conv_new_1" + name: "conv_new_1_relu" + type: "ReLU" +} + +layer { + bottom: "conv_new_1" + top: "rfcn_cls" + name: "rfcn_cls" + type: "Convolution" + convolution_param { + num_output: 1029 #21*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} +layer { + bottom: "conv_new_1" + top: "rfcn_bbox" + name: "rfcn_bbox" + type: "Convolution" + convolution_param { + num_output: 392 #8*(7^2) cls_num*(score_maps_size^2) + kernel_size: 1 + pad: 0 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } + param { + lr_mult: 1.0 + } + param { + lr_mult: 2.0 + } +} + +#--------------position sensitive RoI pooling-------------- +layer { + bottom: "rfcn_cls" + bottom: "rois" + top: "psroipooled_cls_rois" + name: "psroipooled_cls_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 21 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_cls_rois" + top: "cls_score" + name: "ave_cls_score_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +layer { + bottom: "rfcn_bbox" + bottom: "rois" + top: "psroipooled_loc_rois" + name: "psroipooled_loc_rois" + type: "PSROIPooling" + psroi_pooling_param { + spatial_scale: 0.0625 + output_dim: 8 + group_size: 7 + } +} + +layer { + bottom: "psroipooled_loc_rois" + top: "bbox_pred" + name: "ave_bbox_pred_rois" + type: "Pooling" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 7 + } +} + + +#--------------online hard example mining-------------- +layer { + name: "per_roi_loss_cls" + type: "SoftmaxWithLossOHEM" + bottom: "cls_score" + bottom: "labels" + top: "temp_loss_cls" + top: "temp_prob_cls" + top: "per_roi_loss_cls" + loss_weight: 0 + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + top: "temp_loss_bbox" + top: "per_roi_loss_bbox" + loss_weight: 0 + loss_weight: 0 + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "per_roi_loss" + type: "Eltwise" + bottom: "per_roi_loss_cls" + bottom: "per_roi_loss_bbox" + top: "per_roi_loss" + propagate_down: false + propagate_down: false +} + +layer { + bottom: "rois" + bottom: "per_roi_loss" + bottom: "labels" + bottom: "bbox_inside_weights" + top: "labels_ohem" + top: "bbox_loss_weights_ohem" + name: "annotator_detector" + type: "BoxAnnotatorOHEM" + box_annotator_ohem_param { + roi_per_img: 128 + ignore_label: -1 + } + propagate_down: false + propagate_down: false + propagate_down: false + propagate_down: false +} + +layer { + name: "silence" + type: "Silence" + bottom: "bbox_outside_weights" + bottom: "temp_loss_cls" + bottom: "temp_prob_cls" + bottom: "temp_loss_bbox" +} + +#-----------------------output------------------------ +layer { + name: "loss" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels_ohem" + top: "loss_cls" + loss_weight: 1 + loss_param { + ignore_label: -1 + } + propagate_down: true + propagate_down: false +} + +layer { + name: "accuarcy" + type: "Accuracy" + bottom: "cls_score" + bottom: "labels_ohem" + top: "accuarcy" + #include: { phase: TEST } + accuracy_param { + ignore_label: -1 + } + propagate_down: false + propagate_down: false +} + +layer { + name: "loss_bbox" + type: "SmoothL1LossOHEM" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_loss_weights_ohem" + top: "loss_bbox" + loss_weight: 1 + loss_param { + normalization: PRE_FIXED + pre_fixed_normalizer: 128 + } + propagate_down: true + propagate_down: false + propagate_down: false +} + diff --git a/models/pascal_voc/VGG16/fast_rcnn/solver.prototxt b/models/pascal_voc/VGG16/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..9449ab1 --- /dev/null +++ b/models/pascal_voc/VGG16/fast_rcnn/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +# iter_size: 1 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" +#debug_info: true diff --git a/models/pascal_voc/VGG16/fast_rcnn/test.prototxt b/models/pascal_voc/VGG16/fast_rcnn/test.prototxt new file mode 100644 index 0000000..fec2369 --- /dev/null +++ b/models/pascal_voc/VGG16/fast_rcnn/test.prototxt @@ -0,0 +1,517 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG16/fast_rcnn/train.prototxt b/models/pascal_voc/VGG16/fast_rcnn/train.prototxt new file mode 100644 index 0000000..2e7958f --- /dev/null +++ b/models/pascal_voc/VGG16/fast_rcnn/train.prototxt @@ -0,0 +1,503 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/faster_rcnn_test.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/faster_rcnn_test.pt new file mode 100644 index 0000000..c264bfd --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/faster_rcnn_test.pt @@ -0,0 +1,409 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/rpn_test.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/rpn_test.pt new file mode 100644 index 0000000..0e7b1da --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/rpn_test.pt @@ -0,0 +1,341 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + convolution_param { + num_output: 64 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + convolution_param { + num_output: 128 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + convolution_param { + num_output: 256 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + top: 'scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..82df70a --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt new file mode 100644 index 0000000..628dc85 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt @@ -0,0 +1,542 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt new file mode 100644 index 0000000..a6e29f0 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_rpn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt new file mode 100644 index 0000000..e5f3c50 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage1_rpn_train.pt @@ -0,0 +1,525 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 25088 } + data_filler { type: "constant" value: 0 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..861536c --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_fast_rcnn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt new file mode 100644 index 0000000..e38bb2b --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt @@ -0,0 +1,490 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt new file mode 100644 index 0000000..7199df8 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_rpn" diff --git a/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt new file mode 100644 index 0000000..6d82857 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_alt_opt/stage2_rpn_train.pt @@ -0,0 +1,465 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 25088 } + data_filler { type: "constant" value: 0 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_end2end/solver.prototxt b/models/pascal_voc/VGG16/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..7547cc8 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 50000 +display: 20 +average_loss: 100 +# iter_size: 1 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg16_faster_rcnn" +iter_size: 2 diff --git a/models/pascal_voc/VGG16/faster_rcnn_end2end/test.prototxt b/models/pascal_voc/VGG16/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..4a93820 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,608 @@ +name: "VGG_ILSVRC_16_layers" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt b/models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..ebadb49 --- /dev/null +++ b/models/pascal_voc/VGG16/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,673 @@ +name: "VGG_ILSVRC_16_layers" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +layer { + name: "conv1_1" + type: "Convolution" + bottom: "data" + top: "conv1_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_1" + type: "ReLU" + bottom: "conv1_1" + top: "conv1_1" +} +layer { + name: "conv1_2" + type: "Convolution" + bottom: "conv1_1" + top: "conv1_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu1_2" + type: "ReLU" + bottom: "conv1_2" + top: "conv1_2" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv1_2" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv2_1" + type: "Convolution" + bottom: "pool1" + top: "conv2_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_1" + type: "ReLU" + bottom: "conv2_1" + top: "conv2_1" +} +layer { + name: "conv2_2" + type: "Convolution" + bottom: "conv2_1" + top: "conv2_2" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu2_2" + type: "ReLU" + bottom: "conv2_2" + top: "conv2_2" +} +layer { + name: "pool2" + type: "Pooling" + bottom: "conv2_2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv3_1" + type: "Convolution" + bottom: "pool2" + top: "conv3_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_1" + type: "ReLU" + bottom: "conv3_1" + top: "conv3_1" +} +layer { + name: "conv3_2" + type: "Convolution" + bottom: "conv3_1" + top: "conv3_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_2" + type: "ReLU" + bottom: "conv3_2" + top: "conv3_2" +} +layer { + name: "conv3_3" + type: "Convolution" + bottom: "conv3_2" + top: "conv3_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3_3" + type: "ReLU" + bottom: "conv3_3" + top: "conv3_3" +} +layer { + name: "pool3" + type: "Pooling" + bottom: "conv3_3" + top: "pool3" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv4_1" + type: "Convolution" + bottom: "pool3" + top: "conv4_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_1" + type: "ReLU" + bottom: "conv4_1" + top: "conv4_1" +} +layer { + name: "conv4_2" + type: "Convolution" + bottom: "conv4_1" + top: "conv4_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_2" + type: "ReLU" + bottom: "conv4_2" + top: "conv4_2" +} +layer { + name: "conv4_3" + type: "Convolution" + bottom: "conv4_2" + top: "conv4_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4_3" + type: "ReLU" + bottom: "conv4_3" + top: "conv4_3" +} +layer { + name: "pool4" + type: "Pooling" + bottom: "conv4_3" + top: "pool4" + pooling_param { + pool: MAX + kernel_size: 2 + stride: 2 + } +} +layer { + name: "conv5_1" + type: "Convolution" + bottom: "pool4" + top: "conv5_1" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_1" + type: "ReLU" + bottom: "conv5_1" + top: "conv5_1" +} +layer { + name: "conv5_2" + type: "Convolution" + bottom: "conv5_1" + top: "conv5_2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_2" + type: "ReLU" + bottom: "conv5_2" + top: "conv5_2" +} +layer { + name: "conv5_3" + type: "Convolution" + bottom: "conv5_2" + top: "conv5_3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5_3" + type: "ReLU" + bottom: "conv5_3" + top: "conv5_3" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5_3" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 512 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5_3" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 7 + pooled_h: 7 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/solver.prototxt b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..eaa94d9 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/solver.prototxt @@ -0,0 +1,15 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" +#debug_info: true diff --git a/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/test.prototxt b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/test.prototxt new file mode 100644 index 0000000..baeac36 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/test.prototxt @@ -0,0 +1,317 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt new file mode 100644 index 0000000..d702367 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/fast_rcnn/train.prototxt @@ -0,0 +1,310 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/faster_rcnn_test.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/faster_rcnn_test.pt new file mode 100644 index 0000000..954b276 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/faster_rcnn_test.pt @@ -0,0 +1,289 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/rpn_test.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/rpn_test.pt new file mode 100644 index 0000000..fdf373f --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/rpn_test.pt @@ -0,0 +1,221 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + top: 'scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..8444a3e --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt new file mode 100644 index 0000000..1bc1534 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt @@ -0,0 +1,337 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt new file mode 100644 index 0000000..6bea5fc --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_rpn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt new file mode 100644 index 0000000..29fdfe0 --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage1_rpn_train.pt @@ -0,0 +1,286 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1 } + param { lr_mult: 2 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 18432 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..a45a6ee --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_fast_rcnn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt new file mode 100644 index 0000000..4825b1b --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt @@ -0,0 +1,337 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1 } + param { lr_mult: 2 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt new file mode 100644 index 0000000..85f4f0c --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_rpn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt new file mode 100644 index 0000000..d27e76b --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_alt_opt/stage2_rpn_train.pt @@ -0,0 +1,286 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + pad: 1 kernel_size: 5 stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 512 + pad: 1 kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 18432 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..8134d4e --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,14 @@ +train_net: "models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt" +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 50000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "vgg_cnn_m_1024_faster_rcnn" diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..c8bc90a --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,450 @@ +name: "VGG_CNN_M_1024" +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 decay_mult: 1.0 } + param { lr_mult: 2.0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" +} diff --git a/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..81a4d3e --- /dev/null +++ b/models/pascal_voc/VGG_CNN_M_1024/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,484 @@ +name: "VGG_CNN_M_1024" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 7 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 5 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0005 + beta: 0.75 + k: 2 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + convolution_param { + num_output: 512 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} + +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} + +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} + +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} + +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} + +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} + +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "pool5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 1024 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { + lr_mult: 1 + } + param { + lr_mult: 2 + } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "loss_cls" + loss_weight: 1 +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "loss_bbox" + loss_weight: 1 +} diff --git a/models/pascal_voc/ZF/fast_rcnn/solver.prototxt b/models/pascal_voc/ZF/fast_rcnn/solver.prototxt new file mode 100644 index 0000000..3b346cc --- /dev/null +++ b/models/pascal_voc/ZF/fast_rcnn/solver.prototxt @@ -0,0 +1,18 @@ +train_net: "models/pascal_voc/ZF/fast_rcnn/train.prototxt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_fast_rcnn" +#debug_info: true +#iter_size: 2 diff --git a/models/pascal_voc/ZF/fast_rcnn/test.prototxt b/models/pascal_voc/ZF/fast_rcnn/test.prototxt new file mode 100644 index 0000000..18d5cd8 --- /dev/null +++ b/models/pascal_voc/ZF/fast_rcnn/test.prototxt @@ -0,0 +1,251 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "rois" +input_shape { + dim: 1 # to be changed on-the-fly to num ROIs + dim: 5 # [batch ind, x1, y1, x2, y2] zero-based indexing +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" + loss_param { + ignore_label: -1 + normalize: true + } +} diff --git a/models/pascal_voc/ZF/fast_rcnn/train.prototxt b/models/pascal_voc/ZF/fast_rcnn/train.prototxt new file mode 100644 index 0000000..921d8e3 --- /dev/null +++ b/models/pascal_voc/ZF/fast_rcnn/train.prototxt @@ -0,0 +1,300 @@ +name: "ZF" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "bbox_loss" + loss_weight: 1 +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/faster_rcnn_test.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/faster_rcnn_test.pt new file mode 100644 index 0000000..b24aae4 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/faster_rcnn_test.pt @@ -0,0 +1,327 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" + loss_param { + ignore_label: -1 + normalize: true + } +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/rpn_test.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/rpn_test.pt new file mode 100644 index 0000000..204f08f --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/rpn_test.pt @@ -0,0 +1,233 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +# ------------------------ layer 1 ----------------------------- +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} + +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#-----------------------layer +------------------------- + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#-----------------------output------------------------ +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + top: 'scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..0180e7c --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_fast_rcnn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt new file mode 100644 index 0000000..3d98184 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_fast_rcnn_train.pt @@ -0,0 +1,362 @@ +name: "ZF" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "bbox_loss" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt new file mode 100644 index 0000000..23a7c6a --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_rpn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt new file mode 100644 index 0000000..adf8605 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage1_rpn_train.pt @@ -0,0 +1,312 @@ +name: "ZF" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: "rpn_bbox_inside_weights" + bottom: "rpn_bbox_outside_weights" + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 9216 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt new file mode 100644 index 0000000..a666def --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_solver30k40k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 30000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_fast_rcnn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt new file mode 100644 index 0000000..262ed65 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_fast_rcnn_train.pt @@ -0,0 +1,362 @@ +name: "ZF" +layer { + name: 'data' + type: 'Python' + top: 'data' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: "bbox_inside_weights" + bottom: "bbox_outside_weights" + top: "bbox_loss" + loss_weight: 1 +} + +#========= RPN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "silence_rpn_cls_score" + type: "Silence" + bottom: "rpn_cls_score" +} +layer { + name: "silence_rpn_bbox_pred" + type: "Silence" + bottom: "rpn_bbox_pred" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt new file mode 100644 index 0000000..15d3da7 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_solver60k80k.pt @@ -0,0 +1,16 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 60000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_rpn" diff --git a/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt new file mode 100644 index 0000000..336b05b --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_alt_opt/stage2_rpn_train.pt @@ -0,0 +1,312 @@ +name: "ZF" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv1" + type: "Convolution" + bottom: "conv5" + top: "rpn_conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu1" + type: "ReLU" + bottom: "rpn_conv1" + top: "rpn_conv1" +} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn_conv1" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: "rpn_bbox_inside_weights" + bottom: "rpn_bbox_outside_weights" + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RCNN ============ +# Dummy layers so that initial parameters are saved into the output net + +layer { + name: "dummy_roi_pool_conv5" + type: "DummyData" + top: "dummy_roi_pool_conv5" + dummy_data_param { + shape { dim: 1 dim: 9216 } + data_filler { type: "gaussian" std: 0.01 } + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "dummy_roi_pool_conv5" + top: "fc6" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 0 decay_mult: 0 } + param { lr_mult: 0 decay_mult: 0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "silence_fc7" + type: "Silence" + bottom: "fc7" +} diff --git a/models/pascal_voc/ZF/faster_rcnn_end2end/solver.prototxt b/models/pascal_voc/ZF/faster_rcnn_end2end/solver.prototxt new file mode 100644 index 0000000..246697a --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_end2end/solver.prototxt @@ -0,0 +1,25 @@ +train_net: "models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt" + +base_lr: 0.001 +lr_policy: "step" +gamma: 0.1 +stepsize: 50000 +display: 20 +average_loss: 100 +momentum: 0.9 +weight_decay: 0.0005 + +#base_lr: 0.001 +#lr_policy: "exp" +#gamma: 0.999539589 # (0.00001/0.001)^(1/10000) +#display: 1 +#average_loss: 100 +#momentum: 0.9 +#weight_decay: 0.0005 + +# We disable standard caffe solver snapshotting and implement our own snapshot +# function +snapshot: 0 +# We still use the snapshot prefix, though +snapshot_prefix: "zf_faster_rcnn" +iter_size: 2 diff --git a/models/pascal_voc/ZF/faster_rcnn_end2end/test.prototxt b/models/pascal_voc/ZF/faster_rcnn_end2end/test.prototxt new file mode 100644 index 0000000..6d88dc3 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_end2end/test.prototxt @@ -0,0 +1,373 @@ +name: "ZF" + +input: "data" +input_shape { + dim: 1 + dim: 3 + dim: 224 + dim: 224 +} + +input: "im_info" +input_shape { + dim: 1 + dim: 3 +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 decay_mult: 1.0 } +# param { lr_mult: 2.0 decay_mult: 0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rois' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + inner_product_param { + num_output: 21 + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + inner_product_param { + num_output: 84 + } +} +layer { + name: "cls_prob" + type: "Softmax" + bottom: "cls_score" + top: "cls_prob" + loss_param { + ignore_label: -1 + normalize: true + } +} diff --git a/models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt b/models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt new file mode 100644 index 0000000..c044fd5 --- /dev/null +++ b/models/pascal_voc/ZF/faster_rcnn_end2end/train.prototxt @@ -0,0 +1,497 @@ +name: "ZF" +layer { + name: 'input-data' + type: 'Python' + top: 'data' + top: 'im_info' + top: 'gt_boxes' + python_param { + module: 'roi_data_layer.layer' + layer: 'RoIDataLayer' + param_str: "'num_classes': 21" + } +} + +#========= conv1-conv5 ============ + +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 96 + kernel_size: 7 + pad: 3 + stride: 2 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "conv1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "conv1" + top: "norm1" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 5 + pad: 2 + stride: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 3 + alpha: 0.00005 + beta: 0.75 + norm_region: WITHIN_CHANNEL + engine: CAFFE + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + kernel_size: 3 + stride: 2 + pad: 1 + pool: MAX + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 384 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 + pad: 1 + stride: 1 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} + +#========= RPN ============ + +layer { + name: "rpn_conv/3x3" + type: "Convolution" + bottom: "conv5" + top: "rpn/output" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 256 + kernel_size: 3 pad: 1 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_relu/3x3" + type: "ReLU" + bottom: "rpn/output" + top: "rpn/output" +} + +#layer { +# name: "rpn_conv/3x3" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/3x3" +# param { lr_mult: 1.0 } +# param { lr_mult: 2.0 } +# convolution_param { +# num_output: 192 +# kernel_size: 3 pad: 1 stride: 1 +# weight_filler { type: "gaussian" std: 0.01 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn_conv/5x5" +# type: "Convolution" +# bottom: "conv5" +# top: "rpn_conv/5x5" +# param { lr_mult: 1.0 } +# param { lr_mult: 2.0 } +# convolution_param { +# num_output: 64 +# kernel_size: 5 pad: 2 stride: 1 +# weight_filler { type: "gaussian" std: 0.0036 } +# bias_filler { type: "constant" value: 0 } +# } +#} +#layer { +# name: "rpn/output" +# type: "Concat" +# bottom: "rpn_conv/3x3" +# bottom: "rpn_conv/5x5" +# top: "rpn/output" +#} +#layer { +# name: "rpn_relu/output" +# type: "ReLU" +# bottom: "rpn/output" +# top: "rpn/output" +#} + +layer { + name: "rpn_cls_score" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 18 # 2(bg/fg) * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + name: "rpn_bbox_pred" + type: "Convolution" + bottom: "rpn/output" + top: "rpn_bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + convolution_param { + num_output: 36 # 4 * 9(anchors) + kernel_size: 1 pad: 0 stride: 1 + weight_filler { type: "gaussian" std: 0.01 } + bias_filler { type: "constant" value: 0 } + } +} +layer { + bottom: "rpn_cls_score" + top: "rpn_cls_score_reshape" + name: "rpn_cls_score_reshape" + type: "Reshape" + reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } } +} +layer { + name: 'rpn-data' + type: 'Python' + bottom: 'rpn_cls_score' + bottom: 'gt_boxes' + bottom: 'im_info' + bottom: 'data' + top: 'rpn_labels' + top: 'rpn_bbox_targets' + top: 'rpn_bbox_inside_weights' + top: 'rpn_bbox_outside_weights' + python_param { + module: 'rpn.anchor_target_layer' + layer: 'AnchorTargetLayer' + param_str: "'feat_stride': 16" + } +} +layer { + name: "rpn_loss_cls" + type: "SoftmaxWithLoss" + bottom: "rpn_cls_score_reshape" + bottom: "rpn_labels" + propagate_down: 1 + propagate_down: 0 + top: "rpn_cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "rpn_loss_bbox" + type: "SmoothL1Loss" + bottom: "rpn_bbox_pred" + bottom: "rpn_bbox_targets" + bottom: 'rpn_bbox_inside_weights' + bottom: 'rpn_bbox_outside_weights' + top: "rpn_loss_bbox" + loss_weight: 1 + smooth_l1_loss_param { sigma: 3.0 } +} + +#========= RoI Proposal ============ + +layer { + name: "rpn_cls_prob" + type: "Softmax" + bottom: "rpn_cls_score_reshape" + top: "rpn_cls_prob" +} +layer { + name: 'rpn_cls_prob_reshape' + type: 'Reshape' + bottom: 'rpn_cls_prob' + top: 'rpn_cls_prob_reshape' + reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } } +} +layer { + name: 'proposal' + type: 'Python' + bottom: 'rpn_cls_prob_reshape' + bottom: 'rpn_bbox_pred' + bottom: 'im_info' + top: 'rpn_rois' +# top: 'rpn_scores' + python_param { + module: 'rpn.proposal_layer' + layer: 'ProposalLayer' + param_str: "'feat_stride': 16" + } +} +#layer { +# name: 'debug-data' +# type: 'Python' +# bottom: 'data' +# bottom: 'rpn_rois' +# bottom: 'rpn_scores' +# python_param { +# module: 'rpn.debug_layer' +# layer: 'RPNDebugLayer' +# } +#} +layer { + name: 'roi-data' + type: 'Python' + bottom: 'rpn_rois' + bottom: 'gt_boxes' + top: 'rois' + top: 'labels' + top: 'bbox_targets' + top: 'bbox_inside_weights' + top: 'bbox_outside_weights' + python_param { + module: 'rpn.proposal_target_layer' + layer: 'ProposalTargetLayer' + param_str: "'num_classes': 21" + } +} + +#========= RCNN ============ + +layer { + name: "roi_pool_conv5" + type: "ROIPooling" + bottom: "conv5" + bottom: "rois" + top: "roi_pool_conv5" + roi_pooling_param { + pooled_w: 6 + pooled_h: 6 + spatial_scale: 0.0625 # 1/16 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "roi_pool_conv5" + top: "fc6" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6" + top: "fc6" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6" + top: "fc6" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6" + top: "fc7" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7" + top: "fc7" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7" + top: "fc7" + dropout_param { + dropout_ratio: 0.5 + scale_train: false + } +} +layer { + name: "cls_score" + type: "InnerProduct" + bottom: "fc7" + top: "cls_score" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 21 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bbox_pred" + type: "InnerProduct" + bottom: "fc7" + top: "bbox_pred" + param { lr_mult: 1.0 } + param { lr_mult: 2.0 } + inner_product_param { + num_output: 84 + weight_filler { + type: "gaussian" + std: 0.001 + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "loss_cls" + type: "SoftmaxWithLoss" + bottom: "cls_score" + bottom: "labels" + propagate_down: 1 + propagate_down: 0 + top: "cls_loss" + loss_weight: 1 + loss_param { + ignore_label: -1 + normalize: true + } +} +layer { + name: "loss_bbox" + type: "SmoothL1Loss" + bottom: "bbox_pred" + bottom: "bbox_targets" + bottom: 'bbox_inside_weights' + bottom: 'bbox_outside_weights' + top: "bbox_loss" + loss_weight: 1 +} diff --git a/tools/README.md b/tools/README.md new file mode 100644 index 0000000..6fd4094 --- /dev/null +++ b/tools/README.md @@ -0,0 +1 @@ +Tools for training, testing, and compressing Fast R-CNN networks. diff --git a/tools/_init_paths.py b/tools/_init_paths.py new file mode 100644 index 0000000..f12404c --- /dev/null +++ b/tools/_init_paths.py @@ -0,0 +1,25 @@ +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Set up paths for Fast R-CNN.""" + +import os.path as osp +import sys + +def add_path(path): + if path not in sys.path: + sys.path.insert(0, path) + +this_dir = osp.dirname(__file__) + +# Add caffe to PYTHONPATH +caffe_path = osp.join(this_dir, '..', 'caffe', 'python') +add_path(caffe_path) + +# Add lib to PYTHONPATH +lib_path = osp.join(this_dir, '..', 'lib') +add_path(lib_path) diff --git a/tools/compress_net.py b/tools/compress_net.py new file mode 100755 index 0000000..e044e5b --- /dev/null +++ b/tools/compress_net.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Compress a Fast R-CNN network using truncated SVD.""" + +import _init_paths +import caffe +import argparse +import numpy as np +import os, sys + +def parse_args(): + """Parse input arguments.""" + parser = argparse.ArgumentParser(description='Compress a Fast R-CNN network') + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the uncompressed network', + default=None, type=str) + parser.add_argument('--def-svd', dest='prototxt_svd', + help='prototxt file defining the SVD compressed network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to compress', + default=None, type=str) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def compress_weights(W, l): + """Compress the weight matrix W of an inner product (fully connected) layer + using truncated SVD. + + Parameters: + W: N x M weights matrix + l: number of singular values to retain + + Returns: + Ul, L: matrices such that W \approx Ul*L + """ + + # numpy doesn't seem to have a fast truncated SVD algorithm... + # this could be faster + U, s, V = np.linalg.svd(W, full_matrices=False) + + Ul = U[:, :l] + sl = s[:l] + Vl = V[:l, :] + + L = np.dot(np.diag(sl), Vl) + return Ul, L + +def main(): + args = parse_args() + + # prototxt = 'models/VGG16/test.prototxt' + # caffemodel = 'snapshots/vgg16_fast_rcnn_iter_40000.caffemodel' + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + + # prototxt_svd = 'models/VGG16/svd/test_fc6_fc7.prototxt' + # caffemodel = 'snapshots/vgg16_fast_rcnn_iter_40000.caffemodel' + net_svd = caffe.Net(args.prototxt_svd, args.caffemodel, caffe.TEST) + + print('Uncompressed network {} : {}'.format(args.prototxt, args.caffemodel)) + print('Compressed network prototxt {}'.format(args.prototxt_svd)) + + out = os.path.splitext(os.path.basename(args.caffemodel))[0] + '_svd' + out_dir = os.path.dirname(args.caffemodel) + + # Compress fc6 + if net_svd.params.has_key('fc6_L'): + l_fc6 = net_svd.params['fc6_L'][0].data.shape[0] + print(' fc6_L bottleneck size: {}'.format(l_fc6)) + + # uncompressed weights and biases + W_fc6 = net.params['fc6'][0].data + B_fc6 = net.params['fc6'][1].data + + print(' compressing fc6...') + Ul_fc6, L_fc6 = compress_weights(W_fc6, l_fc6) + + assert(len(net_svd.params['fc6_L']) == 1) + + # install compressed matrix factors (and original biases) + net_svd.params['fc6_L'][0].data[...] = L_fc6 + + net_svd.params['fc6_U'][0].data[...] = Ul_fc6 + net_svd.params['fc6_U'][1].data[...] = B_fc6 + + out += '_fc6_{}'.format(l_fc6) + + # Compress fc7 + if net_svd.params.has_key('fc7_L'): + l_fc7 = net_svd.params['fc7_L'][0].data.shape[0] + print ' fc7_L bottleneck size: {}'.format(l_fc7) + + W_fc7 = net.params['fc7'][0].data + B_fc7 = net.params['fc7'][1].data + + print(' compressing fc7...') + Ul_fc7, L_fc7 = compress_weights(W_fc7, l_fc7) + + assert(len(net_svd.params['fc7_L']) == 1) + + net_svd.params['fc7_L'][0].data[...] = L_fc7 + + net_svd.params['fc7_U'][0].data[...] = Ul_fc7 + net_svd.params['fc7_U'][1].data[...] = B_fc7 + + out += '_fc7_{}'.format(l_fc7) + + filename = '{}/{}.caffemodel'.format(out_dir, out) + net_svd.save(filename) + print 'Wrote svd model to: {:s}'.format(filename) + +if __name__ == '__main__': + main() diff --git a/tools/demo.py b/tools/demo.py new file mode 100755 index 0000000..631c68a --- /dev/null +++ b/tools/demo.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +""" +Demo script showing detections in sample images. + +See README.md for installation instructions before running. +""" + +import _init_paths +from fast_rcnn.config import cfg +from fast_rcnn.test import im_detect +from fast_rcnn.nms_wrapper import nms +from utils.timer import Timer +import matplotlib.pyplot as plt +import numpy as np +import scipy.io as sio +import caffe, os, sys, cv2 +import argparse + +CLASSES = ('__background__', + 'aeroplane', 'bicycle', 'bird', 'boat', + 'bottle', 'bus', 'car', 'cat', 'chair', + 'cow', 'diningtable', 'dog', 'horse', + 'motorbike', 'person', 'pottedplant', + 'sheep', 'sofa', 'train', 'tvmonitor') + +NETS = {'vgg16': ('VGG16', + 'VGG16_faster_rcnn_final.caffemodel'), + 'zf': ('ZF', + 'ZF_faster_rcnn_final.caffemodel')} + + +def vis_detections(im, class_name, dets, thresh=0.5): + """Draw detected bounding boxes.""" + inds = np.where(dets[:, -1] >= thresh)[0] + if len(inds) == 0: + return + + im = im[:, :, (2, 1, 0)] + fig, ax = plt.subplots(figsize=(12, 12)) + ax.imshow(im, aspect='equal') + for i in inds: + bbox = dets[i, :4] + score = dets[i, -1] + + ax.add_patch( + plt.Rectangle((bbox[0], bbox[1]), + bbox[2] - bbox[0], + bbox[3] - bbox[1], fill=False, + edgecolor='red', linewidth=3.5) + ) + ax.text(bbox[0], bbox[1] - 2, + '{:s} {:.3f}'.format(class_name, score), + bbox=dict(facecolor='blue', alpha=0.5), + fontsize=14, color='white') + + ax.set_title(('{} detections with ' + 'p({} | box) >= {:.1f}').format(class_name, class_name, + thresh), + fontsize=14) + plt.axis('off') + plt.tight_layout() + plt.draw() + +def demo(net, image_name): + """Detect object classes in an image using pre-computed object proposals.""" + + # Load the demo image + im_file = os.path.join(cfg.DATA_DIR, 'demo', image_name) + im = cv2.imread(im_file) + + # Detect all object classes and regress object bounds + timer = Timer() + timer.tic() + scores, boxes = im_detect(net, im) + timer.toc() + print ('Detection took {:.3f}s for ' + '{:d} object proposals').format(timer.total_time, boxes.shape[0]) + + # Visualize detections for each class + CONF_THRESH = 0.8 + NMS_THRESH = 0.3 + for cls_ind, cls in enumerate(CLASSES[1:]): + cls_ind += 1 # because we skipped background + cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)] + cls_scores = scores[:, cls_ind] + dets = np.hstack((cls_boxes, + cls_scores[:, np.newaxis])).astype(np.float32) + keep = nms(dets, NMS_THRESH) + dets = dets[keep, :] + vis_detections(im, cls, dets, thresh=CONF_THRESH) + +def parse_args(): + """Parse input arguments.""" + parser = argparse.ArgumentParser(description='Faster R-CNN demo') + parser.add_argument('--gpu', dest='gpu_id', help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--cpu', dest='cpu_mode', + help='Use CPU mode (overrides --gpu)', + action='store_true') + parser.add_argument('--net', dest='demo_net', help='Network to use [vgg16]', + choices=NETS.keys(), default='vgg16') + + args = parser.parse_args() + + return args + +if __name__ == '__main__': + cfg.TEST.HAS_RPN = True # Use RPN for proposals + + args = parse_args() + + prototxt = os.path.join(cfg.MODELS_DIR, NETS[args.demo_net][0], + 'faster_rcnn_alt_opt', 'faster_rcnn_test.pt') + caffemodel = os.path.join(cfg.DATA_DIR, 'faster_rcnn_models', + NETS[args.demo_net][1]) + + if not os.path.isfile(caffemodel): + raise IOError(('{:s} not found.\nDid you run ./data/script/' + 'fetch_faster_rcnn_models.sh?').format(caffemodel)) + + if args.cpu_mode: + caffe.set_mode_cpu() + else: + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + cfg.GPU_ID = args.gpu_id + net = caffe.Net(prototxt, caffemodel, caffe.TEST) + + print '\n\nLoaded network {:s}'.format(caffemodel) + + # Warmup on a dummy image + im = 128 * np.ones((300, 500, 3), dtype=np.uint8) + for i in xrange(2): + _, _= im_detect(net, im) + + im_names = ['000456.jpg', '000542.jpg', '001150.jpg', + '001763.jpg', '004545.jpg'] + for im_name in im_names: + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Demo for data/demo/{}'.format(im_name) + demo(net, im_name) + + plt.show() diff --git a/tools/eval_recall.py b/tools/eval_recall.py new file mode 100755 index 0000000..b1a59dc --- /dev/null +++ b/tools/eval_recall.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +import _init_paths +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list +from datasets.factory import get_imdb +import argparse +import time, os, sys +import numpy as np + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Test a Fast R-CNN network') + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to test', + default='voc_2007_test', type=str) + parser.add_argument('--method', dest='method', + help='proposal method', + default='selective_search', type=str) + parser.add_argument('--rpn-file', dest='rpn_file', + default=None, type=str) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + imdb = get_imdb(args.imdb_name) + imdb.set_proposal_method(args.method) + if args.rpn_file is not None: + imdb.config['rpn_file'] = args.rpn_file + + candidate_boxes = None + if 0: + import scipy.io as sio + filename = 'debug/stage1_rpn_voc_2007_test.mat' + raw_data = sio.loadmat(filename)['aboxes'].ravel() + candidate_boxes = raw_data + + ar, gt_overlaps, recalls, thresholds = \ + imdb.evaluate_recall(candidate_boxes=candidate_boxes) + print 'Method: {}'.format(args.method) + print 'AverageRec: {:.3f}'.format(ar) + + def recall_at(t): + ind = np.where(thresholds > t - 1e-5)[0][0] + assert np.isclose(thresholds[ind], t) + return recalls[ind] + + print 'Recall@0.5: {:.3f}'.format(recall_at(0.5)) + print 'Recall@0.6: {:.3f}'.format(recall_at(0.6)) + print 'Recall@0.7: {:.3f}'.format(recall_at(0.7)) + print 'Recall@0.8: {:.3f}'.format(recall_at(0.8)) + print 'Recall@0.9: {:.3f}'.format(recall_at(0.9)) + # print again for easy spreadsheet copying + print '{:.3f}'.format(ar) + print '{:.3f}'.format(recall_at(0.5)) + print '{:.3f}'.format(recall_at(0.6)) + print '{:.3f}'.format(recall_at(0.7)) + print '{:.3f}'.format(recall_at(0.8)) + print '{:.3f}'.format(recall_at(0.9)) diff --git a/tools/reval.py b/tools/reval.py new file mode 100755 index 0000000..905ec1b --- /dev/null +++ b/tools/reval.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Reval = re-eval. Re-evaluate saved detections.""" + +import _init_paths +from fast_rcnn.test import apply_nms +from fast_rcnn.config import cfg +from datasets.factory import get_imdb +import cPickle +import os, sys, argparse +import numpy as np + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Re-evaluate results') + parser.add_argument('output_dir', nargs=1, help='results directory', + type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to re-evaluate', + default='voc_2007_test', type=str) + parser.add_argument('--matlab', dest='matlab_eval', + help='use matlab for evaluation', + action='store_true') + parser.add_argument('--comp', dest='comp_mode', help='competition mode', + action='store_true') + parser.add_argument('--nms', dest='apply_nms', help='apply nms', + action='store_true') + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def from_dets(imdb_name, output_dir, args): + imdb = get_imdb(imdb_name) + imdb.competition_mode(args.comp_mode) + imdb.config['matlab_eval'] = args.matlab_eval + with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f: + dets = cPickle.load(f) + + if args.apply_nms: + print 'Applying NMS to all detections' + nms_dets = apply_nms(dets, cfg.TEST.NMS) + else: + nms_dets = dets + + print 'Evaluating detections' + imdb.evaluate_detections(nms_dets, output_dir) + +if __name__ == '__main__': + args = parse_args() + + output_dir = os.path.abspath(args.output_dir[0]) + imdb_name = args.imdb_name + from_dets(imdb_name, output_dir, args) diff --git a/tools/rpn_generate.py b/tools/rpn_generate.py new file mode 100755 index 0000000..f8ca4a1 --- /dev/null +++ b/tools/rpn_generate.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast/er/ R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Generate RPN proposals.""" + +import _init_paths +import numpy as np +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list, get_output_dir +from datasets.factory import get_imdb +from rpn.generate import imdb_proposals +import cPickle +import caffe +import argparse +import pprint +import time, os, sys + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Test a Fast R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', help='GPU id to use', + default=0, type=int) + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to test', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', default=None, type=str) + parser.add_argument('--wait', dest='wait', + help='wait until net file exists', + default=True, type=bool) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to test', + default='voc_2007_test', type=str) + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + + cfg.GPU_ID = args.gpu_id + + # RPN test settings + cfg.TEST.RPN_PRE_NMS_TOP_N = -1 + cfg.TEST.RPN_POST_NMS_TOP_N = 2000 + + print('Using config:') + pprint.pprint(cfg) + + while not os.path.exists(args.caffemodel) and args.wait: + print('Waiting for {} to exist...'.format(args.caffemodel)) + time.sleep(10) + + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + net.name = os.path.splitext(os.path.basename(args.caffemodel))[0] + + imdb = get_imdb(args.imdb_name) + imdb_boxes = imdb_proposals(net, imdb) + + output_dir = get_output_dir(imdb, net) + rpn_file = os.path.join(output_dir, net.name + '_rpn_proposals.pkl') + with open(rpn_file, 'wb') as f: + cPickle.dump(imdb_boxes, f, cPickle.HIGHEST_PROTOCOL) + print 'Wrote RPN proposals to {}'.format(rpn_file) diff --git a/tools/test_net.py b/tools/test_net.py new file mode 100755 index 0000000..de4f12b --- /dev/null +++ b/tools/test_net.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Test a Fast R-CNN network on an image database.""" + +import _init_paths +from fast_rcnn.test import test_net +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list +from datasets.factory import get_imdb +import caffe +import argparse +import pprint +import time, os, sys + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Test a Fast R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', help='GPU id to use', + default=0, type=int) + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to test', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', default=None, type=str) + parser.add_argument('--wait', dest='wait', + help='wait until net file exists', + default=True, type=bool) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to test', + default='voc_2007_test', type=str) + parser.add_argument('--comp', dest='comp_mode', help='competition mode', + action='store_true') + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + parser.add_argument('--vis', dest='vis', help='visualize detections', + action='store_true') + parser.add_argument('--num_dets', dest='max_per_image', + help='max number of detections per image', + default=100, type=int) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + + cfg.GPU_ID = args.gpu_id + + print('Using config:') + pprint.pprint(cfg) + + while not os.path.exists(args.caffemodel) and args.wait: + print('Waiting for {} to exist...'.format(args.caffemodel)) + time.sleep(10) + + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + net.name = os.path.splitext(os.path.basename(args.caffemodel))[0] + + imdb = get_imdb(args.imdb_name) + imdb.competition_mode(args.comp_mode) + if not cfg.TEST.HAS_RPN: + imdb.set_proposal_method(cfg.TEST.PROPOSAL_METHOD) + + test_net(net, imdb, max_per_image=args.max_per_image, vis=args.vis) diff --git a/tools/train_faster_rcnn_alt_opt.py b/tools/train_faster_rcnn_alt_opt.py new file mode 100755 index 0000000..e49844a --- /dev/null +++ b/tools/train_faster_rcnn_alt_opt.py @@ -0,0 +1,334 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Faster R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Train a Faster R-CNN network using alternating optimization. +This tool implements the alternating optimization algorithm described in our +NIPS 2015 paper ("Faster R-CNN: Towards Real-time Object Detection with Region +Proposal Networks." Shaoqing Ren, Kaiming He, Ross Girshick, Jian Sun.) +""" + +import _init_paths +from fast_rcnn.train import get_training_roidb, train_net +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list, get_output_dir +from datasets.factory import get_imdb +from rpn.generate import imdb_proposals +import argparse +import pprint +import numpy as np +import sys, os +import multiprocessing as mp +import cPickle +import shutil + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Train a Faster R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', + help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--net_name', dest='net_name', + help='network name (e.g., "ZF")', + default=None, type=str) + parser.add_argument('--weights', dest='pretrained_model', + help='initialize with pretrained model weights', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', + default=None, type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to train on', + default='voc_2007_trainval', type=str) + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def get_roidb(imdb_name, rpn_file=None): + imdb = get_imdb(imdb_name) + print 'Loaded dataset `{:s}` for training'.format(imdb.name) + imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD) + print 'Set proposal method: {:s}'.format(cfg.TRAIN.PROPOSAL_METHOD) + if rpn_file is not None: + imdb.config['rpn_file'] = rpn_file + roidb = get_training_roidb(imdb) + return roidb, imdb + +def get_solvers(net_name): + # Faster R-CNN Alternating Optimization + n = 'faster_rcnn_alt_opt' + # Solver for each training stage + solvers = [[net_name, n, 'stage1_rpn_solver60k80k.pt'], + [net_name, n, 'stage1_fast_rcnn_solver30k40k.pt'], + [net_name, n, 'stage2_rpn_solver60k80k.pt'], + [net_name, n, 'stage2_fast_rcnn_solver30k40k.pt']] + solvers = [os.path.join(cfg.MODELS_DIR, *s) for s in solvers] + # Iterations for each training stage + max_iters = [80000, 40000, 80000, 40000] + # max_iters = [100, 100, 100, 100] + # Test prototxt for the RPN + rpn_test_prototxt = os.path.join( + cfg.MODELS_DIR, net_name, n, 'rpn_test.pt') + return solvers, max_iters, rpn_test_prototxt + +# ------------------------------------------------------------------------------ +# Pycaffe doesn't reliably free GPU memory when instantiated nets are discarded +# (e.g. "del net" in Python code). To work around this issue, each training +# stage is executed in a separate process using multiprocessing.Process. +# ------------------------------------------------------------------------------ + +def _init_caffe(cfg): + """Initialize pycaffe in a training process. + """ + + import caffe + # fix the random seeds (numpy and caffe) for reproducibility + np.random.seed(cfg.RNG_SEED) + caffe.set_random_seed(cfg.RNG_SEED) + # set up caffe + caffe.set_mode_gpu() + caffe.set_device(cfg.GPU_ID) + +def train_rpn(queue=None, imdb_name=None, init_model=None, solver=None, + max_iters=None, cfg=None): + """Train a Region Proposal Network in a separate training process. + """ + + # Not using any proposals, just ground-truth boxes + cfg.TRAIN.HAS_RPN = True + cfg.TRAIN.BBOX_REG = False # applies only to Fast R-CNN bbox regression + cfg.TRAIN.PROPOSAL_METHOD = 'gt' + cfg.TRAIN.IMS_PER_BATCH = 1 + print 'Init model: {}'.format(init_model) + print('Using config:') + pprint.pprint(cfg) + + import caffe + _init_caffe(cfg) + + roidb, imdb = get_roidb(imdb_name) + print 'roidb len: {}'.format(len(roidb)) + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + + model_paths = train_net(solver, roidb, output_dir, + pretrained_model=init_model, + max_iters=max_iters) + # Cleanup all but the final model + for i in model_paths[:-1]: + os.remove(i) + rpn_model_path = model_paths[-1] + # Send final model path through the multiprocessing queue + queue.put({'model_path': rpn_model_path}) + +def rpn_generate(queue=None, imdb_name=None, rpn_model_path=None, cfg=None, + rpn_test_prototxt=None): + """Use a trained RPN to generate proposals. + """ + + cfg.TEST.RPN_PRE_NMS_TOP_N = -1 # no pre NMS filtering + cfg.TEST.RPN_POST_NMS_TOP_N = 2000 # limit top boxes after NMS + print 'RPN model: {}'.format(rpn_model_path) + print('Using config:') + pprint.pprint(cfg) + + import caffe + _init_caffe(cfg) + + # NOTE: the matlab implementation computes proposals on flipped images, too. + # We compute them on the image once and then flip the already computed + # proposals. This might cause a minor loss in mAP (less proposal jittering). + imdb = get_imdb(imdb_name) + print 'Loaded dataset `{:s}` for proposal generation'.format(imdb.name) + + # Load RPN and configure output directory + rpn_net = caffe.Net(rpn_test_prototxt, rpn_model_path, caffe.TEST) + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + # Generate proposals on the imdb + rpn_proposals = imdb_proposals(rpn_net, imdb) + # Write proposals to disk and send the proposal file path through the + # multiprocessing queue + rpn_net_name = os.path.splitext(os.path.basename(rpn_model_path))[0] + rpn_proposals_path = os.path.join( + output_dir, rpn_net_name + '_proposals.pkl') + with open(rpn_proposals_path, 'wb') as f: + cPickle.dump(rpn_proposals, f, cPickle.HIGHEST_PROTOCOL) + print 'Wrote RPN proposals to {}'.format(rpn_proposals_path) + queue.put({'proposal_path': rpn_proposals_path}) + +def train_fast_rcnn(queue=None, imdb_name=None, init_model=None, solver=None, + max_iters=None, cfg=None, rpn_file=None): + """Train a Fast R-CNN using proposals generated by an RPN. + """ + + cfg.TRAIN.HAS_RPN = False # not generating prosals on-the-fly + cfg.TRAIN.PROPOSAL_METHOD = 'rpn' # use pre-computed RPN proposals instead + cfg.TRAIN.IMS_PER_BATCH = 2 + print 'Init model: {}'.format(init_model) + print 'RPN proposals: {}'.format(rpn_file) + print('Using config:') + pprint.pprint(cfg) + + import caffe + _init_caffe(cfg) + + roidb, imdb = get_roidb(imdb_name, rpn_file=rpn_file) + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + # Train Fast R-CNN + model_paths = train_net(solver, roidb, output_dir, + pretrained_model=init_model, + max_iters=max_iters) + # Cleanup all but the final model + for i in model_paths[:-1]: + os.remove(i) + fast_rcnn_model_path = model_paths[-1] + # Send Fast R-CNN model path over the multiprocessing queue + queue.put({'model_path': fast_rcnn_model_path}) + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + cfg.GPU_ID = args.gpu_id + + # -------------------------------------------------------------------------- + # Pycaffe doesn't reliably free GPU memory when instantiated nets are + # discarded (e.g. "del net" in Python code). To work around this issue, each + # training stage is executed in a separate process using + # multiprocessing.Process. + # -------------------------------------------------------------------------- + + # queue for communicated results between processes + mp_queue = mp.Queue() + # solves, iters, etc. for each training stage + solvers, max_iters, rpn_test_prototxt = get_solvers(args.net_name) + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 1 RPN, init from ImageNet model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage1' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=args.pretrained_model, + solver=solvers[0], + max_iters=max_iters[0], + cfg=cfg) + p = mp.Process(target=train_rpn, kwargs=mp_kwargs) + p.start() + rpn_stage1_out = mp_queue.get() + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 1 RPN, generate proposals' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + rpn_model_path=str(rpn_stage1_out['model_path']), + cfg=cfg, + rpn_test_prototxt=rpn_test_prototxt) + p = mp.Process(target=rpn_generate, kwargs=mp_kwargs) + p.start() + rpn_stage1_out['proposal_path'] = mp_queue.get()['proposal_path'] + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 1 Fast R-CNN using RPN proposals, init from ImageNet model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage1' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=args.pretrained_model, + solver=solvers[1], + max_iters=max_iters[1], + cfg=cfg, + rpn_file=rpn_stage1_out['proposal_path']) + p = mp.Process(target=train_fast_rcnn, kwargs=mp_kwargs) + p.start() + fast_rcnn_stage1_out = mp_queue.get() + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 2 RPN, init from stage 1 Fast R-CNN model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage2' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=str(fast_rcnn_stage1_out['model_path']), + solver=solvers[2], + max_iters=max_iters[2], + cfg=cfg) + p = mp.Process(target=train_rpn, kwargs=mp_kwargs) + p.start() + rpn_stage2_out = mp_queue.get() + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 2 RPN, generate proposals' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + rpn_model_path=str(rpn_stage2_out['model_path']), + cfg=cfg, + rpn_test_prototxt=rpn_test_prototxt) + p = mp.Process(target=rpn_generate, kwargs=mp_kwargs) + p.start() + rpn_stage2_out['proposal_path'] = mp_queue.get()['proposal_path'] + p.join() + + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + print 'Stage 2 Fast R-CNN, init from stage 2 RPN R-CNN model' + print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + + cfg.TRAIN.SNAPSHOT_INFIX = 'stage2' + mp_kwargs = dict( + queue=mp_queue, + imdb_name=args.imdb_name, + init_model=str(rpn_stage2_out['model_path']), + solver=solvers[3], + max_iters=max_iters[3], + cfg=cfg, + rpn_file=rpn_stage2_out['proposal_path']) + p = mp.Process(target=train_fast_rcnn, kwargs=mp_kwargs) + p.start() + fast_rcnn_stage2_out = mp_queue.get() + p.join() + + # Create final model (just a copy of the last stage) + final_path = os.path.join( + os.path.dirname(fast_rcnn_stage2_out['model_path']), + args.net_name + '_faster_rcnn_final.caffemodel') + print 'cp {} -> {}'.format( + fast_rcnn_stage2_out['model_path'], final_path) + shutil.copy(fast_rcnn_stage2_out['model_path'], final_path) + print 'Final model: {}'.format(final_path) diff --git a/tools/train_net.py b/tools/train_net.py new file mode 100755 index 0000000..622a95d --- /dev/null +++ b/tools/train_net.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +"""Train a Fast R-CNN network on a region of interest database.""" + +import _init_paths +from fast_rcnn.train import get_training_roidb, train_net +from fast_rcnn.config import cfg, cfg_from_file, cfg_from_list, get_output_dir +from datasets.factory import get_imdb +import datasets.imdb +import caffe +import argparse +import pprint +import numpy as np +import sys + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Train a Fast R-CNN network') + parser.add_argument('--gpu', dest='gpu_id', + help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--solver', dest='solver', + help='solver prototxt', + default=None, type=str) + parser.add_argument('--iters', dest='max_iters', + help='number of iterations to train', + default=40000, type=int) + parser.add_argument('--weights', dest='pretrained_model', + help='initialize with pretrained model weights', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', + default=None, type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to train on', + default='voc_2007_trainval', type=str) + parser.add_argument('--rand', dest='randomize', + help='randomize (do not use a fixed seed)', + action='store_true') + parser.add_argument('--set', dest='set_cfgs', + help='set config keys', default=None, + nargs=argparse.REMAINDER) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +def combined_roidb(imdb_names): + def get_roidb(imdb_name): + imdb = get_imdb(imdb_name) + print 'Loaded dataset `{:s}` for training'.format(imdb.name) + imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD) + print 'Set proposal method: {:s}'.format(cfg.TRAIN.PROPOSAL_METHOD) + roidb = get_training_roidb(imdb) + return roidb + + roidbs = [get_roidb(s) for s in imdb_names.split('+')] + roidb = roidbs[0] + if len(roidbs) > 1: + for r in roidbs[1:]: + roidb.extend(r) + imdb = datasets.imdb.imdb(imdb_names) + else: + imdb = get_imdb(imdb_names) + return imdb, roidb + +if __name__ == '__main__': + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + if args.set_cfgs is not None: + cfg_from_list(args.set_cfgs) + + cfg.GPU_ID = args.gpu_id + + print('Using config:') + pprint.pprint(cfg) + + if not args.randomize: + # fix the random seeds (numpy and caffe) for reproducibility + np.random.seed(cfg.RNG_SEED) + caffe.set_random_seed(cfg.RNG_SEED) + + # set up caffe + caffe.set_mode_gpu() + caffe.set_device(args.gpu_id) + + imdb, roidb = combined_roidb(args.imdb_name) + print '{:d} roidb entries'.format(len(roidb)) + + output_dir = get_output_dir(imdb) + print 'Output will be saved to `{:s}`'.format(output_dir) + + train_net(args.solver, roidb, output_dir, + pretrained_model=args.pretrained_model, + max_iters=args.max_iters) diff --git a/tools/train_svms.py b/tools/train_svms.py new file mode 100755 index 0000000..498bbf2 --- /dev/null +++ b/tools/train_svms.py @@ -0,0 +1,353 @@ +#!/usr/bin/env python + +# -------------------------------------------------------- +# Fast R-CNN +# Copyright (c) 2015 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ross Girshick +# -------------------------------------------------------- + +""" +Train post-hoc SVMs using the algorithm and hyper-parameters from +traditional R-CNN. +""" + +import _init_paths +from fast_rcnn.config import cfg, cfg_from_file +from datasets.factory import get_imdb +from fast_rcnn.test import im_detect +from utils.timer import Timer +import caffe +import argparse +import pprint +import numpy as np +import numpy.random as npr +import cv2 +from sklearn import svm +import os, sys + +class SVMTrainer(object): + """ + Trains post-hoc detection SVMs for all classes using the algorithm + and hyper-parameters of traditional R-CNN. + """ + + def __init__(self, net, imdb): + self.imdb = imdb + self.net = net + self.layer = 'fc7' + self.hard_thresh = -1.0001 + self.neg_iou_thresh = 0.3 + + dim = net.params['cls_score'][0].data.shape[1] + scale = self._get_feature_scale() + print('Feature dim: {}'.format(dim)) + print('Feature scale: {:.3f}'.format(scale)) + self.trainers = [SVMClassTrainer(cls, dim, feature_scale=scale) + for cls in imdb.classes] + + def _get_feature_scale(self, num_images=100): + TARGET_NORM = 20.0 # Magic value from traditional R-CNN + _t = Timer() + roidb = self.imdb.roidb + total_norm = 0.0 + count = 0.0 + inds = npr.choice(xrange(self.imdb.num_images), size=num_images, + replace=False) + for i_, i in enumerate(inds): + im = cv2.imread(self.imdb.image_path_at(i)) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + _t.tic() + scores, boxes = im_detect(self.net, im, roidb[i]['boxes']) + _t.toc() + feat = self.net.blobs[self.layer].data + total_norm += np.sqrt((feat ** 2).sum(axis=1)).sum() + count += feat.shape[0] + print('{}/{}: avg feature norm: {:.3f}'.format(i_ + 1, num_images, + total_norm / count)) + + return TARGET_NORM * 1.0 / (total_norm / count) + + def _get_pos_counts(self): + counts = np.zeros((len(self.imdb.classes)), dtype=np.int) + roidb = self.imdb.roidb + for i in xrange(len(roidb)): + for j in xrange(1, self.imdb.num_classes): + I = np.where(roidb[i]['gt_classes'] == j)[0] + counts[j] += len(I) + + for j in xrange(1, self.imdb.num_classes): + print('class {:s} has {:d} positives'. + format(self.imdb.classes[j], counts[j])) + + return counts + + def get_pos_examples(self): + counts = self._get_pos_counts() + for i in xrange(len(counts)): + self.trainers[i].alloc_pos(counts[i]) + + _t = Timer() + roidb = self.imdb.roidb + num_images = len(roidb) + # num_images = 100 + for i in xrange(num_images): + im = cv2.imread(self.imdb.image_path_at(i)) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + gt_inds = np.where(roidb[i]['gt_classes'] > 0)[0] + gt_boxes = roidb[i]['boxes'][gt_inds] + _t.tic() + scores, boxes = im_detect(self.net, im, gt_boxes) + _t.toc() + feat = self.net.blobs[self.layer].data + for j in xrange(1, self.imdb.num_classes): + cls_inds = np.where(roidb[i]['gt_classes'][gt_inds] == j)[0] + if len(cls_inds) > 0: + cls_feat = feat[cls_inds, :] + self.trainers[j].append_pos(cls_feat) + + print 'get_pos_examples: {:d}/{:d} {:.3f}s' \ + .format(i + 1, len(roidb), _t.average_time) + + def initialize_net(self): + # Start all SVM parameters at zero + self.net.params['cls_score'][0].data[...] = 0 + self.net.params['cls_score'][1].data[...] = 0 + + # Initialize SVMs in a smart way. Not doing this because its such + # a good initialization that we might not learn something close to + # the SVM solution. +# # subtract background weights and biases for the foreground classes +# w_bg = self.net.params['cls_score'][0].data[0, :] +# b_bg = self.net.params['cls_score'][1].data[0] +# self.net.params['cls_score'][0].data[1:, :] -= w_bg +# self.net.params['cls_score'][1].data[1:] -= b_bg +# # set the background weights and biases to 0 (where they shall remain) +# self.net.params['cls_score'][0].data[0, :] = 0 +# self.net.params['cls_score'][1].data[0] = 0 + + def update_net(self, cls_ind, w, b): + self.net.params['cls_score'][0].data[cls_ind, :] = w + self.net.params['cls_score'][1].data[cls_ind] = b + + def train_with_hard_negatives(self): + _t = Timer() + roidb = self.imdb.roidb + num_images = len(roidb) + # num_images = 100 + for i in xrange(num_images): + im = cv2.imread(self.imdb.image_path_at(i)) + if roidb[i]['flipped']: + im = im[:, ::-1, :] + _t.tic() + scores, boxes = im_detect(self.net, im, roidb[i]['boxes']) + _t.toc() + feat = self.net.blobs[self.layer].data + for j in xrange(1, self.imdb.num_classes): + hard_inds = \ + np.where((scores[:, j] > self.hard_thresh) & + (roidb[i]['gt_overlaps'][:, j].toarray().ravel() < + self.neg_iou_thresh))[0] + if len(hard_inds) > 0: + hard_feat = feat[hard_inds, :].copy() + new_w_b = \ + self.trainers[j].append_neg_and_retrain(feat=hard_feat) + if new_w_b is not None: + self.update_net(j, new_w_b[0], new_w_b[1]) + + print(('train_with_hard_negatives: ' + '{:d}/{:d} {:.3f}s').format(i + 1, len(roidb), + _t.average_time)) + + def train(self): + # Initialize SVMs using + # a. w_i = fc8_w_i - fc8_w_0 + # b. b_i = fc8_b_i - fc8_b_0 + # c. Install SVMs into net + self.initialize_net() + + # Pass over roidb to count num positives for each class + # a. Pre-allocate arrays for positive feature vectors + # Pass over roidb, computing features for positives only + self.get_pos_examples() + + # Pass over roidb + # a. Compute cls_score with forward pass + # b. For each class + # i. Select hard negatives + # ii. Add them to cache + # c. For each class + # i. If SVM retrain criteria met, update SVM + # ii. Install new SVM into net + self.train_with_hard_negatives() + + # One final SVM retraining for each class + # Install SVMs into net + for j in xrange(1, self.imdb.num_classes): + new_w_b = self.trainers[j].append_neg_and_retrain(force=True) + self.update_net(j, new_w_b[0], new_w_b[1]) + +class SVMClassTrainer(object): + """Manages post-hoc SVM training for a single object class.""" + + def __init__(self, cls, dim, feature_scale=1.0, + C=0.001, B=10.0, pos_weight=2.0): + self.pos = np.zeros((0, dim), dtype=np.float32) + self.neg = np.zeros((0, dim), dtype=np.float32) + self.B = B + self.C = C + self.cls = cls + self.pos_weight = pos_weight + self.dim = dim + self.feature_scale = feature_scale + self.svm = svm.LinearSVC(C=C, class_weight={1: 2, -1: 1}, + intercept_scaling=B, verbose=1, + penalty='l2', loss='l1', + random_state=cfg.RNG_SEED, dual=True) + self.pos_cur = 0 + self.num_neg_added = 0 + self.retrain_limit = 2000 + self.evict_thresh = -1.1 + self.loss_history = [] + + def alloc_pos(self, count): + self.pos_cur = 0 + self.pos = np.zeros((count, self.dim), dtype=np.float32) + + def append_pos(self, feat): + num = feat.shape[0] + self.pos[self.pos_cur:self.pos_cur + num, :] = feat + self.pos_cur += num + + def train(self): + print('>>> Updating {} detector <<<'.format(self.cls)) + num_pos = self.pos.shape[0] + num_neg = self.neg.shape[0] + print('Cache holds {} pos examples and {} neg examples'. + format(num_pos, num_neg)) + X = np.vstack((self.pos, self.neg)) * self.feature_scale + y = np.hstack((np.ones(num_pos), + -np.ones(num_neg))) + self.svm.fit(X, y) + w = self.svm.coef_ + b = self.svm.intercept_[0] + scores = self.svm.decision_function(X) + pos_scores = scores[:num_pos] + neg_scores = scores[num_pos:] + + pos_loss = (self.C * self.pos_weight * + np.maximum(0, 1 - pos_scores).sum()) + neg_loss = self.C * np.maximum(0, 1 + neg_scores).sum() + reg_loss = 0.5 * np.dot(w.ravel(), w.ravel()) + 0.5 * b ** 2 + tot_loss = pos_loss + neg_loss + reg_loss + self.loss_history.append((tot_loss, pos_loss, neg_loss, reg_loss)) + + for i, losses in enumerate(self.loss_history): + print((' {:d}: obj val: {:.3f} = {:.3f} ' + '(pos) + {:.3f} (neg) + {:.3f} (reg)').format(i, *losses)) + + # Sanity check + scores_ret = ( + X * 1.0 / self.feature_scale).dot(w.T * self.feature_scale) + b + assert np.allclose(scores, scores_ret[:, 0], atol=1e-5), \ + "Scores from returned model don't match decision function" + + return ((w * self.feature_scale, b), pos_scores, neg_scores) + + def append_neg_and_retrain(self, feat=None, force=False): + if feat is not None: + num = feat.shape[0] + self.neg = np.vstack((self.neg, feat)) + self.num_neg_added += num + if self.num_neg_added > self.retrain_limit or force: + self.num_neg_added = 0 + new_w_b, pos_scores, neg_scores = self.train() + # scores = np.dot(self.neg, new_w_b[0].T) + new_w_b[1] + # easy_inds = np.where(neg_scores < self.evict_thresh)[0] + not_easy_inds = np.where(neg_scores >= self.evict_thresh)[0] + if len(not_easy_inds) > 0: + self.neg = self.neg[not_easy_inds, :] + # self.neg = np.delete(self.neg, easy_inds) + print(' Pruning easy negatives') + print(' Cache holds {} pos examples and {} neg examples'. + format(self.pos.shape[0], self.neg.shape[0])) + print(' {} pos support vectors'.format((pos_scores <= 1).sum())) + print(' {} neg support vectors'.format((neg_scores >= -1).sum())) + return new_w_b + else: + return None + +def parse_args(): + """ + Parse input arguments + """ + parser = argparse.ArgumentParser(description='Train SVMs (old skool)') + parser.add_argument('--gpu', dest='gpu_id', help='GPU device id to use [0]', + default=0, type=int) + parser.add_argument('--def', dest='prototxt', + help='prototxt file defining the network', + default=None, type=str) + parser.add_argument('--net', dest='caffemodel', + help='model to test', + default=None, type=str) + parser.add_argument('--cfg', dest='cfg_file', + help='optional config file', default=None, type=str) + parser.add_argument('--imdb', dest='imdb_name', + help='dataset to train on', + default='voc_2007_trainval', type=str) + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(1) + + args = parser.parse_args() + return args + +if __name__ == '__main__': + # Must turn this off to prevent issues when digging into the net blobs to + # pull out features (tricky!) + cfg.DEDUP_BOXES = 0 + + # Must turn this on because we use the test im_detect() method to harvest + # hard negatives + cfg.TEST.SVM = True + + args = parse_args() + + print('Called with args:') + print(args) + + if args.cfg_file is not None: + cfg_from_file(args.cfg_file) + + print('Using config:') + pprint.pprint(cfg) + + # fix the random seed for reproducibility + np.random.seed(cfg.RNG_SEED) + + # set up caffe + caffe.set_mode_gpu() + if args.gpu_id is not None: + caffe.set_device(args.gpu_id) + net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) + net.name = os.path.splitext(os.path.basename(args.caffemodel))[0] + out = os.path.splitext(os.path.basename(args.caffemodel))[0] + '_svm' + out_dir = os.path.dirname(args.caffemodel) + + imdb = get_imdb(args.imdb_name) + print 'Loaded dataset `{:s}` for training'.format(imdb.name) + + # enhance roidb to contain flipped examples + if cfg.TRAIN.USE_FLIPPED: + print 'Appending horizontally-flipped training examples...' + imdb.append_flipped_images() + print 'done' + + SVMTrainer(net, imdb).train() + + filename = '{}/{}.caffemodel'.format(out_dir, out) + net.save(filename) + print 'Wrote svm model to: {:s}'.format(filename)