From c50b1331ec60de12465bdadf0b12e7bd53fc58e6 Mon Sep 17 00:00:00 2001 From: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Date: Mon, 3 Jan 2022 20:27:02 +0100 Subject: [PATCH 1/9] Add automatic nightly sanity check (#192) * Add automatic nightly check * fix * Add badge * remove conditions * Remove develop badge * restore name --- .../{tests_sources.yml => tests_suite.yml} | 4 +- .github/workflows/tests_suite_develop.yml | 109 ++++++++++++++++++ README.md | 20 ++-- 3 files changed, 122 insertions(+), 11 deletions(-) rename .github/workflows/{tests_sources.yml => tests_suite.yml} (98%) create mode 100644 .github/workflows/tests_suite_develop.yml diff --git a/.github/workflows/tests_sources.yml b/.github/workflows/tests_suite.yml similarity index 98% rename from .github/workflows/tests_sources.yml rename to .github/workflows/tests_suite.yml index c02aed78ca..1a0ac683d1 100644 --- a/.github/workflows/tests_sources.yml +++ b/.github/workflows/tests_suite.yml @@ -1,8 +1,10 @@ -name: Test Sources +name: Test Suite (master) on: pull_request: types: [opened, synchronize, reopened, ready_for_review, labeled, unlabeled] + schedule: + - cron: '0 23 * * *' defaults: run: diff --git a/.github/workflows/tests_suite_develop.yml b/.github/workflows/tests_suite_develop.yml new file mode 100644 index 0000000000..46c3188938 --- /dev/null +++ b/.github/workflows/tests_suite_develop.yml @@ -0,0 +1,109 @@ +name: Test Suite (develop) + +on: + schedule: + - cron: '0 23 * * *' + +defaults: + run: + shell: bash + +jobs: + cleanup-runs: + if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || contains(github.event.pull_request.labels.*.name, 'test tools') }} + runs-on: ubuntu-latest + steps: + - uses: rokroskar/workflow-run-cleanup-action@master + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + test-sources: + needs: cleanup-runs + if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') }} + strategy: + matrix: + os: [ubuntu-18.04, ubuntu-20.04, macos-10.15] + include: + - os: ubuntu-18.04 + DEPENDENCIES_INSTALLATION: "sudo apt -y install clang-format-10 cppcheck" + - os: ubuntu-20.04 + DEPENDENCIES_INSTALLATION: "sudo apt -y install clang-format-10 cppcheck" + - os: macos-10.15 + DEPENDENCIES_INSTALLATION: "brew install clang-format cppcheck" + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + if: matrix.os == 'ubuntu-20.04' || github.event.pull_request.draft == false + with: + submodules: true + ref: develop + - name: Set up Python 3.8 + if: matrix.os == 'ubuntu-20.04' || github.event.pull_request.draft == false + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Test Sources + if: matrix.os == 'ubuntu-20.04' || github.event.pull_request.draft == false + run: | + ${{ matrix.DEPENDENCIES_INSTALLATION }} + export OPENDR_HOME=$PWD + export OPENDR_DEVICE=cpu + pip install -r tests/requirements.txt + python -m unittest discover -s tests + test-tools: + needs: cleanup-runs + if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') }} + strategy: + matrix: + os: [ubuntu-20.04] + package: + - engine + - utils + - perception/activity_recognition + - perception/compressive_learning + - perception/face_recognition + - perception/heart_anomaly_detection + - perception/multimodal_human_centric + - perception/object_tracking_2d + - perception/object_detection_3d + - perception/pose_estimation + - perception/speech_recognition + - perception/skeleton_based_action_recognition + - perception/semantic_segmentation + - control/mobile_manipulation + - perception/object_detection_2d + - simulation/human_model_generation + - perception/facial_expression_recognition/landmark_based_facial_expression_recognition + - control/single_demo_grasp + # - perception/object_tracking_3d + include: + - os: ubuntu-20.04 + DEPENDENCIES_INSTALLATION: "sudo sh -c 'echo \"deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main\" > /etc/apt/sources.list.d/ros-latest.list' \ + && curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | sudo apt-key add -" + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + with: + submodules: true + ref: develop + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Test Tools + if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') }} + run: | + ${{ matrix.DEPENDENCIES_INSTALLATION }} + export OPENDR_HOME=$PWD + export OPENDR_DEVICE=cpu + export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH + export DISABLE_BCOLZ_AVX2=true + export ROS_DISTRO=noetic + make install_compilation_dependencies + make install_runtime_dependencies + pip install -r tests/sources/requirements.txt + if [ ${{ matrix.package }} = "ctests" ]; then + make ctests + else + source tests/sources/tools/control/mobile_manipulation/run_ros.sh + python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + fi diff --git a/README.md b/README.md index 8aa24e3376..080b69971a 100644 --- a/README.md +++ b/README.md @@ -8,28 +8,28 @@ ______________________________________________________________________

WebsiteAbout • - Installation • + InstallationUsing OpenDR toolkitExamples • - Roadmap • + RoadmapLicense

[![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) - +[![Test Suite (master)](https://github.com/opendr-eu/opendr/actions/workflows/tests_suite.yml/badge.svg)](https://github.com/opendr-eu/opendr/actions/workflows/tests_suite.yml) ## About -The aim of [OpenDR Project](https://opendr.eu) is to develop a **modular, open** and **non-proprietary toolkit** for core **robotic functionalities** by harnessing **deep learning** to provide advanced perception and cognition capabilities, meeting in this way the general requirements of robotics applications in the applications areas of healthcare, agri-food and agile production. -OpenDR provides the means to link the **robotics applications to software libraries** (deep learning frameworks, e.g., [PyTorch](https://pytorch.org/) and [Tensorflow](https://www.tensorflow.org/)) to the **operating environment ([ROS](https://www.ros.org/))**. +The aim of [OpenDR Project](https://opendr.eu) is to develop a **modular, open** and **non-proprietary toolkit** for core **robotic functionalities** by harnessing **deep learning** to provide advanced perception and cognition capabilities, meeting in this way the general requirements of robotics applications in the applications areas of healthcare, agri-food and agile production. +OpenDR provides the means to link the **robotics applications to software libraries** (deep learning frameworks, e.g., [PyTorch](https://pytorch.org/) and [Tensorflow](https://www.tensorflow.org/)) to the **operating environment ([ROS](https://www.ros.org/))**. OpenDR focuses on the **AI and Cognition core technology** in order to provide tools that make robotic systems cognitive, giving them the ability to: 1. interact with people and environments by developing deep learning methods for **human centric and environment active perception and cognition**, -2. **learn and categorize** by developing deep learning **tools for training and inference in common robotics settings**, and +2. **learn and categorize** by developing deep learning **tools for training and inference in common robotics settings**, and 3. **make decisions and derive knowledge** by developing deep learning tools for cognitive robot action and decision making. -As a result, the developed OpenDR toolkit will also enable cooperative human-robot interaction as well as the development of cognitive mechatronics where sensing and actuation are closely coupled with cognitive systems thus contributing to another two core technologies beyond AI and Cognition. -OpenDR aims to develop, train, deploy and evaluate deep learning models that improve the technical capabilities of the core technologies beyond the current state of the art. +As a result, the developed OpenDR toolkit will also enable cooperative human-robot interaction as well as the development of cognitive mechatronics where sensing and actuation are closely coupled with cognitive systems thus contributing to another two core technologies beyond AI and Cognition. +OpenDR aims to develop, train, deploy and evaluate deep learning models that improve the technical capabilities of the core technologies beyond the current state of the art. ## Installing OpenDR Toolkit @@ -42,7 +42,7 @@ You can find detailed installation instruction in the [documentation](docs/refer ## Using OpenDR toolkit OpenDR provides an intuitive and easy to use **[Python interface](src/opendr)**, a **[C API](src/c_api) for performance critical application**, a wealth of **[usage examples and supporting tools](projects)**, as well as **ready-to-use [ROS nodes](projects/opendr_ws)**. -OpenDR is built to support [Webots Open Source Robot Simulator](https://cyberbotics.com/), while it also extensively follows industry standards, such as [ONNX model format](https://onnx.ai/) and [OpenAI Gym Interface](https://gym.openai.com/). +OpenDR is built to support [Webots Open Source Robot Simulator](https://cyberbotics.com/), while it also extensively follows industry standards, such as [ONNX model format](https://onnx.ai/) and [OpenAI Gym Interface](https://gym.openai.com/). You can find detailed documentation in OpenDR [wiki](https://github.com/tasostefas/opendr_internal/wiki), as well as in the [tools index](docs/reference/index.md). ## Roadmap @@ -52,7 +52,7 @@ OpenDR has the following roadmap: - **v3.0 (2023)**: Active perception-enabled deep learning tools for improved robotic perception ## How to contribute -Please follow the instructions provided in the [wiki](https://github.com/tasostefas/opendr_internal/wiki). +Please follow the instructions provided in the [wiki](https://github.com/tasostefas/opendr_internal/wiki). ## Acknowledgments From feccc9bf596badb2fcd3d4559cd1a7dcc9cca9f3 Mon Sep 17 00:00:00 2001 From: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Date: Tue, 4 Jan 2022 12:53:14 +0100 Subject: [PATCH 2/9] Fix nightly check (#193) * Fix nightly check * fix test --- .github/workflows/tests_suite.yml | 9 ++++----- .github/workflows/tests_suite_develop.yml | 9 ++++----- .../perception/facial_expression_recognition/__init__.py | 0 .../__init__.py | 0 4 files changed, 8 insertions(+), 10 deletions(-) create mode 100644 tests/sources/tools/perception/facial_expression_recognition/__init__.py create mode 100644 tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/__init__.py diff --git a/.github/workflows/tests_suite.yml b/.github/workflows/tests_suite.yml index 1a0ac683d1..7d0503a429 100644 --- a/.github/workflows/tests_suite.yml +++ b/.github/workflows/tests_suite.yml @@ -12,7 +12,7 @@ defaults: jobs: cleanup-runs: - if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || contains(github.event.pull_request.labels.*.name, 'test tools') }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || contains(github.event.pull_request.labels.*.name, 'test tools') || github.event_name == 'schedule' }} runs-on: ubuntu-latest steps: - uses: rokroskar/workflow-run-cleanup-action@master @@ -20,7 +20,7 @@ jobs: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" test-sources: needs: cleanup-runs - if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || github.event_name == 'schedule' }} strategy: matrix: os: [ubuntu-18.04, ubuntu-20.04, macos-10.15] @@ -52,7 +52,7 @@ jobs: python -m unittest discover -s tests test-tools: needs: cleanup-runs - if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') || github.event_name == 'schedule' }} strategy: matrix: os: [ubuntu-20.04] @@ -73,7 +73,7 @@ jobs: - control/mobile_manipulation - perception/object_detection_2d - simulation/human_model_generation - - perception/facial_expression_recognition/landmark_based_facial_expression_recognition + - perception/facial_expression_recognition - control/single_demo_grasp # - perception/object_tracking_3d include: @@ -90,7 +90,6 @@ jobs: with: python-version: 3.8 - name: Test Tools - if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') }} run: | ${{ matrix.DEPENDENCIES_INSTALLATION }} export OPENDR_HOME=$PWD diff --git a/.github/workflows/tests_suite_develop.yml b/.github/workflows/tests_suite_develop.yml index 46c3188938..8513c2c22d 100644 --- a/.github/workflows/tests_suite_develop.yml +++ b/.github/workflows/tests_suite_develop.yml @@ -10,7 +10,7 @@ defaults: jobs: cleanup-runs: - if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || contains(github.event.pull_request.labels.*.name, 'test tools') }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || contains(github.event.pull_request.labels.*.name, 'test tools') || github.event_name == 'schedule' }} runs-on: ubuntu-latest steps: - uses: rokroskar/workflow-run-cleanup-action@master @@ -18,7 +18,7 @@ jobs: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" test-sources: needs: cleanup-runs - if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || github.event_name == 'schedule' }} strategy: matrix: os: [ubuntu-18.04, ubuntu-20.04, macos-10.15] @@ -51,7 +51,7 @@ jobs: python -m unittest discover -s tests test-tools: needs: cleanup-runs - if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') || github.event_name == 'schedule' }} strategy: matrix: os: [ubuntu-20.04] @@ -72,7 +72,7 @@ jobs: - control/mobile_manipulation - perception/object_detection_2d - simulation/human_model_generation - - perception/facial_expression_recognition/landmark_based_facial_expression_recognition + - perception/facial_expression_recognition - control/single_demo_grasp # - perception/object_tracking_3d include: @@ -90,7 +90,6 @@ jobs: with: python-version: 3.8 - name: Test Tools - if: ${{ contains(github.event.pull_request.labels.*.name, 'test tools') }} run: | ${{ matrix.DEPENDENCIES_INSTALLATION }} export OPENDR_HOME=$PWD diff --git a/tests/sources/tools/perception/facial_expression_recognition/__init__.py b/tests/sources/tools/perception/facial_expression_recognition/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/__init__.py b/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From 5cb4c3cd9e114afba4483c229d5b7606759b8790 Mon Sep 17 00:00:00 2001 From: Pavlos Tosidis <35866477+Pavlos-Tosidis@users.noreply.github.com> Date: Fri, 14 Jan 2022 11:35:45 +0200 Subject: [PATCH 3/9] upgrade scikit-learn to 0.22 (#198) * upgrade scikit-learn to 0.22 0.21.3 causes warnings with the current numpy version. Upgrading it fixes it. * add changelog Co-authored-by: ad-daniel --- CHANGELOG.md | 16 ++++++++++++++++ README.md | 1 + .../heart_anomaly_detection/dependencies.ini | 2 +- 3 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000..2de83a228b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,16 @@ +# OpenDR Toolkit Change Log + +## Version 1.X +Released on XX, XXth, 2022. + + - New Features: + - None. + - Enhancements: + - None. + - Bug Fixes: + - None. + - Dependency Updates: + - `heart anomaly detection`: upgraded scikit-learn runtime dependency from 0.21.3 to 0.22 ([#198](https://github.com/opendr-eu/opendr/pull/198)). + +## Version 1.0 +Released on December 31th, 2021. \ No newline at end of file diff --git a/README.md b/README.md index 080b69971a..e91b500193 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,7 @@ ______________________________________________________________________ Using OpenDR toolkitExamplesRoadmap • + ChangelogLicense

diff --git a/src/opendr/perception/heart_anomaly_detection/dependencies.ini b/src/opendr/perception/heart_anomaly_detection/dependencies.ini index 66108a4300..832260166a 100644 --- a/src/opendr/perception/heart_anomaly_detection/dependencies.ini +++ b/src/opendr/perception/heart_anomaly_detection/dependencies.ini @@ -5,4 +5,4 @@ python=torch==1.7.1 torchvision==0.8.2 tensorboard==2.4.1 tqdm==4.54.0 - scikit-learn==0.21.3 + scikit-learn==0.22 From 49f7cd9991b205fa222cfefad6c2d76456574cc6 Mon Sep 17 00:00:00 2001 From: Negar Heidari <36771997+negarhdr@users.noreply.github.com> Date: Fri, 14 Jan 2022 11:18:33 +0100 Subject: [PATCH 4/9] bug fixed (#196) Co-authored-by: Negar Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> --- .../perception/skeleton_based_action_recognition/demos/demo.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/projects/perception/skeleton_based_action_recognition/demos/demo.py b/projects/perception/skeleton_based_action_recognition/demos/demo.py index 3347d63325..4d4342094f 100644 --- a/projects/perception/skeleton_based_action_recognition/demos/demo.py +++ b/projects/perception/skeleton_based_action_recognition/demos/demo.py @@ -125,7 +125,7 @@ def draw_preds(frame, preds: Dict): if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("--onnx", help="Use ONNX", default=False, action="store_true") - parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cpu") parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, action="store_true") parser.add_argument('--video', default=0, @@ -224,5 +224,4 @@ def draw_preds(frame, preds: Dict): break print("Average inference fps: ", avg_fps) - image_provider.release() cv2.destroyAllWindows() From cc71138ae22ec39b186960ff98c74bc2cdca3623 Mon Sep 17 00:00:00 2001 From: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Date: Tue, 18 Jan 2022 09:24:00 +0100 Subject: [PATCH 5/9] Fix copyright dates and double-check rights owner (action required) (#199) * Fix dates * undo wrong change * Fix Tampere --- dependencies/parse_dependencies.py | 2 +- include/data.h | 2 +- include/face_recognition.h | 2 +- include/opendr_utils.h | 2 +- include/target.h | 2 +- projects/c_api/Makefile | 2 +- .../face_recognition/face_recognition_demo.c | 2 +- projects/control/eagerx/Makefile | 2 +- .../mobile_manipulation_demo.py | 2 +- .../inference/inference_utils.py | 2 +- .../single_demo_grasp_camera_stream.py | 2 +- .../inference/single_demo_inference.py | 2 +- .../scripts/camera_publisher.py | 2 +- .../scripts/constants.py | 2 +- .../scripts/gripper_command.py | 1 + .../scripts/joint_state_publisher.py | 2 +- .../scripts/panda_ros.py | 2 +- .../scripts/single_demo_grasp_action.py | 2 +- .../scripts/trajectory_follower.py | 2 +- .../scripts/utilities.py | 2 +- .../scripts/face_detection_retinaface.py | 2 +- .../perception/scripts/face_recognition.py | 2 +- .../scripts/heart_anomaly_detection.py | 2 +- .../src/perception/scripts/image_dataset.py | 2 +- ...ark_based_facial_expression_recognition.py | 2 +- .../scripts/object_detection_2d_centernet.py | 2 +- .../scripts/object_detection_2d_detr.py | 2 +- .../scripts/object_detection_2d_gem.py | 2 +- .../scripts/object_detection_2d_ssd.py | 2 +- .../scripts/object_detection_2d_yolov3.py | 2 +- .../scripts/object_detection_3d_voxel.py | 2 +- .../scripts/object_tracking_2d_deep_sort.py | 2 +- .../scripts/object_tracking_2d_fair_mot.py | 2 +- .../scripts/object_tracking_3d_ab3dmot.py | 2 +- .../panoptic_segmentation_efficient_ps.py | 2 +- .../perception/scripts/point_cloud_dataset.py | 2 +- .../src/perception/scripts/pose_estimation.py | 2 +- .../scripts/rgbd_hand_gesture_recognition.py | 2 +- .../scripts/semantic_segmentation_bisenet.py | 2 +- .../skeleton_based_action_recognition.py | 2 +- .../scripts/speech_command_recognition.py | 2 +- .../scripts/video_activity_recognition.py | 2 +- projects/opendr_ws/src/ros_bridge/setup.py | 2 +- .../ros_bridge/src/opendr_bridge/bridge.py | 2 +- .../scripts/human_model_generation_client.py | 2 +- .../scripts/human_model_generation_service.py | 2 +- .../demos/online_recognition/demo.py | 2 +- .../demos/online_recognition/setup.py | 2 +- .../demos/benchmarking_demo.py | 2 +- .../face_recognition/demos/eval_demo.py | 2 +- .../face_recognition/demos/inference_demo.py | 2 +- .../demo.py | 2 +- .../heart_anomaly_detection/demo.py | 2 +- .../demos/benchmarking_demo.py | 2 +- .../lightweight_open_pose/demos/eval_demo.py | 2 +- .../demos/inference_demo.py | 2 +- .../demos/webcam_demo.py | 2 +- .../jetbot/fall_controller.py | 2 +- .../jetbot/utils/active.py | 2 +- .../jetbot/utils/pose_controller.py | 2 +- .../jetbot/utils/pose_utils.py | 2 +- .../jetbot/utils/robot_interface.py | 2 +- .../jetbot/utils/visualization.py | 2 +- .../jetbot/utils/webots.py | 2 +- .../gesture_recognition_demo.py | 2 +- .../centernet/eval_demo.py | 2 +- .../centernet/inference_demo.py | 2 +- .../centernet/train_demo.py | 2 +- .../object_detection_2d/detr/eval_demo.py | 2 +- .../detr/inference_demo.py | 2 +- .../object_detection_2d/detr/train_demo.py | 2 +- .../object_detection_2d/gem/inference_demo.py | 2 +- .../retinaface/eval_demo.py | 2 +- .../retinaface/inference_demo.py | 2 +- .../retinaface/train_demo.py | 2 +- .../object_detection_2d/ssd/eval_demo.py | 2 +- .../object_detection_2d/ssd/inference_demo.py | 2 +- .../object_detection_2d/ssd/train_demo.py | 2 +- .../object_detection_2d/yolov3/eval_demo.py | 2 +- .../yolov3/inference_demo.py | 2 +- .../object_detection_2d/yolov3/train_demo.py | 2 +- .../data_generators.py | 2 +- .../demos/voxel_object_detection_3d/demo.py | 2 +- .../draw_point_clouds.py | 2 +- .../voxel_object_detection_3d/metrics.py | 2 +- .../o3m_lidar/channel.py | 2 +- .../o3m_lidar/main.py | 2 +- .../o3m_lidar/o3m_lidar.py | 2 +- .../o3m_lidar/structures.py | 2 +- .../rplidar_processor.py | 2 +- .../demos/voxel_object_detection_3d/setup.py | 2 +- .../fair_mot_deep_sort/data_generators.py | 2 +- .../demos/fair_mot_deep_sort/demo.py | 2 +- .../demos/fair_mot_deep_sort/setup.py | 2 +- .../efficient_ps/example_usage.py | 2 +- .../bisenet/eval_demo.py | 2 +- .../bisenet/inference_demo.py | 2 +- .../bisenet/train_demo.py | 2 +- .../demos/demo.py | 2 +- .../demos/skeleton_extraction.py | 2 +- .../src/fmp_slam_eval/launch/experiment.py | 2 +- .../launch/experiment_real_data.py | 2 +- .../src/fmp_slam_eval/nodes/err_collector | 2 +- .../src/fmp_slam_eval/nodes/fmp_plot | 2 +- .../src/fmp_slam_eval/nodes/gt_mapping | 2 +- .../src/fmp_slam_eval/nodes/occ_map_saver | 2 +- .../src/fmp_slam_eval/nodes/odom_pose | 2 +- .../src/fmp_slam_eval/nodes/pose_error_calc | 2 +- .../src/fmp_slam_eval/package.xml | 2 +- .../src/fmp_slam_eval/scripts/err_curves.py | 2 +- .../fmp_slam_eval/scripts/err_histograms.py | 2 +- .../scripts/method_comparison.py | 2 +- .../src/fmp_slam_eval/setup.py | 2 +- .../src/fmp_slam_eval/enums/disc_states.py | 2 +- .../src/fmp_slam_eval/error_data_collector.py | 2 +- .../src/fmp_slam_eval/fmp_plotter.py | 2 +- .../src/fmp_slam_eval/ground_truth_mapping.py | 2 +- .../src/fmp_slam_eval/map_colorizer.py | 2 +- .../src/fmp_slam_eval/net_utils.py | 2 +- .../src/fmp_slam_eval/occ_map_saver.py | 2 +- .../src/fmp_slam_eval/odom_pose_publisher.py | 2 +- .../fmp_slam_eval/pose_error_calculator.py | 2 +- .../src/fmp_slam_eval/ros_launcher.py | 2 +- .../src/fmp_slam_eval/roscore.py | 2 +- .../src/map_simulator/package.xml | 2 +- .../src/map_simulator/scripts/mapsim2d.py | 2 +- .../src/map_simulator/setup.py | 2 +- .../geometry/primitives/closed_shape_2D.py | 2 +- .../map_simulator/geometry/primitives/line.py | 2 +- .../geometry/primitives/polygon.py | 2 +- .../map_simulator/geometry/primitives/pose.py | 2 +- .../src/map_simulator/geometry/transform.py | 2 +- .../map_simulator/map_obstacles/obstacle.py | 2 +- .../map_obstacles/polygonal_obstacle.py | 2 +- .../src/map_simulator/map_simulator_2d.py | 2 +- .../map_simulator/robot_commands/command.py | 2 +- .../robot_commands/message/bool_msg_cmd.py | 2 +- .../robot_commands/message/message_cmd.py | 2 +- .../robot_commands/misc/comment_cmd.py | 2 +- .../robot_commands/misc/misc_cmd.py | 2 +- .../robot_commands/misc/scan_cmd.py | 2 +- .../robot_commands/misc/sleep_cmd.py | 2 +- .../robot_commands/move/move_circular_cmd.py | 2 +- .../robot_commands/move/move_cmd.py | 2 +- .../robot_commands/move/move_interpol_cmd.py | 2 +- .../robot_commands/move/move_linear_cmd.py | 2 +- .../robot_commands/move/move_pose_cmd.py | 2 +- .../robot_commands/move/move_rotation_cmd.py | 2 +- .../map_simulator/src/map_simulator/utils.py | 2 +- .../src/openslam_gmapping/Makefile | 8 +++---- .../src/openslam_gmapping/configfile/Makefile | 4 ++-- .../configfile/configfile.cpp | 2 +- .../configfile/configfile_test.cpp | 2 +- .../src/openslam_gmapping/grid/Makefile | 6 ++--- .../src/openslam_gmapping/grid/graphmap.cpp | 2 +- .../src/openslam_gmapping/grid/map_test.cpp | 2 +- .../openslam_gmapping/gridfastslam/Makefile | 2 +- .../gridfastslam/gfs2log.cpp | 2 +- .../gridfastslam/gfs2neff.cpp | 2 +- .../gridfastslam/gfs2rec.cpp | 2 +- .../gridfastslam/gfsreader.cpp | 2 +- .../gridfastslam/gridslamprocessor.cpp | 2 +- .../gridfastslam/gridslamprocessor_tree.cpp | 2 +- .../gridfastslam/motionmodel.cpp | 2 +- .../include/gmapping/configfile/configfile.h | 2 +- .../include/gmapping/grid/accessstate.h | 2 +- .../include/gmapping/grid/array2d.h | 2 +- .../include/gmapping/grid/harray2d.h | 2 +- .../include/gmapping/grid/map.h | 2 +- .../include/gmapping/gridfastslam/gfsreader.h | 2 +- .../gmapping/gridfastslam/gridslamprocessor.h | 2 +- .../gridfastslam/gridslamprocessor.hxx | 2 +- .../gmapping/gridfastslam/motionmodel.h | 2 +- .../gmapping/log/carmenconfiguration.h | 2 +- .../include/gmapping/log/configuration.h | 2 +- .../include/gmapping/log/sensorlog.h | 2 +- .../include/gmapping/log/sensorstream.h | 2 +- .../gmapping/particlefilter/particlefilter.h | 2 +- .../include/gmapping/particlefilter/pf.h | 2 +- .../include/gmapping/scanmatcher/eig3.h | 2 +- .../gmapping/scanmatcher/gridlinetraversal.h | 2 +- .../include/gmapping/scanmatcher/icp.h | 2 +- .../gmapping/scanmatcher/scanmatcher.h | 2 +- .../scanmatcher/scanmatcherprocessor.h | 2 +- .../include/gmapping/scanmatcher/smmap.h | 2 +- .../gmapping/sensor/sensor_base/sensor.h | 2 +- .../sensor/sensor_base/sensoreading.h | 2 +- .../sensor/sensor_base/sensorreading.h | 2 +- .../sensor/sensor_odometry/odometryreading.h | 2 +- .../sensor/sensor_odometry/odometrysensor.h | 2 +- .../sensor/sensor_range/rangereading.h | 2 +- .../sensor/sensor_range/rangesensor.h | 2 +- .../include/gmapping/utils/autoptr.h | 2 +- .../include/gmapping/utils/commandline.h | 2 +- .../include/gmapping/utils/gvalues.h | 2 +- .../include/gmapping/utils/macro_params.h | 2 +- .../include/gmapping/utils/movement.h | 2 +- .../include/gmapping/utils/point.h | 2 +- .../include/gmapping/utils/stat.h | 2 +- .../src/openslam_gmapping/log/Makefile | 6 ++--- .../log/carmenconfiguration.cpp | 2 +- .../openslam_gmapping/log/configuration.cpp | 2 +- .../src/openslam_gmapping/log/log_plot.cpp | 2 +- .../src/openslam_gmapping/log/log_test.cpp | 2 +- .../src/openslam_gmapping/log/rdk2carmen.cpp | 2 +- .../log/scanstudio2carmen.cpp | 2 +- .../src/openslam_gmapping/log/sensorlog.cpp | 2 +- .../openslam_gmapping/log/sensorstream.cpp | 2 +- .../openslam_gmapping/scanmatcher/Makefile | 2 +- .../openslam_gmapping/scanmatcher/eig3.cpp | 2 +- .../openslam_gmapping/scanmatcher/icptest.cpp | 2 +- .../scanmatcher/line_test.cpp | 2 +- .../scanmatcher/scanmatch_test.cpp | 2 +- .../scanmatcher/scanmatcher.cpp | 2 +- .../scanmatcher/scanmatcher.new.cpp | 2 +- .../scanmatcher/scanmatcherprocessor.cpp | 2 +- .../openslam_gmapping/scanmatcher/smmap.cpp | 2 +- .../src/openslam_gmapping/sensor/Makefile | 2 +- .../sensor/sensor_base/Makefile | 2 +- .../sensor/sensor_base/sensor.cpp | 2 +- .../sensor/sensor_base/sensorreading.cpp | 2 +- .../sensor/sensor_odometry/Makefile | 2 +- .../sensor_odometry/odometryreading.cpp | 2 +- .../sensor/sensor_odometry/odometrysensor.cpp | 2 +- .../sensor/sensor_range/Makefile | 6 ++--- .../sensor/sensor_range/rangereading.cpp | 2 +- .../sensor/sensor_range/rangesensor.cpp | 2 +- .../src/openslam_gmapping/utils/Makefile | 2 +- .../openslam_gmapping/utils/autoptr_test.cpp | 2 +- .../src/openslam_gmapping/utils/movement.cpp | 2 +- .../src/openslam_gmapping/utils/stat.cpp | 2 +- .../src/openslam_gmapping/utils/stat_test.cpp | 2 +- .../src/slam_gmapping/gmapping/src/main.cpp | 2 +- .../slam_gmapping/gmapping/src/nodelet.cpp | 2 +- .../src/slam_gmapping/gmapping/src/replay.cpp | 2 +- .../gmapping/src/slam_gmapping.cpp | 2 +- .../gmapping/src/slam_gmapping.h | 2 +- .../src/slam_gmapping/gmapping/test/rtest.cpp | 2 +- .../slam_gmapping/gmapping/test/test_map.py | 2 +- .../speech_command_recognition/demo.py | 2 +- .../SMPL+D_human_models/src/download_data.py | 2 +- .../src/generate_models.py | 2 +- .../webots/extract_anims.py | 2 +- .../controllers/smpl_animation/Makefile | 4 ++-- .../smpl_animation/smpl_animation.c | 2 +- .../smpl_webots/libraries/smpl_util/Makefile | 2 +- .../smpl_util/include/quaternion_private.h | 2 +- .../libraries/smpl_util/include/smpl_util.h | 2 +- .../smpl_util/include/vector3_private.h | 2 +- .../libraries/smpl_util/src/quaternion.c | 2 +- .../libraries/smpl_util/src/smpl_util.c | 2 +- .../libraries/smpl_util/src/vector3.c | 2 +- .../human_dataset_generation/background.py | 2 +- .../create_background_images.py | 2 +- .../create_dataset.py | 2 +- .../data_generator.py | 2 +- .../reformat_cityscapes.py | 2 +- .../hyperparameter_tuner_demo.py | 2 +- src/c_api/Makefile | 2 +- src/c_api/face_recognition.cpp | 3 +-- src/c_api/opendr_utils.cpp | 2 +- src/opendr/_version.py | 2 +- .../control/mobile_manipulation/Makefile | 2 +- .../gripper_planner/base_gripper_planner.hpp | 2 +- .../gaussian_mixture_model.hpp | 2 +- .../include/gripper_planner/gmm_planner.hpp | 2 +- .../gripper_planner/linear_planner.hpp | 2 +- .../mobile_manipulation_rl/robot_env.hpp | 2 +- .../mobile_manipulation_rl/robot_hsr.hpp | 2 +- .../mobile_manipulation_rl/robot_pr2.hpp | 2 +- .../mobile_manipulation_rl/robot_tiago.hpp | 2 +- .../include/mobile_manipulation_rl/utils.hpp | 2 +- .../include/mobile_manipulation_rl/worlds.hpp | 2 +- .../mobileRL/envs/__init__.py | 2 +- .../mobileRL/envs/eeplanner.py | 2 +- .../mobileRL/envs/env_utils.py | 2 +- .../mobile_manipulation/mobileRL/envs/map.py | 2 +- .../mobileRL/envs/mobile_manipulation_env.py | 2 +- .../mobileRL/envs/robotenv.py | 2 +- .../mobileRL/envs/simulator_api.py | 2 +- .../mobileRL/envs/tasks.py | 2 +- .../mobileRL/envs/tasks_chained.py | 2 +- .../mobileRL/evaluation.py | 2 +- .../mobileRL/handle_launchfiles.py | 2 +- .../mobileRL/stablebl_callbacks.py | 2 +- .../mobile_manipulation/mobileRL/utils.py | 2 +- .../mobile_manipulation_learner.py | 2 +- .../gripper_planner/base_gripper_planner.cpp | 2 +- .../gaussian_mixture_model.cpp | 2 +- .../src/gripper_planner/gmm_planner.cpp | 2 +- .../src/gripper_planner/linear_planner.cpp | 2 +- .../mobile_manipulation/src/pybindings.cpp | 2 +- .../mobile_manipulation/src/robot_env.cpp | 2 +- .../mobile_manipulation/src/robot_hsr.cpp | 2 +- .../mobile_manipulation/src/robot_pr2.cpp | 2 +- .../mobile_manipulation/src/robot_tiago.cpp | 2 +- .../control/mobile_manipulation/src/utils.cpp | 2 +- .../mobile_manipulation/src/worlds.cpp | 2 +- src/opendr/control/single_demo_grasp/Makefile | 2 +- .../augmentation/augmentation_gui.py | 2 +- .../augmentation/augmentation_utils.py | 2 +- .../training/learner_utils.py | 2 +- .../training/single_demo_grasp_learner.py | 2 +- src/opendr/engine/constants.py | 2 +- src/opendr/engine/data.py | 2 +- src/opendr/engine/datasets.py | 2 +- src/opendr/engine/example_learner.py | 2 +- src/opendr/engine/learners.py | 2 +- src/opendr/engine/target.py | 2 +- .../cox3d/cox3d_learner.py | 2 +- .../activity_recognition/datasets/kinetics.py | 2 +- .../datasets/utils/transforms.py | 2 +- .../activity_recognition/x3d/x3d_learner.py | 2 +- .../algorithm/__init__.py | 2 +- .../algorithm/backbones/__init__.py | 2 +- .../algorithm/backbones/cifar_allcnn.py | 2 +- .../algorithm/backbones/imagenet_densenet.py | 2 +- .../algorithm/backbones/imagenet_resnet.py | 2 +- .../algorithm/backbones/imagenet_vgg.py | 2 +- .../algorithm/backbones/model_utils.py | 2 +- .../algorithm/data.py | 2 +- .../algorithm/learner.py | 2 +- .../multilinear_compressive_learner.py | 2 +- .../face_recognition_learner.py | 2 +- .../algorithm/datasets/AFEW_data_gen.py | 2 +- .../algorithm/datasets/CASIA_CK_data_gen.py | 2 +- .../algorithm/datasets/data_augmentation.py | 2 +- .../algorithm/datasets/frame_extractor.py | 2 +- .../datasets/gen_facial_muscles_data.py | 2 +- .../algorithm/datasets/landmark_extractor.py | 2 +- .../algorithm/models/pstbln.py | 2 +- ...progressive_spatio_temporal_bln_learner.py | 2 +- .../algorithm/__init__.py | 2 +- ...attention_neural_bag_of_feature_learner.py | 2 +- .../algorithm/__init__.py | 2 +- .../gated_recurrent_unit/algorithm/data.py | 2 +- .../gated_recurrent_unit/algorithm/models.py | 2 +- .../algorithm/trainers.py | 2 +- .../gated_recurrent_unit_learner.py | 2 +- .../algorithm/__init__.py | 2 +- .../algorithm/architectures/__init__.py | 2 +- .../algorithm/data.py | 2 +- .../rgbd_hand_gesture_learner.py | 2 +- .../centernet/centernet_learner.py | 2 +- .../datasets/detection_dataset.py | 2 +- .../datasets/transforms.py | 2 +- .../datasets/wider_face.py | 2 +- .../datasets/wider_person.py | 2 +- .../detr/algorithm/util/__init__.py | 2 +- .../detr/algorithm/util/draw.py | 2 +- .../object_detection_2d/detr/detr_learner.py | 3 ++- .../gem/algorithm/util/draw.py | 2 +- .../gem/algorithm/util/sampler.py | 2 +- .../object_detection_2d/gem/gem_learner.py | 4 ++-- .../retinaface/retinaface_learner.py | 2 +- .../object_detection_2d/ssd/ssd_learner.py | 2 +- .../object_detection_2d/utils/eval_utils.py | 2 +- .../utils/get_color_infra_alignment.py | 2 +- .../object_detection_2d/utils/vis_utils.py | 2 +- .../yolov3/yolov3_learner.py | 2 +- .../datasets/create_data_kitti.py | 1 + .../object_detection_3d/datasets/kitti.py | 1 + .../voxel_object_detection_3d/logger.py | 1 + .../second_detector/load.py | 2 +- .../second_detector/run.py | 2 +- .../voxel_object_detection_3d_learner.py | 2 +- .../datasets/market1501_dataset.py | 2 +- .../datasets/mot_dataset.py | 2 +- .../deep_sort/algorithm/deep_sort_tracker.py | 2 +- .../deep_sort/algorithm/run.py | 2 +- .../object_tracking_2d_deep_sort_learner.py | 2 +- .../fair_mot/algorithm/load.py | 2 +- .../fair_mot/algorithm/run.py | 2 +- .../object_tracking_2d_fair_mot_learner.py | 2 +- .../perception/object_tracking_2d/logger.py | 3 ++- .../ab3dmot/algorithm/ab3dmot.py | 2 +- .../ab3dmot/algorithm/evaluate.py | 2 +- .../ab3dmot/algorithm/kalman_tracker_3d.py | 2 +- .../object_tracking_3d/ab3dmot/logger.py | 1 + .../object_tracking_3d_ab3dmot_learner.py | 2 +- .../datasets/kitti_tracking.py | 2 +- .../datasets/cityscapes.py | 2 +- .../panoptic_segmentation/datasets/kitti.py | 2 +- .../efficient_ps/configs/singlegpu_sample.py | 2 +- .../efficient_ps/efficient_ps_learner.py | 2 +- .../algorithm/models/with_mobilenet_v2.py | 2 +- .../algorithm/models/with_shufflenet.py | 2 +- .../lightweight_open_pose/filtered_pose.py | 2 +- .../lightweight_open_pose_learner.py | 2 +- .../lightweight_open_pose/utilities.py | 2 +- .../semantic_segmentation/bisenet/CamVid.py | 2 +- .../bisenet/bisenet_learner.py | 2 +- ...progressive_spatio_temporal_gcn_learner.py | 2 +- .../spatio_temporal_gcn_learner.py | 2 +- .../edgespeechnets/algorithm/audioutils.py | 2 +- .../edgespeechnets/algorithm/models.py | 2 +- .../edgespeechnets/edgespeechnets_learner.py | 2 +- .../matchboxnet/algorithm/audioutils.py | 2 +- .../matchboxnet/algorithm/model.py | 2 +- .../matchboxnet/matchboxnet_learner.py | 2 +- .../quadraticselfonn/algorithm/audioutils.py | 2 +- .../quadraticselfonn/algorithm/model.py | 2 +- .../quadraticselfonn_learner.py | 2 +- .../pifu_generator_learner.py | 2 +- .../utilities/config_utils.py | 2 +- .../utilities/joint_extractor.py | 2 +- .../utilities/model_3D.py | 2 +- .../utilities/studio.py | 2 +- .../utilities/visualizer.py | 2 +- .../hyperparameter_tuner/dummy_learner.py | 2 +- .../hyperparameter_tuner.py | 2 +- src/opendr/utils/io.py | 2 +- tests/Makefile | 2 +- tests/sources/c_api/test_face_recognition.c | 2 +- tests/sources/c_api/test_fmp_gmapping.cpp | 2 +- tests/sources/c_api/test_opendr_utils.c | 2 +- .../test_mobile_manipulation.py | 2 +- .../test_single_demo_grasp.py | 2 +- .../cox3d/test_cox3d_learner.py | 2 +- .../x3d/test_x3d_learner.py | 2 +- .../test_multilinear_compressive_learner.py | 2 +- .../face_recognition/test_face_recognition.py | 2 +- .../test_pstbln.py | 2 +- ...attention_neural_bag_of_feature_learner.py | 2 +- .../test_gated_recurrent_unit_learner.py | 2 +- .../test_rgbd_hand_gesture_learner.py | 2 +- .../centernet/test_centernet.py | 2 +- .../object_detection_2d/detr/test_detr.py | 2 +- .../object_detection_2d/gem/test_gem.py | 2 +- .../retinaface/test_retinaface.py | 2 +- .../object_detection_2d/ssd/test_ssd.py | 2 +- .../object_detection_2d/yolov3/test_yolo3.py | 2 +- .../test_object_detection_3d.py | 2 +- .../test_object_tracking_2d_deep_sort.py | 2 +- .../test_object_tracking_2d_fair_mot.py | 2 +- .../test_object_tracking_3d_ab3dmot.py | 2 +- .../test_lightweight_open_pose.py | 2 +- .../test_semantic_segmentation_bisenet.py | 2 +- .../test_pstgcn.py | 2 +- .../test_stbln.py | 2 +- .../test_stgcn.py | 2 +- .../test_tagcn.py | 2 +- .../edgespeechnets/test_edgespeechnets.py | 2 +- .../matchboxnet/test_matchboxnet.py | 2 +- .../quadraticselfonn/test_quadraticselfonn.py | 2 +- .../test_human_model_generation.py | 2 +- .../tools/utils/test_hyperparameter_tuner.py | 2 +- tests/sources/tools/utils/test_io.py | 2 +- tests/test_license.py | 23 +++++++++++++------ 449 files changed, 478 insertions(+), 463 deletions(-) diff --git a/dependencies/parse_dependencies.py b/dependencies/parse_dependencies.py index 58b17ae029..31fdc20829 100644 --- a/dependencies/parse_dependencies.py +++ b/dependencies/parse_dependencies.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/include/data.h b/include/data.h index 5b0f9b0818..0b0018b941 100644 --- a/include/data.h +++ b/include/data.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/include/face_recognition.h b/include/face_recognition.h index 997a414c57..ff2774aab2 100644 --- a/include/face_recognition.h +++ b/include/face_recognition.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/include/opendr_utils.h b/include/opendr_utils.h index 0ec35ec9ae..309c44a211 100644 --- a/include/opendr_utils.h +++ b/include/opendr_utils.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/include/target.h b/include/target.h index 04c4f3f4c7..99603e905d 100644 --- a/include/target.h +++ b/include/target.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/c_api/Makefile b/projects/c_api/Makefile index caf29a97b8..92d14951f9 100644 --- a/projects/c_api/Makefile +++ b/projects/c_api/Makefile @@ -1,5 +1,5 @@ # -# Copyright 2020-2021 OpenDR project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/c_api/samples/face_recognition/face_recognition_demo.c b/projects/c_api/samples/face_recognition/face_recognition_demo.c index f426d6f945..03465f77fe 100644 --- a/projects/c_api/samples/face_recognition/face_recognition_demo.c +++ b/projects/c_api/samples/face_recognition/face_recognition_demo.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/control/eagerx/Makefile b/projects/control/eagerx/Makefile index 26c6367def..cd9ee79573 100644 --- a/projects/control/eagerx/Makefile +++ b/projects/control/eagerx/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/mobile_manipulation/mobile_manipulation_demo.py b/projects/control/mobile_manipulation/mobile_manipulation_demo.py index 7c7e84c192..68b29ad2aa 100644 --- a/projects/control/mobile_manipulation/mobile_manipulation_demo.py +++ b/projects/control/mobile_manipulation/mobile_manipulation_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py index 16e2b6a9e1..619556dcf0 100755 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py index f786b8b108..46722f1be9 100755 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py index 991746f9b7..9285654766 100755 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py index c7c81ed793..c50b56fc20 100644 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py @@ -1,4 +1,4 @@ -# Copyright 1996-2020 Cyberbotics Ltd. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py index e2d7ec4b08..44be8a295b 100644 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py index d0cc516edc..70dedc17a4 100644 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py @@ -1,3 +1,4 @@ +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py index 9e5a6b8bf6..73bf7d26f0 100644 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py @@ -1,4 +1,4 @@ -# Copyright 1996-2020 Cyberbotics Ltd. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py index 7940a7aef2..117ba1b1e3 100755 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 1996-2020 Cyberbotics Ltd. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py index 9973ccc01a..12b0556b64 100755 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py index 70219a6e79..9847d6347a 100644 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py @@ -1,4 +1,4 @@ -# Copyright 1996-2020 Cyberbotics Ltd. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py index eedddd31e2..46eda4ba23 100644 --- a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py +++ b/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/face_detection_retinaface.py b/projects/opendr_ws/src/perception/scripts/face_detection_retinaface.py index fb9549ac8b..7227951b17 100755 --- a/projects/opendr_ws/src/perception/scripts/face_detection_retinaface.py +++ b/projects/opendr_ws/src/perception/scripts/face_detection_retinaface.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/face_recognition.py b/projects/opendr_ws/src/perception/scripts/face_recognition.py index bb18222782..1f489b828b 100755 --- a/projects/opendr_ws/src/perception/scripts/face_recognition.py +++ b/projects/opendr_ws/src/perception/scripts/face_recognition.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/heart_anomaly_detection.py b/projects/opendr_ws/src/perception/scripts/heart_anomaly_detection.py index 470359e5e6..4e72471b9d 100755 --- a/projects/opendr_ws/src/perception/scripts/heart_anomaly_detection.py +++ b/projects/opendr_ws/src/perception/scripts/heart_anomaly_detection.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/image_dataset.py b/projects/opendr_ws/src/perception/scripts/image_dataset.py index 98d612d703..0ce4ee3850 100644 --- a/projects/opendr_ws/src/perception/scripts/image_dataset.py +++ b/projects/opendr_ws/src/perception/scripts/image_dataset.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/landmark_based_facial_expression_recognition.py b/projects/opendr_ws/src/perception/scripts/landmark_based_facial_expression_recognition.py index 4d01bbc230..a6b0c2188f 100644 --- a/projects/opendr_ws/src/perception/scripts/landmark_based_facial_expression_recognition.py +++ b/projects/opendr_ws/src/perception/scripts/landmark_based_facial_expression_recognition.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_centernet.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_centernet.py index d87512a9cf..c1615f99a7 100755 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_centernet.py +++ b/projects/opendr_ws/src/perception/scripts/object_detection_2d_centernet.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_detr.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_detr.py index 52f7d31aa2..ec98c4ddf0 100644 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_detr.py +++ b/projects/opendr_ws/src/perception/scripts/object_detection_2d_detr.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_gem.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_gem.py index a48bb36d29..ee1d784566 100644 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_gem.py +++ b/projects/opendr_ws/src/perception/scripts/object_detection_2d_gem.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_ssd.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_ssd.py index 619e4f6c4d..6f643e61cf 100755 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_ssd.py +++ b/projects/opendr_ws/src/perception/scripts/object_detection_2d_ssd.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_yolov3.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_yolov3.py index 853fde3967..93155f148b 100755 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_yolov3.py +++ b/projects/opendr_ws/src/perception/scripts/object_detection_2d_yolov3.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_3d_voxel.py b/projects/opendr_ws/src/perception/scripts/object_detection_3d_voxel.py index 7034eb5a37..6d6b74015a 100644 --- a/projects/opendr_ws/src/perception/scripts/object_detection_3d_voxel.py +++ b/projects/opendr_ws/src/perception/scripts/object_detection_3d_voxel.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_deep_sort.py b/projects/opendr_ws/src/perception/scripts/object_tracking_2d_deep_sort.py index d4d666877f..70d66c69a8 100644 --- a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_deep_sort.py +++ b/projects/opendr_ws/src/perception/scripts/object_tracking_2d_deep_sort.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_fair_mot.py b/projects/opendr_ws/src/perception/scripts/object_tracking_2d_fair_mot.py index 1e96bc2640..0f8d3a7373 100755 --- a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_fair_mot.py +++ b/projects/opendr_ws/src/perception/scripts/object_tracking_2d_fair_mot.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/object_tracking_3d_ab3dmot.py b/projects/opendr_ws/src/perception/scripts/object_tracking_3d_ab3dmot.py index 460505e894..b9927182ce 100644 --- a/projects/opendr_ws/src/perception/scripts/object_tracking_3d_ab3dmot.py +++ b/projects/opendr_ws/src/perception/scripts/object_tracking_3d_ab3dmot.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/panoptic_segmentation_efficient_ps.py b/projects/opendr_ws/src/perception/scripts/panoptic_segmentation_efficient_ps.py index 9ef4274e14..bce86e46ea 100755 --- a/projects/opendr_ws/src/perception/scripts/panoptic_segmentation_efficient_ps.py +++ b/projects/opendr_ws/src/perception/scripts/panoptic_segmentation_efficient_ps.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/point_cloud_dataset.py b/projects/opendr_ws/src/perception/scripts/point_cloud_dataset.py index 6cb30a7727..0701e1005e 100644 --- a/projects/opendr_ws/src/perception/scripts/point_cloud_dataset.py +++ b/projects/opendr_ws/src/perception/scripts/point_cloud_dataset.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/pose_estimation.py b/projects/opendr_ws/src/perception/scripts/pose_estimation.py index c5622c4771..855ada40cf 100644 --- a/projects/opendr_ws/src/perception/scripts/pose_estimation.py +++ b/projects/opendr_ws/src/perception/scripts/pose_estimation.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/rgbd_hand_gesture_recognition.py b/projects/opendr_ws/src/perception/scripts/rgbd_hand_gesture_recognition.py index e01ac98308..69150856ad 100755 --- a/projects/opendr_ws/src/perception/scripts/rgbd_hand_gesture_recognition.py +++ b/projects/opendr_ws/src/perception/scripts/rgbd_hand_gesture_recognition.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/semantic_segmentation_bisenet.py b/projects/opendr_ws/src/perception/scripts/semantic_segmentation_bisenet.py index 9c56895ec6..32390c9157 100644 --- a/projects/opendr_ws/src/perception/scripts/semantic_segmentation_bisenet.py +++ b/projects/opendr_ws/src/perception/scripts/semantic_segmentation_bisenet.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/skeleton_based_action_recognition.py b/projects/opendr_ws/src/perception/scripts/skeleton_based_action_recognition.py index 12821921e6..0556acfd52 100644 --- a/projects/opendr_ws/src/perception/scripts/skeleton_based_action_recognition.py +++ b/projects/opendr_ws/src/perception/scripts/skeleton_based_action_recognition.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/speech_command_recognition.py b/projects/opendr_ws/src/perception/scripts/speech_command_recognition.py index 2c2115600b..4726b478a1 100755 --- a/projects/opendr_ws/src/perception/scripts/speech_command_recognition.py +++ b/projects/opendr_ws/src/perception/scripts/speech_command_recognition.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/perception/scripts/video_activity_recognition.py b/projects/opendr_ws/src/perception/scripts/video_activity_recognition.py index 489bf56073..b79a462e3a 100755 --- a/projects/opendr_ws/src/perception/scripts/video_activity_recognition.py +++ b/projects/opendr_ws/src/perception/scripts/video_activity_recognition.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/ros_bridge/setup.py b/projects/opendr_ws/src/ros_bridge/setup.py index fe148f972c..b5479915ae 100644 --- a/projects/opendr_ws/src/ros_bridge/setup.py +++ b/projects/opendr_ws/src/ros_bridge/setup.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/ros_bridge/src/opendr_bridge/bridge.py b/projects/opendr_ws/src/ros_bridge/src/opendr_bridge/bridge.py index 36d5bbe5a7..fe7e4171f2 100755 --- a/projects/opendr_ws/src/ros_bridge/src/opendr_bridge/bridge.py +++ b/projects/opendr_ws/src/ros_bridge/src/opendr_bridge/bridge.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/simulation/scripts/human_model_generation_client.py b/projects/opendr_ws/src/simulation/scripts/human_model_generation_client.py index 902401fa70..1f9470f9c6 100644 --- a/projects/opendr_ws/src/simulation/scripts/human_model_generation_client.py +++ b/projects/opendr_ws/src/simulation/scripts/human_model_generation_client.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/opendr_ws/src/simulation/scripts/human_model_generation_service.py b/projects/opendr_ws/src/simulation/scripts/human_model_generation_service.py index 3036aa7fec..f869d989b3 100644 --- a/projects/opendr_ws/src/simulation/scripts/human_model_generation_service.py +++ b/projects/opendr_ws/src/simulation/scripts/human_model_generation_service.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/activity_recognition/demos/online_recognition/demo.py b/projects/perception/activity_recognition/demos/online_recognition/demo.py index 4fccd16c3f..5bfd19d9ed 100644 --- a/projects/perception/activity_recognition/demos/online_recognition/demo.py +++ b/projects/perception/activity_recognition/demos/online_recognition/demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/activity_recognition/demos/online_recognition/setup.py b/projects/perception/activity_recognition/demos/online_recognition/setup.py index efdf8db3dd..571a9c8e7f 100644 --- a/projects/perception/activity_recognition/demos/online_recognition/setup.py +++ b/projects/perception/activity_recognition/demos/online_recognition/setup.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/face_recognition/demos/benchmarking_demo.py b/projects/perception/face_recognition/demos/benchmarking_demo.py index 55b6447764..e4278d570a 100644 --- a/projects/perception/face_recognition/demos/benchmarking_demo.py +++ b/projects/perception/face_recognition/demos/benchmarking_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/face_recognition/demos/eval_demo.py b/projects/perception/face_recognition/demos/eval_demo.py index 5ec25670d0..91d8095bc2 100644 --- a/projects/perception/face_recognition/demos/eval_demo.py +++ b/projects/perception/face_recognition/demos/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/face_recognition/demos/inference_demo.py b/projects/perception/face_recognition/demos/inference_demo.py index b8394f3910..7e56da2cb6 100644 --- a/projects/perception/face_recognition/demos/inference_demo.py +++ b/projects/perception/face_recognition/demos/inference_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py b/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py index b148434576..7939ac7aef 100644 --- a/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py +++ b/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/heart_anomaly_detection/demo.py b/projects/perception/heart_anomaly_detection/demo.py index 5a90f79678..11cd1a426c 100644 --- a/projects/perception/heart_anomaly_detection/demo.py +++ b/projects/perception/heart_anomaly_detection/demo.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/demos/benchmarking_demo.py b/projects/perception/lightweight_open_pose/demos/benchmarking_demo.py index 7b7878565a..cc80487d70 100644 --- a/projects/perception/lightweight_open_pose/demos/benchmarking_demo.py +++ b/projects/perception/lightweight_open_pose/demos/benchmarking_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/demos/eval_demo.py b/projects/perception/lightweight_open_pose/demos/eval_demo.py index a56bf081ac..3ecd6c6884 100644 --- a/projects/perception/lightweight_open_pose/demos/eval_demo.py +++ b/projects/perception/lightweight_open_pose/demos/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/demos/inference_demo.py b/projects/perception/lightweight_open_pose/demos/inference_demo.py index 126ab6cc2c..1b494919fa 100644 --- a/projects/perception/lightweight_open_pose/demos/inference_demo.py +++ b/projects/perception/lightweight_open_pose/demos/inference_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/demos/webcam_demo.py b/projects/perception/lightweight_open_pose/demos/webcam_demo.py index ed56d24db2..149783d1e1 100644 --- a/projects/perception/lightweight_open_pose/demos/webcam_demo.py +++ b/projects/perception/lightweight_open_pose/demos/webcam_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/jetbot/fall_controller.py b/projects/perception/lightweight_open_pose/jetbot/fall_controller.py index e4d04daff4..cc9ecb32a9 100644 --- a/projects/perception/lightweight_open_pose/jetbot/fall_controller.py +++ b/projects/perception/lightweight_open_pose/jetbot/fall_controller.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/active.py b/projects/perception/lightweight_open_pose/jetbot/utils/active.py index 9ff075daab..c1cfa0f5fb 100644 --- a/projects/perception/lightweight_open_pose/jetbot/utils/active.py +++ b/projects/perception/lightweight_open_pose/jetbot/utils/active.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/pose_controller.py b/projects/perception/lightweight_open_pose/jetbot/utils/pose_controller.py index 95569921a1..f7b3b8c10c 100644 --- a/projects/perception/lightweight_open_pose/jetbot/utils/pose_controller.py +++ b/projects/perception/lightweight_open_pose/jetbot/utils/pose_controller.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/pose_utils.py b/projects/perception/lightweight_open_pose/jetbot/utils/pose_utils.py index cb8a4724df..e36420c1ba 100644 --- a/projects/perception/lightweight_open_pose/jetbot/utils/pose_utils.py +++ b/projects/perception/lightweight_open_pose/jetbot/utils/pose_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/robot_interface.py b/projects/perception/lightweight_open_pose/jetbot/utils/robot_interface.py index 72c845ab33..61784ca0a2 100644 --- a/projects/perception/lightweight_open_pose/jetbot/utils/robot_interface.py +++ b/projects/perception/lightweight_open_pose/jetbot/utils/robot_interface.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/visualization.py b/projects/perception/lightweight_open_pose/jetbot/utils/visualization.py index 2f807ccc09..c89ed76da8 100644 --- a/projects/perception/lightweight_open_pose/jetbot/utils/visualization.py +++ b/projects/perception/lightweight_open_pose/jetbot/utils/visualization.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/webots.py b/projects/perception/lightweight_open_pose/jetbot/utils/webots.py index 99c60f7e11..3daceecf58 100644 --- a/projects/perception/lightweight_open_pose/jetbot/utils/webots.py +++ b/projects/perception/lightweight_open_pose/jetbot/utils/webots.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/multimodal_human_centric/gesture_recognition_demo.py b/projects/perception/multimodal_human_centric/gesture_recognition_demo.py index fb1365d953..7309be3715 100644 --- a/projects/perception/multimodal_human_centric/gesture_recognition_demo.py +++ b/projects/perception/multimodal_human_centric/gesture_recognition_demo.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/centernet/eval_demo.py b/projects/perception/object_detection_2d/centernet/eval_demo.py index 3138c576e2..48b1d273cb 100644 --- a/projects/perception/object_detection_2d/centernet/eval_demo.py +++ b/projects/perception/object_detection_2d/centernet/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/centernet/inference_demo.py b/projects/perception/object_detection_2d/centernet/inference_demo.py index 6989c5136f..5715ad3226 100644 --- a/projects/perception/object_detection_2d/centernet/inference_demo.py +++ b/projects/perception/object_detection_2d/centernet/inference_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/centernet/train_demo.py b/projects/perception/object_detection_2d/centernet/train_demo.py index a51bf4d3aa..d55bee9bba 100644 --- a/projects/perception/object_detection_2d/centernet/train_demo.py +++ b/projects/perception/object_detection_2d/centernet/train_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/detr/eval_demo.py b/projects/perception/object_detection_2d/detr/eval_demo.py index 17c9d15094..1dd831b740 100644 --- a/projects/perception/object_detection_2d/detr/eval_demo.py +++ b/projects/perception/object_detection_2d/detr/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/detr/inference_demo.py b/projects/perception/object_detection_2d/detr/inference_demo.py index bae0c7ad66..d0fa231075 100755 --- a/projects/perception/object_detection_2d/detr/inference_demo.py +++ b/projects/perception/object_detection_2d/detr/inference_demo.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/detr/train_demo.py b/projects/perception/object_detection_2d/detr/train_demo.py index 3132fea68c..9aa93f225e 100644 --- a/projects/perception/object_detection_2d/detr/train_demo.py +++ b/projects/perception/object_detection_2d/detr/train_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/gem/inference_demo.py b/projects/perception/object_detection_2d/gem/inference_demo.py index b5c221353f..5387f81afd 100755 --- a/projects/perception/object_detection_2d/gem/inference_demo.py +++ b/projects/perception/object_detection_2d/gem/inference_demo.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/retinaface/eval_demo.py b/projects/perception/object_detection_2d/retinaface/eval_demo.py index c512ad4fcb..e64c086088 100644 --- a/projects/perception/object_detection_2d/retinaface/eval_demo.py +++ b/projects/perception/object_detection_2d/retinaface/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/retinaface/inference_demo.py b/projects/perception/object_detection_2d/retinaface/inference_demo.py index 81bfe738f9..385c008c6b 100644 --- a/projects/perception/object_detection_2d/retinaface/inference_demo.py +++ b/projects/perception/object_detection_2d/retinaface/inference_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/retinaface/train_demo.py b/projects/perception/object_detection_2d/retinaface/train_demo.py index ea937cf85c..b1d135bf42 100644 --- a/projects/perception/object_detection_2d/retinaface/train_demo.py +++ b/projects/perception/object_detection_2d/retinaface/train_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/ssd/eval_demo.py b/projects/perception/object_detection_2d/ssd/eval_demo.py index 936addd09d..93e413497b 100644 --- a/projects/perception/object_detection_2d/ssd/eval_demo.py +++ b/projects/perception/object_detection_2d/ssd/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/ssd/inference_demo.py b/projects/perception/object_detection_2d/ssd/inference_demo.py index 7e8cd15018..6efc451e7d 100644 --- a/projects/perception/object_detection_2d/ssd/inference_demo.py +++ b/projects/perception/object_detection_2d/ssd/inference_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/ssd/train_demo.py b/projects/perception/object_detection_2d/ssd/train_demo.py index 56ce546aad..b0d875269e 100644 --- a/projects/perception/object_detection_2d/ssd/train_demo.py +++ b/projects/perception/object_detection_2d/ssd/train_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/yolov3/eval_demo.py b/projects/perception/object_detection_2d/yolov3/eval_demo.py index a4a3cc33d0..d64ebf6cac 100644 --- a/projects/perception/object_detection_2d/yolov3/eval_demo.py +++ b/projects/perception/object_detection_2d/yolov3/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/yolov3/inference_demo.py b/projects/perception/object_detection_2d/yolov3/inference_demo.py index f1e12877b5..d2c3dfd346 100644 --- a/projects/perception/object_detection_2d/yolov3/inference_demo.py +++ b/projects/perception/object_detection_2d/yolov3/inference_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_2d/yolov3/train_demo.py b/projects/perception/object_detection_2d/yolov3/train_demo.py index 0aec26330e..bc6af4c515 100644 --- a/projects/perception/object_detection_2d/yolov3/train_demo.py +++ b/projects/perception/object_detection_2d/yolov3/train_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py index 6e4e17ea44..b0ab3ff276 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py index c2b29dc5e3..d113b26a05 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py index e44111dd81..2f204926d0 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py index ce4fe77009..9c6432aeea 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py index 08cb2b2ec8..f315c186e8 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py index b2dbffa19f..d8e412d354 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py index 047fa96900..71a80dac38 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py index cb29c990c2..3b9c383a30 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py index b0edc349e8..092f171ab5 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aarhus University. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py index 8e514d9c58..08dd296a54 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py +++ b/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py b/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py index 86d29514d5..898d8c678e 100644 --- a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py +++ b/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py b/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py index fcb31f09c5..c6a0819a69 100644 --- a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py +++ b/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py b/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py index 8c68576c83..f6477446b9 100644 --- a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py +++ b/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/panoptic_segmentation/efficient_ps/example_usage.py b/projects/perception/panoptic_segmentation/efficient_ps/example_usage.py index 078b6013d4..0d38de553e 100644 --- a/projects/perception/panoptic_segmentation/efficient_ps/example_usage.py +++ b/projects/perception/panoptic_segmentation/efficient_ps/example_usage.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/semantic_segmentation/bisenet/eval_demo.py b/projects/perception/semantic_segmentation/bisenet/eval_demo.py index cd694b83d3..97f6b6e6ed 100644 --- a/projects/perception/semantic_segmentation/bisenet/eval_demo.py +++ b/projects/perception/semantic_segmentation/bisenet/eval_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/semantic_segmentation/bisenet/inference_demo.py b/projects/perception/semantic_segmentation/bisenet/inference_demo.py index 83b55cc03e..616918a961 100644 --- a/projects/perception/semantic_segmentation/bisenet/inference_demo.py +++ b/projects/perception/semantic_segmentation/bisenet/inference_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/semantic_segmentation/bisenet/train_demo.py b/projects/perception/semantic_segmentation/bisenet/train_demo.py index 4ce5c85ede..d419f5b45c 100644 --- a/projects/perception/semantic_segmentation/bisenet/train_demo.py +++ b/projects/perception/semantic_segmentation/bisenet/train_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/skeleton_based_action_recognition/demos/demo.py b/projects/perception/skeleton_based_action_recognition/demos/demo.py index 4d4342094f..85923b7008 100644 --- a/projects/perception/skeleton_based_action_recognition/demos/demo.py +++ b/projects/perception/skeleton_based_action_recognition/demos/demo.py @@ -1,5 +1,5 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py b/projects/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py index 5ce55af19e..cf7bb9148d 100644 --- a/projects/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py +++ b/projects/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py index e20499af66..f0faf88bc0 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py index 551520ecb6..db6336bc75 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector index 01fe353d54..6531848c83 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot index 0d2da01572..5434bebf0c 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping index b9ab4025a3..ed682e23c1 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver index e92227c02f..545ebfcbf5 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose index b8a3d16937..8f97170d2f 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc index e02026638c..76a7e627b0 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml index a36601b59d..db53efca55 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml @@ -7,7 +7,7 @@ Jose Arce - Copyright 2020-2021 OpenDR European Project + Copyright 2020-2022 OpenDR European Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py index 17a6463a8a..f3e90c6e2a 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py index 67bda9c3b1..86f91673a1 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py index 52113d4fa2..8159a438bd 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py index 113720d3fd..4e02be6146 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py @@ -1,6 +1,6 @@ # ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py index 18170367d5..3fe904b8cc 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py @@ -1,5 +1,5 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py index 21e128f6db..e17652199e 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py index d76544b370..771958870d 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py index 82a29b507e..81c90f8d48 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py index fb3e7788e6..d3172cd4a7 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py index e6b3a0e4b9..cb62b4241c 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py index 989ae66f85..7546c32eee 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py index 177021b55b..6298dd830a 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py index 785b59f044..d8343da2be 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py index 1830942db4..f7b58dcb3d 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py index b33e3b1530..4917634114 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml index 4e508a9945..3c47d8d9a4 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml @@ -7,7 +7,7 @@ Jose Arce - Copyright 2020-2021 OpenDR European Project + Copyright 2020-2022 OpenDR European Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py index f61d9cab39..84a98dface 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py index 74d11228e4..a0cd3da3cd 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py @@ -1,5 +1,5 @@ # ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py index 5ca4030cd9..6ccdd489f0 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py index f446df4b86..328dbbe837 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py index adf25e5e22..009f79d5e3 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py index b67cb7287d..731d33af3e 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py index 2bd40b1119..4b2bdef303 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py index c35c318f58..dce08e6c72 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py index cd9dbc5239..8325382910 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py index eff82bc10e..e9b065ba7b 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py index be1bde41b9..538560287f 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py index f44ea79c8a..965af89776 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py index b97e0691c4..2e30563c0b 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py index 20114c27de..d44e20051c 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py index 73f690fe19..9d58223818 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py index 084140aecc..31b7d29519 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py index 7dc666fa37..97829ae472 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py index 831073ff10..fedf5970cc 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py index c1038f23a7..57f0dbb402 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py index 31bb050071..f0fc00a82c 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py index 393e9534d5..53751a9d8d 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py index bba96cc1b8..7112e6b9ea 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py index d563eb8691..cdc83d5abf 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py index 6c50bd2b1c..a7604f731a 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile index bce875a5c4..df385ec959 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,12 +15,12 @@ -include ./global.mk ifeq ($(CARMENSUPPORT),1) -SUBDIRS=utils sensor log configfile scanmatcher carmenwrapper gridfastslam gui gfs-carmen +SUBDIRS=utils sensor log configfile scanmatcher carmenwrapper gridfastslam gui gfs-carmen else ifeq ($(MACOSX),1) -SUBDIRS=utils sensor log configfile scanmatcher gridfastslam +SUBDIRS=utils sensor log configfile scanmatcher gridfastslam else -SUBDIRS=utils sensor log configfile scanmatcher gridfastslam gui +SUBDIRS=utils sensor log configfile scanmatcher gridfastslam gui endif endif diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile index 3d3626ec36..945efd360a 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +13,7 @@ # limitations under the License. OBJS= configfile.o -APPS= configfile_test +APPS= configfile_test -include ../global.mk -include ../build_tools/Makefile.generic-shared-object diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp index 45e4d6fae8..113e81c2aa 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp @@ -20,7 +20,7 @@ * *****************************************************************/ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp index 0f1b29ef19..98e1e83613 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp @@ -20,7 +20,7 @@ * *****************************************************************/ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile index 93f9549744..5cd20214e9 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,8 @@ OBJS= APPS= map_test -LDFLAGS+= -CPPFLAGS+= -DNDEBUG +LDFLAGS+= +CPPFLAGS+= -DNDEBUG -include ../global.mk -include ../build_tools/Makefile.app diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp index 6b420b0173..9ca6cdf074 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp index 32867f7ee9..cba0bc945b 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile index 7275063883..8a0f97faac 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp index 520d45228c..ecee6011c7 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp index b51c9eae76..ddc29e647e 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp index 25263f7e72..0940f9fbc3 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp index 00b544b405..cd6ec5f6f9 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp index 31b523d2d1..50cd006089 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp index 3b48d0e5ce..747178fc00 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp index aabb75c4e2..830c117afd 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h index 682b7d5b72..89ebe532f3 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h @@ -21,7 +21,7 @@ *****************************************************************/ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h index 6595233dba..5d2567112f 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h index dbcbe8127b..50b6a0838b 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h index a47476bc1d..880fe1b3e9 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h index 3b667461cb..a488ce5aff 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h index 617a9dea2c..55b453427e 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h index 7268f18b8d..3c163293d3 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx index 998e61dd8d..4245a4d405 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h index 6daf5b8e76..0f4e351279 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h index 491e117d06..0aa37b5104 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h index 3a2e278dc1..c40d1e6fc0 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h index dfbbba1b0d..3edd2397cc 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h index f6b2933750..44e267d33a 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h index 278246526e..9d1c328572 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h index 109bc30322..0bb4fae251 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h index c2d691dec3..3c4c4aaa33 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h index a82c496d6e..e0b90fecfa 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h index 1fa9d7dad8..4ae0733e00 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h index 2984ab6b14..8615d8e866 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h index 439fe9689d..8e7bca4084 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h index 4567732024..bb06ed541c 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h index e608f60b47..0d847ede56 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h index c6cd2bf49f..c1768919a1 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h index aca1637dbc..95883b24b7 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h index 9072eadf8e..46c9f79f6d 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h index 078e5626ab..d28290fc98 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h index fccc2aa86c..60f10ea57d 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h index e02c1a1207..74a025d24a 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h index bf060e6b5d..6eec2daa24 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h index 8ba1b16a2c..830bc5b9ca 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h @@ -21,7 +21,7 @@ *****************************************************************/ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h index 38584d8bff..c1ce030c4e 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h index 929a308629..94f2da45c9 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h index 27d9f995f5..1ff7897b78 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h index bb25290f17..5f1f6e7001 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h index 17f217ee45..b9b7af86ac 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile index 15dadfde65..be76533db2 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,8 +15,8 @@ OBJS= configuration.o carmenconfiguration.o sensorlog.o sensorstream.o APPS= log_test log_plot scanstudio2carmen rdk2carmen -LDFLAGS+= -lsensor_range -lsensor_odometry -lsensor_base -CPPFLAGS+= -I../sensor +LDFLAGS+= -lsensor_range -lsensor_odometry -lsensor_base +CPPFLAGS+= -I../sensor -include ../global.mk -include ../build_tools/Makefile.generic-shared-object diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp index b6cb7f974c..c19d79183f 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp index 5e5ecc2aac..6a3b1c449a 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp index 018a655e2e..b01f3ad437 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp index 57f5084552..05ac4a7bb9 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp index 83d8c3dad9..7b9af82e17 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp index d6c427ca9f..b12eae20db 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp index ce58cfaae8..a2697a2da2 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp index 4b9ca457b0..3c58d9806e 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile index 6345d3724f..494dfcfecc 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp index ec91cb50b1..f62d3f933e 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp index 791c168cf2..e738cc811b 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp index 99ec050091..bbb45bc9d9 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp index da7623c0df..71624be15d 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp index 8181c76bcf..b69814b8a0 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp index ea789d3644..0f2d3ead72 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp index ab40d0f23f..027b26108f 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp index a149e83fdb..133ca15e9d 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile index 8f81903590..4ec1f15b2c 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile index 6d79eca48b..088d8d1261 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp index 0eb58ce130..9900433198 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp index 3a2fc1622f..6c8a09d160 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile index 19ac2f334d..eed998402f 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp index 86766749df..4412c96d16 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp index 622e507eeb..9e58ff6721 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile index 8ccb09831c..f86cd62043 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,9 @@ -include ../../global.mk -CPPFLAGS+= -I../ +CPPFLAGS+= -I../ LDFLAGS+= -lsensor_base -OBJS= rangesensor.o rangereading.o +OBJS= rangesensor.o rangereading.o -include ../../build_tools/Makefile.generic-shared-object diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp index ec6f800e1e..e02c2e8722 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp index 981af9b1ad..ebee3809f2 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile index a0a693eb5c..1493284405 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp index e04b1c2b2f..437a9a8ccf 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp index 7e38167ee2..c029ded513 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp index 69f617a2ee..a040ea4645 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp index ec3e7b0116..261da030d7 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp index 73f060885e..ea88d87d85 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp @@ -29,7 +29,7 @@ /* Author: Brian Gerkey */ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp index aab4adbd63..8f44604bf9 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp @@ -27,7 +27,7 @@ * */ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp index 7da5d2bedc..4749f511d3 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp @@ -2,7 +2,7 @@ * Copyright 2015 Aldebaran */ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp index 535aefd1ec..b1cb892d07 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp @@ -30,7 +30,7 @@ /* Author: Brian Gerkey */ /* Modified by: Charles DuHadway */ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h index 1f2f248b1a..86ee0a07a6 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h +++ b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h @@ -30,7 +30,7 @@ /* Author: Brian Gerkey */ /* - * Copyright 2020-2021 OpenDR European Project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp index 09599d0acb..1224a6baf6 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp +++ b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp @@ -29,7 +29,7 @@ /* Author: Brian Gerkey */ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py index 33a1ab9921..7655d8f36f 100755 --- a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py +++ b/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py @@ -32,7 +32,7 @@ # POSSIBILITY OF SUCH DAMAGE. # -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/perception/speech_command_recognition/demo.py b/projects/perception/speech_command_recognition/demo.py index 224b17d594..2edb6665d6 100644 --- a/projects/perception/speech_command_recognition/demo.py +++ b/projects/perception/speech_command_recognition/demo.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/src/download_data.py b/projects/simulation/SMPL+D_human_models/src/download_data.py index cece1d9bcd..4dc42bf2e9 100644 --- a/projects/simulation/SMPL+D_human_models/src/download_data.py +++ b/projects/simulation/SMPL+D_human_models/src/download_data.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/src/generate_models.py b/projects/simulation/SMPL+D_human_models/src/generate_models.py index 3b7cfdacd9..007a852656 100644 --- a/projects/simulation/SMPL+D_human_models/src/generate_models.py +++ b/projects/simulation/SMPL+D_human_models/src/generate_models.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/extract_anims.py b/projects/simulation/SMPL+D_human_models/webots/extract_anims.py index 6a4026313d..a6f69c1cfc 100644 --- a/projects/simulation/SMPL+D_human_models/webots/extract_anims.py +++ b/projects/simulation/SMPL+D_human_models/webots/extract_anims.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile index a22fd63291..f134727024 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,6 +24,6 @@ ifndef WEBOTS_SKIN_ANIMATION_PATH WEBOTS_SKIN_ANIMATION_PATH = ../../libraries endif INCLUDE = -I"$(WEBOTS_SKIN_ANIMATION_PATH)/smpl_util/include" -LIBRARIES = -L"$(WEBOTS_SKIN_ANIMATION_PATH)/smpl_util" -lsmpl_util +LIBRARIES = -L"$(WEBOTS_SKIN_ANIMATION_PATH)/smpl_util" -lsmpl_util # Do not modify the following: this includes Webots global Makefile.include include $(WEBOTS_HOME_PATH)/resources/Makefile.include diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c index 336fd07237..c85e324590 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile index 90434501e9..e6cb3fdc3f 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h index a96cb2bd47..fa7b1ea0c2 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h index 0dae5d9214..78ebe40314 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h index 12b8a08ee4..1693b4bb66 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c index b35f01fecd..e9c1382666 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c index 8c9a49e325..1355e73c12 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c index 5a8f3f2486..bad16d855f 100644 --- a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c +++ b/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/projects/simulation/human_dataset_generation/background.py b/projects/simulation/human_dataset_generation/background.py index d571b09761..a25cf5a481 100644 --- a/projects/simulation/human_dataset_generation/background.py +++ b/projects/simulation/human_dataset_generation/background.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/human_dataset_generation/create_background_images.py b/projects/simulation/human_dataset_generation/create_background_images.py index 4a282e4d21..68be677cec 100644 --- a/projects/simulation/human_dataset_generation/create_background_images.py +++ b/projects/simulation/human_dataset_generation/create_background_images.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/human_dataset_generation/create_dataset.py b/projects/simulation/human_dataset_generation/create_dataset.py index 801f19899c..860f85b7b2 100644 --- a/projects/simulation/human_dataset_generation/create_dataset.py +++ b/projects/simulation/human_dataset_generation/create_dataset.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/human_dataset_generation/data_generator.py b/projects/simulation/human_dataset_generation/data_generator.py index 769fc0dc95..14a56000d6 100644 --- a/projects/simulation/human_dataset_generation/data_generator.py +++ b/projects/simulation/human_dataset_generation/data_generator.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/simulation/human_dataset_generation/reformat_cityscapes.py b/projects/simulation/human_dataset_generation/reformat_cityscapes.py index e639173d38..cf7bc87a7b 100644 --- a/projects/simulation/human_dataset_generation/reformat_cityscapes.py +++ b/projects/simulation/human_dataset_generation/reformat_cityscapes.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/projects/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py b/projects/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py index 4d54fa58ef..3adeb63a31 100644 --- a/projects/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py +++ b/projects/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/c_api/Makefile b/src/c_api/Makefile index f872485fe0..1e1d783c8d 100644 --- a/src/c_api/Makefile +++ b/src/c_api/Makefile @@ -1,5 +1,5 @@ # -# Copyright 2020-2021 OpenDR project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/c_api/face_recognition.cpp b/src/c_api/face_recognition.cpp index 10a86c5325..fd646d17d9 100644 --- a/src/c_api/face_recognition.cpp +++ b/src/c_api/face_recognition.cpp @@ -1,5 +1,4 @@ -// -// Copyright 2020-2021 OpenDR project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/c_api/opendr_utils.cpp b/src/c_api/opendr_utils.cpp index f9b4676cf2..b28f398303 100644 --- a/src/c_api/opendr_utils.cpp +++ b/src/c_api/opendr_utils.cpp @@ -1,5 +1,5 @@ // -// Copyright 2020-2021 OpenDR project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/_version.py b/src/opendr/_version.py index 9234441d92..4c67cd2681 100644 --- a/src/opendr/_version.py +++ b/src/opendr/_version.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/Makefile b/src/opendr/control/mobile_manipulation/Makefile index cd43799f63..adefb820bf 100644 --- a/src/opendr/control/mobile_manipulation/Makefile +++ b/src/opendr/control/mobile_manipulation/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/gripper_planner/base_gripper_planner.hpp b/src/opendr/control/mobile_manipulation/include/gripper_planner/base_gripper_planner.hpp index f78529cd9c..7a06ead65f 100644 --- a/src/opendr/control/mobile_manipulation/include/gripper_planner/base_gripper_planner.hpp +++ b/src/opendr/control/mobile_manipulation/include/gripper_planner/base_gripper_planner.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/gripper_planner/gaussian_mixture_model.hpp b/src/opendr/control/mobile_manipulation/include/gripper_planner/gaussian_mixture_model.hpp index 5653c4ef41..6c4656a671 100644 --- a/src/opendr/control/mobile_manipulation/include/gripper_planner/gaussian_mixture_model.hpp +++ b/src/opendr/control/mobile_manipulation/include/gripper_planner/gaussian_mixture_model.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/gripper_planner/gmm_planner.hpp b/src/opendr/control/mobile_manipulation/include/gripper_planner/gmm_planner.hpp index 618b92e772..1eaca5bf12 100644 --- a/src/opendr/control/mobile_manipulation/include/gripper_planner/gmm_planner.hpp +++ b/src/opendr/control/mobile_manipulation/include/gripper_planner/gmm_planner.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/gripper_planner/linear_planner.hpp b/src/opendr/control/mobile_manipulation/include/gripper_planner/linear_planner.hpp index f7d4b4cd80..78524542ba 100644 --- a/src/opendr/control/mobile_manipulation/include/gripper_planner/linear_planner.hpp +++ b/src/opendr/control/mobile_manipulation/include/gripper_planner/linear_planner.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_env.hpp b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_env.hpp index d63da28266..2177f103af 100644 --- a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_env.hpp +++ b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_env.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_hsr.hpp b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_hsr.hpp index 1a92f3a21f..02a58ef018 100644 --- a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_hsr.hpp +++ b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_hsr.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_pr2.hpp b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_pr2.hpp index d734fd11b2..30437dcb97 100644 --- a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_pr2.hpp +++ b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_pr2.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_tiago.hpp b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_tiago.hpp index 0f6e6624d0..8abb775a5a 100644 --- a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_tiago.hpp +++ b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/robot_tiago.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/utils.hpp b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/utils.hpp index ec3445bc0e..55ead439b7 100644 --- a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/utils.hpp +++ b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/utils.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/worlds.hpp b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/worlds.hpp index 03be397737..f724a45837 100644 --- a/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/worlds.hpp +++ b/src/opendr/control/mobile_manipulation/include/mobile_manipulation_rl/worlds.hpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/__init__.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/__init__.py index 0e756f1367..47a701d784 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/__init__.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/eeplanner.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/eeplanner.py index ffa8a6dc61..c0bf3ba275 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/eeplanner.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/eeplanner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/env_utils.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/env_utils.py index 6fd24121ac..3972a41fda 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/env_utils.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/env_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/map.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/map.py index c06b7208bf..6d528ded5f 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/map.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/map.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/mobile_manipulation_env.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/mobile_manipulation_env.py index 89892a81d1..13a17bbd1c 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/mobile_manipulation_env.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/mobile_manipulation_env.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/robotenv.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/robotenv.py index c0e56b74e2..fb47e344c3 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/robotenv.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/robotenv.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/simulator_api.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/simulator_api.py index bae72e7471..00a2c21f14 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/simulator_api.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/simulator_api.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks.py index fa1dec0b16..5bb0412e01 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks_chained.py b/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks_chained.py index 1340ef95f6..15c3175d68 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks_chained.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/envs/tasks_chained.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/evaluation.py b/src/opendr/control/mobile_manipulation/mobileRL/evaluation.py index d52e1ce81d..b62d5ee5d2 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/evaluation.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/evaluation.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/handle_launchfiles.py b/src/opendr/control/mobile_manipulation/mobileRL/handle_launchfiles.py index 33f0c36b2f..e1e68877db 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/handle_launchfiles.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/handle_launchfiles.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/stablebl_callbacks.py b/src/opendr/control/mobile_manipulation/mobileRL/stablebl_callbacks.py index 3d65f445a8..fe685a66f4 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/stablebl_callbacks.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/stablebl_callbacks.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobileRL/utils.py b/src/opendr/control/mobile_manipulation/mobileRL/utils.py index 5ab8c99301..a4daab39f9 100644 --- a/src/opendr/control/mobile_manipulation/mobileRL/utils.py +++ b/src/opendr/control/mobile_manipulation/mobileRL/utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/mobile_manipulation_learner.py b/src/opendr/control/mobile_manipulation/mobile_manipulation_learner.py index 7f97f002f4..660f3e129f 100644 --- a/src/opendr/control/mobile_manipulation/mobile_manipulation_learner.py +++ b/src/opendr/control/mobile_manipulation/mobile_manipulation_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/gripper_planner/base_gripper_planner.cpp b/src/opendr/control/mobile_manipulation/src/gripper_planner/base_gripper_planner.cpp index ecf7659994..4d57caf051 100644 --- a/src/opendr/control/mobile_manipulation/src/gripper_planner/base_gripper_planner.cpp +++ b/src/opendr/control/mobile_manipulation/src/gripper_planner/base_gripper_planner.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/gripper_planner/gaussian_mixture_model.cpp b/src/opendr/control/mobile_manipulation/src/gripper_planner/gaussian_mixture_model.cpp index 53acac4baa..54d30e125c 100644 --- a/src/opendr/control/mobile_manipulation/src/gripper_planner/gaussian_mixture_model.cpp +++ b/src/opendr/control/mobile_manipulation/src/gripper_planner/gaussian_mixture_model.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/gripper_planner/gmm_planner.cpp b/src/opendr/control/mobile_manipulation/src/gripper_planner/gmm_planner.cpp index 4b2657f0ca..f2c5f31fcb 100644 --- a/src/opendr/control/mobile_manipulation/src/gripper_planner/gmm_planner.cpp +++ b/src/opendr/control/mobile_manipulation/src/gripper_planner/gmm_planner.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/gripper_planner/linear_planner.cpp b/src/opendr/control/mobile_manipulation/src/gripper_planner/linear_planner.cpp index 7ed68d265e..5ccf4214ae 100644 --- a/src/opendr/control/mobile_manipulation/src/gripper_planner/linear_planner.cpp +++ b/src/opendr/control/mobile_manipulation/src/gripper_planner/linear_planner.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/pybindings.cpp b/src/opendr/control/mobile_manipulation/src/pybindings.cpp index 159562b73d..4c9920c12d 100644 --- a/src/opendr/control/mobile_manipulation/src/pybindings.cpp +++ b/src/opendr/control/mobile_manipulation/src/pybindings.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/robot_env.cpp b/src/opendr/control/mobile_manipulation/src/robot_env.cpp index 2cb86e8253..b39c89a1be 100644 --- a/src/opendr/control/mobile_manipulation/src/robot_env.cpp +++ b/src/opendr/control/mobile_manipulation/src/robot_env.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/robot_hsr.cpp b/src/opendr/control/mobile_manipulation/src/robot_hsr.cpp index cac6d63c3f..62cc44f7b7 100644 --- a/src/opendr/control/mobile_manipulation/src/robot_hsr.cpp +++ b/src/opendr/control/mobile_manipulation/src/robot_hsr.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/robot_pr2.cpp b/src/opendr/control/mobile_manipulation/src/robot_pr2.cpp index 19dabce27e..d8afd43816 100644 --- a/src/opendr/control/mobile_manipulation/src/robot_pr2.cpp +++ b/src/opendr/control/mobile_manipulation/src/robot_pr2.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/robot_tiago.cpp b/src/opendr/control/mobile_manipulation/src/robot_tiago.cpp index d528182242..25b92c1ee5 100644 --- a/src/opendr/control/mobile_manipulation/src/robot_tiago.cpp +++ b/src/opendr/control/mobile_manipulation/src/robot_tiago.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/utils.cpp b/src/opendr/control/mobile_manipulation/src/utils.cpp index 414940beed..e418d49b51 100644 --- a/src/opendr/control/mobile_manipulation/src/utils.cpp +++ b/src/opendr/control/mobile_manipulation/src/utils.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/mobile_manipulation/src/worlds.cpp b/src/opendr/control/mobile_manipulation/src/worlds.cpp index e5aa9c3c75..c305a03c2e 100644 --- a/src/opendr/control/mobile_manipulation/src/worlds.cpp +++ b/src/opendr/control/mobile_manipulation/src/worlds.cpp @@ -1,4 +1,4 @@ -// Copyright 2020-2021 OpenDR European Project +// Copyright 2020-2022 OpenDR European Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/src/opendr/control/single_demo_grasp/Makefile b/src/opendr/control/single_demo_grasp/Makefile index 701a747e57..755bbe20c0 100644 --- a/src/opendr/control/single_demo_grasp/Makefile +++ b/src/opendr/control/single_demo_grasp/Makefile @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/single_demo_grasp/augmentation/augmentation_gui.py b/src/opendr/control/single_demo_grasp/augmentation/augmentation_gui.py index 6312b88bc9..13fd282a8f 100644 --- a/src/opendr/control/single_demo_grasp/augmentation/augmentation_gui.py +++ b/src/opendr/control/single_demo_grasp/augmentation/augmentation_gui.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/single_demo_grasp/augmentation/augmentation_utils.py b/src/opendr/control/single_demo_grasp/augmentation/augmentation_utils.py index 990c478df1..1f6286e2d3 100644 --- a/src/opendr/control/single_demo_grasp/augmentation/augmentation_utils.py +++ b/src/opendr/control/single_demo_grasp/augmentation/augmentation_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/single_demo_grasp/training/learner_utils.py b/src/opendr/control/single_demo_grasp/training/learner_utils.py index 7b1a0295a0..20f850c538 100644 --- a/src/opendr/control/single_demo_grasp/training/learner_utils.py +++ b/src/opendr/control/single_demo_grasp/training/learner_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/control/single_demo_grasp/training/single_demo_grasp_learner.py b/src/opendr/control/single_demo_grasp/training/single_demo_grasp_learner.py index 6674b40544..ef360f599f 100644 --- a/src/opendr/control/single_demo_grasp/training/single_demo_grasp_learner.py +++ b/src/opendr/control/single_demo_grasp/training/single_demo_grasp_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/engine/constants.py b/src/opendr/engine/constants.py index d1d7cd351a..8956b3cd97 100644 --- a/src/opendr/engine/constants.py +++ b/src/opendr/engine/constants.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/engine/data.py b/src/opendr/engine/data.py index 41454a7e09..667027169c 100644 --- a/src/opendr/engine/data.py +++ b/src/opendr/engine/data.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/engine/datasets.py b/src/opendr/engine/datasets.py index 438835b845..737a642863 100644 --- a/src/opendr/engine/datasets.py +++ b/src/opendr/engine/datasets.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/engine/example_learner.py b/src/opendr/engine/example_learner.py index fe6860841c..063d5bdcd2 100644 --- a/src/opendr/engine/example_learner.py +++ b/src/opendr/engine/example_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/engine/learners.py b/src/opendr/engine/learners.py index 916ea65611..a49a0a8fc1 100644 --- a/src/opendr/engine/learners.py +++ b/src/opendr/engine/learners.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/engine/target.py b/src/opendr/engine/target.py index 13cfeba9ad..efb922aa2f 100644 --- a/src/opendr/engine/target.py +++ b/src/opendr/engine/target.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py b/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py index dfbd9753a7..244ac8b278 100644 --- a/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py +++ b/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py @@ -1,4 +1,4 @@ -# Copyright 2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/activity_recognition/datasets/kinetics.py b/src/opendr/perception/activity_recognition/datasets/kinetics.py index 4dd84e5657..91301d059d 100644 --- a/src/opendr/perception/activity_recognition/datasets/kinetics.py +++ b/src/opendr/perception/activity_recognition/datasets/kinetics.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/activity_recognition/datasets/utils/transforms.py b/src/opendr/perception/activity_recognition/datasets/utils/transforms.py index 1256d712ac..6ba0921f04 100644 --- a/src/opendr/perception/activity_recognition/datasets/utils/transforms.py +++ b/src/opendr/perception/activity_recognition/datasets/utils/transforms.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/activity_recognition/x3d/x3d_learner.py b/src/opendr/perception/activity_recognition/x3d/x3d_learner.py index 4c2602855c..0a413f67c2 100644 --- a/src/opendr/perception/activity_recognition/x3d/x3d_learner.py +++ b/src/opendr/perception/activity_recognition/x3d/x3d_learner.py @@ -1,4 +1,4 @@ -# Copyright 2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/__init__.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/__init__.py index ca5542f964..2dc6914a0c 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/__init__.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/__init__.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/__init__.py index 5b8c54ed33..4ef3dcea2f 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/__init__.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/cifar_allcnn.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/cifar_allcnn.py index 017426c7ab..29fe420385 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/cifar_allcnn.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/cifar_allcnn.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_densenet.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_densenet.py index 812dcc8472..169851fc54 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_densenet.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_densenet.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_resnet.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_resnet.py index 75e1d44d69..ee13811c26 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_resnet.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_resnet.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_vgg.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_vgg.py index 6333b88adb..e85abf230d 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_vgg.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/imagenet_vgg.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/model_utils.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/model_utils.py index 1f71140a5e..123d6e210e 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/model_utils.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones/model_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/data.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/data.py index 9c96527eaa..380fa64bb2 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/data.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/data.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/learner.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/learner.py index cb38556297..3c1f943ead 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/learner.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/multilinear_compressive_learner.py b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/multilinear_compressive_learner.py index 8386b9e212..8a71441f13 100644 --- a/src/opendr/perception/compressive_learning/multilinear_compressive_learning/multilinear_compressive_learner.py +++ b/src/opendr/perception/compressive_learning/multilinear_compressive_learning/multilinear_compressive_learner.py @@ -1,4 +1,4 @@ -# Copyright 1996-2020 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/face_recognition/face_recognition_learner.py b/src/opendr/perception/face_recognition/face_recognition_learner.py index 004975c72b..dfd44b978d 100644 --- a/src/opendr/perception/face_recognition/face_recognition_learner.py +++ b/src/opendr/perception/face_recognition/face_recognition_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/AFEW_data_gen.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/AFEW_data_gen.py index 2074667455..357272e456 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/AFEW_data_gen.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/AFEW_data_gen.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/CASIA_CK_data_gen.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/CASIA_CK_data_gen.py index dbf609948f..9f0fb74452 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/CASIA_CK_data_gen.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/CASIA_CK_data_gen.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/data_augmentation.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/data_augmentation.py index 83c9f97d49..45080a065f 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/data_augmentation.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/data_augmentation.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/frame_extractor.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/frame_extractor.py index 83843e49f7..9e501020c1 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/frame_extractor.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/frame_extractor.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/gen_facial_muscles_data.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/gen_facial_muscles_data.py index f75d202dca..30eb9eb53e 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/gen_facial_muscles_data.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/gen_facial_muscles_data.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/landmark_extractor.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/landmark_extractor.py index dd0ab63588..3ee87c8b69 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/landmark_extractor.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/datasets/landmark_extractor.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/models/pstbln.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/models/pstbln.py index 6dd053f780..9d8d583227 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/models/pstbln.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm/models/pstbln.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/progressive_spatio_temporal_bln_learner.py b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/progressive_spatio_temporal_bln_learner.py index 72e68b3146..0941f362c4 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/progressive_spatio_temporal_bln_learner.py +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/progressive_spatio_temporal_bln_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/algorithm/__init__.py b/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/algorithm/__init__.py index b5b7d06a65..77323b27a3 100644 --- a/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/algorithm/__init__.py +++ b/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/algorithm/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/attention_neural_bag_of_feature_learner.py b/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/attention_neural_bag_of_feature_learner.py index 7bd25b6514..9af30cd277 100644 --- a/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/attention_neural_bag_of_feature_learner.py +++ b/src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/attention_neural_bag_of_feature_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/__init__.py b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/__init__.py index fe6caddd71..9985cb5a3a 100644 --- a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/__init__.py +++ b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/data.py b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/data.py index 902f1b19da..d032001160 100644 --- a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/data.py +++ b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/data.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/models.py b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/models.py index ecb8e2e359..e980ab92cb 100644 --- a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/models.py +++ b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/models.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/trainers.py b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/trainers.py index fec0b76d9f..3f6f37a094 100644 --- a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/trainers.py +++ b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/algorithm/trainers.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/gated_recurrent_unit_learner.py b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/gated_recurrent_unit_learner.py index 9176ba0168..50ca5ccc00 100644 --- a/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/gated_recurrent_unit_learner.py +++ b/src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/gated_recurrent_unit_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/__init__.py b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/__init__.py index 6a330be2f8..8ecd264d0c 100644 --- a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/__init__.py +++ b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/architectures/__init__.py b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/architectures/__init__.py index 23b6c14b9b..3f178bd998 100644 --- a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/architectures/__init__.py +++ b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/architectures/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/data.py b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/data.py index 36f40d13ab..fb9bbbedbc 100644 --- a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/data.py +++ b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/data.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/rgbd_hand_gesture_learner.py b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/rgbd_hand_gesture_learner.py index 0b4f3eab56..7206df99d0 100644 --- a/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/rgbd_hand_gesture_learner.py +++ b/src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/rgbd_hand_gesture_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py b/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py index 7795214d66..196d8c3396 100644 --- a/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py +++ b/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py b/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py index 52cdbbdffa..0889f67a70 100644 --- a/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py +++ b/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/datasets/transforms.py b/src/opendr/perception/object_detection_2d/datasets/transforms.py index 98b08216b6..5aa6f1e327 100644 --- a/src/opendr/perception/object_detection_2d/datasets/transforms.py +++ b/src/opendr/perception/object_detection_2d/datasets/transforms.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/datasets/wider_face.py b/src/opendr/perception/object_detection_2d/datasets/wider_face.py index dcf101cfbf..66c185f320 100644 --- a/src/opendr/perception/object_detection_2d/datasets/wider_face.py +++ b/src/opendr/perception/object_detection_2d/datasets/wider_face.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/datasets/wider_person.py b/src/opendr/perception/object_detection_2d/datasets/wider_person.py index dc09935384..d1372d3ca7 100644 --- a/src/opendr/perception/object_detection_2d/datasets/wider_person.py +++ b/src/opendr/perception/object_detection_2d/datasets/wider_person.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/detr/algorithm/util/__init__.py b/src/opendr/perception/object_detection_2d/detr/algorithm/util/__init__.py index aaf2a76690..74d66d73ab 100644 --- a/src/opendr/perception/object_detection_2d/detr/algorithm/util/__init__.py +++ b/src/opendr/perception/object_detection_2d/detr/algorithm/util/__init__.py @@ -1 +1 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project diff --git a/src/opendr/perception/object_detection_2d/detr/algorithm/util/draw.py b/src/opendr/perception/object_detection_2d/detr/algorithm/util/draw.py index 0555814e1f..beaccb1219 100644 --- a/src/opendr/perception/object_detection_2d/detr/algorithm/util/draw.py +++ b/src/opendr/perception/object_detection_2d/detr/algorithm/util/draw.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/detr/detr_learner.py b/src/opendr/perception/object_detection_2d/detr/detr_learner.py index 215983a1a5..6a2ec7c526 100755 --- a/src/opendr/perception/object_detection_2d/detr/detr_learner.py +++ b/src/opendr/perception/object_detection_2d/detr/detr_learner.py @@ -1,4 +1,5 @@ - +# Copyright 2020-2022 OpenDR European Project +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/src/opendr/perception/object_detection_2d/gem/algorithm/util/draw.py b/src/opendr/perception/object_detection_2d/gem/algorithm/util/draw.py index c638190fd4..278414dc73 100644 --- a/src/opendr/perception/object_detection_2d/gem/algorithm/util/draw.py +++ b/src/opendr/perception/object_detection_2d/gem/algorithm/util/draw.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/gem/algorithm/util/sampler.py b/src/opendr/perception/object_detection_2d/gem/algorithm/util/sampler.py index 72b18b6a61..ebe29cd01b 100644 --- a/src/opendr/perception/object_detection_2d/gem/algorithm/util/sampler.py +++ b/src/opendr/perception/object_detection_2d/gem/algorithm/util/sampler.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/gem/gem_learner.py b/src/opendr/perception/object_detection_2d/gem/gem_learner.py index 3f60f7fcb6..8012d38106 100644 --- a/src/opendr/perception/object_detection_2d/gem/gem_learner.py +++ b/src/opendr/perception/object_detection_2d/gem/gem_learner.py @@ -1,5 +1,5 @@ -# Copyright 2020-2021 OpenDR European Project - +# Copyright 2020-2022 OpenDR European Project +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/src/opendr/perception/object_detection_2d/retinaface/retinaface_learner.py b/src/opendr/perception/object_detection_2d/retinaface/retinaface_learner.py index 29f260a1ec..ce1049b53b 100644 --- a/src/opendr/perception/object_detection_2d/retinaface/retinaface_learner.py +++ b/src/opendr/perception/object_detection_2d/retinaface/retinaface_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py b/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py index 31587436c6..8bb8b8f65b 100644 --- a/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py +++ b/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/utils/eval_utils.py b/src/opendr/perception/object_detection_2d/utils/eval_utils.py index e12df113d9..ef4674ce3c 100644 --- a/src/opendr/perception/object_detection_2d/utils/eval_utils.py +++ b/src/opendr/perception/object_detection_2d/utils/eval_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/utils/get_color_infra_alignment.py b/src/opendr/perception/object_detection_2d/utils/get_color_infra_alignment.py index 5a3018e9d3..9df4349fce 100644 --- a/src/opendr/perception/object_detection_2d/utils/get_color_infra_alignment.py +++ b/src/opendr/perception/object_detection_2d/utils/get_color_infra_alignment.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/utils/vis_utils.py b/src/opendr/perception/object_detection_2d/utils/vis_utils.py index 37afb5f905..a84cc1309b 100644 --- a/src/opendr/perception/object_detection_2d/utils/vis_utils.py +++ b/src/opendr/perception/object_detection_2d/utils/vis_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py b/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py index bc5a98f670..6a47381b5e 100644 --- a/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py +++ b/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_3d/datasets/create_data_kitti.py b/src/opendr/perception/object_detection_3d/datasets/create_data_kitti.py index 012fc1e0cb..40b5318577 100644 --- a/src/opendr/perception/object_detection_3d/datasets/create_data_kitti.py +++ b/src/opendr/perception/object_detection_3d/datasets/create_data_kitti.py @@ -1,3 +1,4 @@ +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_3d/datasets/kitti.py b/src/opendr/perception/object_detection_3d/datasets/kitti.py index 10b7ab8599..f07bd9c3b4 100644 --- a/src/opendr/perception/object_detection_3d/datasets/kitti.py +++ b/src/opendr/perception/object_detection_3d/datasets/kitti.py @@ -1,3 +1,4 @@ +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/logger.py b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/logger.py index b1987b01a3..0a479947da 100644 --- a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/logger.py +++ b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/logger.py @@ -1,3 +1,4 @@ +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/load.py b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/load.py index b47843f2b1..7793c90a61 100644 --- a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/load.py +++ b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/load.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/run.py b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/run.py index 8eb846046b..449131d53a 100644 --- a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/run.py +++ b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/run.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/voxel_object_detection_3d_learner.py b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/voxel_object_detection_3d_learner.py index 68ac9a1cab..c2fe0afaa9 100644 --- a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/voxel_object_detection_3d_learner.py +++ b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/voxel_object_detection_3d_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020 Aristotle University of Thessaloniki +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/datasets/market1501_dataset.py b/src/opendr/perception/object_tracking_2d/datasets/market1501_dataset.py index fc7678c6be..dc7120e844 100644 --- a/src/opendr/perception/object_tracking_2d/datasets/market1501_dataset.py +++ b/src/opendr/perception/object_tracking_2d/datasets/market1501_dataset.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/datasets/mot_dataset.py b/src/opendr/perception/object_tracking_2d/datasets/mot_dataset.py index 32b93ec9c9..1743798826 100644 --- a/src/opendr/perception/object_tracking_2d/datasets/mot_dataset.py +++ b/src/opendr/perception/object_tracking_2d/datasets/mot_dataset.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py b/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py index 6d0a0a34ca..7073b1e6ff 100644 --- a/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py +++ b/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/run.py b/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/run.py index b0df630859..6e02af3313 100644 --- a/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/run.py +++ b/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/run.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py b/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py index 98da503803..2cec93e397 100644 --- a/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py +++ b/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/load.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/load.py index 7e64c3a4b7..54677c54e5 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/load.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/load.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/run.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/run.py index c3767ae54a..64d272828c 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/run.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/run.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py b/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py index f719b4dc7a..d22735a919 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_2d/logger.py b/src/opendr/perception/object_tracking_2d/logger.py index 7272692abb..0a479947da 100644 --- a/src/opendr/perception/object_tracking_2d/logger.py +++ b/src/opendr/perception/object_tracking_2d/logger.py @@ -1,4 +1,5 @@ - +# Copyright 2020-2022 OpenDR European Project +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py index e753d7caed..19b0e1d761 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py index 4a3cacbe02..d28d8d9951 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py index d1caed5d59..5f73a56a78 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/logger.py b/src/opendr/perception/object_tracking_3d/ab3dmot/logger.py index b1987b01a3..0a479947da 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/logger.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/logger.py @@ -1,3 +1,4 @@ +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py b/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py index 1d607006ce..b542fc510d 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py b/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py index c7b57cacd1..6332b8889c 100644 --- a/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py +++ b/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/panoptic_segmentation/datasets/cityscapes.py b/src/opendr/perception/panoptic_segmentation/datasets/cityscapes.py index b48b74973b..eb9f25bf3b 100644 --- a/src/opendr/perception/panoptic_segmentation/datasets/cityscapes.py +++ b/src/opendr/perception/panoptic_segmentation/datasets/cityscapes.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/panoptic_segmentation/datasets/kitti.py b/src/opendr/perception/panoptic_segmentation/datasets/kitti.py index 11da638406..212e75931c 100644 --- a/src/opendr/perception/panoptic_segmentation/datasets/kitti.py +++ b/src/opendr/perception/panoptic_segmentation/datasets/kitti.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/panoptic_segmentation/efficient_ps/configs/singlegpu_sample.py b/src/opendr/perception/panoptic_segmentation/efficient_ps/configs/singlegpu_sample.py index 31572fae30..6670a71a01 100644 --- a/src/opendr/perception/panoptic_segmentation/efficient_ps/configs/singlegpu_sample.py +++ b/src/opendr/perception/panoptic_segmentation/efficient_ps/configs/singlegpu_sample.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py b/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py index ccd177cd40..12b68711ad 100644 --- a/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py +++ b/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_mobilenet_v2.py b/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_mobilenet_v2.py index 1195d38c90..da3f2016f9 100644 --- a/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_mobilenet_v2.py +++ b/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_mobilenet_v2.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_shufflenet.py b/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_shufflenet.py index e3e6a4f441..09ce1be3d7 100644 --- a/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_shufflenet.py +++ b/src/opendr/perception/pose_estimation/lightweight_open_pose/algorithm/models/with_shufflenet.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/pose_estimation/lightweight_open_pose/filtered_pose.py b/src/opendr/perception/pose_estimation/lightweight_open_pose/filtered_pose.py index d64e7f9383..680acb428a 100644 --- a/src/opendr/perception/pose_estimation/lightweight_open_pose/filtered_pose.py +++ b/src/opendr/perception/pose_estimation/lightweight_open_pose/filtered_pose.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py b/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py index 370c64db9a..f0002c8bab 100644 --- a/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py +++ b/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/pose_estimation/lightweight_open_pose/utilities.py b/src/opendr/perception/pose_estimation/lightweight_open_pose/utilities.py index 3b08d473b6..d26dc49a0e 100644 --- a/src/opendr/perception/pose_estimation/lightweight_open_pose/utilities.py +++ b/src/opendr/perception/pose_estimation/lightweight_open_pose/utilities.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/semantic_segmentation/bisenet/CamVid.py b/src/opendr/perception/semantic_segmentation/bisenet/CamVid.py index d92ffe5c39..626d60ef46 100644 --- a/src/opendr/perception/semantic_segmentation/bisenet/CamVid.py +++ b/src/opendr/perception/semantic_segmentation/bisenet/CamVid.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/semantic_segmentation/bisenet/bisenet_learner.py b/src/opendr/perception/semantic_segmentation/bisenet/bisenet_learner.py index d8c9687bb5..f5ec72da4f 100644 --- a/src/opendr/perception/semantic_segmentation/bisenet/bisenet_learner.py +++ b/src/opendr/perception/semantic_segmentation/bisenet/bisenet_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py b/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py index fb0ef4f5bc..5fe9b8c5e5 100644 --- a/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py +++ b/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py b/src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py index 7501593922..6589637f72 100644 --- a/src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py +++ b/src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/audioutils.py b/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/audioutils.py index 502752cac6..db39f97260 100644 --- a/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/audioutils.py +++ b/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/audioutils.py @@ -1,4 +1,4 @@ -# Copyright 2020 Tampere University +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/models.py b/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/models.py index 3cf618cee0..384115dca1 100644 --- a/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/models.py +++ b/src/opendr/perception/speech_recognition/edgespeechnets/algorithm/models.py @@ -1,4 +1,4 @@ -# Copyright 2020 Tampere University +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py b/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py index a229053b63..2882658539 100644 --- a/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py +++ b/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/matchboxnet/algorithm/audioutils.py b/src/opendr/perception/speech_recognition/matchboxnet/algorithm/audioutils.py index 502752cac6..db39f97260 100644 --- a/src/opendr/perception/speech_recognition/matchboxnet/algorithm/audioutils.py +++ b/src/opendr/perception/speech_recognition/matchboxnet/algorithm/audioutils.py @@ -1,4 +1,4 @@ -# Copyright 2020 Tampere University +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/matchboxnet/algorithm/model.py b/src/opendr/perception/speech_recognition/matchboxnet/algorithm/model.py index 0c9fa512ad..999027305d 100644 --- a/src/opendr/perception/speech_recognition/matchboxnet/algorithm/model.py +++ b/src/opendr/perception/speech_recognition/matchboxnet/algorithm/model.py @@ -1,4 +1,4 @@ -# Copyright 2020 Tampere University +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/matchboxnet/matchboxnet_learner.py b/src/opendr/perception/speech_recognition/matchboxnet/matchboxnet_learner.py index 90a90c0163..292887a1ce 100644 --- a/src/opendr/perception/speech_recognition/matchboxnet/matchboxnet_learner.py +++ b/src/opendr/perception/speech_recognition/matchboxnet/matchboxnet_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/audioutils.py b/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/audioutils.py index 5ecaaca543..f2b7bbe763 100644 --- a/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/audioutils.py +++ b/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/audioutils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/model.py b/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/model.py index 3c45e9dc7b..e043230b12 100644 --- a/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/model.py +++ b/src/opendr/perception/speech_recognition/quadraticselfonn/algorithm/model.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/perception/speech_recognition/quadraticselfonn/quadraticselfonn_learner.py b/src/opendr/perception/speech_recognition/quadraticselfonn/quadraticselfonn_learner.py index 1877aeffae..b01ee58cca 100644 --- a/src/opendr/perception/speech_recognition/quadraticselfonn/quadraticselfonn_learner.py +++ b/src/opendr/perception/speech_recognition/quadraticselfonn/quadraticselfonn_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/simulation/human_model_generation/pifu_generator_learner.py b/src/opendr/simulation/human_model_generation/pifu_generator_learner.py index 47389873e3..31173a90d5 100644 --- a/src/opendr/simulation/human_model_generation/pifu_generator_learner.py +++ b/src/opendr/simulation/human_model_generation/pifu_generator_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/simulation/human_model_generation/utilities/config_utils.py b/src/opendr/simulation/human_model_generation/utilities/config_utils.py index ca8cc9c3c1..38d306d559 100644 --- a/src/opendr/simulation/human_model_generation/utilities/config_utils.py +++ b/src/opendr/simulation/human_model_generation/utilities/config_utils.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/simulation/human_model_generation/utilities/joint_extractor.py b/src/opendr/simulation/human_model_generation/utilities/joint_extractor.py index 67588c7659..bd9543db5e 100644 --- a/src/opendr/simulation/human_model_generation/utilities/joint_extractor.py +++ b/src/opendr/simulation/human_model_generation/utilities/joint_extractor.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/simulation/human_model_generation/utilities/model_3D.py b/src/opendr/simulation/human_model_generation/utilities/model_3D.py index 36baf7e71d..cef4ab1d29 100644 --- a/src/opendr/simulation/human_model_generation/utilities/model_3D.py +++ b/src/opendr/simulation/human_model_generation/utilities/model_3D.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/simulation/human_model_generation/utilities/studio.py b/src/opendr/simulation/human_model_generation/utilities/studio.py index 8b4c5530d3..d253aa36f5 100644 --- a/src/opendr/simulation/human_model_generation/utilities/studio.py +++ b/src/opendr/simulation/human_model_generation/utilities/studio.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/simulation/human_model_generation/utilities/visualizer.py b/src/opendr/simulation/human_model_generation/utilities/visualizer.py index d76766f718..b5647a9aeb 100644 --- a/src/opendr/simulation/human_model_generation/utilities/visualizer.py +++ b/src/opendr/simulation/human_model_generation/utilities/visualizer.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/utils/hyperparameter_tuner/dummy_learner.py b/src/opendr/utils/hyperparameter_tuner/dummy_learner.py index 89545115b3..92e92eafaa 100644 --- a/src/opendr/utils/hyperparameter_tuner/dummy_learner.py +++ b/src/opendr/utils/hyperparameter_tuner/dummy_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/utils/hyperparameter_tuner/hyperparameter_tuner.py b/src/opendr/utils/hyperparameter_tuner/hyperparameter_tuner.py index 6781b16326..373bdfe890 100644 --- a/src/opendr/utils/hyperparameter_tuner/hyperparameter_tuner.py +++ b/src/opendr/utils/hyperparameter_tuner/hyperparameter_tuner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/opendr/utils/io.py b/src/opendr/utils/io.py index d7fc485767..d6f6756504 100644 --- a/src/opendr/utils/io.py +++ b/src/opendr/utils/io.py @@ -1,4 +1,4 @@ -# Copyright 2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/Makefile b/tests/Makefile index 82bcdfd407..3c2797ee0e 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -1,5 +1,5 @@ # -# Copyright 2020-2021 OpenDR project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/c_api/test_face_recognition.c b/tests/sources/c_api/test_face_recognition.c index 3dcb47c470..65160fbbcc 100644 --- a/tests/sources/c_api/test_face_recognition.c +++ b/tests/sources/c_api/test_face_recognition.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/tests/sources/c_api/test_fmp_gmapping.cpp b/tests/sources/c_api/test_fmp_gmapping.cpp index ccdde883d6..1b1875899c 100644 --- a/tests/sources/c_api/test_fmp_gmapping.cpp +++ b/tests/sources/c_api/test_fmp_gmapping.cpp @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/tests/sources/c_api/test_opendr_utils.c b/tests/sources/c_api/test_opendr_utils.c index 831320a09c..04e2bcf878 100644 --- a/tests/sources/c_api/test_opendr_utils.c +++ b/tests/sources/c_api/test_opendr_utils.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 OpenDR project + * Copyright 2020-2022 OpenDR European Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/control/mobile_manipulation/test_mobile_manipulation.py b/tests/sources/tools/control/mobile_manipulation/test_mobile_manipulation.py index cd6589b028..df93c9d7a9 100644 --- a/tests/sources/tools/control/mobile_manipulation/test_mobile_manipulation.py +++ b/tests/sources/tools/control/mobile_manipulation/test_mobile_manipulation.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/control/single_demo_grasp/test_single_demo_grasp.py b/tests/sources/tools/control/single_demo_grasp/test_single_demo_grasp.py index 533e215bde..73108854f9 100644 --- a/tests/sources/tools/control/single_demo_grasp/test_single_demo_grasp.py +++ b/tests/sources/tools/control/single_demo_grasp/test_single_demo_grasp.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py b/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py index 2c54bb87c2..8d1598de46 100644 --- a/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py +++ b/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py b/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py index 8d315661d3..960be05b00 100644 --- a/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py +++ b/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/compressive_learning/multilinear_compressive_learning/test_multilinear_compressive_learner.py b/tests/sources/tools/perception/compressive_learning/multilinear_compressive_learning/test_multilinear_compressive_learner.py index d7cedeedcd..221012aeb0 100644 --- a/tests/sources/tools/perception/compressive_learning/multilinear_compressive_learning/test_multilinear_compressive_learner.py +++ b/tests/sources/tools/perception/compressive_learning/multilinear_compressive_learning/test_multilinear_compressive_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/face_recognition/test_face_recognition.py b/tests/sources/tools/perception/face_recognition/test_face_recognition.py index aa640dda57..7f02c6031c 100644 --- a/tests/sources/tools/perception/face_recognition/test_face_recognition.py +++ b/tests/sources/tools/perception/face_recognition/test_face_recognition.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py b/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py index 9cde2975e5..4720edde23 100644 --- a/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py +++ b/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/heart_anomaly_detection/attention_neural_bag_of_feature/test_attention_neural_bag_of_feature_learner.py b/tests/sources/tools/perception/heart_anomaly_detection/attention_neural_bag_of_feature/test_attention_neural_bag_of_feature_learner.py index 8db019b5d1..cb0dc1d2f3 100644 --- a/tests/sources/tools/perception/heart_anomaly_detection/attention_neural_bag_of_feature/test_attention_neural_bag_of_feature_learner.py +++ b/tests/sources/tools/perception/heart_anomaly_detection/attention_neural_bag_of_feature/test_attention_neural_bag_of_feature_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/heart_anomaly_detection/gated_recurrent_unit/test_gated_recurrent_unit_learner.py b/tests/sources/tools/perception/heart_anomaly_detection/gated_recurrent_unit/test_gated_recurrent_unit_learner.py index 4fa34c8d19..d84780326e 100644 --- a/tests/sources/tools/perception/heart_anomaly_detection/gated_recurrent_unit/test_gated_recurrent_unit_learner.py +++ b/tests/sources/tools/perception/heart_anomaly_detection/gated_recurrent_unit/test_gated_recurrent_unit_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/multimodal_human_centric/rgbd_hand_gesture_learner/test_rgbd_hand_gesture_learner.py b/tests/sources/tools/perception/multimodal_human_centric/rgbd_hand_gesture_learner/test_rgbd_hand_gesture_learner.py index bd1c6bd5a5..694763f97c 100644 --- a/tests/sources/tools/perception/multimodal_human_centric/rgbd_hand_gesture_learner/test_rgbd_hand_gesture_learner.py +++ b/tests/sources/tools/perception/multimodal_human_centric/rgbd_hand_gesture_learner/test_rgbd_hand_gesture_learner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_detection_2d/centernet/test_centernet.py b/tests/sources/tools/perception/object_detection_2d/centernet/test_centernet.py index cb1c1e8608..cbbc446cab 100644 --- a/tests/sources/tools/perception/object_detection_2d/centernet/test_centernet.py +++ b/tests/sources/tools/perception/object_detection_2d/centernet/test_centernet.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_detection_2d/detr/test_detr.py b/tests/sources/tools/perception/object_detection_2d/detr/test_detr.py index e9afc94cce..16b5dbcaa3 100644 --- a/tests/sources/tools/perception/object_detection_2d/detr/test_detr.py +++ b/tests/sources/tools/perception/object_detection_2d/detr/test_detr.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_detection_2d/gem/test_gem.py b/tests/sources/tools/perception/object_detection_2d/gem/test_gem.py index 03ad1e5616..f773f53cea 100644 --- a/tests/sources/tools/perception/object_detection_2d/gem/test_gem.py +++ b/tests/sources/tools/perception/object_detection_2d/gem/test_gem.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_detection_2d/retinaface/test_retinaface.py b/tests/sources/tools/perception/object_detection_2d/retinaface/test_retinaface.py index 71006eb172..f8f52c9390 100644 --- a/tests/sources/tools/perception/object_detection_2d/retinaface/test_retinaface.py +++ b/tests/sources/tools/perception/object_detection_2d/retinaface/test_retinaface.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_detection_2d/ssd/test_ssd.py b/tests/sources/tools/perception/object_detection_2d/ssd/test_ssd.py index 73887a235d..3f3f03e1f5 100644 --- a/tests/sources/tools/perception/object_detection_2d/ssd/test_ssd.py +++ b/tests/sources/tools/perception/object_detection_2d/ssd/test_ssd.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_detection_2d/yolov3/test_yolo3.py b/tests/sources/tools/perception/object_detection_2d/yolov3/test_yolo3.py index 589c46799b..71b2466263 100644 --- a/tests/sources/tools/perception/object_detection_2d/yolov3/test_yolo3.py +++ b/tests/sources/tools/perception/object_detection_2d/yolov3/test_yolo3.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py b/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py index 3095ae96f0..2372294671 100644 --- a/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py +++ b/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_tracking_2d/deep_sort/test_object_tracking_2d_deep_sort.py b/tests/sources/tools/perception/object_tracking_2d/deep_sort/test_object_tracking_2d_deep_sort.py index 323283c154..0c4e35256f 100644 --- a/tests/sources/tools/perception/object_tracking_2d/deep_sort/test_object_tracking_2d_deep_sort.py +++ b/tests/sources/tools/perception/object_tracking_2d/deep_sort/test_object_tracking_2d_deep_sort.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py b/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py index 3c2743135f..b75717a668 100644 --- a/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py +++ b/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py b/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py index 5c512cdcb8..723c0b2e15 100644 --- a/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py +++ b/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/pose_estimation/lightweight_open_pose/test_lightweight_open_pose.py b/tests/sources/tools/perception/pose_estimation/lightweight_open_pose/test_lightweight_open_pose.py index a0ba8decae..58c00eb751 100644 --- a/tests/sources/tools/perception/pose_estimation/lightweight_open_pose/test_lightweight_open_pose.py +++ b/tests/sources/tools/perception/pose_estimation/lightweight_open_pose/test_lightweight_open_pose.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/semantic_segmentation/bisenet/test_semantic_segmentation_bisenet.py b/tests/sources/tools/perception/semantic_segmentation/bisenet/test_semantic_segmentation_bisenet.py index fe332ed3dd..77e4eb141f 100644 --- a/tests/sources/tools/perception/semantic_segmentation/bisenet/test_semantic_segmentation_bisenet.py +++ b/tests/sources/tools/perception/semantic_segmentation/bisenet/test_semantic_segmentation_bisenet.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_pstgcn.py b/tests/sources/tools/perception/skeleton_based_action_recognition/test_pstgcn.py index 71b4df1544..1ec279c2ae 100644 --- a/tests/sources/tools/perception/skeleton_based_action_recognition/test_pstgcn.py +++ b/tests/sources/tools/perception/skeleton_based_action_recognition/test_pstgcn.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_stbln.py b/tests/sources/tools/perception/skeleton_based_action_recognition/test_stbln.py index 39dcd46dd6..f95d01f570 100644 --- a/tests/sources/tools/perception/skeleton_based_action_recognition/test_stbln.py +++ b/tests/sources/tools/perception/skeleton_based_action_recognition/test_stbln.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_stgcn.py b/tests/sources/tools/perception/skeleton_based_action_recognition/test_stgcn.py index c11ecc82a5..e1ec303fbe 100644 --- a/tests/sources/tools/perception/skeleton_based_action_recognition/test_stgcn.py +++ b/tests/sources/tools/perception/skeleton_based_action_recognition/test_stgcn.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_tagcn.py b/tests/sources/tools/perception/skeleton_based_action_recognition/test_tagcn.py index f00c333d00..b6fea730df 100644 --- a/tests/sources/tools/perception/skeleton_based_action_recognition/test_tagcn.py +++ b/tests/sources/tools/perception/skeleton_based_action_recognition/test_tagcn.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/speech_recognition/edgespeechnets/test_edgespeechnets.py b/tests/sources/tools/perception/speech_recognition/edgespeechnets/test_edgespeechnets.py index d6aab79ca6..57fdfee017 100644 --- a/tests/sources/tools/perception/speech_recognition/edgespeechnets/test_edgespeechnets.py +++ b/tests/sources/tools/perception/speech_recognition/edgespeechnets/test_edgespeechnets.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/speech_recognition/matchboxnet/test_matchboxnet.py b/tests/sources/tools/perception/speech_recognition/matchboxnet/test_matchboxnet.py index b1c1b593de..cdc29e0de8 100644 --- a/tests/sources/tools/perception/speech_recognition/matchboxnet/test_matchboxnet.py +++ b/tests/sources/tools/perception/speech_recognition/matchboxnet/test_matchboxnet.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/perception/speech_recognition/quadraticselfonn/test_quadraticselfonn.py b/tests/sources/tools/perception/speech_recognition/quadraticselfonn/test_quadraticselfonn.py index 36afa664e4..81233ecfb7 100644 --- a/tests/sources/tools/perception/speech_recognition/quadraticselfonn/test_quadraticselfonn.py +++ b/tests/sources/tools/perception/speech_recognition/quadraticselfonn/test_quadraticselfonn.py @@ -1,4 +1,4 @@ -# Copyright 1996-2020 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py b/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py index 4c62b26892..307e3ee220 100644 --- a/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py +++ b/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/utils/test_hyperparameter_tuner.py b/tests/sources/tools/utils/test_hyperparameter_tuner.py index 2f9d59dcaf..71e00a37a0 100644 --- a/tests/sources/tools/utils/test_hyperparameter_tuner.py +++ b/tests/sources/tools/utils/test_hyperparameter_tuner.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR European Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/sources/tools/utils/test_io.py b/tests/sources/tools/utils/test_io.py index 8fa45385c3..fffce01719 100644 --- a/tests/sources/tools/utils/test_io.py +++ b/tests/sources/tools/utils/test_io.py @@ -1,4 +1,4 @@ -# Copyright 2020-2021 OpenDR Project +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/test_license.py b/tests/test_license.py index 5535915b80..c13a325101 100755 --- a/tests/test_license.py +++ b/tests/test_license.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2020-2021 Cyberbotics Ltd. +# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,10 +19,14 @@ import unittest import os import fnmatch +import datetime from io import open -APACHE2_LICENSE_C = """* Licensed under the Apache License, Version 2.0 (the "License"); +APACHE2_LICENSE_C = """/* + * Copyright 2020-20XX OpenDR European Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * @@ -33,9 +37,11 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */""" + */""".replace('20XX', str(datetime.datetime.now().year)) -APACHE2_LICENSE_CPP = """// Licensed under the Apache License, Version 2.0 (the "License"); +APACHE2_LICENSE_CPP = """// Copyright 2020-20XX OpenDR European Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // @@ -45,9 +51,10 @@ // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and -// limitations under the License.""" +// limitations under the License.""".replace('20XX', str(datetime.datetime.now().year)) -APACHE2_LICENSE_PYTHON = """ +APACHE2_LICENSE_PYTHON = """# Copyright 2020-20XX OpenDR European Project +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -58,7 +65,7 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License.""" +# limitations under the License.""".replace('20XX', str(datetime.datetime.now().year)) PYTHON_OPTIONAL_HEADERS = [ '#!/usr/bin/env python2', @@ -93,6 +100,8 @@ def setUp(self): 'src/opendr/perception/skeleton_based_action_recognition/algorithm', 'src/opendr/perception/semantic_segmentation/bisenet/algorithm', 'src/opendr/perception/object_detection_2d/retinaface/algorithm', + 'src/opendr/perception/object_detection_2d/gem/algorithm', + 'src/opendr/perception/object_detection_2d/detr/algorithm', 'src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS', 'src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition', 'projects/control/eagerx/eagerx', From 60d07576c724c94f2d32ddb8d35386b8d4adb4de Mon Sep 17 00:00:00 2001 From: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Date: Mon, 24 Jan 2022 14:45:18 +0100 Subject: [PATCH 6/9] Automatic test of pip and docker (#200) * Test pip installation * Trigger * Fix * Minor * Updated wheel test * Added libopenblas install * Temporary test disable * Temporary test disable * Added libsndfile1 dependency * Restored disabled test * New wheel building pipeline * Update build_wheel.sh * Added libboost-dev dep * Update installation.md * Switch to sdist * sdist workflow * Update tests_suite.yml * Added numpy include dirs * Disabled two 3d detection tests * Disabled three 3d detection tests * Disabled five 3d detection tests * Disabled all 3d detection tests * Disabled model download - 3d test * Disabled everything? * Removed imports from 3d object detection test * Import test * Import test * Import test * Import test * Import test * Import test * Restored test * Disabled object detection 3d test * Refactor workflow * disable temporarely * Fix docker mobile manipulation * Re-enable test * add badge * Fix changelog * remove test install reference * Restored pip installation Co-authored-by: Nikolaos Passalis Co-authored-by: Nikolaos --- .github/workflows/test_packages.yml | 103 ++++++++++++++++++++++++++++ .github/workflows/tests_suite.yml | 1 + CHANGELOG.md | 3 +- MANIFEST.in | 2 + README.md | 1 + bin/build_wheel.sh | 19 +++++ bin/install.sh | 8 --- docs/reference/installation.md | 2 +- setup.py | 20 +++++- 9 files changed, 147 insertions(+), 12 deletions(-) create mode 100644 .github/workflows/test_packages.yml create mode 100644 MANIFEST.in create mode 100755 bin/build_wheel.sh diff --git a/.github/workflows/test_packages.yml b/.github/workflows/test_packages.yml new file mode 100644 index 0000000000..e7a3b718ea --- /dev/null +++ b/.github/workflows/test_packages.yml @@ -0,0 +1,103 @@ +name: Test Packages +# This workflow tests the latest (third-party hosted) available builds + +on: + pull_request: + types: [opened, reopened, synchronize, labeled, unlabeled] + schedule: + - cron: '0 23 * * *' + +defaults: + run: + shell: bash + + +jobs: + cleanup-runs: + if: ${{ contains(github.event.pull_request.labels.*.name, 'test packages') || github.event_name == 'schedule' }} + runs-on: ubuntu-latest + steps: + - uses: rokroskar/workflow-run-cleanup-action@master + env: + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + test-wheel: + needs: cleanup-runs + if: ${{ contains(github.event.pull_request.labels.*.name, 'test packages') || github.event_name == 'schedule' }} + strategy: + matrix: + os: [ubuntu-20.04] + package: + - engine + - utils + - perception/activity_recognition + - perception/compressive_learning + - perception/face_recognition + - perception/heart_anomaly_detection + - perception/multimodal_human_centric + - perception/object_tracking_2d + - perception/pose_estimation + - perception/speech_recognition + - perception/skeleton_based_action_recognition + - perception/semantic_segmentation + - perception/object_detection_2d + - perception/facial_expression_recognition + # - perception/object_detection_3d + # - control/mobile_manipulation + # - simulation/human_model_generation + # - control/single_demo_grasp + # - perception/object_tracking_3d + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Test Wheel + run: | + export DISABLE_BCOLZ_AVX2=true + sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev python3-dev + python3 -m venv venv + source venv/bin/activate + wget https://raw.githubusercontent.com/opendr-eu/opendr/master/dependencies/pip_requirements.txt + cat pip_requirements.txt | xargs -n 1 -L 1 pip install + # Test new package + pip install opendr-toolkit + python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + test-docker: + needs: cleanup-runs + if: ${{ contains(github.event.pull_request.labels.*.name, 'test packages') || github.event_name == 'schedule' }} + strategy: + matrix: + os: [ubuntu-20.04] + package: + - engine + - utils + - perception/activity_recognition + - perception/compressive_learning + - perception/face_recognition + - perception/heart_anomaly_detection + - perception/multimodal_human_centric + - perception/object_tracking_2d + - perception/pose_estimation + - perception/speech_recognition + - perception/skeleton_based_action_recognition + - perception/semantic_segmentation + - perception/object_detection_2d + - perception/facial_expression_recognition + - perception/object_detection_3d + - control/mobile_manipulation + - simulation/human_model_generation + - control/single_demo_grasp + #- perception/object_tracking_3d + runs-on: ${{ matrix.os }} + steps: + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Test Docker + run: | + docker run --name toolkit -i opendr/opendr-toolkit:cpu_latest bash + docker start toolkit + docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python -m unittest discover -s tests/sources/tools/${{ matrix.package }}" diff --git a/.github/workflows/tests_suite.yml b/.github/workflows/tests_suite.yml index 7d0503a429..82b80e7e96 100644 --- a/.github/workflows/tests_suite.yml +++ b/.github/workflows/tests_suite.yml @@ -106,3 +106,4 @@ jobs: source tests/sources/tools/control/mobile_manipulation/run_ros.sh python -m unittest discover -s tests/sources/tools/${{ matrix.package }} fi + diff --git a/CHANGELOG.md b/CHANGELOG.md index 2de83a228b..7e0bec9ded 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,9 +8,10 @@ Released on XX, XXth, 2022. - Enhancements: - None. - Bug Fixes: - - None. + - Updated wheel building pipeline to include missing files and removed unnecessary dependencies ([#200](https://github.com/opendr-eu/opendr/pull/200)). - Dependency Updates: - `heart anomaly detection`: upgraded scikit-learn runtime dependency from 0.21.3 to 0.22 ([#198](https://github.com/opendr-eu/opendr/pull/198)). + ## Version 1.0 Released on December 31th, 2021. \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000..2c2972093c --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include src/opendr * +include requirements.txt diff --git a/README.md b/README.md index e91b500193..b3102f0580 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ ______________________________________________________________________ [![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) [![Test Suite (master)](https://github.com/opendr-eu/opendr/actions/workflows/tests_suite.yml/badge.svg)](https://github.com/opendr-eu/opendr/actions/workflows/tests_suite.yml) +[![Test Packages](https://github.com/opendr-eu/opendr/actions/workflows/test_packages.yml/badge.svg)](https://github.com/opendr-eu/opendr/actions/workflows/test_packages.yml) ## About diff --git a/bin/build_wheel.sh b/bin/build_wheel.sh new file mode 100755 index 0000000000..c0564a5d0c --- /dev/null +++ b/bin/build_wheel.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# Build all OpenDR dependecies +./bin/install.sh + +# Activate OpenDR +source ./bin/activate.sh + +# Prepare requirements.txt for wheel distributions +pip3 freeze > requirements.txt + +# Remove detectron and git repositories (installation not supported through PyPI) +sed -i '/detectron2/d' requirements.txt +sed -i '/git/d' requirements.txt +sed -i '/pkg_resources/d' requirements.txt +sed -i '/auditwheel/d' requirements.txt + +# Build binary wheel and repair it +python3 setup.py sdist diff --git a/bin/install.sh b/bin/install.sh index d0f3b6c269..e819c7a100 100755 --- a/bin/install.sh +++ b/bin/install.sh @@ -40,13 +40,5 @@ make install_compilation_dependencies make install_runtime_dependencies make libopendr -# Prepare requirements.txt for wheel distributions -pip3 freeze > requirements.txt - -# Remove detectron, since it was installed from git repo -sed -i '/detectron2/d' requirements.txt -sed -i '/git/d' requirements.txt -python3 setup.py bdist_wheel - deactivate diff --git a/docs/reference/installation.md b/docs/reference/installation.md index 1b36477f82..8ede23d12e 100644 --- a/docs/reference/installation.md +++ b/docs/reference/installation.md @@ -74,7 +74,7 @@ You can directly install OpenDR toolkit for CPU-only inference using pip. First, install the required dependencies: ```bash export DISABLE_BCOLZ_AVX2=true -sudo apt install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget +sudo apt install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev python3 -m venv venv source venv/bin/activate wget https://raw.githubusercontent.com/opendr-eu/opendr/master/dependencies/pip_requirements.txt diff --git a/setup.py b/setup.py index 5eff9c2b0c..c2d988b385 100644 --- a/setup.py +++ b/setup.py @@ -1,19 +1,33 @@ +import os +from os.path import join from setuptools import setup from setuptools import find_packages from Cython.Build import cythonize +import numpy + packages = find_packages(where="./src") +# Get the requirements with open('requirements.txt') as fp: install_requires = fp.read().splitlines() +# Retrieve version exec(open('src/opendr/_version.py').read()) - try: __version__ except NameError: __version__ = '0.0' +# Gather all files +data_files = [] +for root, dirs, files in os.walk("src/opendr"): + for file in files: + file_extension = file.split(".")[-1] + # Add all files except from shared libraries + if file_extension != "so" and file_extension != "py": + data_files.append(join(root.replace("src/opendr/", ""), file)) + setup( name='opendr-toolkit', version=__version__, @@ -49,5 +63,7 @@ license='LICENSE', package_dir={"": "src"}, install_requires=install_requires, - ext_modules=cythonize(["src/opendr/perception/object_detection_2d/retinaface/algorithm/cython/*.pyx"]) + package_data={'': data_files}, + ext_modules=cythonize(["src/opendr/perception/object_detection_2d/retinaface/algorithm/cython/*.pyx"]), + include_dirs=[numpy.get_include()] ) From d9f27c5bc7483ed6b026d087e4c78429b6b4eaf5 Mon Sep 17 00:00:00 2001 From: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Date: Tue, 25 Jan 2022 18:29:45 +0100 Subject: [PATCH 7/9] Add automatic publishing of wheel and docker (#202) * Test pip installation * Trigger * Fix * Minor * Updated wheel test * Added libopenblas install * Temporary test disable * Temporary test disable * Added libsndfile1 dependency * Restored disabled test * New wheel building pipeline * Update build_wheel.sh * Added libboost-dev dep * Update installation.md * Switch to sdist * sdist workflow * Update tests_suite.yml * Added numpy include dirs * Disabled two 3d detection tests * Disabled three 3d detection tests * Disabled five 3d detection tests * Disabled all 3d detection tests * Disabled model download - 3d test * Disabled everything? * Removed imports from 3d object detection test * Import test * Import test * Import test * Import test * Import test * Import test * Restored test * Disabled object detection 3d test * Minimal * Refactor workflow * disable temporarely * Fix docker mobile manipulation * Re-enable test * add badge * Fix changelog * remove test install reference * test build process * Fix * version bump * manual trigger * Fix workflow * Minor * add cuda docker * Minor * Fix Token * Publish wheel to testpypi * Cleanup * Specify version * cleanup and check * Fix * Docker push test * remove if * Prepare for merge * Fix pypi credentials Co-authored-by: Nikolaos Passalis Co-authored-by: Nikolaos --- .github/workflows/publisher.yml | 72 +++++++++++++++++++++++++++++ .github/workflows/test_packages.yml | 3 +- src/opendr/_version.py | 2 +- 3 files changed, 74 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/publisher.yml diff --git a/.github/workflows/publisher.yml b/.github/workflows/publisher.yml new file mode 100644 index 0000000000..6dc43014d2 --- /dev/null +++ b/.github/workflows/publisher.yml @@ -0,0 +1,72 @@ +name: Publisher + +# Trigger on new github release, a tag with format vX.Y.Z is expected (used to tag the docker) +on: + release: + types: [published] + +env: + OPENDR_VERSION: ${{ github.event.release.tag_name }} + +defaults: + run: + shell: bash + +jobs: + publish-wheel: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install prerequisites + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build Wheel + run: | + ./bin/build_wheel.sh + - name: Upload Wheel + env: + TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run : | + twine upload dist/* + publish-docker-cpu: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Build Docker Image + run: docker build --tag opendr-toolkit:cpu_$OPENDR_VERSION --file Dockerfile . + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + - name: Publish Image + run: | + docker tag opendr-toolkit:cpu_$OPENDR_VERSION opendr/opendr-toolkit:cpu_$OPENDR_VERSION + docker push opendr/opendr-toolkit:cpu_$OPENDR_VERSION + publish-docker-cuda: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Build Docker Image + run: docker build --tag opendr-toolkit:cuda_$OPENDR_VERSION --file Dockerfile-cuda . + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + - name: Publish Image + run: | + docker tag opendr-toolkit:cuda_$OPENDR_VERSION opendr/opendr-toolkit:cuda_$OPENDR_VERSION + docker push opendr/opendr-toolkit:cuda_$OPENDR_VERSION diff --git a/.github/workflows/test_packages.yml b/.github/workflows/test_packages.yml index e7a3b718ea..13cd407950 100644 --- a/.github/workflows/test_packages.yml +++ b/.github/workflows/test_packages.yml @@ -61,7 +61,6 @@ jobs: source venv/bin/activate wget https://raw.githubusercontent.com/opendr-eu/opendr/master/dependencies/pip_requirements.txt cat pip_requirements.txt | xargs -n 1 -L 1 pip install - # Test new package pip install opendr-toolkit python -m unittest discover -s tests/sources/tools/${{ matrix.package }} test-docker: @@ -89,7 +88,7 @@ jobs: - control/mobile_manipulation - simulation/human_model_generation - control/single_demo_grasp - #- perception/object_tracking_3d + # - perception/object_tracking_3d runs-on: ${{ matrix.os }} steps: - name: Set up Python 3.8 diff --git a/src/opendr/_version.py b/src/opendr/_version.py index 4c67cd2681..c98a588823 100644 --- a/src/opendr/_version.py +++ b/src/opendr/_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.0" +__version__ = "1.0.0" From a31d600973b642330002056080ff4ad6631e7579 Mon Sep 17 00:00:00 2001 From: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Date: Wed, 2 Feb 2022 18:00:44 +0100 Subject: [PATCH 8/9] Retry (#208) --- .github/workflows/tests_suite.yml | 134 +++++++++++++++++++++++++++++- 1 file changed, 132 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests_suite.yml b/.github/workflows/tests_suite.yml index 82b80e7e96..d17c596702 100644 --- a/.github/workflows/tests_suite.yml +++ b/.github/workflows/tests_suite.yml @@ -12,7 +12,7 @@ defaults: jobs: cleanup-runs: - if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || contains(github.event.pull_request.labels.*.name, 'test tools') || github.event_name == 'schedule' }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test sources') || contains(github.event.pull_request.labels.*.name, 'test tools') || contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} runs-on: ubuntu-latest steps: - uses: rokroskar/workflow-run-cleanup-action@master @@ -106,4 +106,134 @@ jobs: source tests/sources/tools/control/mobile_manipulation/run_ros.sh python -m unittest discover -s tests/sources/tools/${{ matrix.package }} fi - + build-wheel: + needs: cleanup-runs + if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install prerequisites + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build Wheel + run: + ./bin/build_wheel.sh + - name: Upload wheel as artifact + uses: actions/upload-artifact@v2 + with: + path: + dist/*.tar.gz + build-docker: + needs: cleanup-runs + if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Build image + run: | + docker build --tag opendr/opendr-toolkit:cpu_test --file Dockerfile . + docker save opendr/opendr-toolkit:cpu_test > cpu_test.zip + - name: Upload image artifact + uses: actions/upload-artifact@v2 + with: + path: + cpu_test.zip + test-wheel: + needs: build-wheel + if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} + strategy: + matrix: + os: [ubuntu-20.04] + package: + - engine + - utils + - perception/activity_recognition + - perception/compressive_learning + - perception/face_recognition + - perception/heart_anomaly_detection + - perception/multimodal_human_centric + - perception/object_tracking_2d + - perception/pose_estimation + - perception/speech_recognition + - perception/skeleton_based_action_recognition + - perception/semantic_segmentation + - perception/object_detection_2d + - perception/facial_expression_recognition + # - perception/object_detection_3d + # - control/mobile_manipulation + # - simulation/human_model_generation + # - control/single_demo_grasp + # - perception/object_tracking_3d + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + submodules: true + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Download artifact + uses: actions/download-artifact@v2 + with: + path: artifact + - name: Get branch name + id: branch-name + uses: tj-actions/branch-names@v5.1 + - name: Test Wheel + run: | + export DISABLE_BCOLZ_AVX2=true + sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev python3-dev + python3 -m venv venv + source venv/bin/activate + wget https://raw.githubusercontent.com/opendr-eu/opendr/${{ steps.branch-name.outputs.current_branch }}/dependencies/pip_requirements.txt + cat pip_requirements.txt | xargs -n 1 -L 1 pip install + pip install ./artifact/artifact/*.tar.gz + python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + test-docker: + needs: build-docker + if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} + strategy: + matrix: + os: [ubuntu-20.04] + package: + - engine + - utils + - perception/activity_recognition + - perception/compressive_learning + - perception/face_recognition + - perception/heart_anomaly_detection + - perception/multimodal_human_centric + - perception/object_tracking_2d + - perception/pose_estimation + - perception/speech_recognition + - perception/skeleton_based_action_recognition + - perception/semantic_segmentation + - perception/object_detection_2d + - perception/facial_expression_recognition + - perception/object_detection_3d + - control/mobile_manipulation + - simulation/human_model_generation + - control/single_demo_grasp + # - perception/object_tracking_3d + runs-on: ubuntu-20.04 + steps: + - name: Download artifact + uses: actions/download-artifact@v2 + with: + path: artifact + - name: Test docker + run: | + docker load < ./artifact/artifact/cpu_test.zip + docker run --name toolkit -i opendr/opendr-toolkit:cpu_test bash + docker start toolkit + docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python -m unittest discover -s tests/sources/tools/${{ matrix.package }}" From 3bb6bbdd77865b735bd73ea1698322875a6c06c2 Mon Sep 17 00:00:00 2001 From: ekakalet <63847549+ekakalet@users.noreply.github.com> Date: Thu, 3 Feb 2022 17:10:50 +0200 Subject: [PATCH 9/9] Synthetic multi view facial generator (#94) * Update readme.md * Update readme.md * Rename main.py to preprocessing_1.py * Rename inference.py to preprocessing_2.py * Update SyntheticDataGeneration.py * Update SyntheticDataGeneration.py * Update readme.md * Delete bfm.ply * Delete bfm_refine.ply * Update readme.md * Update SyntheticDataGeneration.py * Update SyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Update SyntheticDataGeneration.py * Update projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update tests/test_license.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update SyntheticDataGeneration.py * Update SyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Update projects/data_generation/synthetic-multi-view-facial-image-generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic-multi-view-facial-image-generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic-multi-view-facial-image-generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update README.md * Update projects/data_generation/synthetic-multi-view-facial-image-generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update README.md * ROS implementation * Update README.md * Update projects/data_generation/synthetic-multi-view-facial-image-generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Create synthetic_facial_image_generator.md * Update synthetic_facial_image_generator.md * Update index.md * Update testSyntheticDataGeneration.py * Update SyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Update synthetic_facial_image_generator.md * Update synthetic_facial_image_generator.md * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa/SyntheticDataGeneration.py to projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/SyntheticDataGeneration.py * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa/testSyntheticDataGeneration.py to projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/testSyntheticDataGeneration.py * Update readme.md * Delete readme.md * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa/path_helper.py to projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/path_helper.py * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa/path_helper2.py to projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/path_helper2.py * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/synthetic_facial_generation.py to projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/synthetic_facial_generation.py * Update SyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Delete synthetic_facial_generation.py * Delete path_helper2.py * Delete path_helper.py * Add files via upload * Update synthetic_facial_generation.py * Update synthetic_facial_generation.py * Update synthetic_facial_generation.py * Update SyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Update testSyntheticDataGeneration.py * Update synthetic_facial_generation.py * Update synthetic_facial_generation.py * Update synthetic_facial_generation.py * Update synthetic_facial_generation.py * Update README.md * Update README.md * Update projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * new structure * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa directory * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/data directory * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/experiments directory * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/misc directory * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/models directory * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/options directory * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/trainers directory * Delete projects/data_generation/synthetic-multi-view-facial-image-generation/util directory * Delete SyntheticDataGeneration.py * Delete testSyntheticDataGeneration.py * Delete __init__.py * Delete test_frontal.py * Delete test_multipose.py * Delete train.py * Delete LICENSE * Update README.md * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/SyntheticDataGeneration.py to projects/data_generation/synthetic-multi-view-facial-image-generation/SyntheticDataGeneration.py * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/testSyntheticDataGeneration.py to projects/data_generation/synthetic-multi-view-facial-image-generation/testSyntheticDataGeneration.py * Update tests_sources.yml * Rename projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/__init__.py to projects/data_generation/synthetic-multi-view-facial-image-generation/__init__.py * Update __init__.py * Update __init__.py * Update __init__.py * Delete __init__.py * Create __init__.py * Update __init__.py * Delete __init__.py * Create __init__.py * Delete __init__.py * update * Delete SyntheticDataGeneration.py * Delete testSyntheticDataGeneration.py * update for tests * update for tests * Update test_synthetic_facial_generation.py * update * Update test_synthetic_facial_generation.py * Rename tests/sources/tools/data_generation/test_synthetic_facial_generation.py to tests/sources/tools/data_generation/synthetic-multi-view-facial-image-generation/test_synthetic_facial_generation.py * update references * Rename tests/sources/tools/data_generation/opendr_url.py to tests/sources/tools/data_generation/synthetic-multi-view-facial-generator/opendr_url.py * Rename tests/sources/tools/data_generation/synthetic-multi-view-facial-generator/opendr_url.py to tests/sources/tools/data_generation/synthetic-multi-view-facial-generationopendr_url.py * Rename tests/sources/tools/data_generation/synthetic-multi-view-facial-generationopendr_url.py to tests/sources/tools/data_generation/synthetic-multi-view-facial-generation/opendr_url.py * Rename tests/sources/tools/data_generation/synthetic-multi-view-facial-generation/opendr_url.py to tests/sources/tools/data_generation/synthetic-mult-view-facial-image-generation/opendr_url.py * Rename tests/sources/tools/data_generation/synthetic-mult-view-facial-image-generation/opendr_url.py to tests/sources/tools/data_generation/synthetic-multi-view-facial-image-generation/opendr_url.py * Update opendr_url.py * Update test_synthetic_facial_generation.py * Update opendr_url.py * Create 3ddfa_url.py * Update and rename projects/data_generation/synthetic-multi-view-facial-image-generation/algorithm/3ddfa/3ddfa_url.py to projects/data_generation/3ddfa_url.py * Update SyntheticDataGeneration.py * Create test_multipose_url.py * Update SyntheticDataGeneration.py * Update 3ddfa_url.py * Update test_multipose_url.py * Update 3ddfa_url.py * Update SyntheticDataGeneration.py * Rename 3ddfa_url.py to help_url.py * Update SyntheticDataGeneration.py * Update help_url.py * Update test_multipose_url.py * Update test_multipose_url.py * update * Update and rename tests/sources/tools/data_generation/synthetic-multi-view-facial-image-generation/opendr_url.py to tests/sources/tools/data_generation/synthetic_multi_view_facial_image_generation/opendr_url.py * Rename tests/sources/tools/data_generation/synthetic-multi-view-facial-image-generation/test_synthetic_facial_generation.py to tests/sources/tools/data_generation/synthetic_multi_view_facial_image_generation/test_synthetic_facial_generation.py * rename * updates * update * update * Delete opendr_url.py * Delete test_synthetic_facial_generation.py * Rename tests/sources/tools/data_generation/test_synthetic_facial_generation.py to tests/sources/tools/data_generation/synthetic_multi_view_facial_image_generation/test_synthetic_facial_generation.py * Rename tests/sources/tools/data_generation/__init__.py to tests/sources/tools/data_generation/synthetic_multi_view_facial_image_generation/__init__.py * update test * update * Rename tests/sources/tools/data_generation/file_list.txt to tests/sources/tools/data_generation/synthetic_multi_view_facial_image_generation/file_list.txt * Update __init__.py * init * Update tests_sources.yml * Update tests_sources.yml * update paths * update * update * update * Reorganize * update * update * update * update * update tool * Delete test_synthetic_facial_generation.py * Delete file_list.txt * Delete __init__.py * Delete __init__.py * Update tests_sources.yml * update * without pep errors * reformat code * Delete projects/data_generation/OLD directory * Update SyntheticDataGeneration.py * Update tool_synthetic_facial_generation.py * Update render.py * Update SyntheticDataGeneration.py * Update SyntheticDataGeneration.py * Update render.py * Update render.py * Update synthetic_facial_image_generator.md * Update synthetic_facial_image_generator.md * Update README.md * Update README.md * Delete __init__.py * Delete __init__.py * Update readme.md * Update readme.md * Delete realign_lmk * Add files via upload * Rename projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/tri.mat to projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat * Update readme.md * Update README.md * Delete tri.mat * Add files via upload * final updates * installation and import fixes * Bug fixes * for execute in gpu_id 0 * resolve pep8 errors * Update synthetic_facial_generation.py * bug fixes * Update docs/reference/index.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/synthetic_facial_image_generator.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/synthetic_facial_image_generator.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/synthetic_facial_image_generator.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/synthetic_facial_image_generator.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update docs/reference/synthetic_facial_image_generator.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic_multi_view_facial_image_generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic_multi_view_facial_image_generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update projects/data_generation/synthetic_multi_view_facial_image_generation/README.md Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> * Update test_license.py * Update test_license.py * Update readme.md * Update readme.md * Delete projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/misc directory * Delete projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/train.configs directory Co-authored-by: ad-daniel <44834743+ad-daniel@users.noreply.github.com> Co-authored-by: ad-daniel Co-authored-by: Nikolaos Passalis Co-authored-by: Nikolaos --- docs/reference/index.md | 7 +- .../synthetic_facial_image_generator.md | 58 + .../README.md | 70 + .../SyntheticDataGeneration.py | 141 + .../algorithm/DDFA/BFM_Remove_Neck/bfm_show.m | 14 + .../DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg | Bin 0 -> 38340 bytes .../DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg | Bin 0 -> 38340 bytes .../algorithm/DDFA/BFM_Remove_Neck/readme.md | 16 + .../DDFA/BFM_Remove_Neck/render_face_mesh.m | 22 + .../algorithm/DDFA/LICENSE | 21 + .../algorithm/DDFA/__init__.py | 0 .../algorithm/DDFA/example/Images/.keep | 0 .../algorithm/DDFA/mobilenet_v1.py | 154 + .../algorithm/DDFA/preprocessing_1.py | 229 + .../algorithm/DDFA/preprocessing_2.py | 163 + .../algorithm/DDFA/simple_dataset.py | 39 + .../algorithm/DDFA/test.py | 126 + .../algorithm/DDFA/utils/__init__.py | 0 .../algorithm/DDFA/utils/cv_plot.py | 99 + .../algorithm/DDFA/utils/cython/__init__.py | 0 .../algorithm/DDFA/utils/cython/mesh_core.cpp | 215 + .../algorithm/DDFA/utils/cython/mesh_core.h | 65 + .../mesh_core_cython.cp37-win_amd64.pyd | Bin 0 -> 67584 bytes .../DDFA/utils/cython/mesh_core_cython.cpp | 8365 +++++++++++++++++ .../DDFA/utils/cython/mesh_core_cython.pyx | 65 + .../algorithm/DDFA/utils/cython/readme.md | 6 + .../algorithm/DDFA/utils/cython/setup.py | 19 + .../algorithm/DDFA/utils/ddfa.py | 161 + .../algorithm/DDFA/utils/estimate_pose.py | 110 + .../algorithm/DDFA/utils/inference.py | 268 + .../algorithm/DDFA/utils/io.py | 130 + .../algorithm/DDFA/utils/lighting.py | 101 + .../algorithm/DDFA/utils/paf.py | 61 + .../algorithm/DDFA/utils/params.py | 43 + .../algorithm/DDFA/utils/path_helper.py | 2 + .../algorithm/DDFA/utils/render.py | 225 + .../algorithm/DDFA/visualize/readme.md | 3 + .../algorithm/DDFA/visualize/render_demo.m | 10 + .../DDFA/visualize/render_face_mesh.m | 18 + .../algorithm/DDFA/visualize/tri.mat | Bin 0 -> 339799 bytes .../algorithm/Rotate_and_Render/LICENSE | 384 + .../algorithm/Rotate_and_Render/__init__.py | 0 .../Rotate_and_Render/data/__init__.py | 150 + .../Rotate_and_Render/data/allface_dataset.py | 165 + .../Rotate_and_Render/data/base_dataset.py | 123 + .../algorithm/Rotate_and_Render/data/curve.py | 250 + .../Rotate_and_Render/data/data_utils.py | 183 + .../Rotate_and_Render/experiments/test.sh | 24 + .../Rotate_and_Render/experiments/train.sh | 32 + .../experiments/v100_test.sh | 24 + .../Rotate_and_Render/models/__init__.py | 40 + .../models/networks/__init__.py | 60 + .../models/networks/architecture.py | 199 + .../models/networks/base_network.py | 54 + .../models/networks/discriminator.py | 308 + .../models/networks/encoder.py | 50 + .../models/networks/generator.py | 126 + .../Rotate_and_Render/models/networks/loss.py | 188 + .../models/networks/normalization.py | 106 + .../models/networks/render.py | 584 ++ .../models/networks/rotate_render.py | 115 + .../networks/sync_batchnorm/__init__.py | 6 + .../networks/sync_batchnorm/batchnorm.py | 384 + .../sync_batchnorm/batchnorm_reimpl.py | 57 + .../models/networks/sync_batchnorm/comm.py | 127 + .../networks/sync_batchnorm/replicate.py | 119 + .../networks/sync_batchnorm/scatter_gather.py | 44 + .../networks/sync_batchnorm/unittest.py | 18 + .../models/networks/test_render.py | 152 + .../Rotate_and_Render/models/networks/util.py | 176 + .../Rotate_and_Render/models/rotate_model.py | 330 + .../models/rotatespade_model.py | 391 + .../Rotate_and_Render/models/test_model.py | 58 + .../Rotate_and_Render/options/__init__.py | 0 .../Rotate_and_Render/options/base_options.py | 243 + .../Rotate_and_Render/options/test_options.py | 29 + .../options/train_options.py | 73 + .../Rotate_and_Render/test_frontal.py | 205 + .../Rotate_and_Render/test_multipose.py | 241 + .../algorithm/Rotate_and_Render/train.py | 99 + .../Rotate_and_Render/trainers/__init__.py | 36 + .../trainers/rotate_trainer.py | 110 + .../trainers/rotatespade_trainer.py | 46 + .../Rotate_and_Render/util/__init__.py | 0 .../algorithm/Rotate_and_Render/util/html.py | 71 + .../Rotate_and_Render/util/iter_counter.py | 69 + .../algorithm/Rotate_and_Render/util/util.py | 206 + .../Rotate_and_Render/util/visualizer.py | 187 + .../demos/imgs_input/person01145+0-15.jpg | Bin 0 -> 9282 bytes .../requirements.txt | 11 + .../tool_synthetic_facial_generation.py | 79 + .../src/data_generation/CMakeLists.txt | 32 + .../opendr_ws/src/data_generation/README.md | 28 + .../opendr_ws/src/data_generation/package.xml | 25 + .../scripts/synthetic_facial_generation.py | 123 + tests/test_license.py | 1 + 96 files changed, 17965 insertions(+), 2 deletions(-) create mode 100644 docs/reference/synthetic_facial_image_generator.md create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/README.md create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/example/Images/.keep create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cp37-win_amd64.pyd create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/__init__.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/demos/imgs_input/person01145+0-15.jpg create mode 100644 projects/data_generation/synthetic_multi_view_facial_image_generation/requirements.txt create mode 100755 projects/data_generation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py create mode 100644 projects/opendr_ws/src/data_generation/CMakeLists.txt create mode 100644 projects/opendr_ws/src/data_generation/README.md create mode 100644 projects/opendr_ws/src/data_generation/package.xml create mode 100644 projects/opendr_ws/src/data_generation/scripts/synthetic_facial_generation.py diff --git a/docs/reference/index.md b/docs/reference/index.md index 60d2cfc530..f0926d311e 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -67,11 +67,14 @@ Neither the copyright holder nor any applicable licensor will be liable for any - [single_demo_grasp Module](single-demonstration-grasping.md) - `simulation` Module + - [human_model_generation Module](human_model_generation.md) + - `data_generation` Module + - [synthetic_facial_image_generation Module](synthetic_facial_image_generator.md) - [human_model_generation Module](human-model-generation.md) - `utils` Module - [Hyperparameter Tuning Module](hyperparameter_tuner.md) -- `Stand-alone Utility Frameworks` - - [Engine Agnostic Gym Environment with Reactive extension (EAGERx)](eagerx.md) + - `Stand-alone Utility Frameworks` + - [Engine Agnostic Gym Environment with Reactive extension (EAGERx)](eagerx.md) - [ROSBridge Package](rosbridge.md) - [C Inference API](c-api.md) - [data.h](c-data-h.md) diff --git a/docs/reference/synthetic_facial_image_generator.md b/docs/reference/synthetic_facial_image_generator.md new file mode 100644 index 0000000000..5f2dedac56 --- /dev/null +++ b/docs/reference/synthetic_facial_image_generator.md @@ -0,0 +1,58 @@ +## synthetic_facial_image_generator module + +The *synthetic_facial_image_generator* module contains the *MultiviewDataGeneration* class, which implements the multi-view facial image rendering operation. + +### Class MultiviewDataGeneration + +The *MultiviewDataGeneration* class is a wrapper of the Rotate-and-Render [[1]](#R-R-paper) photorealistic multi-view facial image generator based on the original +[Rotate-and-Render implementation](https://github.com/Hangz-nju-cuhk/Rotate-and-Render). +It can be used to perform multi-view facial image generation from a single view image on the wild (eval). +The [MultiviewDataGeneration](#projects.data_generation.synthetic-multi-view-facial-image-generation.3ddfa.SyntheticDataGeneration.py ) class has the +following public methods: + +#### `MultiviewDataGeneration` constructor +```python +MultiviewDataGeneration(self, args) +``` + +Constructor main parameters *args* explanation: + +- **path_in**: *str, default='./example/Images'* \ +An absolute path (path in) which indicates the folder that contains the set of single view facial image snapshots to be processed by the algorithm. +- **path_3ddfa**: *str, default='./'* \ +An absolute path (path 3ddfa) which indicates the 3ddfa module folder of the software structure as presented in the repository. This path is necessary in order for the software to create the folders for the intermediate / temporary storage of files generated during the pre-processing such as 3d face models, facial landmarks etc. +in the folder results of this path. +- **save_path**: *str, default='./results'* \ +The output images are stored in the folder indicated by save path which is also a class input parameter. +- **val_yaw**: *str, default='10,20'* \ +Definition of the yaw angles (in the interval [−90°,90°]) for which the rendered images will be produced. +- **val_pitch**: *str, default=' 30,40'* \ +Definition of the pitch angles (in the interval [−90°,90°]) for which the rendered images will be produced. +- **device**: *{'cuda', 'cpu'}, default='cpu'* \ +Specifies the device to be used. + + +#### `MultiviewDataGeneration.eval` +```python +MultiviewDataGeneration.eval() +``` + +This function is implementing the main procedure for the creation of the multi-view facial images, which consists of three different stages. +Instead of initializing the main parameters of the 3DDFA network in the intializer, the first stage includes detection of the candidate faces in the input images and 3D-head mesh fitting using 3DDFA. +Moreover, the second stage extracts the facial landmarks in order to derive the head pose and align the images with the 3d head model mesh. +Finally, the main functionality of the multiview facial image rendering is executed by loading the respective network parameters. + +### Usage Example + +```python +python3 tool_synthetic_facial_generation.py -path_in ./demos/imgs_input/ -path_3ddfa ./algorithm/DDFA/ -save_path ./results -val_yaw 10, 40 -val_pitch 10, 30 -device cuda +``` +The corresponding paths for the input, output folders as well as the pitch and yaw angles for which the user wants to +produce the facial images can be easily incorporated in the class creation while the method is initialized. +The process is executed for the CNN parameters and GPUs specified in the arguments of the aforementioned command. +Users that wish to modify these parameters shall change the respective input arguments which derived from a parser including the arguments path in, path_3ddfa, save_path, val_yaw, val_pitch etc. + +#### References +[1] +Hang Zhou, Jihao Liu, Ziwei Liu, Yu Liu, Xiaogang Wang, Rotate-and-Render: Unsupervised Photorealistic Face Rotation from Single-View Images, +[arXiv](https://arxiv.org/abs/2003.08124#). diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/README.md b/projects/data_generation/synthetic_multi_view_facial_image_generation/README.md new file mode 100644 index 0000000000..c8a650489d --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/README.md @@ -0,0 +1,70 @@ +# Synthentic Multi-view Facial Image Generation based on Rotate-and-Render: Unsupervised Photorealistic Face Rotation from Single-View Images (CVPR 2020) + +Based on: [[Rotate-and-Render: Unsupervised Photorealistic Face Rotation from Single-View Images]](https://arxiv.org/abs/2003.08124) + +We utilize, with small modifications in order to be easily executed, publicly available code, namely an un-supervised framework that can synthesize photorealistic rotated facial images using as input a single facial image, or multiple such images (one per person). +The implemented method allows for rotating faces in the 3D space back and forth, and then re-rendering them to the 2D plane. +The generated multi-view facial images can be used for different learning tasks, such as in self-supervised learning tasks. + +## Sources: +* Face Alignment in Full Pose Range: A 3D Total Solution (IEEE TPAMI 2017) +* Neural 3D Mesh Renderer (CVPR 2018) +* Rotate-and-Render: Unsupervised Photorealistic Face Rotation from Single-View Images (CVPR 2020) + +## Requirements +* Python 3.6 is used. Basic requirements are listed in the 'requirements.txt'. + +``` +pip3 install -r requirements.txt +``` +* Install the [Neural_Renderer](https://github.com/daniilidis-group/neural_renderer) following the instructions. +``` +pip install git+https://github.com/cidl-auth/neural_renderer +``` + +* Download checkpoint and BFM model from [checkpoint.zip](ftp://opendrdata.csd.auth.gr/data_generation/synthetic_multi-view-facial-generator/ckpt_and_bfm.zip) put it in ```3ddfa``` and unzip it: +```bash +wget ftp://opendrdata.csd.auth.gr/data_generation/synthetic_multi-view-facial-generator/checkpoints.zip +unzip checkpoints.zip +unzip checkpoints/ckpt_and_bfm.zip -d 3ddfa +``` +The 3D models are borrowed from [3DDFA](https://github.com/cleardusk/3DDFA). + +* Compile cython code and download remaining models: +```bash +cd algorithm/DDFA/utils/cython/ +python3 setup.py build_ext -i +cd ../../../.. +mkdir algorithm/DDFA/models +mkdir algorithm/DDFA/example +wget https://github.com/cleardusk/3DDFA/blob/master/models/phase1_wpdc_vdc.pth.tar?raw=true -O algorithm/DDFA/models/phase1_wpdc_vdc.pth.tar +``` + +## Usage Example + +1. Execute the one-step OPENDR function ```tool_synthetic_facial_generation.py``` specifying the input images folder, the output folder, the desired degrees (range -90 to 90) for generating the facial images in multiple view angles pitch and yaw as indicated in the command line: +```sh +python3 tool_synthetic_facial_generation.py -path_in ./demos/imgs_input/ -path_3ddfa ./algorithm/DDFA/ -save_path ./results -val_yaw 10, 40 -val_pitch 10, 30 -device cuda +``` + +3. The results can be found in ```results/rs_model/example/```, where multi-view facial images are generated for every person in a respective folder. + +## License +Rotate-and-Render is provided under [CC-BY-4.0](https://github.com/Hangz-nju-cuhk/Rotate-and-Render/blob/master/LICENSE) license. +SPADE, SyncBN, 3DDFA are under [MIT License](https://github.com/tasostefas/opendr_internal/blob/synthetic-multi-view-facial-generator/projects/data_generation/synthetic-multi-view-facial-image-generation/3ddfa/LICENSE) + +## Acknowledgement +Large parts of the code are taken from: +* The structure of this codebase is borrowed from [SPADE](https://github.com/NVlabs/SPADE). +* The [SyncBN](https://github.com/vacancy/Synchronized-BatchNorm-PyTorch) module is used in the current code. +* The [3DDFA](https://github.com/cleardusk/3DDFA) implementation for 3D reconstruction. +* The code [Rotate-and-Render](https://github.com/Hangz-nju-cuhk/Rotate-and-Render/) + +with the following modifications to make them compatible with the OpenDR specifications: +## Minor Modifications +1. All scripts: PEP8 changes +2. ```3ddfa/preprocessing_1.py, 3ddfa/preprocessing_2.py, test_multipose.py``` Modified to work as a callable functions +3. ```options/base_options.py, options/test_options.py ``` Commented out/change several parameters to be easily executed +4. ```models/networks/render.py``` Minor functional changes +5. The OPENDR created functions are ```SyntheticDataGeneration.py, tool_synthetic_facial_generation.py``` +6. The rest are taken from the aforementioned repositories diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py new file mode 100644 index 0000000000..4df8e55fff --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py @@ -0,0 +1,141 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# MIT License +# +# Copyright (c) 2019 Jian Zhao +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# !/usr/bin/env python3.7 +# coding: utf-8 +from tqdm import tqdm +from shutil import copyfile +import cv2 +import os +from algorithm.DDFA import preprocessing_1 +from algorithm.DDFA import preprocessing_2 +from algorithm.Rotate_and_Render import test_multipose + + +class MultiviewDataGeneration(): + + def __init__(self, args): + + self.path_in = args.path_in + self.key = str(args.path_3ddfa + "/example/Images/") + self.key1 = str(args.path_3ddfa + "/example/") + self.key2 = str(args.path_3ddfa + "/results/") + self.save_path = args.save_path + self.val_yaw = args.val_yaw + self.val_pitch = args.val_pitch + self.args = args + + def eval(self): + + # STAGE No1 : detect faces and fitting to 3d mesh by main.py execution + list_im = [] + + print("START") + + a = open("file_list.txt", "w") + for subdir, dirs, files in os.walk(self.path_in): + current_directory_path = os.path.abspath(subdir) + for file in files: + name, ext = os.path.splitext(file) + if ext == ".jpg": + current_image_path = os.path.join(current_directory_path, file) + current_image = cv2.imread(current_image_path) + list_im.append(current_image_path) + a.write(str(file) + os.linesep) + cv2.imwrite(os.path.join(self.key, file), current_image) + self.args.files = list_im.copy() + list_im.clear() + preprocessing_1.main(self.args) + a.close() + + # STAGE No2: Landmarks Output with inference.py execution + + im_list2 = [] + d = open(os.path.join(self.key1, 'realign_lmk'), "w") + for subdir, dirs, files in os.walk(self.path_in): + current_directory_path = os.path.abspath(subdir) + self.args.img_prefix = current_directory_path + self.args.save_dir = os.path.abspath(self.key2) + self.args.save_lmk_dir = os.path.abspath(self.key1) + if not os.path.exists(self.args.save_dir): + os.mkdir(self.args.save_dir) + if not os.path.exists(self.args.save_lmk_dir): + os.mkdir(self.args.save_lmk_dir) + + list_lfw_batch = './file_list.txt' + dst = os.path.join(self.args.save_lmk_dir, "file_list.txt") + copyfile(list_lfw_batch, dst) + b = open("txt_name_batch.txt", "w") + for file in files: + + with open(list_lfw_batch) as f: + img_list = [x.strip() for x in f.readlines()] + + for img_idx, img_fp in enumerate(tqdm(img_list)): + if img_fp == str(file): + im_list2.append(str(file)) + b.write(str(file) + os.linesep) + self.args.img_list = './txt_name_batch.txt' + b.close() + self.args.dump_lmk = 'true' + im_list2.clear() + preprocessing_2.main(self.args) + with open(os.path.join(self.args.save_lmk_dir, 'realign_lmk_')) as f: + img_list = [x.strip() for x in f.readlines()] + for img_idx, img_fp in enumerate(tqdm(img_list)): + d.write(img_fp + os.linesep) + d.close() + + # STAGE No3: Generate Facial Images in specific pitch and yaw angles + test_multipose.main(self.save_path, self.val_yaw, self.val_pitch) + + def fit(self): + raise NotImplementedError() + + def infer(self): + raise NotImplementedError() + + def load(self): + raise NotImplementedError() + + def optimize(self): + raise NotImplementedError() + + def reset(self): + raise NotImplementedError() + + def save(self): + raise NotImplementedError() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m new file mode 100644 index 0000000000..392dcdf62c --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m @@ -0,0 +1,14 @@ +model = load('model_refine.mat'); +model = model.model_refine; + +mu = model.mu_shape + model.mu_exp; +mu = reshape(mu, 3, length(mu) / 3); +tri = model.tri; +keypoints = model.keypoints; +pts68_3d = mu(:, keypoints); + +render_face_mesh(mu, tri, pts68_3d); + +A = getframe(gcf); +mimg = A.cdata; +imwrite(mimg, 'imgs/bfm_noneck.jpg', 'quality', 95); diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e05cd6073db0c4e6a9628427fb10593cac1ac93d GIT binary patch literal 38340 zcmdSAcUTn7)-T#f7(kNb3O)VU&mkdoxcto?7wPU92{&MJX}1ye-sb_J|2hwgolTJ7oULeuLE;NbeE9m zuZzE4@?VeM!NtbLB?RGt{?X)bRkvNhJ$$UZJ5R8&9sqalVPW6Hy6pp4FmmGlLp+SI z|8-#9!N$SG17XCti)m0#h7lhd8zVF>Mq*5BU(Eji&OKc6`~1)FC^Sq!51c3k0^+jq z*`8N-Q)!MP*ac0U0|^MJX=v%_IUaIyJ>nJ;77=|aCNBFzPF_J#N%{4gw_4gdx_V~j z7M51lHnuLVZtfmXPp_bl!6Bhx;Suo(iAl*RpTDH$mtI%^_J65`ssE#1_b__h z`Rf~i{?ZHUjt8b--^0PZ&yPp`Oao-%MDai%0H5-CTy}Lg0h^#Eg38o+oRFGbXpIB; zmumme?0-$M!2e&G{ZGaIAHC)QB5W+o$HTq{K!EG6oCq%9|A2y(LPLGweqy*Z5+_Ga zldMb2of8nRBng1Le$4uKtU*Yu8&hxc%X0PSY(mwWsl#T;Bh?8`wYY+mSS9os-XybO zKdjtKsha!sg=5P8P*0cquBSun(Dpa@(-ejjCmT9SF4pRIx$)pklr}Na7D_+KB$+F>;F=4Nh4zNT)>R`RFuuf7R3AmUv znOz5+ce|)o)s^U+8%|4=eqKxa>n9oHQDz}eqs&MwOpU$5>KQKBP?>ASgiw|nD$ag8 zL&1J#ys7hFW%bdH-8II#hWfTO!|Y>2c1(+W&3Ajg!A(w|egCwnP=p4d9?^8g2HsGi zIJ1;|eeKrxNPq3}8!gF(JK&Jz`Pl{25}_7lR;kyc)Iln8Iyc{^Av+9Cw}78Mn(JRK z?zB^xU4h@+0$KKh*MA%lx&M5TMR@jRw_s0zHC}??myU(b3q`h*{J;13VZk1ksMN<* zbE40&nja)IZvhM2%roMqTcDHYh7!vtqWU_i1U4qJeuHjAY5dc3+@F7Ivt%>dDk^P`5vppx7yK*Xy7M>n=4E5$!9&2)0Z^Q_E?>b|R$;tfh zUZhcNEZqOTf8WL)l)t_SXk_`l+1V3U zw1hWZ=Km#AkPD zks0H$t6u&A)%{t1#fD+9LQ-J53`T%WTZN@%63@{gn173q|K@ z4ve^s4xY9`h*Ic1MFkziiNkF44IeDDcQHQrO1?viJJ>PSi-W!e%Kc`H>u91n>#{nL zXEPTqrG7Wx2RNeH49v{!1CbR^Sw)9O;Lg_AhqnL~`Z?mUG-LDhLjR>r!=#&p`4opr zpNT`ynm}dXhec*_q`^hzm7WjD)O%h>IPygi&({E=2F=F_M5~kr&ZHgqN4P$s`MUAX z9<=yA9Hphb=E7cSGrlr~#%jeK`hareOt3Vm_mD3zUD-x+#5Z=FkJvs`S>NZuv6&A^ zxAS#xQ-FQIe|ihp&|t;Ba+W%sX-iP4rDfE9R;U{k!-ZWk?Z<~!N3xmj_X~EV=T`Rt zevd*R;lhaxv)VO(e!ECk$o%-$AC6X4E8JZO!?N>G8&`=t8N<0@K!!$3LJFce6=xqt ztVqfQ--j)ICB)dvvJ|5@MaI z=v;?;vY#(++ydgCY(%k9c5O8XYi@|!<=bh5?K{8yvn*2K6s4~uNxFxQuTZ}5WSQi1s ze9fJG$o%$dU(4Nn`j~1LDIX?c#8Mh~i_%vU-W9^-Lls z;7HGPQXGUL6T7AWoz916O#LdxUprV(+sK5qkpR(PG}Zc#A9v~U$%XEHv@hQ)`>rb( z^ZXO3&ukqE`-afa#v*@rUb~+ZXOw3($#lxiP+Cssr?2EOm~LsYh?QA!kPwir&S}2B z3-9bDrFh0P8hUfzy3y|OU4bySr;+a+K2{u5!a@1g|7sbs?rh)4m-1bK7H@%_4_d)4 zzoj1|8qpjgRyX8^keg?}At$KJm7wK1$Uj?&F}z?Zj4k*VOA-5TMgl8a_fi!7(arG+ zpYJB2R{_nDL3Imcr>WfnT~oIJ{pffb#z++H{X+6)j$~oh;g@QEeupmmixd7tI-&zc zuU++E|NcxaoC)@PN@(5Qq+Cz!{)ZPI7E48>{m1Z#DK%tp|4xC<9tK=JRJeMhi!!A< zhY{8JF-$o)q{$*jgJ5f{<8GwlzgCx|Qs(V{t1zF8D{c9F?ELOw?#)Aqb-F#s63T;s zT_~U|5ZUP0z7;IQJ}*&k^V6+f`_9rVD?|y=w(XLo__?k1M4h)gM1wO@;-vTDMx4;C{8vA(!SJs^f z@CIbT?uL--79e)>h5&w};y3BuCEZR>k+@e~4`ar1Y5SW%fmgkPENl2pBd7kPMyUEL zZSFj02NsP!t`L8OvuYF8^4W9bx>>#!NM zmGT$X8MQ3a9HY#&Zqlgv{kgD>Rs4yx*wrT zAx~zda>ztx?bo_KRyh3`!|RA%+kEd+Kd!s;>3ME}LjA+AY8)IgRqHWqvtYKXx;?He z9M*e58fs=lw*b^HNthBVycFt=r@7PaTl#t0RoLKD{SI%yRH;XWhhby13&uT7-)qr% z8RcZKTxit@;c=gzFM144n2zP#m0>K%#}kaxp-;rlOl~n$D=y=adlTp+!`M*#o&;Lq z__hw`E8+e*nt1D6T?+`2f!>?7x+FoCr z2hLLxGqqoS`w+s@-%ur{r3q_!CU)PCdiJn70~|Y*YrVPrF_7dL)3YqnU!|FU=*v0} zSX?kOu3L}l&*s$=ShsS=1Di~WGkcngVB}+(45llJxgRA`sbv@Wfm9jt26ST4vJ?p+ zJVUgQENK=bUO2p+WU_CWVfudiHRJ%Qe|0*9|8>U8 z&{KVZul&KzyxgfT)Xm_*3~#YppdwO+vHtH&)&Hw4v)q63t%m=@&HqoaC^d}fZh>w? z%=#b!3CX-$euIyGiu!b6#5?1LpQc?uaiqMcZ4d+aI}`O}4rP>~n2mV}A{+qZx{ z4D~3p4~C7_n(Y9g$%B*w7hXQI@q}|m7c^bT%IBjqKR@ua_#*IqMIfF(ATyH3NNH1W zluhDWfBND^wUiQPafNYj=|@ewqDg;NY#9b|b6pcdtx#%RdGObXge|&?(&sWd>LA1H zz9;q5yG}$T;MB3%;EK|hT^ceB)cAN$z_z_Wkj~6|Jh0ZtOH+T*y5HD%jjp@M7`wEZ zltSTZ6A}Jlz-|8R)}qh!tTlbXgjgkH(#k{tIVJ6u_Ks+>u5&R*J&;7m%+UBa&P8!^df!d0JVE?kzw zfcd`1TJ5PePl@41TVbnSj}6Zf>`>Js-hf=ti9Zu*CbRLECXF#TN}a`TgwDW z?XP|ghstlPk`Y4-ikgO9^j3N3?)XF8&)Q%0>P_08r*j|j@;Iz2l@~^Bnnx!JR7rm% zgVASn@^`L&TB)yJ3Ah0p_>IEIC7~vWn?{>kfO5Dao9`%V>P1$VnW+;?Y9;57@pN?( zHAtZlpyd~R3*2YYzv;Pq3*;eYf~|H|PVP;29flP*6fscnAz)(rNcLIpn9bsTaFFz~qUNctAq;u${JJrXLHm76_wEnQGz zei)8!Ev3u;LbdL$rGBNu@|lqcz$}oQNYm}X&Z=9$Us_}*^#Lkm{Vnpi=h~8(uVv4g9TLv2S5XZ-F#LwkQ~KMBp0pFVQk zZmxTOJw!iqWUT^gW~-F4+g6)T>F%ILxm`&>8M>tL$5tnW>}lY~y?2?wuj&mH>>X*M z&V*uwnIlR-YsOmYc(99++*{yzM)oZL!`Ktlq1Dd(NbYrRDXgEf2=?t6Y%{+Tebsvl zsM=*=VxF7l;*HlIX>I|fwJajXuujTyX*%d#T~sLC{?6WVuiT_%drLBWZ;6cES|7Kq zN8Tw0OHA?(w6fb*d*Fr#^<`b+7GOr58Z}J|N9*lYxZS-f_%Y1pCiwv*LkUn~P&+K+ zQe%YfN{lh01Y*t*i}P#VvoS+_%4|Yos4Ecn`fPMI{)Gu}3lw}g4En>d=K3cfi{$E= zggX6h-S%#PK|?%>5S%w@F+}^oU)=Y8%`@Q{Yt@OS>xTxWE$QS=al?QIJsT0WA#Wxx zuy2qY{o#pNMZ&9;4*<)Izc_=KV8l!U` z-a^d%vl^QcE4x6ynGKj#%h^5Yz*jC`TcWC9dc&!AW=sT?QZ9?{9h~q7qg-U06L92G z59_?K?C>p(oHTW9+%J@K?nQaMO$E4eBK}bVFBc9qXpsvM>yip<3$b55`uKudiYne` zx4<|K&{5@6M+&G@d3ty(CDyhm@(E|e zK!@zGTWWcLCbR;kUeB7rh5fZ1$FjzeHezXtl}$LfD3@qe9Z&A839rh{3Rm^LEqjHc z@W@{6>b+E8%k6@Nzd}NHK-Ep}uxo?DAf%R8sUL9BtKNuJ-l~sRuDYlQ)%oOXmpYxd zr;rag8L{zEW|)d;y-u=t%6d6_poXTPxrm(Y5Wo4BidPB;mmYc>jpt{o%<>WcBz_bx zM%!n;xYr*=Oz>D6ieEL$aG?U>Gn7HzsTMB}$#?vp z_GJvjBS)!@^*0MB&Tj#7qU0lt!NVHY8gB~bIeZ0;1^>fM8Qg!dY}Wr~_{Kn+vuK)3 z_+e#uhZt04bta|!&(K;ly-K_CgvcM0mKFU-FB=h3)t>0Mc4-Y3@BC6kueEbE3OUm)128wZ>l z?`EGBjQR`i$}l;6amEv?NO{q=K|I~7XN&_lT-)U!&UfSGfH6?sjt6d6zgvrfgl}nY zUFJMRcNO||4NA4Js-v^n#tc@yIItzA0moI;e7>gV{;prw?`Id-@#WQM=wpk-NBDmA zvk_fzgfdARqvx!UNz7UkLci}qA;Cg!EDG2p_9gU|j> z2B7{-BJ`VSX+{G*ALc36`dOl}>r|z~)K?x+WLi*nT<4gd%kTJeiz5sdR9EIA*I==d zxK|P=I~7!%x06V@;&@NV=5#o2q@Zopn+zSKL&=t= z#>Q;gn-x*>y3?rvoxLBzF~k}v;Gl$zuPs8&+FIu)W9K{q-Jhd4-1P8bCZ6>8B~i!b zNRWwWC!^Rdf>G@lV5eK=>=yV{v#{2_)EJUIUa#vW@$2UMcgbq=u@OB2D=|7+1v`&l zC0*tpDP`GLYtZtLd2g_HT;kHao}A%A>xta@DWkOE!8K+`dp_;f6)j6oy5c=Nlg>s! zLhgR67?1LRx8M0b&SN$o#OTaTO!G3LXZWqm5R;t7j2}CSwD;!m1e%}>S~9|9rPoYm z(_)p@;%Tt3WWcN8tnpGy`#oJ%;lU9WyJ^!p!Enc0Z@Uxy+8Vd}s+_|lT@0rx6sf>4r$mBi&yF? zT%I(%wHY=3cb zJXy3ONBw@@kZ#fZWJ`<>uCxBQ=Ke39!C!|PMzRW${;TGgXqc$n$D-H=Jm$v2(id4e zTzJ0?Eb+}8(#uB2W3MB@J7e)gq5eza9U9IWPhUlgssWdW`J4>K%c|^0vsNx485XY! zY$Qq96LYD+3rjW)Wtm!K?`-v)wj(x<9fTHsl$L^- zPu}s8L$aIQ)pfOxNv;-eppv{4_nz17&8R1`K?^cFj%$3GI?HLR=h`e8?mefpZXR&1 zOSN0nTB<0y1w4kCLMkb^-r2>^G8Wj4waMj`BGx#;gMbzLU4 zw|7UY)Y(q5`aG0ZiXA5O_;90hKZwyJup7>gDl#bok2nn37mvpbteqUNBZZf;aovB9 zdl6JFo5Tta?WLH6Z()XHk1S`Hq=a3?4-1~U6tbh_WowS;T)!xhpJAwPPdVh}@mC|Z zj_VoYTU%#@AB{vSys{mZfV{C+E(2>uD@qizPo8*F3EP}z*nOQU*oc~GD0HTKMeNcN zg)$p{mK|G3Dj))H8J)9&*BRr$-mBe#Ufu!*hT=UuUSLD5qnGbvCVYDN9Y`qI;?>?; zHcMQbnhO80Rfqemw*xQ*#oY%4Y#+X3!YOC`^Ah@hLQ0IRu0TGzji}kH`%uzBr`}EF zc}jQDxGzC*QpF+@{A>kVVNIY=R9j@#_5K zUo1v;#tpDbt{Y$*@hH2SACA3_*Q#~OmzEu3*Cjp*@OH-wL4;ZaS}-dz-FX|%*~^d) zQAKhbBtQ4Ag&z{I`SosUCN(PGQK6`xc+q_>sA|@Yc^t)Hf!$=aL9a^_!vIbJG zo{&_NI;<10IvSKrRu?Lb@)=;4p28ZNjrW^jp~^vVTnV8yQJ)>H8Zy-;wp69PO}i&f z>~!8Wux1l=cE&?Klf7}7Z|6Id=EfwO{(`iFL@|rAFftQOtp*naH*pDjZ$dNB@4NtOh@uaSX2_ zU3v>!YCo$c%{C}?)73__>9_D$|HE&m8Bwn&|FH2-X?R z;8sh%Ib1NPxOSa7bw>Ldxb8n1Pl8y9QVlzL4M|%#r0!M}%B?~IZkW;r)zKP93(65toohE1{$RPCM7}n;hVjVNeWs^6OO>)0x2JyS>O=;u28Pu z*V?>@cTCbX&e7-CaG%JQ4@xMcY))TlX>9d#80bi|!r?9h%8P^-7}#+W3TaE|ee*Q2 zzWNC}8~9+H|R-A9Wsa<@thRW?j9nUM5Z^%eh?qlCe-Pb0u+1^G}*ziM{ltm>?Yi%uT)?9hgFw)!c{v4TP#Faki#Fks&9(C zF3(W#q;TYbRoIHZ5A}Z1PY>%GGV!gHmM&<0LKnivQ?|FZX(P0L5?`x8ZIZlts!SO3 zP;y*T(@;3F#f!z+CX=ZSyzw@%qMUv9aUF|+27F2Fiuubg| zeJciNM=O365b{t6y556J#Fi&bPYFC>J!74i{p|Au_4RiwQ_{vCEmrf4q}}Ro@dNC# zpL$uxu#0k=I`DM6AW%2b)P$$IY_N z#-g_2Zksb)gHF>_H|Nxe7spWbPIU-bNem9@N%!qJHRLgIOB~-nrZ{${Rm=C ze^0xLcnh3z8k!)mb~E-^-t>gyB;MzzT0GnNQ$NYQdEQ{FwFOn%BgOajY>g2rt$!RI zy2%j|#s55JuaMc?uyhxD+!t@ReQgKNE8F1dx&=B)1t7Gx^ilroQO@4l?rZ`Brd4%S z(;QGuRXO+8R!Ovoc0f0L-?aGoGbliWBjkVC2#h-H{| z#UyQ9QfC(L5z$4u7Z3YKaR<98CLqJeqEms8^yG<64atv-{;`2B{!CvmdmCK@tb0Es{0 ziImadVa2>C`@7jGWAeHk&OkW}Ch$h(yjm|aYs#xAsOj^BihPGbf~I%MdHl4JV0*J2 z%2^{2N%h>0M!Fn{dqQG^_TGr3AIBz&Fll$vWh2s-e`z7-VN>fVg;<%CmVMsW@wG`` zyl_XyVx<=IFpz|zbl2wMHZ9JP7@II&&D?XBiRS(1LVMruc_`KU-M|EMqBganthi1Z z5^9%>3%?bypQ=Y)l-X;td`<1NlHMiU`cmB6>=HBmrT7wqyyGyV`l*XCpyJA%dLBKi z<_Tf;>AD5oH%TfsQwprIa|G(-Z7#g>&U06d|GK!?ov_^CuOWGjftz{i45nx^6Zk0U zvW<0uvIbFU&92gYouEi}jX|x-nP~d$e#pFdgOEMs=K0-B)R+H?$X{AxXO)do-vZwl zZ-H64!t26*<1;*rbwgNUa+H=us1SR%GkD{rTb{--9n?{G0V0b5%*BE3zrS{IVSoEi zi2HxU;iG2JDr=*JI>fL0rCDnx?O&FXHBa$Q(Eh0Iso2I$O<>RZ&Y_<%##8o`^qPe3 zcV`muRQglb7}+)^cM)X@FesY1oShV6@41X=?;f`Iw=cCA&cBN9=LSXS%n7u_eR{@7 zrr?FfydbayEgQ5BI;>tNILk4wM||(ie!g9C<$8@+FW~eH9djQ6KqK=e%r;aH3qO$I z;uY(tZGP{LmDSkO_CC6UEyKhT5|Ds1tF)QG43v@Lzc3PJb*~s0RTJd;RDsq*Yb>Fz<)pZR4{rSd%TWFJm%2QX= zVrju*Ys5fa0{3Ggn^E4otwUzZq!kO_m3XO>n<+{Cj?qhP7C3{xk3bvUC65+_MQt88 zj~N#!x5KuXJlm5pA@3LSU>=5TfcKY!O>iA|Ac+|_BxqwK^UBNVoTm7@rFQ#Druw>Z z5t6SJwat0UXsMj>S%V^;%t=~`yn5>8oThx7^p_Y}rqy$giQZHTo8c^_2qijPf$J;d zJo-{a5}b9-W%AC3KV+~XgsfbX?PIemy;77AR$-0(?}vzjLX_~B)MVpgNbFKE8Oq)F zwNiOoFCn>$(W^Rb#WduU!p-uMfl}T6Ur#0U< z;$I()Dg&2;YlV@uLIRK52fZ$1yXVWi8p0{Z$)0C{sQz3df(G}UP4FDL&MT{KuzjA_ zHcwK>H>C>SJUE&|b74@yE?FlNMB)6(bqZYXZnzPyUZXY z@ljflzJilUk5O=a{ZC{4D{hzWx+M8{4fzWcRmS~QzaX1dELLZ4f z<#qs^G1IdDJ8?@lDio=*7Td!^;PV=31{Voh^W{IHUJ_=xXN&kHGtuBa!2Tskg^`ih z`GYmK|G$SS=o^fkPV!Mh1+Ql=koM1yWV=Y0=j^05PY4Y^sWz{TSb6VUrl^7|pcpSm zCdzNQO7)h-<7P#?Ro`#qQocR+XD#gw4_dsbKAw!7G?s3sEZ7auMQrhakP&|Va*=%L!@+eiBjFs+bg-g_*H&v zkCpv^voaWQH=HHVNZgnXN)I>JS9(#-Xtnh#A@g^Y0I9RoAVk*~x-EnHgQyoqk+U4K ze-!+PO`3rrEj;zHFyRcT|Mh;8wwJy?=J-$)%{W`&H2d|{|zoB_P zI2`@nh6n>*oq_Kj3d7skdRx@e{k4lKNV`7ay`Vj-G)BmOWw-d2MchdOeTV3go}Sqi zXUN6Girsr9uhFm1})V? zEEn>GN?Y_7qzJ_+1mNl9{)n>uAu+xo!qgIt@g@Q z*g-eKqq~#b0+pV%^ZTA{N}L?yiPV0TZ>)1)Ek%dwighoz_94$9BKJF9GbV}e!y760 zaQ}I21divtHkp5z?p$lSzZ=G#<{#qxI?IrQwnSkVS$qqeuMk?bSqqsUB@?d#Ylkh9 zSb=Wpcjb-*rqMhNG8B(t?=ye*n>m6z`LZMW-}K3Xq!mww>Nc&o%00bYr0NvXznDJx zEUsg-o;@lJ&70EKNSjQn7?qT6dj0F?pEczlb%igg$ybO}m&EI6Q?#EG5yN8%#py26k0=EwX@@S0;7 zO~Q4do)zAh8pCHZolR-$Pl>oh;f(LTu>n7&P4CV58Y33EV3ea>nWRv=LA^7_GQC|5 zIk%P(ov*7VTE-$LNOZqy$&po>cj*(h4;rhGI?@9{xMNs{@Pzb(y;MCl&$Zp34HVCW zjyEmuhV?$2jq2;J!09meHvZY*AOFG@?=#nCq6>)8Vq3|D&qaPI6RiD9)Y2+8+1{8p zs!AkS`}_5^rfP-xVXN}~RIytT)M~(wG^lT$H`~$4-i>@1Q;RDxR*Q!yXlcGp4Epqp zHK@BXqeHZtDbHR~;fQ6TedM%9Z!|>6vUZx!qsr2TW$49XUwq$zM!XVxrq`bz7yd|| zwkN}O3_(*O#Gdi>OAS&wr(C5+>L`hIPu8jUxu1Tzgy)rc<$h08RvodIp~XB76{#aD zK3X%$DHuBGhoV-VoMglNu*Dw?hX&QOuZ^kE_d8PU9Y2`;%$l8E2E|Amvi~ybNsxUk z#}VJX);{W`f(Z;mo zO1FIT#}8Q2%9iIX5PTj~32kcaSbeyj(bJKGk}@%4H&t3BF4DPqi}dK_@%aVK|5%Rp zy{SJR>2+{SC^^uXOtr%sJBC8ihagy=8IWaf+Jazw#&EqpZx$RP)4R{ zDbN_Jbo-j^LJ-N>Pa4Kdk1Qy~%|GxX?-14FCKk%_uUR}M`hJBaVDLqy%+F4V2={B5 zV(z|UupbUmEqF|+T-70~lHhJFbNUG$glN{CXY(kUrL_2w^VM%JF+;SpR`vGk`WNo= zn;Q7q@Kx)>)aaD{ZSPe2&Tf{E8TXMDX-JAbW74`|72R<&xp6*Wu#?mE=eJ)zCKo@{ zn%C6dN@lHCVCetp<_3mxRvpr&;q;*Uh7FS@r(_ZPlA0sqV!7)@XkA{Zm_cB5J z(aeKr3qqgr;!@C?)~NJGYCHa#jwE}X%HxhNNYNPAbaJu#k?Qq!!3Lk_kA&Iw?qch0 z(dw=hk2}=aK9b*kxWaufsXX#~oX(=D1zJ)+kzfzK8+m0niKb-Vq(Nm)7 zWwP8>2*(NGk^DBPe@Ew2NtseO-Un&{Dj9%}3KNTVD|86?m>^aH>q=pP^RC)0M}_cm zVachsKJ`An@+r19`#J)v_~oTsOWgHTO)UIP8x-Uj5*Bg8xJx4~_=)M29ge6zM_gm( zn!A9$zQ~1vAYTIW42OcutE3NebLMsX%7sVyVjK*6smdptyS|m1TUH8G{u$m{A8k7A zXyuwub|xLhxAyH1w>NzL*k9?`IQb~SK8jytTU|HRvV%mS;dw46Iki~L;oNtw*PEPX znTreVhOdL&DDIDlJ%++ z_-yQ=f?`bC6KNINhZQqA(Kgxd?tL#r9NXv`iLIbGjYxubJgr;ki~x|aWYvxBJjbsh z&?Ci!jXeFLgLgNT2dUK7+U((^l=Hj$@z@wz^UE<%iw_4%*Sp*%F3;WZev1LuvxL!? zgqM{UCP;-tIRy5F-n29>%5Ql^F+z>Z$XEd$;FK2W{pf}HAbk^o!$Pv!a{M5PY`F`p{uVf7sX%RQp!rZfI3lam(A@X9GSC0SGov#= z72@u|pY?UTmc~xGk zmpUqS?;7&v=ln6s+vUpqWe@i30(QW1UAm^a8jK6g7-$hF*@vj?PNh9fp5MOTrP{*A`RH~Kg&$D+}*-eYT1G|hmOwFK|eF=`TXDeU|rooQLK4`IP!-p363?f%s)%B&vnnKU8EEJn+>k3BRXp7>)mNd!Taz(RTJ zjRTfNZu*)M=wB86d>?Lh)i|IBDb$pz-Ti4yIvg4%&8x=qHG4_-C+Y^&gS-WN4k6KA zkU^G^Ohh7-8b0vWJc&}p&N+-^o%z1xUS0rqyAeR# z<^Jfv)V-sa&rdV`c~mQkJ#F7ar+(~yb|5)0xN8*c<3|6>?Xi&cfIv`!v;Oysm5F&~ zOBO7>7%P1$?ukUpoJ6K_X8pr`Uzp$m8kbq)=~<->f=F3}96aEICqppo%@SCMA3p3k zSl~?)!kZhyvGD=yR&P)-4N3aYB9ixAH)9d@A?&^f}BqgsLT z8WSvx#M|B@&pN@n1@_fJrO;QSYsY%D?Le9c=Jluiy(E}j?0ai=NJ3mX~ z1-i^9l^RV%awH5K$^LuJ|u zOKE*FvC0QCb%{-_HdLOAuyhBM^8A}ejrS7+@%Iflv;2zuMlz{{?2TVUzPx(A86rzD zdp2~ze7-;ZryW59&!}D-%4uk7{shRA*c}{y%w(`XZ+%*JE|wUb1zNp#<7N3OZtm19)?(0umuv!dcg8N@-G|6HMVpLVPUo-kSp{;3wZKu z$R7K@`|M8$!!lBX>oXFqfI6Olu@cSWm2w5&V)iO3 zp84DVrf=xeGZpBz5mi2Fdz&q_)Xv^_%c#XGqEEE~QC&NsEB$Ub@ zovDUd4V%5KMa8|ppl5F^Ikk^_{e$+7;w7DVw6jZI^w2&wfkZ#>bJ%;qD@r_wf4@HrQdenfXifb?j@X zt60LLi*U)!QA9W~{2Y*6v@N=^YWWB(a&B|Q( zH)&0O(V#$|{1(FE(GL(C&a4)W(JoXik5C{Jh_4X>NdGPyy}drET>rYO^K9dF)-$s7<78?fiytCr!FQIE&O4A&L4GWYo9@ zZhNqLQxDI;Z)M?>PTH5g*UZi*gtbRcHS|ubqF2t!0B zKo0W7D%#(r^T$zjOC-ZN4JJ(_S+hJvsCa8r{ytR2Yu(P4c2nJ<(RM%!xe~)C{Ln2% zHde;&8`{`TYU_`s*yU`l(cBSERfSr=mTq`pr7eKyq|*s@SFi7}-KB~b6cIE^UypR; zRj_0N5BRa}7u$<{lolHhlwkt-ANdTw5+K#Z^<_ZpZC~FLt4x)qEKBjw_z~zmJ0Xpw zpu$djD8r;x>MZ?p@uHF%)$a7!d^`KtsI&N3WczylgfvnlUWsVVC=0V!SJxreUM|~Z zI@-;Mq5f^UatyI&MPmt*sL}jkR3p`H#+`IGd^0yOc0%A>4bLX>0b9{_jGWg>fZLM7 z3)hAFT>~U`qmtOx{S@F`qm;bewYXQyQ&>kV#5Yfp!g(AEa6f>A=WRtVjau!tx<%Q& zW=H8w)vUX7O~I98l4{=Xspu`)_GK6{xF!n%7R;OsyPM|4(u*Ey-KogcofuwFvIpeV(qnZjG785EI|d6W}#Vd4Av6YBjW+!*u|M zBk9S?_hlFr!YexDab^>ta6d-ueWN6Jv2w^zB`0!tMD$dKSp`Yl9r4PWK6CBxrjaj5 zs1h6HTGctd#TCu6{DM6>A@iu1@v>&ds_$;)qROb5(dZSH4~w+$v~jqHMJxh6*x_0xX9 z!}agbx``Su$d=^_RlNxfUd4>bOaLd`>rQX?WEPv*3fA^tYe)Od8P~>!b=s)1$RY|t z^HC)1buFnjXeD?%p7*t^Jcs(@mUki6^b8>k0cj(KpN*4cl>JF9DnRR@hgi92@)X$# znl2+Yq){_mr8LYH!^72FURG%h4#dmwba9d7=a!EgvcY=16G4~$X2WlI1-Rk#0Z!3; zl{XU^+^C?nm4i}q3)jmq2t@l)31hul#Y*iLmsm)o0Z;|wj|-69jpvweu|2Q z!wAqyC=Dc^8lp2ol8xu&m*KuNqp#M8x7`mIw{>2lN~zT%-TgQNP(L856AXW;xUulcV3D^h<0+dFW(0>TU$a@}nafxs|KFXFOXy0S92v`4*_r zu?kW+B@!-sVQALtX!9WWsX)(hbNKm!kn9ewE2K`Lixk^O{5nLM<>seXd7?Bs;wWkT zIE0z0Bgn8e26}Gy7U>@9$kh}c5IysDsU4q7HZID^PytOTc;SxnTVrJ0y_7?as&i)~%%vRr|3%DaF!>okKrZ0}YcRRc@vb6DrhClS;@i(!` zhBk>+w{ancA4=?t?DAO8z76W3ByI>6;D!*DaUQnGBb`BqY&HR3WFyhx=bQvKfT|pIhL{N9KkG#j$>OU;l$)8T}l&^iKK=c`sAfr??}Dv&x|r>R!h} zABE5Ji)^_AMg0t{;^bvd*8`tmw&e`_W%#XRuyr6;4wm5M)ZpChMUHAM2|xQr^=9LA zop-_Af7C$ve%jE4p3Bi7v?{q>ioMX}4?)k0)f8q+S*PpO?#pv$d>qD0p(?2EW)eSn zS(0K?K1q$5$H`3#v&DM{-V8exVtSG{n{;AtcBvcGhA~_-H8q6Ph2fh_&+U}bQh5U*2W=^YF`5)*z~ z_9b?yJj``F5z%fZ|D$J8ExI}Z!$M!@JsY%uaJU$;kJDwF)z*9art}OesH+ay&Ydq9 z9QVbBdg$TB&nD=9$|76g0esw$89J_t>^8KIy7!GAK|A}@Dp2+B?TtT>j)12j_M?#@ zD+3swLeS1jBj@H%*uQ--3dQzh;usq3V{3G1{?ws}lVM&!nDtg(K!*&h04L)YD34z8 zyj03+>)9vW(3vtG&Xz`-oEXhQJ4xFe!)M*xOiGLnM!5kf;WgT11`F$!)w9a-8wwu= z`jL#3I5RByRRv|6x}%4QNnenB2b&1UMLwdj?@+(rn7Y1*B(_wEqfaYCMe_3%Pu-)f zd(dTr*87cWnTHD3%8z!o} zI;WsB_UTu8?4h>S6*J>6 ziAp4Ry=K39S?zT8wv!FpGgenT1Gd^3R42ZW5vP|%Tqm5XK*DZFCf$f1|LL>|AW@L~ zL~waxw0o-w>AY4Ga?GX0#oYh8oY_#=}NTg(LC!0GN{ z0+Wdbe6ax>Ip_)ZM3-jU4&E>H zTO1#MU1T%VVrNSg&B-&)#y;n%7{m?Vks?IUpI5FS$kf*~0_E7zzRRCGT)x;!RIDhK zv)%~rOY@{+N?lTAqg6!=L+5^WLeRh`eA7%xtMbDBQCU<}uvZuV;$q)sQ{CHpbCv*l zOR$2}#3^Lk82Wv3d?RM0!c7HAWVO`oo4WS@+r&@n;s!~u89-S=(0EB0@f}d*e_raK zdr`1valfuBPV+*gPx?v=@7ersP@)H0in{j?HvXx^6N`#wjgI0TTyRx zf|M=+AtuK1)96UBBjQk8E8 zOqjW3V8|_C7MQ@ONy+le_d4%Zqaj#x{uUde(zm^J1Oy4De#RbncvA}U>L@qM>kIKF zk&GNnWBBP;J}LYTg7FdwBfLy-F<)1?c`d?&_T`um${%v0#*dERyBuxH{m~mYc1(BB zXNNGCPiOSUt6{u%&Ewx)J39>3>sXJzF-0*((u`HwvQbDoaiUrYs!u!0y0-lK`erLh z*X#%WAbF1D&n9l0O=WjC0*0L*P;T)b>%Go1-eMagu`E#kq>KMZk|y8tHtpH3KJj_% z>n9W0KeK_vPL3(@Tu1TQx7p7&ZB!K%5r#GaZ{FV5!I9%_h!s>Amt9 zAQ2OJOYW-|$8dG-yL^j@4!(EV^KRK2vhSVOUxbr#4-wIUN}XQ=;obl=O*vkiGsZL8 zop6(Se>Y0a?=6EATQ>jz=mJ>XY6$EmaSY4X1nShJ0AuQonYyuBIE}3*&6}nw%#`rDF2%P0qcz&=0%7C;Pjj=~hcFyd4vDI3rX^KTShbo1|)4bqR+FzCANoA!r z1f8sgWb#&3rICILsPOkjApCCNY|*JfIDXG!Y?4-8Wo2cxPgL{nU~(kWO~s`nnaXRC z#bg31peyRIFBgyf`b{5PW^uulFXNK7MjcI^@{<8;d{G)S1udzPm zXxZagc=@aF2$dFxHe2C0R?(zvsh}J`#UpX?0ax5S>$rmc4^26 z+zt9A$#>uT7Q{(Xeyt9}Z>`)i&a~q0*gSmS{3~gsBP^}6qF}bg9&_AR=|ztEMkd&< z#q)jWXg_r1mdmC}J1vPZh+Kk-paY>GnqnaE^gJ*zB(;0ebGPd0GGoq`rPi={2z-Gy zs>l<4JGsem5vjLZbsy&-E63U79`oqxbr(}D3^gjh?0fLuE0VWt(Rx$x8t|Sd#(Zt7 z{g<2o_C}wxSK9f)+^}zchQMMj$j>sSJJvONgKLuuEc8~+WxPetx7%vDR$_nY<#*^S?P+T* zTAuz(vl{FCtF1x69>v|V#F_?LlXY>8-x5bbZyZ+MWInK;Z*hIgAbHM$hJ??Cs?}+} zyCKivig-R+vQSLZu%blAz{TA_m;g2N8#K&c4ZykE7D1O}bqxzX%;dOsJuNNI+ZtDh zKChrZf(N_lVkEowS)V9ci9)Hu3dW3h7XzSPehEf3F;WGvBL&L^gG*uhr)JNWZft(5lRW4cfbNuWkq!n&}Xwi`dAC@j&VFZ6F~F!tVXO5 zPn`%pg@R#1GqMV(l818Z_+yPv*y_h=K2HKyB-7UQ7hz^n<9QyM#rfg%r;uPjUaV8^ z3QIM{G^PL>%4)Rzq(ZR-tE@G7pyL{?>g4XkHdYUIA2(X?mjkHnft6T}*>)tW8+Bw| zVF@_0Cbh0r)7y6cTFlliT_pl{rA=vKcrynv>R`_mZuKHhxRDJXc8r*WVEI-%7B%8l zt9v%Q;LoP=FeG96KUk#HZ*PdKndz(E@n`mXbKcup4NY4mF6V4(FtGZbT-T=Y6m+H$ zfG4d%+oY_IfHGYjWxV>!h0hMr&*bi4UlyAzc6@Z$ANo-!J!BzusoSdDv>I^&@v{XS8wWrFjBV z$(^59X|Q?)UROLKEt!AVS(7|>6n4+v8P*O=1EdnC2D!(y%znT=IE399 zwL5e(N(61@Rgbqo-l@J!+jlc6Ej~7NKwXR|i!W~L8C-LfXFrIMQ`u}DC4P2wTlLV? z;j>0!%d97g|I@P|&s}-&BQNC)K~ns7nPDfIe#+Hmpt;TI>0*j@s3=z^s6U&87Su0A zScDM4POi7IYEFC~=T6WsSD=%YoowTSX){FJ{#Y;^u>5{4!Sp`DD{=iuH&IoM_FWRt z=DT;9gR}-305A#cxHm?v-Gkq=z&`1txD3-IMb&T6sN(30F2qB5L>^=f;I}a2S-o7` zl_pb`-LEsSE8XCjNBOz5fImLsTsz@PNEo*93SF1k9`cmf;oD<=rx${yK7*pOI-MfV z7st*1Yp zpBJ3lPN8~q`X;-zwC@x>svlXbj;xCjYC4E3XuFGhG7f{5F6~&q3=pa%<<49g-XiBt z=p8SOEq?yAM2(4?g76mMAO+qJn8CBh2(R(R>Tlm)Hnw???v3UY_zkl9SY`3-6N>DY zKb|e2eSxIP%jpb?MXVn#03>g0Ss`}dV5_UHU-oEWXa;4fITI?C!4)a;R-0ac$K^7e z%=VGR{3!xhjR9(TuA1kxU)N}IVe9h=`>gARu2jT2-vgH~x06rbMyeijH}BdTsVYn4@N(`4AdQfXtoj+-VvcSa?eHU<5w1z!mQ^k=zAPBn7<_%zQ?>0` zie@j*ZD-NMF`?BDkrM_Aoz^t%8wEud{ZCbk_`b|OUk!NgnjGEF^WoKJ?wf2&N zhEx4xxJYkI{ngiPcJ&vMFc8{l?*ipj1Ew}G>~}-U*PkcuV0w6l5_;*Huan0^hvG>* z=Qpd0u7Q}EnBUy1H7Z`#;m%I{Y5cAq+0R<4Wc!j?@viEN_eSYXE73JIFD?#7buJ_Q z7`GwSvjH`u6h=cw>W-zyODg`%mYza;a|dcH7hIjDx2rK!pOO^rSwEEDhjQD#%PXWi z+%ehm_q%Dj?O8d<-T~rJ^6K&aQs~R0vcacZYlcI(#0-_L9h;4m?~Vx?_l?e{ z$2&F~kk?my57Ydmfmi1_NSQO9vjJ6QpEv6`c%Qjkvil`2+I7KS_IRD>U@nWK5My2P^rDp`Lf5A^|5-lvt~_ z3ye^LeX*Bxjd&dC*uIPxBp(I{qFlws>1Fjb<YBbxW({j$I1iqKS-OG7((sPof;^yasE=7lapv_)7IB# zy@>azQ@X*sho3n>C3xxFROiFH4Vyo_Rw*d?J2zzp;8* zOP)jc!gTvAlLO1RwtuD5zKF9D+k+lhVHQT@H)SNq+U69BsEdjv$$^GqhxXp;C#oWf z_&1#vt?{bDp~$^D9MI*ST6}^iumK^4z>DT*dNTlvGb`pC(d~2vYv#IUqKj4)o6(JZ zJ>h7~#gxZ&OU!J;oR;|cikv1~qlEQ>Q3bt>+`fuqS5B3rFYq3G#IUM$N;-DB_wNv1`U_l7*N54 zMXe6TT$=k6;tpq}Z)%JXD;clnU`5vu5n!6~$tM%CL}j)YWrGu==wem)@ra7a!lwX&@xYa!vBETemPQrgKw2Kal|i56rhZL-c*>pPAx=cmz`D>Vd5+j>tP9GmooIW}me z9mg1%TEa_EhCg3ig7macz2@d<0V=(YcT?b=8|Dge$htAOMS+~ z`?qU^MpVVKTmscWR>txZNeO8IVonb+RW-O;-og?On%iiPF%q8(Su6|dzTX||dVzS< zoO$_us1nm_8};TV1tQH)Y2uZinNPI%ocT??u5Po*6h4ircX{|guh4FVf1<{N+*Rl; zBzWxVxnx)kHI-4nEoz@?r1OrWRYM9tzeR+pZGrb@d1?N{TdUjiIJvYp6b^rIb%^Hn?qr&*ZJ8l&h>%ruX`ss48jC}tOluEQ@7;a_ zFnD)`RbOcFr!S>eWOujUzDFjneqK|2aIgM)WJ3XTV3#xCnzp+9r z&;T+h9(Za@@>!HfKgBoKucL-1Hhx0=o?n&agxcr!Se7|Lk_-E8< zfVb!%Qd+!Y*VACh)zC1r)a1EgW2@ou&BkmB!MD4hV`E6Tle8n;aHsa7g?O{U;oY}N zCm`Zpn!*xQ%N4p)!OgEI-I_|5GVNnA0;~K0Dw5`P@c3fNjgbDEpQO`WX6j0WAqGBV zJR1*tWKDdUr^gGB1HPl`5}45y%$>gsd~HO98Me>Q9I>i=7y?zTRxh ze8fTiE7qg(RL-zhbUSyU8BT9IKF7%4<4d>!p7(;$V3kqRXp%44+*WpmDNR6qPh>P zISoQN36+UleIJ~izYwQ89B?Pjp+`9$t!vQR)!|9eauKe@_U}G*Qp!B94sS_pJR!^` zOw<5fQ3Y8cRp4fE^NXsNAKR$DbT!;kp1z71E?+?#`h2O6Sh8?`Vr{7@)jq0g_VVoI zWqb?oh1i^_y~vN4GOsUsF?u>Po57qa9bfAMr0@59V)#}Qo$~>u^>Rw=Uj*;shyS;l#0)who?!b`s%Wjm(~WpL{*4DAuUv*MtHkbOpL1g|T-SzkaAN#6hr@ zez$8Qx~w@l_*Ea>wevJweO^4XI;kAlz*NiTptQA!I}{BHW>O}V;Uu=5f8W2V8>aYq2Vy71Ljp}T~la9s)VOH zxqyFJ@+UjyOj~klK1DF4MU8}=E>`-1@K=dc49y>K<00`|N?od?kira2t>MIA-M zPVTQs=DrJx-IB2-HV;>qdcR~?^LDYFK3~LJ_`sA2<8Ya^R{>y(hLi5><~sxTx zXn4tOz|8udt^5&5+F0vlb_>KecX``zxTkd_%g3Il^)zFQnhxER?B?#s{4|kN_PV+w zrbaGTjqF9BanvPM1^5biB1MZ8PVFG5S!G>*3j)$$daX_lGLl(f0 z0h%6uKGA*_1CyF^+cn|WY-N!A$pWw}jyqGUN>!|E7UnG(5EdqKK%!Bw7XCu2i$mkA z5{tUf^n#vkjmfdYSi_gS#I_|V3l-q*f*IA)4HGOj zV~{fF-LqhGqN+TgOO*ptT-s@(oWnj)&a*@UegqxS~U-aUpj_mIuDJ&)ftEE`@p&9hw}bQN$^ zRf^?ezWCbEKlSBvDt~MJ{HdQ2NA3DDM76Og(L@gQG)3m^{T`S4GfK9wSZVPt>IHC@ zUZG{>j9D^a6+@(EyT~V+KIcbQJ#H25X0ba=Dm6DvLMF<&>Js+%Cv9zw@7&E6<>UTD zWuRQdZol=OHA`r8=-gVW&{a_P+|EPL%{+c9yCj8idx-eQFY`0yR%{=ttK1a!YIc8F z*qwFfR!wC8hEBpM`A7EL+plt1lryz1qN_B>Xp=Rg_Rrz54$s;JHj>(jn_Xq;Ws&T% zvUiIkA$BsNL3jJKhbpdjkGwCE_WkG>_p;`r0l}zU?9q#`ApJHlhC)&W1fQvOj7z6N z8Iye0L^j`aP3xFKXh80lJ=CsHFrLYmzM3S@w8Gi+^$p9qd&#HD%_it|6X!1RUb~;n zZ$;&q&Ogmxo#k3*;1+jhiLzkx(;4uJXmGS49EBM=m;1UJWL5Qbbz?g&>|i2T&%2@> zo*OcE4!NEPn69j7)I4k95M_HVx+uM`Cu=LE?K&BGB8=s>4rzg|gRy+XK^tK5Z5q=a z)Euw+tVZhh z77bgDmiL?L6&u>g+2kEnZ>Kz*kFn%GIn|p!iQ(;jpgsfSy&gYjBxeqtd@Q}M3{rLr z<(o<>5t$gM5~*+oKFX#BU6#ULmt*VCWYE^JI7*_}>S8-E4W4<$_P(87oVkO~+ZExl z6>$LR(W{SH@*k=As!99AQU2vAW3an$L$!qV9UdTU24(?A+ZVT-iHo3Ne~a| zZ&d;L#^#&%Z=N9tFKYzL_V83#(}{_zk$eF2R9xkA%;RD{OkFq#g+I`U#|~R>&&u7wk55?d)G>#JAwandd5 znfx5XAJ>0Kh%Bran6y_Z%8K04Q>71}D9sD}5=b^|BB`!z{PTNT-O94VrlOA4WNLN2 zcm|^O9QqYkJ_|^U?!YXw7qHbp%}b7Z>22;^T9DDet(nBH_T3?dMg92ts3TMHo>F6k z$@nsT6LK$(W7+1&?#52ovqole%M#Oq^_9l@<5Z`%y$e00SAFLSeYJhF!wvcRy3k$C zhSp>nPb=H^3zxX=W7C(P zVb=8xm-(fBS8P}48s!{fmL`{%^+%oJ1bXTeMQ=IAeQz@E<<^sXWB{^WJgE-qezJ3tN{Jzn?sh7)xm1xQNIqs*H^%R$94G-hBH+NIMnW#Juhg_iR|d{Sawf-T(Q8o}GVJBS<@_yW>XG}q_O zooU$QX!67RT<;08dgQ&oA2w-~%6@tX8`@?IrPeKdv)y}lr+F~7#PQ6xW^(9Aq@1t* zNC8ypWVKikRdfP^78SZ0HI7!XO4kcta=Am%T5*A3Y9TyzPlqGF?gd$L_(O=YFG2((49nan^<6)bopKZdUc5KqLea z;`ydrMM7K#wNf%gyn}b79VXM4;j<1Dh3FnBba$0qlCF!(W;C7W?%<*DZtfJ8@XQ|T zf<|g#DZeO};TWgg)V76gM?`1xXWG`4ynaOd14SQCz*_zl?oAiAhjCnZgI#RrpsrXH z4kz6fW@)0Sj6E2;H_>X`QEyZp-(@`iwRZqU;}eQ&AaX;s(1Xz zsk3`-eBXW<-+5}f&-Y_5CBd*+cnmW$wtrh7$av~zy7wa6%+tz^Iui@C`=jX0={}eV zW;L>n%m#w~m|r`*ANgaZ&P!!J?=}CGkt>dxIXY7euV7u*uPM;B*6;0OZbRab)F_Pa zVQ4``!*zwNpw}*RqSDc1EVmOR9{eidBjAoNzAB`XZgXNH*?Df#p(6YyH%&ZE`pQ%P z@)PInxrfuq`W)SaMt8~ihFeo!t9^W#+Dg8F*HaR{eq5TqXgS?zqOx+Mc7Mz2H692S zG%fBIT05Tg=gR)>QlBSVdJ-{iCCIAgynF-QZ=IY1G$M)ue2(x zn>OcpMflBIUMp8N6|onXse?kn4?eIsX52oB^0Yrc^($ypWE-STefcG>uvkZJB~e%X zH^@|N3`6NS^||6!L#qoe+kRYDi_P3c%hZ0o@rh-q{pdrkI1JH>N#lB>ppx&zCzkoy zgQLF8(~+gY6u~r5e&JV%%2B&-Ff}RCzK11w)4T+v!!D+GS_t%$7J(U@Rq!h5di+); zq7Atg0HJWtAYZGG^%L*b>Cjj8YEcn+*tQrY$|HdwXB1RlqI-i%jB(;(`rBO@QVLK z^1tkTV!nUYrm5k}rT_*quqAkZz$AlW^*1Pb#xKToJ!_xch~*Cfv71DrxM!>QKkEGg z@v*-OV-lQY(Zk1Bfl>Dkfc1(=%Ux%#ot=za-I+;HZm~qQTEDrZu*dDb`xo&xe`&ja z(KP&{DE+_rl8ySCVU}O%e{&G@SG9Tj$A4BI75hW*m48bEdpB+m1=;7nVDU?fF8U9VUAad9iuaL{ zeOIez(OqdpUQmS+C+*|9ds<>5%e3#eMJU!qf9WQc zNH-ieFDiPR?eA<`vuLm7e{J-x^-%M}J57Cqn`JHj2Aw4~VD!RqcQCmf49d?Z4};%1 zexAL-JrFoE^N5wRF6M2&*h|{=oV)xgj|D)I}EQK`dDuCETpIVNfTlOU%L^vn9qsM%qiS9Ws5X$DC4jQ9=vU_khgz1 zq4I6bnVvbK9h7AH&=#%tbaFFuvr8qCS;&IcxJ5I>*7z2u6(_9E!lqFcMdx;2TZC1?Yw&Vx9Ur>@9jfH2sZ`qu`#PKsnnlV%C1to%*l*T*Rv8wz9bO713|y$6TIG z-xuu0YFnqP)L1XKziAG5k18A*-a+-7ZwxwhzeH9rQJG)f6{Ro{uoBTJdZC@PaKAOV zRc+AOy+Vp_tV~zOqW*oECtM*IUGFa|x#oL|n%Yb4N#wx93TaNP)RC&nXO-NgAvI~5 z-l)b1ZodAqW=csQ!<*UIxgwgF434j4TW`FpU8L*i>QLYOuDUMcf!nJbb?%gYo7Arp zV~&&s_vZRq-NwB_Rw!e5nNG{^$|Nzj@93IMebh_2v?CY9#^O{;J@r0d2hf&yyWP`a zjr;Y(3oW}nv-$IFcA#(WAAf#TvZwuF(zsdUE-i2MW`-fxwNm3@A)%x}D;e`<%WqIG z%ZFIWU(IZ@>g$5?x9%;Q8eqt~?>IPdiDfqSXH%;HPl5-G0>0pM_6>~)Jv{l|I4&!B zA446k5d9^=PP;qU!jal}>?ZV}n(aC%V(BjK82AffEQ!*DAFCH2(LIvjm$n+=m@nw< zn!ECd+;&{d)-gdtjGtm_nDIG)?IfyyRtZjL1>r+=YYTO{rrdt?PUw#RD#0s>@sD>; z=-g3#Ds)~}M|AD8K&~!$g}rom zJKL2Bo|biDYpifF7oSOYa&Y2bSKtH*jQ4RV{h}!RL9hq@o8ghJRmPLJaF$uzjV_dR zSc?fm{#x7(V{AQ2y$oHI;fN5E=~^!%BIkmVCnwruC7cCF2_RdJ{Tx_qfgmobGv9z9s~17|k9wOB90T}_ za!QU8TL)dFMv147X75VEF~$acEWC40Y=uS@g`lJF($xME{h`vwWR&&19z0Av23*~; zyrlx+xxOAvCSTpzW+S|A^tc$`sv5W_&y{uxrn1`=p-#^#yO|mA**Mu{*mH$(N`-&a z?wFF}^f{Gx`CwO5IFa)S!A_mzFL_Uol7yv{hxQ*QPC?t!Wl?JJF2kTVi*3g9 zG(BeX17vJ7_}HBUz$3ycVJrU1X*uFC*4n33B#)+C-=C0*U$pC|#8_2zfN2m48D95J z*2qkoJD{sj3GN!Wv}cu7i)H%dMuK>@e)Lr-$ zaT#(~_lwWXl_9dT^yOYk>e7)-ZaY2idh;;>0r#r<0%idPY~hF&W&7g$@t=S$>ljIf zo$f7xnA)&+>8j-=Mv1rd%lT8L@&PId!DFS4VFjLft(n9h&Mh+z-`6ys+gxFH(!3=) z?Ci(gLbX5aw43(fSLl!R+UEU(rJvapzE5FENyanHewE)+w)tP`BDE=#jCrGA@(F-K z5XcORgzD#-S@dlf!Yhz_t;K$etF^>gYd(X@a59_cX{j+n3TxzwPKit4!)I&C0bil_ z!dXBjB(A2o&vV>HILyMf2U0p4G+b0VwJVgywPqS5GNVd8fk+ssxmye z(N9CzNl)&oeDPAlE1HPnJJ5hP)|DDQ!e75Zmt}ae`aQ^n&hBpzK~hg$T6wC2b1Dma zx8As_3w_?#svgP-E{0G`WlhhJV-PLh!g>vkdV}xF6U$W2jaI)V{30B!5f`{xFBz6y ziB0w_F|;#$yQMN!Dja?xN?NWZAJcWW$R{M>jL4Q{VM#{h2`!Sr?;eZ`J9t44d)k;~TfdgfID2HqYrM6aD z!A~%ErF0GWqYobDD$E3coXV3lrgYE>dI{U6x1sNuA%=Ty8rKCr5Npi%Kcwx%a=02T z=$uVZ{Xo^UDtp{J9k)&W*h>lWIQRS92a*I7CXK6&9~5$bng!<>J91hz!->0-OtQ`8 zIXD+k&8XB}n?kmRRKhsx~CA-%o?*pqH`<@Md)Nn}aRVU*$_YrOgO6}pet-|HB z;_Hpzz(!%N$mo9%NY99!QUqLoKHBUKNhu81+-ZNl^iV%(-j4GCaw!FP$3t^0$yx*478-46sJ^jE zx3hCQ-QM-h0G^{V-nyUQ*Nw0F&tm+w{q|g1kc-Kqusc|5ltvJsgop_BBbzAQJ6XQH zI1tLpF0wKrVb=34slP5(Dj`$J(;+;&y0TAvY(_^%T=!{ui{rlU{@6@Z@pjL&ZgTR( z!llEp=mGqj9Wabo;X8^CBeKy2;#iC>^o+U@NEOz~t_@B7qs07+>)F56q5S_-#oYe+ zf3rR=m)Q{R8x6?<)kFc=kYBUAU0U+wqV3s&W0Ys%vhmmWe%W-vz4a6;?Z`Xa9%Qxj zB0;kB8^UBi&~_}kWf&h+$iYXrJ*QW>!*@((C}tq~ zK?y%GNyDwS!v9;pw|`&_Wbm&g1@JjJ@Fhq$#2WkOFa$#IURJ`*vx1>@$W8-1159`F zqoIVTP17)-8mT%_?h+uT%y}dB+NTmKQiT3ze@GU1!Sl=v3#8XqFR{fy+s~wy(a>_- zR`%h#506uoT|9;5`^*=LU#VGP1Mrr7_M~<&(@B}>OG`@ZaW1pXplh7g9H72)9C@0b{uDH)_MTjOaVp4(C^=xw>DdUIpju}ygW$r{Pp3SF?zf+L2}omJ``uxJS!46 z(x!p->~X$uIN-Ve^+Ab?X%dhZ6+f-WIxXjFIY)G5a;XO5kIOvLBnsn#+S**u%H1~k zbCi7$!&4|Zc7QoS<-MO9!CsT#`8T#%CzQi$hkU)|5ibPmxU4z{F%LMyeL*rZ;eG_M z4dD**3QV)QzDC~(Tt_`ns5KZFX~u1L^;o*>KY`Y=?_s8{hd1ShVr$7B8% zKq7pqi^OK-8a*)}(6w*_LPJA zT0tId+?YLv1lcNMqH7|mjzr$dk#Yic&9d)2!x)Sfql$F-^#yaSz4P7>X={pfCxQr( z?DiTca0en5aHdL)HA3mNBZ!7emPEW$XDK>m?4`owERK--< zw05|s4D>^`x}6->k>M|v=E+(fJuJUDsiw|=Egi3}u;v<(N_d}e#7|d#?sl}5`rI}B zY}UB4zG-8+ezgjpnxR{6RsFy@MCyV{_dEZ^fC;ThGjd?zR~U}=HwaHg0V9!1zj*F9 z+oZ7|tIh7%BDpr`Uq%%z&Hr%ho2u(i1M?j!IfoG!#_eP}J%a5Jz+H|R@VFE=09M-; zP_+cs@XP(#5%V8DU4Ka`{@*#>!6q*>@qJ*1w!0mu`f)rD6gxx7W!l!(JdV@ZAOi^7 zeJ-!Xiv{bxDA3?8YHFJ_ zxeU@Mjft8V26B6Qo8ydx9u^|SepwKS_!@A-|Bd1>MhwT@@}mdr2CV_0vz;5569x=E zc~Rzo!6$kqtM?CsPlZ1WJ{Ow)+zSRlP&WYkVe$uXJOM9O>o*!@f(18FWq@fZ@IeOC z;Lbj}VSl|QS)SjZTjd{rgI;0bCj^Fc1i)MV>4b5AJ{LXE26e^rptC|kGdN_$fkx}s z$CG&DNxUD|S;--an(S*nxbxhVpW^zPT+XFlOcvr?PXS(i_6}BdAz;#CBkHixF@$u6h0Jnl+`t^o5p+ z$J{;VqTkFYTHr=M4}Mj`6<8_y0a*>~)eC0kg6AJLQR4)wuzShgg3FG)$g4DGI8IZa6nky_!vMUn>e#tWs z(q5HY=5N#krgN;Do~Is4mw)lBtyOo}9w7+>M)Ga%4%8nszCl@HpsKvcVtnd&8w(a` zDQl;=wsl#&FP0VQ>|ks{J<>)v)^>Y0h0`}(>zMJP98i>naMU)-Gg!u(H`t+MMw|EW zcNQJMJ0QHcyWE{c|NG962iVxYv$zHjN!p`RdT*pHYu3$$1xwp2U_ox*;^9~VPBx04 z9p@~z5_`9&bmq@|a5_ObpZwf)57)~Yu3l#QxubuE!R-*NcJE5h&U;gBo!-PgR%P!> zCA4|np^<(lY?7hDMq`+oRb$x!P5d4=rU$4ybz9b6#@L(o@9^+g*A>+`f1gib73;dN zWBlH1*weG@l$U5d(}d{pasG12=jT2EloO0)TrYsE8h;9_EW-IsI}%*~sId{bSqqO` z;cEMOjH~cT*=&eC-i9;7T&hEi82vGqKhRkbV0N5pp}lSXb|qhs%w~f(=@Nl);jiZ; zL)o7=$aVsiAOSEgjQK`flsQ_jbCqP5gLj6PHKj|X+B-w9xo-T3^I$lH{VSpxZZV3z z3F!q#CK!0lp5mqA^7^xFMfw0KrQXLC6dP{3?M1S9OaK!ff3t9pW~ZL}-3cA~ zG}XF9C+mW<{mPCHsQXMy556c>=N+Wm-d@nqDW5&4mAhXxBWn&|gB~lCSU74}!DvSK zJI(w~lS#>_?8tMFw!A9j5%ev-Zx#I*9M18eS%Ve7$g;P<@TtKbPO_bv(7N2K%{Kbr zKt=mTU{vF%YyklTz?Q1x_Ixj-@%`sqHorlHa!mR*N5fYXeT$PtR#rl`eonTT^!hn8 zUL!*(KRp>h$&5&%JohkY$u&f<1C9(`jd~vTc}KVXOQoSO5q> z9yB_UCjvV_&T=&ir49RAp7zRre`r(avI54t0cV7<)x@E;f*e~Eindp;!)~q$=>w0H0%8&^p?(L_UPUzNrndz=yG1oe~=?$1GNQt-p<}x6t8b3k!gd`rA&R^T=fZ)bw z;ff$; zE6B1{WqV6=2cDkqL&L$5+%HD0P*fzj6FU9i2=|s5;?Nppv~C0|KKtFUYoVB zdY0LY?aQ&30Hc8xMidww_6X%`x@cONrH?jfN!Lo*P`77{!r!HBd)meDnR}41N#$Ot zB>h?BQPu+ESrb}v59NjS%I%Qjo8I4ZBda~6`N2XIEb=IO=e|l2&;FpAm_;^ETqo2Jx2w~MN^SNBs^yTkQ2sx8>w zQodKcpO2^NXX<*N^t+OAB|5UVbJz^M^%F@a8*`j?@_VwxoD**iJC3b4n;av^fI$ zqF22lgi$2Bs*@&ePVf#s>k z(ed1`KXbJnHEH9Ykis483IoYvpf$WhMevaXvPvo6HK;wPmjevMy|z;##56Die0)}b z&e|vU_RZT_xr$K^b!Qc1Ri81Rgy~sc6*aN-Tz9Hm>E%q*Ir=6sS>DGl;6P)*rQq-T zz#qaQ3#`Yh9ha4-{QFrn0Pk=${9LF0=ZQ3o3x0FWB?6}ENFvSLzd^!Yvb`FVRRL~? zg#Og}u_6szl7ruPn{=~$ULR1DdEMeybarigU5BBwh&!{eCGraa*gfiNkOfggtPyGs z$AuonlWAd$x)!vbn?96z(tzeEz19b+`-*$Q$Vj@+=5@-bwZVjsjpT1bwWF zOVT8I(LVte2(`0#Xhg4B+nD7!ZxGV0gh#n?7b8C}*{Sck6 z%+fsC3?RC{2^iylx2l^R7&Cic|sz03W5$UDY;s!zx^O?e-rS^ z@qn~(hS6EmWnlq!INQDn`p3N+@Or%1#nRf=Xf0H|hO8SbQgtb)jZQG{=7A-L3O-Ry zA#sa;b#F=U?*p;{UhuLQM~{NWq^uGO;@H<6%r!;ym5YBE3T4sFq^YgQ@KlX{)U_H} zK6o9gMPX_2p*x!C-1k4(X6XJldY7BGLu+UE6$`K>y|bBrJm=mpgDkZvSq=KWAhU`Y z7~|&RPOe%-#75)t`gPe2ANJ#z{IFzh*{!>6!=|ce^Rs@eD+amlz6GdTPNi3>cmof<=R!`ge|i}IC$HTYpe6={ zB4BE047x9(T->J2(DHp<)H?aFi0|Mhzw;6Oe8NmU4{B%o&K4lqxQYXWp=ty16PjMX zK~EFRFgzpWN%bKgu#u?Slj`9U2kstC<}L-j*9KDdK{rvsskde8C;sW$YpmCU@HZvE zPCALni8js7j!X#E%m5R%j5G0KqP>{53!4IcUWf8IkzVY^n4Y~N$P2m$zhLga5VC8^ zto_4i^~h+HV*8wep^YBxJT4ehmvNRQw7GuM1+pN_eW6Bnd*F`#xDFj*mfJtwoR6s^ zz*Tx(;D~BtQ#(8-CZ_bK;c`J4m!a3p;1tEl;Z);ssm^S6L*rUx-7j zoC2Zf5$F}1G+Glo@v|F@j+-NQ+P6QM=f-kxIU1QouL(T|5#DZY;sXJ4)K}Oa_fkIZ z^p)7jr!js!uJS5l-MN%h#)Q;DN>(KP(YNdN?QH`1o;E3rM#!Kxt{ z*v7eISRe6fN}NzC!Lj_duNj#T!@Xjz`cH$`B#edDLga$}IT$s7*$S%33eO~&@dNK} zYMGbsNV53ug#3?f+dtXFSNg4rjO?0%?63t_-aWhg<#zY)T(jzv3XMh{6a z_g>wKzqxqJmAT&jOLwH*|8_b@V=mK_-X)>;&hF4(aRGcyV#=I-AnHR4zQvD8@BGrtC2 zy0QM9Jn)!;_kZjc+`aL4=@PEE=*gA!)wWkpTclNYs_8sfq5SmZ7cFp{Fs#@5yV^cg zo~Q0=9e+Ik>pG)rTdZ&ES_d_|@4vLJ!@F|5&b^DzCJDFBSQF&2cxvvZQwyhfTz$1A zfX%<*k1lY?#J;JdpRGP8HcS3yRF2vFyuG=X?o?)oSI>VhlX7h;CQW$1A_}W_^BZ#OwEn zS+Y;lW<}YW%Y~mG)JjI|7qnCU!TwOaS^u9>jsM5KxxmAwV%Tz>!>+Eo#-ZSNcW#Fa z@OC#(hLkx0Y*$5}*;)PFWS`F8w#&c$>wSqoF4@`k!Y>X-TzaQ?aebbs$^AReemy_g zy6_D8XwEw)<1HeV=M4_oJ)T!m;K0na&c|`9>a3( z*t4vcj_SU@^`yt;Z0+ zn`?S?%|zD~5gli)tvTf|&E8bje5G~U*VGe|VUlcDfhR-RZ*G56{bBdF(7jil{_tG= z`ohiEWns;cm)?J6oyYywNmN^Hy<*0n!?guVT|TvQYxYVkdl2yp8q1)xiwm>G_m*Vt zaenCim+#iGOTZH-i)9M$?_Lw#@0Kio&TRgST*XanlKy8bPUkmOEv`}hXnr_e_*K|E zgvy`3M}|B3c@t6w+uULowmX3!qEMex?iZyQ*qOE&p$ey`2{Ui{hdtGjZ$GG~?U fn=RA#I-kj9z2a#DV4B`9YD(()Fax5V@&8Q#MfV?# literal 0 HcmV?d00001 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e05cd6073db0c4e6a9628427fb10593cac1ac93d GIT binary patch literal 38340 zcmdSAcUTn7)-T#f7(kNb3O)VU&mkdoxcto?7wPU92{&MJX}1ye-sb_J|2hwgolTJ7oULeuLE;NbeE9m zuZzE4@?VeM!NtbLB?RGt{?X)bRkvNhJ$$UZJ5R8&9sqalVPW6Hy6pp4FmmGlLp+SI z|8-#9!N$SG17XCti)m0#h7lhd8zVF>Mq*5BU(Eji&OKc6`~1)FC^Sq!51c3k0^+jq z*`8N-Q)!MP*ac0U0|^MJX=v%_IUaIyJ>nJ;77=|aCNBFzPF_J#N%{4gw_4gdx_V~j z7M51lHnuLVZtfmXPp_bl!6Bhx;Suo(iAl*RpTDH$mtI%^_J65`ssE#1_b__h z`Rf~i{?ZHUjt8b--^0PZ&yPp`Oao-%MDai%0H5-CTy}Lg0h^#Eg38o+oRFGbXpIB; zmumme?0-$M!2e&G{ZGaIAHC)QB5W+o$HTq{K!EG6oCq%9|A2y(LPLGweqy*Z5+_Ga zldMb2of8nRBng1Le$4uKtU*Yu8&hxc%X0PSY(mwWsl#T;Bh?8`wYY+mSS9os-XybO zKdjtKsha!sg=5P8P*0cquBSun(Dpa@(-ejjCmT9SF4pRIx$)pklr}Na7D_+KB$+F>;F=4Nh4zNT)>R`RFuuf7R3AmUv znOz5+ce|)o)s^U+8%|4=eqKxa>n9oHQDz}eqs&MwOpU$5>KQKBP?>ASgiw|nD$ag8 zL&1J#ys7hFW%bdH-8II#hWfTO!|Y>2c1(+W&3Ajg!A(w|egCwnP=p4d9?^8g2HsGi zIJ1;|eeKrxNPq3}8!gF(JK&Jz`Pl{25}_7lR;kyc)Iln8Iyc{^Av+9Cw}78Mn(JRK z?zB^xU4h@+0$KKh*MA%lx&M5TMR@jRw_s0zHC}??myU(b3q`h*{J;13VZk1ksMN<* zbE40&nja)IZvhM2%roMqTcDHYh7!vtqWU_i1U4qJeuHjAY5dc3+@F7Ivt%>dDk^P`5vppx7yK*Xy7M>n=4E5$!9&2)0Z^Q_E?>b|R$;tfh zUZhcNEZqOTf8WL)l)t_SXk_`l+1V3U zw1hWZ=Km#AkPD zks0H$t6u&A)%{t1#fD+9LQ-J53`T%WTZN@%63@{gn173q|K@ z4ve^s4xY9`h*Ic1MFkziiNkF44IeDDcQHQrO1?viJJ>PSi-W!e%Kc`H>u91n>#{nL zXEPTqrG7Wx2RNeH49v{!1CbR^Sw)9O;Lg_AhqnL~`Z?mUG-LDhLjR>r!=#&p`4opr zpNT`ynm}dXhec*_q`^hzm7WjD)O%h>IPygi&({E=2F=F_M5~kr&ZHgqN4P$s`MUAX z9<=yA9Hphb=E7cSGrlr~#%jeK`hareOt3Vm_mD3zUD-x+#5Z=FkJvs`S>NZuv6&A^ zxAS#xQ-FQIe|ihp&|t;Ba+W%sX-iP4rDfE9R;U{k!-ZWk?Z<~!N3xmj_X~EV=T`Rt zevd*R;lhaxv)VO(e!ECk$o%-$AC6X4E8JZO!?N>G8&`=t8N<0@K!!$3LJFce6=xqt ztVqfQ--j)ICB)dvvJ|5@MaI z=v;?;vY#(++ydgCY(%k9c5O8XYi@|!<=bh5?K{8yvn*2K6s4~uNxFxQuTZ}5WSQi1s ze9fJG$o%$dU(4Nn`j~1LDIX?c#8Mh~i_%vU-W9^-Lls z;7HGPQXGUL6T7AWoz916O#LdxUprV(+sK5qkpR(PG}Zc#A9v~U$%XEHv@hQ)`>rb( z^ZXO3&ukqE`-afa#v*@rUb~+ZXOw3($#lxiP+Cssr?2EOm~LsYh?QA!kPwir&S}2B z3-9bDrFh0P8hUfzy3y|OU4bySr;+a+K2{u5!a@1g|7sbs?rh)4m-1bK7H@%_4_d)4 zzoj1|8qpjgRyX8^keg?}At$KJm7wK1$Uj?&F}z?Zj4k*VOA-5TMgl8a_fi!7(arG+ zpYJB2R{_nDL3Imcr>WfnT~oIJ{pffb#z++H{X+6)j$~oh;g@QEeupmmixd7tI-&zc zuU++E|NcxaoC)@PN@(5Qq+Cz!{)ZPI7E48>{m1Z#DK%tp|4xC<9tK=JRJeMhi!!A< zhY{8JF-$o)q{$*jgJ5f{<8GwlzgCx|Qs(V{t1zF8D{c9F?ELOw?#)Aqb-F#s63T;s zT_~U|5ZUP0z7;IQJ}*&k^V6+f`_9rVD?|y=w(XLo__?k1M4h)gM1wO@;-vTDMx4;C{8vA(!SJs^f z@CIbT?uL--79e)>h5&w};y3BuCEZR>k+@e~4`ar1Y5SW%fmgkPENl2pBd7kPMyUEL zZSFj02NsP!t`L8OvuYF8^4W9bx>>#!NM zmGT$X8MQ3a9HY#&Zqlgv{kgD>Rs4yx*wrT zAx~zda>ztx?bo_KRyh3`!|RA%+kEd+Kd!s;>3ME}LjA+AY8)IgRqHWqvtYKXx;?He z9M*e58fs=lw*b^HNthBVycFt=r@7PaTl#t0RoLKD{SI%yRH;XWhhby13&uT7-)qr% z8RcZKTxit@;c=gzFM144n2zP#m0>K%#}kaxp-;rlOl~n$D=y=adlTp+!`M*#o&;Lq z__hw`E8+e*nt1D6T?+`2f!>?7x+FoCr z2hLLxGqqoS`w+s@-%ur{r3q_!CU)PCdiJn70~|Y*YrVPrF_7dL)3YqnU!|FU=*v0} zSX?kOu3L}l&*s$=ShsS=1Di~WGkcngVB}+(45llJxgRA`sbv@Wfm9jt26ST4vJ?p+ zJVUgQENK=bUO2p+WU_CWVfudiHRJ%Qe|0*9|8>U8 z&{KVZul&KzyxgfT)Xm_*3~#YppdwO+vHtH&)&Hw4v)q63t%m=@&HqoaC^d}fZh>w? z%=#b!3CX-$euIyGiu!b6#5?1LpQc?uaiqMcZ4d+aI}`O}4rP>~n2mV}A{+qZx{ z4D~3p4~C7_n(Y9g$%B*w7hXQI@q}|m7c^bT%IBjqKR@ua_#*IqMIfF(ATyH3NNH1W zluhDWfBND^wUiQPafNYj=|@ewqDg;NY#9b|b6pcdtx#%RdGObXge|&?(&sWd>LA1H zz9;q5yG}$T;MB3%;EK|hT^ceB)cAN$z_z_Wkj~6|Jh0ZtOH+T*y5HD%jjp@M7`wEZ zltSTZ6A}Jlz-|8R)}qh!tTlbXgjgkH(#k{tIVJ6u_Ks+>u5&R*J&;7m%+UBa&P8!^df!d0JVE?kzw zfcd`1TJ5PePl@41TVbnSj}6Zf>`>Js-hf=ti9Zu*CbRLECXF#TN}a`TgwDW z?XP|ghstlPk`Y4-ikgO9^j3N3?)XF8&)Q%0>P_08r*j|j@;Iz2l@~^Bnnx!JR7rm% zgVASn@^`L&TB)yJ3Ah0p_>IEIC7~vWn?{>kfO5Dao9`%V>P1$VnW+;?Y9;57@pN?( zHAtZlpyd~R3*2YYzv;Pq3*;eYf~|H|PVP;29flP*6fscnAz)(rNcLIpn9bsTaFFz~qUNctAq;u${JJrXLHm76_wEnQGz zei)8!Ev3u;LbdL$rGBNu@|lqcz$}oQNYm}X&Z=9$Us_}*^#Lkm{Vnpi=h~8(uVv4g9TLv2S5XZ-F#LwkQ~KMBp0pFVQk zZmxTOJw!iqWUT^gW~-F4+g6)T>F%ILxm`&>8M>tL$5tnW>}lY~y?2?wuj&mH>>X*M z&V*uwnIlR-YsOmYc(99++*{yzM)oZL!`Ktlq1Dd(NbYrRDXgEf2=?t6Y%{+Tebsvl zsM=*=VxF7l;*HlIX>I|fwJajXuujTyX*%d#T~sLC{?6WVuiT_%drLBWZ;6cES|7Kq zN8Tw0OHA?(w6fb*d*Fr#^<`b+7GOr58Z}J|N9*lYxZS-f_%Y1pCiwv*LkUn~P&+K+ zQe%YfN{lh01Y*t*i}P#VvoS+_%4|Yos4Ecn`fPMI{)Gu}3lw}g4En>d=K3cfi{$E= zggX6h-S%#PK|?%>5S%w@F+}^oU)=Y8%`@Q{Yt@OS>xTxWE$QS=al?QIJsT0WA#Wxx zuy2qY{o#pNMZ&9;4*<)Izc_=KV8l!U` z-a^d%vl^QcE4x6ynGKj#%h^5Yz*jC`TcWC9dc&!AW=sT?QZ9?{9h~q7qg-U06L92G z59_?K?C>p(oHTW9+%J@K?nQaMO$E4eBK}bVFBc9qXpsvM>yip<3$b55`uKudiYne` zx4<|K&{5@6M+&G@d3ty(CDyhm@(E|e zK!@zGTWWcLCbR;kUeB7rh5fZ1$FjzeHezXtl}$LfD3@qe9Z&A839rh{3Rm^LEqjHc z@W@{6>b+E8%k6@Nzd}NHK-Ep}uxo?DAf%R8sUL9BtKNuJ-l~sRuDYlQ)%oOXmpYxd zr;rag8L{zEW|)d;y-u=t%6d6_poXTPxrm(Y5Wo4BidPB;mmYc>jpt{o%<>WcBz_bx zM%!n;xYr*=Oz>D6ieEL$aG?U>Gn7HzsTMB}$#?vp z_GJvjBS)!@^*0MB&Tj#7qU0lt!NVHY8gB~bIeZ0;1^>fM8Qg!dY}Wr~_{Kn+vuK)3 z_+e#uhZt04bta|!&(K;ly-K_CgvcM0mKFU-FB=h3)t>0Mc4-Y3@BC6kueEbE3OUm)128wZ>l z?`EGBjQR`i$}l;6amEv?NO{q=K|I~7XN&_lT-)U!&UfSGfH6?sjt6d6zgvrfgl}nY zUFJMRcNO||4NA4Js-v^n#tc@yIItzA0moI;e7>gV{;prw?`Id-@#WQM=wpk-NBDmA zvk_fzgfdARqvx!UNz7UkLci}qA;Cg!EDG2p_9gU|j> z2B7{-BJ`VSX+{G*ALc36`dOl}>r|z~)K?x+WLi*nT<4gd%kTJeiz5sdR9EIA*I==d zxK|P=I~7!%x06V@;&@NV=5#o2q@Zopn+zSKL&=t= z#>Q;gn-x*>y3?rvoxLBzF~k}v;Gl$zuPs8&+FIu)W9K{q-Jhd4-1P8bCZ6>8B~i!b zNRWwWC!^Rdf>G@lV5eK=>=yV{v#{2_)EJUIUa#vW@$2UMcgbq=u@OB2D=|7+1v`&l zC0*tpDP`GLYtZtLd2g_HT;kHao}A%A>xta@DWkOE!8K+`dp_;f6)j6oy5c=Nlg>s! zLhgR67?1LRx8M0b&SN$o#OTaTO!G3LXZWqm5R;t7j2}CSwD;!m1e%}>S~9|9rPoYm z(_)p@;%Tt3WWcN8tnpGy`#oJ%;lU9WyJ^!p!Enc0Z@Uxy+8Vd}s+_|lT@0rx6sf>4r$mBi&yF? zT%I(%wHY=3cb zJXy3ONBw@@kZ#fZWJ`<>uCxBQ=Ke39!C!|PMzRW${;TGgXqc$n$D-H=Jm$v2(id4e zTzJ0?Eb+}8(#uB2W3MB@J7e)gq5eza9U9IWPhUlgssWdW`J4>K%c|^0vsNx485XY! zY$Qq96LYD+3rjW)Wtm!K?`-v)wj(x<9fTHsl$L^- zPu}s8L$aIQ)pfOxNv;-eppv{4_nz17&8R1`K?^cFj%$3GI?HLR=h`e8?mefpZXR&1 zOSN0nTB<0y1w4kCLMkb^-r2>^G8Wj4waMj`BGx#;gMbzLU4 zw|7UY)Y(q5`aG0ZiXA5O_;90hKZwyJup7>gDl#bok2nn37mvpbteqUNBZZf;aovB9 zdl6JFo5Tta?WLH6Z()XHk1S`Hq=a3?4-1~U6tbh_WowS;T)!xhpJAwPPdVh}@mC|Z zj_VoYTU%#@AB{vSys{mZfV{C+E(2>uD@qizPo8*F3EP}z*nOQU*oc~GD0HTKMeNcN zg)$p{mK|G3Dj))H8J)9&*BRr$-mBe#Ufu!*hT=UuUSLD5qnGbvCVYDN9Y`qI;?>?; zHcMQbnhO80Rfqemw*xQ*#oY%4Y#+X3!YOC`^Ah@hLQ0IRu0TGzji}kH`%uzBr`}EF zc}jQDxGzC*QpF+@{A>kVVNIY=R9j@#_5K zUo1v;#tpDbt{Y$*@hH2SACA3_*Q#~OmzEu3*Cjp*@OH-wL4;ZaS}-dz-FX|%*~^d) zQAKhbBtQ4Ag&z{I`SosUCN(PGQK6`xc+q_>sA|@Yc^t)Hf!$=aL9a^_!vIbJG zo{&_NI;<10IvSKrRu?Lb@)=;4p28ZNjrW^jp~^vVTnV8yQJ)>H8Zy-;wp69PO}i&f z>~!8Wux1l=cE&?Klf7}7Z|6Id=EfwO{(`iFL@|rAFftQOtp*naH*pDjZ$dNB@4NtOh@uaSX2_ zU3v>!YCo$c%{C}?)73__>9_D$|HE&m8Bwn&|FH2-X?R z;8sh%Ib1NPxOSa7bw>Ldxb8n1Pl8y9QVlzL4M|%#r0!M}%B?~IZkW;r)zKP93(65toohE1{$RPCM7}n;hVjVNeWs^6OO>)0x2JyS>O=;u28Pu z*V?>@cTCbX&e7-CaG%JQ4@xMcY))TlX>9d#80bi|!r?9h%8P^-7}#+W3TaE|ee*Q2 zzWNC}8~9+H|R-A9Wsa<@thRW?j9nUM5Z^%eh?qlCe-Pb0u+1^G}*ziM{ltm>?Yi%uT)?9hgFw)!c{v4TP#Faki#Fks&9(C zF3(W#q;TYbRoIHZ5A}Z1PY>%GGV!gHmM&<0LKnivQ?|FZX(P0L5?`x8ZIZlts!SO3 zP;y*T(@;3F#f!z+CX=ZSyzw@%qMUv9aUF|+27F2Fiuubg| zeJciNM=O365b{t6y556J#Fi&bPYFC>J!74i{p|Au_4RiwQ_{vCEmrf4q}}Ro@dNC# zpL$uxu#0k=I`DM6AW%2b)P$$IY_N z#-g_2Zksb)gHF>_H|Nxe7spWbPIU-bNem9@N%!qJHRLgIOB~-nrZ{${Rm=C ze^0xLcnh3z8k!)mb~E-^-t>gyB;MzzT0GnNQ$NYQdEQ{FwFOn%BgOajY>g2rt$!RI zy2%j|#s55JuaMc?uyhxD+!t@ReQgKNE8F1dx&=B)1t7Gx^ilroQO@4l?rZ`Brd4%S z(;QGuRXO+8R!Ovoc0f0L-?aGoGbliWBjkVC2#h-H{| z#UyQ9QfC(L5z$4u7Z3YKaR<98CLqJeqEms8^yG<64atv-{;`2B{!CvmdmCK@tb0Es{0 ziImadVa2>C`@7jGWAeHk&OkW}Ch$h(yjm|aYs#xAsOj^BihPGbf~I%MdHl4JV0*J2 z%2^{2N%h>0M!Fn{dqQG^_TGr3AIBz&Fll$vWh2s-e`z7-VN>fVg;<%CmVMsW@wG`` zyl_XyVx<=IFpz|zbl2wMHZ9JP7@II&&D?XBiRS(1LVMruc_`KU-M|EMqBganthi1Z z5^9%>3%?bypQ=Y)l-X;td`<1NlHMiU`cmB6>=HBmrT7wqyyGyV`l*XCpyJA%dLBKi z<_Tf;>AD5oH%TfsQwprIa|G(-Z7#g>&U06d|GK!?ov_^CuOWGjftz{i45nx^6Zk0U zvW<0uvIbFU&92gYouEi}jX|x-nP~d$e#pFdgOEMs=K0-B)R+H?$X{AxXO)do-vZwl zZ-H64!t26*<1;*rbwgNUa+H=us1SR%GkD{rTb{--9n?{G0V0b5%*BE3zrS{IVSoEi zi2HxU;iG2JDr=*JI>fL0rCDnx?O&FXHBa$Q(Eh0Iso2I$O<>RZ&Y_<%##8o`^qPe3 zcV`muRQglb7}+)^cM)X@FesY1oShV6@41X=?;f`Iw=cCA&cBN9=LSXS%n7u_eR{@7 zrr?FfydbayEgQ5BI;>tNILk4wM||(ie!g9C<$8@+FW~eH9djQ6KqK=e%r;aH3qO$I z;uY(tZGP{LmDSkO_CC6UEyKhT5|Ds1tF)QG43v@Lzc3PJb*~s0RTJd;RDsq*Yb>Fz<)pZR4{rSd%TWFJm%2QX= zVrju*Ys5fa0{3Ggn^E4otwUzZq!kO_m3XO>n<+{Cj?qhP7C3{xk3bvUC65+_MQt88 zj~N#!x5KuXJlm5pA@3LSU>=5TfcKY!O>iA|Ac+|_BxqwK^UBNVoTm7@rFQ#Druw>Z z5t6SJwat0UXsMj>S%V^;%t=~`yn5>8oThx7^p_Y}rqy$giQZHTo8c^_2qijPf$J;d zJo-{a5}b9-W%AC3KV+~XgsfbX?PIemy;77AR$-0(?}vzjLX_~B)MVpgNbFKE8Oq)F zwNiOoFCn>$(W^Rb#WduU!p-uMfl}T6Ur#0U< z;$I()Dg&2;YlV@uLIRK52fZ$1yXVWi8p0{Z$)0C{sQz3df(G}UP4FDL&MT{KuzjA_ zHcwK>H>C>SJUE&|b74@yE?FlNMB)6(bqZYXZnzPyUZXY z@ljflzJilUk5O=a{ZC{4D{hzWx+M8{4fzWcRmS~QzaX1dELLZ4f z<#qs^G1IdDJ8?@lDio=*7Td!^;PV=31{Voh^W{IHUJ_=xXN&kHGtuBa!2Tskg^`ih z`GYmK|G$SS=o^fkPV!Mh1+Ql=koM1yWV=Y0=j^05PY4Y^sWz{TSb6VUrl^7|pcpSm zCdzNQO7)h-<7P#?Ro`#qQocR+XD#gw4_dsbKAw!7G?s3sEZ7auMQrhakP&|Va*=%L!@+eiBjFs+bg-g_*H&v zkCpv^voaWQH=HHVNZgnXN)I>JS9(#-Xtnh#A@g^Y0I9RoAVk*~x-EnHgQyoqk+U4K ze-!+PO`3rrEj;zHFyRcT|Mh;8wwJy?=J-$)%{W`&H2d|{|zoB_P zI2`@nh6n>*oq_Kj3d7skdRx@e{k4lKNV`7ay`Vj-G)BmOWw-d2MchdOeTV3go}Sqi zXUN6Girsr9uhFm1})V? zEEn>GN?Y_7qzJ_+1mNl9{)n>uAu+xo!qgIt@g@Q z*g-eKqq~#b0+pV%^ZTA{N}L?yiPV0TZ>)1)Ek%dwighoz_94$9BKJF9GbV}e!y760 zaQ}I21divtHkp5z?p$lSzZ=G#<{#qxI?IrQwnSkVS$qqeuMk?bSqqsUB@?d#Ylkh9 zSb=Wpcjb-*rqMhNG8B(t?=ye*n>m6z`LZMW-}K3Xq!mww>Nc&o%00bYr0NvXznDJx zEUsg-o;@lJ&70EKNSjQn7?qT6dj0F?pEczlb%igg$ybO}m&EI6Q?#EG5yN8%#py26k0=EwX@@S0;7 zO~Q4do)zAh8pCHZolR-$Pl>oh;f(LTu>n7&P4CV58Y33EV3ea>nWRv=LA^7_GQC|5 zIk%P(ov*7VTE-$LNOZqy$&po>cj*(h4;rhGI?@9{xMNs{@Pzb(y;MCl&$Zp34HVCW zjyEmuhV?$2jq2;J!09meHvZY*AOFG@?=#nCq6>)8Vq3|D&qaPI6RiD9)Y2+8+1{8p zs!AkS`}_5^rfP-xVXN}~RIytT)M~(wG^lT$H`~$4-i>@1Q;RDxR*Q!yXlcGp4Epqp zHK@BXqeHZtDbHR~;fQ6TedM%9Z!|>6vUZx!qsr2TW$49XUwq$zM!XVxrq`bz7yd|| zwkN}O3_(*O#Gdi>OAS&wr(C5+>L`hIPu8jUxu1Tzgy)rc<$h08RvodIp~XB76{#aD zK3X%$DHuBGhoV-VoMglNu*Dw?hX&QOuZ^kE_d8PU9Y2`;%$l8E2E|Amvi~ybNsxUk z#}VJX);{W`f(Z;mo zO1FIT#}8Q2%9iIX5PTj~32kcaSbeyj(bJKGk}@%4H&t3BF4DPqi}dK_@%aVK|5%Rp zy{SJR>2+{SC^^uXOtr%sJBC8ihagy=8IWaf+Jazw#&EqpZx$RP)4R{ zDbN_Jbo-j^LJ-N>Pa4Kdk1Qy~%|GxX?-14FCKk%_uUR}M`hJBaVDLqy%+F4V2={B5 zV(z|UupbUmEqF|+T-70~lHhJFbNUG$glN{CXY(kUrL_2w^VM%JF+;SpR`vGk`WNo= zn;Q7q@Kx)>)aaD{ZSPe2&Tf{E8TXMDX-JAbW74`|72R<&xp6*Wu#?mE=eJ)zCKo@{ zn%C6dN@lHCVCetp<_3mxRvpr&;q;*Uh7FS@r(_ZPlA0sqV!7)@XkA{Zm_cB5J z(aeKr3qqgr;!@C?)~NJGYCHa#jwE}X%HxhNNYNPAbaJu#k?Qq!!3Lk_kA&Iw?qch0 z(dw=hk2}=aK9b*kxWaufsXX#~oX(=D1zJ)+kzfzK8+m0niKb-Vq(Nm)7 zWwP8>2*(NGk^DBPe@Ew2NtseO-Un&{Dj9%}3KNTVD|86?m>^aH>q=pP^RC)0M}_cm zVachsKJ`An@+r19`#J)v_~oTsOWgHTO)UIP8x-Uj5*Bg8xJx4~_=)M29ge6zM_gm( zn!A9$zQ~1vAYTIW42OcutE3NebLMsX%7sVyVjK*6smdptyS|m1TUH8G{u$m{A8k7A zXyuwub|xLhxAyH1w>NzL*k9?`IQb~SK8jytTU|HRvV%mS;dw46Iki~L;oNtw*PEPX znTreVhOdL&DDIDlJ%++ z_-yQ=f?`bC6KNINhZQqA(Kgxd?tL#r9NXv`iLIbGjYxubJgr;ki~x|aWYvxBJjbsh z&?Ci!jXeFLgLgNT2dUK7+U((^l=Hj$@z@wz^UE<%iw_4%*Sp*%F3;WZev1LuvxL!? zgqM{UCP;-tIRy5F-n29>%5Ql^F+z>Z$XEd$;FK2W{pf}HAbk^o!$Pv!a{M5PY`F`p{uVf7sX%RQp!rZfI3lam(A@X9GSC0SGov#= z72@u|pY?UTmc~xGk zmpUqS?;7&v=ln6s+vUpqWe@i30(QW1UAm^a8jK6g7-$hF*@vj?PNh9fp5MOTrP{*A`RH~Kg&$D+}*-eYT1G|hmOwFK|eF=`TXDeU|rooQLK4`IP!-p363?f%s)%B&vnnKU8EEJn+>k3BRXp7>)mNd!Taz(RTJ zjRTfNZu*)M=wB86d>?Lh)i|IBDb$pz-Ti4yIvg4%&8x=qHG4_-C+Y^&gS-WN4k6KA zkU^G^Ohh7-8b0vWJc&}p&N+-^o%z1xUS0rqyAeR# z<^Jfv)V-sa&rdV`c~mQkJ#F7ar+(~yb|5)0xN8*c<3|6>?Xi&cfIv`!v;Oysm5F&~ zOBO7>7%P1$?ukUpoJ6K_X8pr`Uzp$m8kbq)=~<->f=F3}96aEICqppo%@SCMA3p3k zSl~?)!kZhyvGD=yR&P)-4N3aYB9ixAH)9d@A?&^f}BqgsLT z8WSvx#M|B@&pN@n1@_fJrO;QSYsY%D?Le9c=Jluiy(E}j?0ai=NJ3mX~ z1-i^9l^RV%awH5K$^LuJ|u zOKE*FvC0QCb%{-_HdLOAuyhBM^8A}ejrS7+@%Iflv;2zuMlz{{?2TVUzPx(A86rzD zdp2~ze7-;ZryW59&!}D-%4uk7{shRA*c}{y%w(`XZ+%*JE|wUb1zNp#<7N3OZtm19)?(0umuv!dcg8N@-G|6HMVpLVPUo-kSp{;3wZKu z$R7K@`|M8$!!lBX>oXFqfI6Olu@cSWm2w5&V)iO3 zp84DVrf=xeGZpBz5mi2Fdz&q_)Xv^_%c#XGqEEE~QC&NsEB$Ub@ zovDUd4V%5KMa8|ppl5F^Ikk^_{e$+7;w7DVw6jZI^w2&wfkZ#>bJ%;qD@r_wf4@HrQdenfXifb?j@X zt60LLi*U)!QA9W~{2Y*6v@N=^YWWB(a&B|Q( zH)&0O(V#$|{1(FE(GL(C&a4)W(JoXik5C{Jh_4X>NdGPyy}drET>rYO^K9dF)-$s7<78?fiytCr!FQIE&O4A&L4GWYo9@ zZhNqLQxDI;Z)M?>PTH5g*UZi*gtbRcHS|ubqF2t!0B zKo0W7D%#(r^T$zjOC-ZN4JJ(_S+hJvsCa8r{ytR2Yu(P4c2nJ<(RM%!xe~)C{Ln2% zHde;&8`{`TYU_`s*yU`l(cBSERfSr=mTq`pr7eKyq|*s@SFi7}-KB~b6cIE^UypR; zRj_0N5BRa}7u$<{lolHhlwkt-ANdTw5+K#Z^<_ZpZC~FLt4x)qEKBjw_z~zmJ0Xpw zpu$djD8r;x>MZ?p@uHF%)$a7!d^`KtsI&N3WczylgfvnlUWsVVC=0V!SJxreUM|~Z zI@-;Mq5f^UatyI&MPmt*sL}jkR3p`H#+`IGd^0yOc0%A>4bLX>0b9{_jGWg>fZLM7 z3)hAFT>~U`qmtOx{S@F`qm;bewYXQyQ&>kV#5Yfp!g(AEa6f>A=WRtVjau!tx<%Q& zW=H8w)vUX7O~I98l4{=Xspu`)_GK6{xF!n%7R;OsyPM|4(u*Ey-KogcofuwFvIpeV(qnZjG785EI|d6W}#Vd4Av6YBjW+!*u|M zBk9S?_hlFr!YexDab^>ta6d-ueWN6Jv2w^zB`0!tMD$dKSp`Yl9r4PWK6CBxrjaj5 zs1h6HTGctd#TCu6{DM6>A@iu1@v>&ds_$;)qROb5(dZSH4~w+$v~jqHMJxh6*x_0xX9 z!}agbx``Su$d=^_RlNxfUd4>bOaLd`>rQX?WEPv*3fA^tYe)Od8P~>!b=s)1$RY|t z^HC)1buFnjXeD?%p7*t^Jcs(@mUki6^b8>k0cj(KpN*4cl>JF9DnRR@hgi92@)X$# znl2+Yq){_mr8LYH!^72FURG%h4#dmwba9d7=a!EgvcY=16G4~$X2WlI1-Rk#0Z!3; zl{XU^+^C?nm4i}q3)jmq2t@l)31hul#Y*iLmsm)o0Z;|wj|-69jpvweu|2Q z!wAqyC=Dc^8lp2ol8xu&m*KuNqp#M8x7`mIw{>2lN~zT%-TgQNP(L856AXW;xUulcV3D^h<0+dFW(0>TU$a@}nafxs|KFXFOXy0S92v`4*_r zu?kW+B@!-sVQALtX!9WWsX)(hbNKm!kn9ewE2K`Lixk^O{5nLM<>seXd7?Bs;wWkT zIE0z0Bgn8e26}Gy7U>@9$kh}c5IysDsU4q7HZID^PytOTc;SxnTVrJ0y_7?as&i)~%%vRr|3%DaF!>okKrZ0}YcRRc@vb6DrhClS;@i(!` zhBk>+w{ancA4=?t?DAO8z76W3ByI>6;D!*DaUQnGBb`BqY&HR3WFyhx=bQvKfT|pIhL{N9KkG#j$>OU;l$)8T}l&^iKK=c`sAfr??}Dv&x|r>R!h} zABE5Ji)^_AMg0t{;^bvd*8`tmw&e`_W%#XRuyr6;4wm5M)ZpChMUHAM2|xQr^=9LA zop-_Af7C$ve%jE4p3Bi7v?{q>ioMX}4?)k0)f8q+S*PpO?#pv$d>qD0p(?2EW)eSn zS(0K?K1q$5$H`3#v&DM{-V8exVtSG{n{;AtcBvcGhA~_-H8q6Ph2fh_&+U}bQh5U*2W=^YF`5)*z~ z_9b?yJj``F5z%fZ|D$J8ExI}Z!$M!@JsY%uaJU$;kJDwF)z*9art}OesH+ay&Ydq9 z9QVbBdg$TB&nD=9$|76g0esw$89J_t>^8KIy7!GAK|A}@Dp2+B?TtT>j)12j_M?#@ zD+3swLeS1jBj@H%*uQ--3dQzh;usq3V{3G1{?ws}lVM&!nDtg(K!*&h04L)YD34z8 zyj03+>)9vW(3vtG&Xz`-oEXhQJ4xFe!)M*xOiGLnM!5kf;WgT11`F$!)w9a-8wwu= z`jL#3I5RByRRv|6x}%4QNnenB2b&1UMLwdj?@+(rn7Y1*B(_wEqfaYCMe_3%Pu-)f zd(dTr*87cWnTHD3%8z!o} zI;WsB_UTu8?4h>S6*J>6 ziAp4Ry=K39S?zT8wv!FpGgenT1Gd^3R42ZW5vP|%Tqm5XK*DZFCf$f1|LL>|AW@L~ zL~waxw0o-w>AY4Ga?GX0#oYh8oY_#=}NTg(LC!0GN{ z0+Wdbe6ax>Ip_)ZM3-jU4&E>H zTO1#MU1T%VVrNSg&B-&)#y;n%7{m?Vks?IUpI5FS$kf*~0_E7zzRRCGT)x;!RIDhK zv)%~rOY@{+N?lTAqg6!=L+5^WLeRh`eA7%xtMbDBQCU<}uvZuV;$q)sQ{CHpbCv*l zOR$2}#3^Lk82Wv3d?RM0!c7HAWVO`oo4WS@+r&@n;s!~u89-S=(0EB0@f}d*e_raK zdr`1valfuBPV+*gPx?v=@7ersP@)H0in{j?HvXx^6N`#wjgI0TTyRx zf|M=+AtuK1)96UBBjQk8E8 zOqjW3V8|_C7MQ@ONy+le_d4%Zqaj#x{uUde(zm^J1Oy4De#RbncvA}U>L@qM>kIKF zk&GNnWBBP;J}LYTg7FdwBfLy-F<)1?c`d?&_T`um${%v0#*dERyBuxH{m~mYc1(BB zXNNGCPiOSUt6{u%&Ewx)J39>3>sXJzF-0*((u`HwvQbDoaiUrYs!u!0y0-lK`erLh z*X#%WAbF1D&n9l0O=WjC0*0L*P;T)b>%Go1-eMagu`E#kq>KMZk|y8tHtpH3KJj_% z>n9W0KeK_vPL3(@Tu1TQx7p7&ZB!K%5r#GaZ{FV5!I9%_h!s>Amt9 zAQ2OJOYW-|$8dG-yL^j@4!(EV^KRK2vhSVOUxbr#4-wIUN}XQ=;obl=O*vkiGsZL8 zop6(Se>Y0a?=6EATQ>jz=mJ>XY6$EmaSY4X1nShJ0AuQonYyuBIE}3*&6}nw%#`rDF2%P0qcz&=0%7C;Pjj=~hcFyd4vDI3rX^KTShbo1|)4bqR+FzCANoA!r z1f8sgWb#&3rICILsPOkjApCCNY|*JfIDXG!Y?4-8Wo2cxPgL{nU~(kWO~s`nnaXRC z#bg31peyRIFBgyf`b{5PW^uulFXNK7MjcI^@{<8;d{G)S1udzPm zXxZagc=@aF2$dFxHe2C0R?(zvsh}J`#UpX?0ax5S>$rmc4^26 z+zt9A$#>uT7Q{(Xeyt9}Z>`)i&a~q0*gSmS{3~gsBP^}6qF}bg9&_AR=|ztEMkd&< z#q)jWXg_r1mdmC}J1vPZh+Kk-paY>GnqnaE^gJ*zB(;0ebGPd0GGoq`rPi={2z-Gy zs>l<4JGsem5vjLZbsy&-E63U79`oqxbr(}D3^gjh?0fLuE0VWt(Rx$x8t|Sd#(Zt7 z{g<2o_C}wxSK9f)+^}zchQMMj$j>sSJJvONgKLuuEc8~+WxPetx7%vDR$_nY<#*^S?P+T* zTAuz(vl{FCtF1x69>v|V#F_?LlXY>8-x5bbZyZ+MWInK;Z*hIgAbHM$hJ??Cs?}+} zyCKivig-R+vQSLZu%blAz{TA_m;g2N8#K&c4ZykE7D1O}bqxzX%;dOsJuNNI+ZtDh zKChrZf(N_lVkEowS)V9ci9)Hu3dW3h7XzSPehEf3F;WGvBL&L^gG*uhr)JNWZft(5lRW4cfbNuWkq!n&}Xwi`dAC@j&VFZ6F~F!tVXO5 zPn`%pg@R#1GqMV(l818Z_+yPv*y_h=K2HKyB-7UQ7hz^n<9QyM#rfg%r;uPjUaV8^ z3QIM{G^PL>%4)Rzq(ZR-tE@G7pyL{?>g4XkHdYUIA2(X?mjkHnft6T}*>)tW8+Bw| zVF@_0Cbh0r)7y6cTFlliT_pl{rA=vKcrynv>R`_mZuKHhxRDJXc8r*WVEI-%7B%8l zt9v%Q;LoP=FeG96KUk#HZ*PdKndz(E@n`mXbKcup4NY4mF6V4(FtGZbT-T=Y6m+H$ zfG4d%+oY_IfHGYjWxV>!h0hMr&*bi4UlyAzc6@Z$ANo-!J!BzusoSdDv>I^&@v{XS8wWrFjBV z$(^59X|Q?)UROLKEt!AVS(7|>6n4+v8P*O=1EdnC2D!(y%znT=IE399 zwL5e(N(61@Rgbqo-l@J!+jlc6Ej~7NKwXR|i!W~L8C-LfXFrIMQ`u}DC4P2wTlLV? z;j>0!%d97g|I@P|&s}-&BQNC)K~ns7nPDfIe#+Hmpt;TI>0*j@s3=z^s6U&87Su0A zScDM4POi7IYEFC~=T6WsSD=%YoowTSX){FJ{#Y;^u>5{4!Sp`DD{=iuH&IoM_FWRt z=DT;9gR}-305A#cxHm?v-Gkq=z&`1txD3-IMb&T6sN(30F2qB5L>^=f;I}a2S-o7` zl_pb`-LEsSE8XCjNBOz5fImLsTsz@PNEo*93SF1k9`cmf;oD<=rx${yK7*pOI-MfV z7st*1Yp zpBJ3lPN8~q`X;-zwC@x>svlXbj;xCjYC4E3XuFGhG7f{5F6~&q3=pa%<<49g-XiBt z=p8SOEq?yAM2(4?g76mMAO+qJn8CBh2(R(R>Tlm)Hnw???v3UY_zkl9SY`3-6N>DY zKb|e2eSxIP%jpb?MXVn#03>g0Ss`}dV5_UHU-oEWXa;4fITI?C!4)a;R-0ac$K^7e z%=VGR{3!xhjR9(TuA1kxU)N}IVe9h=`>gARu2jT2-vgH~x06rbMyeijH}BdTsVYn4@N(`4AdQfXtoj+-VvcSa?eHU<5w1z!mQ^k=zAPBn7<_%zQ?>0` zie@j*ZD-NMF`?BDkrM_Aoz^t%8wEud{ZCbk_`b|OUk!NgnjGEF^WoKJ?wf2&N zhEx4xxJYkI{ngiPcJ&vMFc8{l?*ipj1Ew}G>~}-U*PkcuV0w6l5_;*Huan0^hvG>* z=Qpd0u7Q}EnBUy1H7Z`#;m%I{Y5cAq+0R<4Wc!j?@viEN_eSYXE73JIFD?#7buJ_Q z7`GwSvjH`u6h=cw>W-zyODg`%mYza;a|dcH7hIjDx2rK!pOO^rSwEEDhjQD#%PXWi z+%ehm_q%Dj?O8d<-T~rJ^6K&aQs~R0vcacZYlcI(#0-_L9h;4m?~Vx?_l?e{ z$2&F~kk?my57Ydmfmi1_NSQO9vjJ6QpEv6`c%Qjkvil`2+I7KS_IRD>U@nWK5My2P^rDp`Lf5A^|5-lvt~_ z3ye^LeX*Bxjd&dC*uIPxBp(I{qFlws>1Fjb<YBbxW({j$I1iqKS-OG7((sPof;^yasE=7lapv_)7IB# zy@>azQ@X*sho3n>C3xxFROiFH4Vyo_Rw*d?J2zzp;8* zOP)jc!gTvAlLO1RwtuD5zKF9D+k+lhVHQT@H)SNq+U69BsEdjv$$^GqhxXp;C#oWf z_&1#vt?{bDp~$^D9MI*ST6}^iumK^4z>DT*dNTlvGb`pC(d~2vYv#IUqKj4)o6(JZ zJ>h7~#gxZ&OU!J;oR;|cikv1~qlEQ>Q3bt>+`fuqS5B3rFYq3G#IUM$N;-DB_wNv1`U_l7*N54 zMXe6TT$=k6;tpq}Z)%JXD;clnU`5vu5n!6~$tM%CL}j)YWrGu==wem)@ra7a!lwX&@xYa!vBETemPQrgKw2Kal|i56rhZL-c*>pPAx=cmz`D>Vd5+j>tP9GmooIW}me z9mg1%TEa_EhCg3ig7macz2@d<0V=(YcT?b=8|Dge$htAOMS+~ z`?qU^MpVVKTmscWR>txZNeO8IVonb+RW-O;-og?On%iiPF%q8(Su6|dzTX||dVzS< zoO$_us1nm_8};TV1tQH)Y2uZinNPI%ocT??u5Po*6h4ircX{|guh4FVf1<{N+*Rl; zBzWxVxnx)kHI-4nEoz@?r1OrWRYM9tzeR+pZGrb@d1?N{TdUjiIJvYp6b^rIb%^Hn?qr&*ZJ8l&h>%ruX`ss48jC}tOluEQ@7;a_ zFnD)`RbOcFr!S>eWOujUzDFjneqK|2aIgM)WJ3XTV3#xCnzp+9r z&;T+h9(Za@@>!HfKgBoKucL-1Hhx0=o?n&agxcr!Se7|Lk_-E8< zfVb!%Qd+!Y*VACh)zC1r)a1EgW2@ou&BkmB!MD4hV`E6Tle8n;aHsa7g?O{U;oY}N zCm`Zpn!*xQ%N4p)!OgEI-I_|5GVNnA0;~K0Dw5`P@c3fNjgbDEpQO`WX6j0WAqGBV zJR1*tWKDdUr^gGB1HPl`5}45y%$>gsd~HO98Me>Q9I>i=7y?zTRxh ze8fTiE7qg(RL-zhbUSyU8BT9IKF7%4<4d>!p7(;$V3kqRXp%44+*WpmDNR6qPh>P zISoQN36+UleIJ~izYwQ89B?Pjp+`9$t!vQR)!|9eauKe@_U}G*Qp!B94sS_pJR!^` zOw<5fQ3Y8cRp4fE^NXsNAKR$DbT!;kp1z71E?+?#`h2O6Sh8?`Vr{7@)jq0g_VVoI zWqb?oh1i^_y~vN4GOsUsF?u>Po57qa9bfAMr0@59V)#}Qo$~>u^>Rw=Uj*;shyS;l#0)who?!b`s%Wjm(~WpL{*4DAuUv*MtHkbOpL1g|T-SzkaAN#6hr@ zez$8Qx~w@l_*Ea>wevJweO^4XI;kAlz*NiTptQA!I}{BHW>O}V;Uu=5f8W2V8>aYq2Vy71Ljp}T~la9s)VOH zxqyFJ@+UjyOj~klK1DF4MU8}=E>`-1@K=dc49y>K<00`|N?od?kira2t>MIA-M zPVTQs=DrJx-IB2-HV;>qdcR~?^LDYFK3~LJ_`sA2<8Ya^R{>y(hLi5><~sxTx zXn4tOz|8udt^5&5+F0vlb_>KecX``zxTkd_%g3Il^)zFQnhxER?B?#s{4|kN_PV+w zrbaGTjqF9BanvPM1^5biB1MZ8PVFG5S!G>*3j)$$daX_lGLl(f0 z0h%6uKGA*_1CyF^+cn|WY-N!A$pWw}jyqGUN>!|E7UnG(5EdqKK%!Bw7XCu2i$mkA z5{tUf^n#vkjmfdYSi_gS#I_|V3l-q*f*IA)4HGOj zV~{fF-LqhGqN+TgOO*ptT-s@(oWnj)&a*@UegqxS~U-aUpj_mIuDJ&)ftEE`@p&9hw}bQN$^ zRf^?ezWCbEKlSBvDt~MJ{HdQ2NA3DDM76Og(L@gQG)3m^{T`S4GfK9wSZVPt>IHC@ zUZG{>j9D^a6+@(EyT~V+KIcbQJ#H25X0ba=Dm6DvLMF<&>Js+%Cv9zw@7&E6<>UTD zWuRQdZol=OHA`r8=-gVW&{a_P+|EPL%{+c9yCj8idx-eQFY`0yR%{=ttK1a!YIc8F z*qwFfR!wC8hEBpM`A7EL+plt1lryz1qN_B>Xp=Rg_Rrz54$s;JHj>(jn_Xq;Ws&T% zvUiIkA$BsNL3jJKhbpdjkGwCE_WkG>_p;`r0l}zU?9q#`ApJHlhC)&W1fQvOj7z6N z8Iye0L^j`aP3xFKXh80lJ=CsHFrLYmzM3S@w8Gi+^$p9qd&#HD%_it|6X!1RUb~;n zZ$;&q&Ogmxo#k3*;1+jhiLzkx(;4uJXmGS49EBM=m;1UJWL5Qbbz?g&>|i2T&%2@> zo*OcE4!NEPn69j7)I4k95M_HVx+uM`Cu=LE?K&BGB8=s>4rzg|gRy+XK^tK5Z5q=a z)Euw+tVZhh z77bgDmiL?L6&u>g+2kEnZ>Kz*kFn%GIn|p!iQ(;jpgsfSy&gYjBxeqtd@Q}M3{rLr z<(o<>5t$gM5~*+oKFX#BU6#ULmt*VCWYE^JI7*_}>S8-E4W4<$_P(87oVkO~+ZExl z6>$LR(W{SH@*k=As!99AQU2vAW3an$L$!qV9UdTU24(?A+ZVT-iHo3Ne~a| zZ&d;L#^#&%Z=N9tFKYzL_V83#(}{_zk$eF2R9xkA%;RD{OkFq#g+I`U#|~R>&&u7wk55?d)G>#JAwandd5 znfx5XAJ>0Kh%Bran6y_Z%8K04Q>71}D9sD}5=b^|BB`!z{PTNT-O94VrlOA4WNLN2 zcm|^O9QqYkJ_|^U?!YXw7qHbp%}b7Z>22;^T9DDet(nBH_T3?dMg92ts3TMHo>F6k z$@nsT6LK$(W7+1&?#52ovqole%M#Oq^_9l@<5Z`%y$e00SAFLSeYJhF!wvcRy3k$C zhSp>nPb=H^3zxX=W7C(P zVb=8xm-(fBS8P}48s!{fmL`{%^+%oJ1bXTeMQ=IAeQz@E<<^sXWB{^WJgE-qezJ3tN{Jzn?sh7)xm1xQNIqs*H^%R$94G-hBH+NIMnW#Juhg_iR|d{Sawf-T(Q8o}GVJBS<@_yW>XG}q_O zooU$QX!67RT<;08dgQ&oA2w-~%6@tX8`@?IrPeKdv)y}lr+F~7#PQ6xW^(9Aq@1t* zNC8ypWVKikRdfP^78SZ0HI7!XO4kcta=Am%T5*A3Y9TyzPlqGF?gd$L_(O=YFG2((49nan^<6)bopKZdUc5KqLea z;`ydrMM7K#wNf%gyn}b79VXM4;j<1Dh3FnBba$0qlCF!(W;C7W?%<*DZtfJ8@XQ|T zf<|g#DZeO};TWgg)V76gM?`1xXWG`4ynaOd14SQCz*_zl?oAiAhjCnZgI#RrpsrXH z4kz6fW@)0Sj6E2;H_>X`QEyZp-(@`iwRZqU;}eQ&AaX;s(1Xz zsk3`-eBXW<-+5}f&-Y_5CBd*+cnmW$wtrh7$av~zy7wa6%+tz^Iui@C`=jX0={}eV zW;L>n%m#w~m|r`*ANgaZ&P!!J?=}CGkt>dxIXY7euV7u*uPM;B*6;0OZbRab)F_Pa zVQ4``!*zwNpw}*RqSDc1EVmOR9{eidBjAoNzAB`XZgXNH*?Df#p(6YyH%&ZE`pQ%P z@)PInxrfuq`W)SaMt8~ihFeo!t9^W#+Dg8F*HaR{eq5TqXgS?zqOx+Mc7Mz2H692S zG%fBIT05Tg=gR)>QlBSVdJ-{iCCIAgynF-QZ=IY1G$M)ue2(x zn>OcpMflBIUMp8N6|onXse?kn4?eIsX52oB^0Yrc^($ypWE-STefcG>uvkZJB~e%X zH^@|N3`6NS^||6!L#qoe+kRYDi_P3c%hZ0o@rh-q{pdrkI1JH>N#lB>ppx&zCzkoy zgQLF8(~+gY6u~r5e&JV%%2B&-Ff}RCzK11w)4T+v!!D+GS_t%$7J(U@Rq!h5di+); zq7Atg0HJWtAYZGG^%L*b>Cjj8YEcn+*tQrY$|HdwXB1RlqI-i%jB(;(`rBO@QVLK z^1tkTV!nUYrm5k}rT_*quqAkZz$AlW^*1Pb#xKToJ!_xch~*Cfv71DrxM!>QKkEGg z@v*-OV-lQY(Zk1Bfl>Dkfc1(=%Ux%#ot=za-I+;HZm~qQTEDrZu*dDb`xo&xe`&ja z(KP&{DE+_rl8ySCVU}O%e{&G@SG9Tj$A4BI75hW*m48bEdpB+m1=;7nVDU?fF8U9VUAad9iuaL{ zeOIez(OqdpUQmS+C+*|9ds<>5%e3#eMJU!qf9WQc zNH-ieFDiPR?eA<`vuLm7e{J-x^-%M}J57Cqn`JHj2Aw4~VD!RqcQCmf49d?Z4};%1 zexAL-JrFoE^N5wRF6M2&*h|{=oV)xgj|D)I}EQK`dDuCETpIVNfTlOU%L^vn9qsM%qiS9Ws5X$DC4jQ9=vU_khgz1 zq4I6bnVvbK9h7AH&=#%tbaFFuvr8qCS;&IcxJ5I>*7z2u6(_9E!lqFcMdx;2TZC1?Yw&Vx9Ur>@9jfH2sZ`qu`#PKsnnlV%C1to%*l*T*Rv8wz9bO713|y$6TIG z-xuu0YFnqP)L1XKziAG5k18A*-a+-7ZwxwhzeH9rQJG)f6{Ro{uoBTJdZC@PaKAOV zRc+AOy+Vp_tV~zOqW*oECtM*IUGFa|x#oL|n%Yb4N#wx93TaNP)RC&nXO-NgAvI~5 z-l)b1ZodAqW=csQ!<*UIxgwgF434j4TW`FpU8L*i>QLYOuDUMcf!nJbb?%gYo7Arp zV~&&s_vZRq-NwB_Rw!e5nNG{^$|Nzj@93IMebh_2v?CY9#^O{;J@r0d2hf&yyWP`a zjr;Y(3oW}nv-$IFcA#(WAAf#TvZwuF(zsdUE-i2MW`-fxwNm3@A)%x}D;e`<%WqIG z%ZFIWU(IZ@>g$5?x9%;Q8eqt~?>IPdiDfqSXH%;HPl5-G0>0pM_6>~)Jv{l|I4&!B zA446k5d9^=PP;qU!jal}>?ZV}n(aC%V(BjK82AffEQ!*DAFCH2(LIvjm$n+=m@nw< zn!ECd+;&{d)-gdtjGtm_nDIG)?IfyyRtZjL1>r+=YYTO{rrdt?PUw#RD#0s>@sD>; z=-g3#Ds)~}M|AD8K&~!$g}rom zJKL2Bo|biDYpifF7oSOYa&Y2bSKtH*jQ4RV{h}!RL9hq@o8ghJRmPLJaF$uzjV_dR zSc?fm{#x7(V{AQ2y$oHI;fN5E=~^!%BIkmVCnwruC7cCF2_RdJ{Tx_qfgmobGv9z9s~17|k9wOB90T}_ za!QU8TL)dFMv147X75VEF~$acEWC40Y=uS@g`lJF($xME{h`vwWR&&19z0Av23*~; zyrlx+xxOAvCSTpzW+S|A^tc$`sv5W_&y{uxrn1`=p-#^#yO|mA**Mu{*mH$(N`-&a z?wFF}^f{Gx`CwO5IFa)S!A_mzFL_Uol7yv{hxQ*QPC?t!Wl?JJF2kTVi*3g9 zG(BeX17vJ7_}HBUz$3ycVJrU1X*uFC*4n33B#)+C-=C0*U$pC|#8_2zfN2m48D95J z*2qkoJD{sj3GN!Wv}cu7i)H%dMuK>@e)Lr-$ zaT#(~_lwWXl_9dT^yOYk>e7)-ZaY2idh;;>0r#r<0%idPY~hF&W&7g$@t=S$>ljIf zo$f7xnA)&+>8j-=Mv1rd%lT8L@&PId!DFS4VFjLft(n9h&Mh+z-`6ys+gxFH(!3=) z?Ci(gLbX5aw43(fSLl!R+UEU(rJvapzE5FENyanHewE)+w)tP`BDE=#jCrGA@(F-K z5XcORgzD#-S@dlf!Yhz_t;K$etF^>gYd(X@a59_cX{j+n3TxzwPKit4!)I&C0bil_ z!dXBjB(A2o&vV>HILyMf2U0p4G+b0VwJVgywPqS5GNVd8fk+ssxmye z(N9CzNl)&oeDPAlE1HPnJJ5hP)|DDQ!e75Zmt}ae`aQ^n&hBpzK~hg$T6wC2b1Dma zx8As_3w_?#svgP-E{0G`WlhhJV-PLh!g>vkdV}xF6U$W2jaI)V{30B!5f`{xFBz6y ziB0w_F|;#$yQMN!Dja?xN?NWZAJcWW$R{M>jL4Q{VM#{h2`!Sr?;eZ`J9t44d)k;~TfdgfID2HqYrM6aD z!A~%ErF0GWqYobDD$E3coXV3lrgYE>dI{U6x1sNuA%=Ty8rKCr5Npi%Kcwx%a=02T z=$uVZ{Xo^UDtp{J9k)&W*h>lWIQRS92a*I7CXK6&9~5$bng!<>J91hz!->0-OtQ`8 zIXD+k&8XB}n?kmRRKhsx~CA-%o?*pqH`<@Md)Nn}aRVU*$_YrOgO6}pet-|HB z;_Hpzz(!%N$mo9%NY99!QUqLoKHBUKNhu81+-ZNl^iV%(-j4GCaw!FP$3t^0$yx*478-46sJ^jE zx3hCQ-QM-h0G^{V-nyUQ*Nw0F&tm+w{q|g1kc-Kqusc|5ltvJsgop_BBbzAQJ6XQH zI1tLpF0wKrVb=34slP5(Dj`$J(;+;&y0TAvY(_^%T=!{ui{rlU{@6@Z@pjL&ZgTR( z!llEp=mGqj9Wabo;X8^CBeKy2;#iC>^o+U@NEOz~t_@B7qs07+>)F56q5S_-#oYe+ zf3rR=m)Q{R8x6?<)kFc=kYBUAU0U+wqV3s&W0Ys%vhmmWe%W-vz4a6;?Z`Xa9%Qxj zB0;kB8^UBi&~_}kWf&h+$iYXrJ*QW>!*@((C}tq~ zK?y%GNyDwS!v9;pw|`&_Wbm&g1@JjJ@Fhq$#2WkOFa$#IURJ`*vx1>@$W8-1159`F zqoIVTP17)-8mT%_?h+uT%y}dB+NTmKQiT3ze@GU1!Sl=v3#8XqFR{fy+s~wy(a>_- zR`%h#506uoT|9;5`^*=LU#VGP1Mrr7_M~<&(@B}>OG`@ZaW1pXplh7g9H72)9C@0b{uDH)_MTjOaVp4(C^=xw>DdUIpju}ygW$r{Pp3SF?zf+L2}omJ``uxJS!46 z(x!p->~X$uIN-Ve^+Ab?X%dhZ6+f-WIxXjFIY)G5a;XO5kIOvLBnsn#+S**u%H1~k zbCi7$!&4|Zc7QoS<-MO9!CsT#`8T#%CzQi$hkU)|5ibPmxU4z{F%LMyeL*rZ;eG_M z4dD**3QV)QzDC~(Tt_`ns5KZFX~u1L^;o*>KY`Y=?_s8{hd1ShVr$7B8% zKq7pqi^OK-8a*)}(6w*_LPJA zT0tId+?YLv1lcNMqH7|mjzr$dk#Yic&9d)2!x)Sfql$F-^#yaSz4P7>X={pfCxQr( z?DiTca0en5aHdL)HA3mNBZ!7emPEW$XDK>m?4`owERK--< zw05|s4D>^`x}6->k>M|v=E+(fJuJUDsiw|=Egi3}u;v<(N_d}e#7|d#?sl}5`rI}B zY}UB4zG-8+ezgjpnxR{6RsFy@MCyV{_dEZ^fC;ThGjd?zR~U}=HwaHg0V9!1zj*F9 z+oZ7|tIh7%BDpr`Uq%%z&Hr%ho2u(i1M?j!IfoG!#_eP}J%a5Jz+H|R@VFE=09M-; zP_+cs@XP(#5%V8DU4Ka`{@*#>!6q*>@qJ*1w!0mu`f)rD6gxx7W!l!(JdV@ZAOi^7 zeJ-!Xiv{bxDA3?8YHFJ_ zxeU@Mjft8V26B6Qo8ydx9u^|SepwKS_!@A-|Bd1>MhwT@@}mdr2CV_0vz;5569x=E zc~Rzo!6$kqtM?CsPlZ1WJ{Ow)+zSRlP&WYkVe$uXJOM9O>o*!@f(18FWq@fZ@IeOC z;Lbj}VSl|QS)SjZTjd{rgI;0bCj^Fc1i)MV>4b5AJ{LXE26e^rptC|kGdN_$fkx}s z$CG&DNxUD|S;--an(S*nxbxhVpW^zPT+XFlOcvr?PXS(i_6}BdAz;#CBkHixF@$u6h0Jnl+`t^o5p+ z$J{;VqTkFYTHr=M4}Mj`6<8_y0a*>~)eC0kg6AJLQR4)wuzShgg3FG)$g4DGI8IZa6nky_!vMUn>e#tWs z(q5HY=5N#krgN;Do~Is4mw)lBtyOo}9w7+>M)Ga%4%8nszCl@HpsKvcVtnd&8w(a` zDQl;=wsl#&FP0VQ>|ks{J<>)v)^>Y0h0`}(>zMJP98i>naMU)-Gg!u(H`t+MMw|EW zcNQJMJ0QHcyWE{c|NG962iVxYv$zHjN!p`RdT*pHYu3$$1xwp2U_ox*;^9~VPBx04 z9p@~z5_`9&bmq@|a5_ObpZwf)57)~Yu3l#QxubuE!R-*NcJE5h&U;gBo!-PgR%P!> zCA4|np^<(lY?7hDMq`+oRb$x!P5d4=rU$4ybz9b6#@L(o@9^+g*A>+`f1gib73;dN zWBlH1*weG@l$U5d(}d{pasG12=jT2EloO0)TrYsE8h;9_EW-IsI}%*~sId{bSqqO` z;cEMOjH~cT*=&eC-i9;7T&hEi82vGqKhRkbV0N5pp}lSXb|qhs%w~f(=@Nl);jiZ; zL)o7=$aVsiAOSEgjQK`flsQ_jbCqP5gLj6PHKj|X+B-w9xo-T3^I$lH{VSpxZZV3z z3F!q#CK!0lp5mqA^7^xFMfw0KrQXLC6dP{3?M1S9OaK!ff3t9pW~ZL}-3cA~ zG}XF9C+mW<{mPCHsQXMy556c>=N+Wm-d@nqDW5&4mAhXxBWn&|gB~lCSU74}!DvSK zJI(w~lS#>_?8tMFw!A9j5%ev-Zx#I*9M18eS%Ve7$g;P<@TtKbPO_bv(7N2K%{Kbr zKt=mTU{vF%YyklTz?Q1x_Ixj-@%`sqHorlHa!mR*N5fYXeT$PtR#rl`eonTT^!hn8 zUL!*(KRp>h$&5&%JohkY$u&f<1C9(`jd~vTc}KVXOQoSO5q> z9yB_UCjvV_&T=&ir49RAp7zRre`r(avI54t0cV7<)x@E;f*e~Eindp;!)~q$=>w0H0%8&^p?(L_UPUzNrndz=yG1oe~=?$1GNQt-p<}x6t8b3k!gd`rA&R^T=fZ)bw z;ff$; zE6B1{WqV6=2cDkqL&L$5+%HD0P*fzj6FU9i2=|s5;?Nppv~C0|KKtFUYoVB zdY0LY?aQ&30Hc8xMidww_6X%`x@cONrH?jfN!Lo*P`77{!r!HBd)meDnR}41N#$Ot zB>h?BQPu+ESrb}v59NjS%I%Qjo8I4ZBda~6`N2XIEb=IO=e|l2&;FpAm_;^ETqo2Jx2w~MN^SNBs^yTkQ2sx8>w zQodKcpO2^NXX<*N^t+OAB|5UVbJz^M^%F@a8*`j?@_VwxoD**iJC3b4n;av^fI$ zqF22lgi$2Bs*@&ePVf#s>k z(ed1`KXbJnHEH9Ykis483IoYvpf$WhMevaXvPvo6HK;wPmjevMy|z;##56Die0)}b z&e|vU_RZT_xr$K^b!Qc1Ri81Rgy~sc6*aN-Tz9Hm>E%q*Ir=6sS>DGl;6P)*rQq-T zz#qaQ3#`Yh9ha4-{QFrn0Pk=${9LF0=ZQ3o3x0FWB?6}ENFvSLzd^!Yvb`FVRRL~? zg#Og}u_6szl7ruPn{=~$ULR1DdEMeybarigU5BBwh&!{eCGraa*gfiNkOfggtPyGs z$AuonlWAd$x)!vbn?96z(tzeEz19b+`-*$Q$Vj@+=5@-bwZVjsjpT1bwWF zOVT8I(LVte2(`0#Xhg4B+nD7!ZxGV0gh#n?7b8C}*{Sck6 z%+fsC3?RC{2^iylx2l^R7&Cic|sz03W5$UDY;s!zx^O?e-rS^ z@qn~(hS6EmWnlq!INQDn`p3N+@Or%1#nRf=Xf0H|hO8SbQgtb)jZQG{=7A-L3O-Ry zA#sa;b#F=U?*p;{UhuLQM~{NWq^uGO;@H<6%r!;ym5YBE3T4sFq^YgQ@KlX{)U_H} zK6o9gMPX_2p*x!C-1k4(X6XJldY7BGLu+UE6$`K>y|bBrJm=mpgDkZvSq=KWAhU`Y z7~|&RPOe%-#75)t`gPe2ANJ#z{IFzh*{!>6!=|ce^Rs@eD+amlz6GdTPNi3>cmof<=R!`ge|i}IC$HTYpe6={ zB4BE047x9(T->J2(DHp<)H?aFi0|Mhzw;6Oe8NmU4{B%o&K4lqxQYXWp=ty16PjMX zK~EFRFgzpWN%bKgu#u?Slj`9U2kstC<}L-j*9KDdK{rvsskde8C;sW$YpmCU@HZvE zPCALni8js7j!X#E%m5R%j5G0KqP>{53!4IcUWf8IkzVY^n4Y~N$P2m$zhLga5VC8^ zto_4i^~h+HV*8wep^YBxJT4ehmvNRQw7GuM1+pN_eW6Bnd*F`#xDFj*mfJtwoR6s^ zz*Tx(;D~BtQ#(8-CZ_bK;c`J4m!a3p;1tEl;Z);ssm^S6L*rUx-7j zoC2Zf5$F}1G+Glo@v|F@j+-NQ+P6QM=f-kxIU1QouL(T|5#DZY;sXJ4)K}Oa_fkIZ z^p)7jr!js!uJS5l-MN%h#)Q;DN>(KP(YNdN?QH`1o;E3rM#!Kxt{ z*v7eISRe6fN}NzC!Lj_duNj#T!@Xjz`cH$`B#edDLga$}IT$s7*$S%33eO~&@dNK} zYMGbsNV53ug#3?f+dtXFSNg4rjO?0%?63t_-aWhg<#zY)T(jzv3XMh{6a z_g>wKzqxqJmAT&jOLwH*|8_b@V=mK_-X)>;&hF4(aRGcyV#=I-AnHR4zQvD8@BGrtC2 zy0QM9Jn)!;_kZjc+`aL4=@PEE=*gA!)wWkpTclNYs_8sfq5SmZ7cFp{Fs#@5yV^cg zo~Q0=9e+Ik>pG)rTdZ&ES_d_|@4vLJ!@F|5&b^DzCJDFBSQF&2cxvvZQwyhfTz$1A zfX%<*k1lY?#J;JdpRGP8HcS3yRF2vFyuG=X?o?)oSI>VhlX7h;CQW$1A_}W_^BZ#OwEn zS+Y;lW<}YW%Y~mG)JjI|7qnCU!TwOaS^u9>jsM5KxxmAwV%Tz>!>+Eo#-ZSNcW#Fa z@OC#(hLkx0Y*$5}*;)PFWS`F8w#&c$>wSqoF4@`k!Y>X-TzaQ?aebbs$^AReemy_g zy6_D8XwEw)<1HeV=M4_oJ)T!m;K0na&c|`9>a3( z*t4vcj_SU@^`yt;Z0+ zn`?S?%|zD~5gli)tvTf|&E8bje5G~U*VGe|VUlcDfhR-RZ*G56{bBdF(7jil{_tG= z`ohiEWns;cm)?J6oyYywNmN^Hy<*0n!?guVT|TvQYxYVkdl2yp8q1)xiwm>G_m*Vt zaenCim+#iGOTZH-i)9M$?_Lw#@0Kio&TRgST*XanlKy8bPUkmOEv`}hXnr_e_*K|E zgvy`3M}|B3c@t6w+uULowmX3!qEMex?iZyQ*qOE&p$ey`2{Ui{hdtGjZ$GG~?U fn=RA#I-kj9z2a#DV4B`9YD(()Fax5V@&8Q#MfV?# literal 0 HcmV?d00001 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md new file mode 100644 index 0000000000..5fb8557645 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md @@ -0,0 +1,16 @@ +The original version with neck: +

+ neck +

+ +[bfm.ply](https://github.com/Hangz-nju-cuhk/Rotate-and-Render/blob/master/3ddfa/BFM_Remove_Neck/bfm.ply) + +The image is rendered by MeshLab. + +`bfm_show.m` shows how to render it with 68 keypoints in Matlab. + +

+ no neck +

+ +Attention: Do not use the `ply` file in training. diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m new file mode 100644 index 0000000000..623e19cce7 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m @@ -0,0 +1,22 @@ +function render_face_mesh(vertex, tri, pts68_3d) + trisurf(tri', vertex(1, :), vertex(2, :), vertex(3, :), ones(size(vertex, 2),1), 'edgecolor', 'none'); + + re=[1 1 1]; + colormap(re); + + light('Position', [0 0 1], 'Style', 'infinite'); + lighting gouraud + axis equal + view([0 90]); + + if nargin == 3 + hold on; plot3(pts68_3d(1,:), pts68_3d(2,:), pts68_3d(3,:)+1, '*'); + end + + xlabel('x'); + ylabel('y'); + zlabel('z'); + + axis on; + grid on; +end \ No newline at end of file diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE new file mode 100644 index 0000000000..4695f23b74 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Jianzhu Guo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/example/Images/.keep b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/example/Images/.keep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py new file mode 100644 index 0000000000..e48dd7a60d --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +from __future__ import division + +""" +Creates a MobileNet Model as defined in: +Andrew G. Howard Menglong Zhu Bo Chen, et.al. (2017). +MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications. +Copyright (c) Yang Lu, 2017 + +Modified By cleardusk +""" +import math +import torch.nn as nn + +__all__ = ['mobilenet_2', 'mobilenet_1', 'mobilenet_075', 'mobilenet_05', 'mobilenet_025'] + + +class DepthWiseBlock(nn.Module): + def __init__(self, inplanes, planes, stride=1, prelu=False): + super(DepthWiseBlock, self).__init__() + inplanes, planes = int(inplanes), int(planes) + self.conv_dw = nn.Conv2d(inplanes, inplanes, kernel_size=3, padding=1, stride=stride, groups=inplanes, + bias=False) + self.bn_dw = nn.BatchNorm2d(inplanes) + self.conv_sep = nn.Conv2d(inplanes, planes, kernel_size=1, stride=1, padding=0, bias=False) + self.bn_sep = nn.BatchNorm2d(planes) + if prelu: + self.relu = nn.PReLU() + else: + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + out = self.conv_dw(x) + out = self.bn_dw(out) + out = self.relu(out) + + out = self.conv_sep(out) + out = self.bn_sep(out) + out = self.relu(out) + + return out + + +class MobileNet(nn.Module): + def __init__(self, widen_factor=1.0, num_classes=1000, prelu=False, input_channel=3): + """ Constructor + Args: + widen_factor: config of widen_factor + num_classes: number of classes + """ + super(MobileNet, self).__init__() + + block = DepthWiseBlock + self.conv1 = nn.Conv2d(input_channel, int(32 * widen_factor), kernel_size=3, stride=2, padding=1, + bias=False) + + self.bn1 = nn.BatchNorm2d(int(32 * widen_factor)) + if prelu: + self.relu = nn.PReLU() + else: + self.relu = nn.ReLU(inplace=True) + + self.dw2_1 = block(32 * widen_factor, 64 * widen_factor, prelu=prelu) + self.dw2_2 = block(64 * widen_factor, 128 * widen_factor, stride=2, prelu=prelu) + + self.dw3_1 = block(128 * widen_factor, 128 * widen_factor, prelu=prelu) + self.dw3_2 = block(128 * widen_factor, 256 * widen_factor, stride=2, prelu=prelu) + + self.dw4_1 = block(256 * widen_factor, 256 * widen_factor, prelu=prelu) + self.dw4_2 = block(256 * widen_factor, 512 * widen_factor, stride=2, prelu=prelu) + + self.dw5_1 = block(512 * widen_factor, 512 * widen_factor, prelu=prelu) + self.dw5_2 = block(512 * widen_factor, 512 * widen_factor, prelu=prelu) + self.dw5_3 = block(512 * widen_factor, 512 * widen_factor, prelu=prelu) + self.dw5_4 = block(512 * widen_factor, 512 * widen_factor, prelu=prelu) + self.dw5_5 = block(512 * widen_factor, 512 * widen_factor, prelu=prelu) + self.dw5_6 = block(512 * widen_factor, 1024 * widen_factor, stride=2, prelu=prelu) + + self.dw6 = block(1024 * widen_factor, 1024 * widen_factor, prelu=prelu) + + self.avgpool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(int(1024 * widen_factor), num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + + x = self.dw2_1(x) + x = self.dw2_2(x) + x = self.dw3_1(x) + x = self.dw3_2(x) + x = self.dw4_1(x) + x = self.dw4_2(x) + x = self.dw5_1(x) + x = self.dw5_2(x) + x = self.dw5_3(x) + x = self.dw5_4(x) + x = self.dw5_5(x) + x = self.dw5_6(x) + x = self.dw6(x) + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x + + +def mobilenet(widen_factor=1.0, num_classes=1000): + """ + Construct MobileNet. + widen_factor=1.0 for mobilenet_1 + widen_factor=0.75 for mobilenet_075 + widen_factor=0.5 for mobilenet_05 + widen_factor=0.25 for mobilenet_025 + """ + model = MobileNet(widen_factor=widen_factor, num_classes=num_classes) + return model + + +def mobilenet_2(num_classes=62, input_channel=3): + model = MobileNet(widen_factor=2.0, num_classes=num_classes, input_channel=input_channel) + return model + + +def mobilenet_1(num_classes=62, input_channel=3): + model = MobileNet(widen_factor=1.0, num_classes=num_classes, input_channel=input_channel) + return model + + +def mobilenet_075(num_classes=62, input_channel=3): + model = MobileNet(widen_factor=0.75, num_classes=num_classes, input_channel=input_channel) + return model + + +def mobilenet_05(num_classes=62, input_channel=3): + model = MobileNet(widen_factor=0.5, num_classes=num_classes, input_channel=input_channel) + return model + + +def mobilenet_025(num_classes=62, input_channel=3): + model = MobileNet(widen_factor=0.25, num_classes=num_classes, input_channel=input_channel) + return model diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py new file mode 100644 index 0000000000..43607ab14f --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +The pipeline of 3DDFA prediction: given one image, predict the 3d face vertices, 68 landmarks and visualization. + +[todo] +1. CPU optimization: https://pmchojnacki.wordpress.com/2018/10/07/slow-pytorch-cpu-performance +""" + +import torch +import torchvision.transforms as transforms +from . import mobilenet_v1 +import numpy as np +import cv2 +from os import path +import face_alignment +from .utils.ddfa import ToTensorGjz, NormalizeGjz +import scipy.io as sio +from .utils.inference import get_suffix, parse_roi_box_from_landmark, crop_img, predict_68pts, dump_to_ply, \ + dump_vertex, draw_landmarks, predict_dense, parse_roi_box_from_bbox, get_colors, write_obj_with_colors +from .utils.cv_plot import plot_pose_box +from .utils.estimate_pose import parse_pose +from .utils.render import cget_depths_image, cpncc +from .utils.paf import gen_img_paf +import torch.backends.cudnn as cudnn +import sys + +__author__ = 'cleardusk' +STD_SIZE = 120 + + +def main(args): + # 1. load pre-tained model + checkpoint_fp = 'algorithm/DDFA/models/phase1_wpdc_vdc.pth.tar' + arch = 'mobilenet_1' + + checkpoint = torch.load(checkpoint_fp, map_location=lambda storage, loc: storage)['state_dict'] + model = getattr(mobilenet_v1, arch)(num_classes=62) # 62 = 12(pose) + 40(shape) +10(expression) + + model_dict = model.state_dict() + # because the model is trained by multiple gpus, prefix module should be removed + for k in checkpoint.keys(): + model_dict[k.replace('module.', '')] = checkpoint[k] + model.load_state_dict(model_dict) + if args.mode == 'gpu': + cudnn.benchmark = True + model = model.cuda() + model.eval() + + ''' + # 2. load dlib model for face detection and landmark used for face cropping + if args.dlib_landmark: + dlib_landmark_model = 'models/shape_predictor_68_face_landmarks.dat' + face_regressor = dlib.shape_predictor(dlib_landmark_model) + if args.dlib_bbox: + face_detector = dlib.get_frontal_face_detector() + ''' + face_regressor = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False) + # face_detector = face_regressor.face_detector + + # 3. forward + tri = sio.loadmat('algorithm/DDFA/visualize/tri.mat')['tri'] + transform = transforms.Compose([ToTensorGjz(), NormalizeGjz(mean=127.5, std=128)]) + # print(args.files) + for img_fp in args.files: + print(img_fp) + suffix = get_suffix(img_fp) + wfp = '{}_{}.obj'.format(img_fp.replace(suffix, ''), 0) + if not path.exists(wfp): + img_ori = cv2.imread(img_fp) + if img_ori is None: + print("Can't load image, please check the path", file=sys.stderr) + sys.exit(1) + + try: + rect + except NameError: + rect = None + ''' + if args.dlib_bbox: + rects = face_detector(img_ori, 1) + else: + rects = [] + + if len(rects) == 0: + rects = dlib.rectangles() + rect_fp = img_fp + '.bbox' + lines = open(rect_fp).read().strip().split('\n')[1:] + for l in lines: + l, r, t, b = [int(_) for _ in l.split(' ')[1:]] + rect = dlib.rectangle(l, r, t, b) + rects.append(rect) + ''' + img_rgb = img_ori[:, :, ::-1] + ptss = face_regressor.get_landmarks(img_rgb) + pts_res = [] + Ps = [] # Camera matrix collection + poses = [] # pose collection, [todo: validate it] + vertices_lst = [] # store multiple face vertices + ind = 0 + + for pts in ptss: + # whether use dlib landmark to crop image, if not, use only face bbox to calc roi bbox for cropping + if args.dlib_landmark: + # - use landmark for cropping + # pts = face_regressor(img_ori, rect).parts() + # pts = np.array([[pt.x, pt.y] for pt in pts]).T + roi_box = parse_roi_box_from_landmark(pts.T) + else: + # - use detected face bbox + bbox = [rect.left(), rect.top(), rect.right(), rect.bottom()] + roi_box = parse_roi_box_from_bbox(bbox) + + img = crop_img(img_ori, roi_box) + + # forward: one step + img = cv2.resize(img, dsize=(STD_SIZE, STD_SIZE), interpolation=cv2.INTER_LINEAR) + input = transform(img).unsqueeze(0) + with torch.no_grad(): + if args.mode == 'gpu': + input = input.cuda() + param = model(input) + param = param.squeeze().cpu().numpy().flatten().astype(np.float32) + + # 68 pts + pts68 = predict_68pts(param, roi_box) + + # two-step for more accurate bbox to crop face + if args.bbox_init == 'two': + roi_box = parse_roi_box_from_landmark(pts68) + img_step2 = crop_img(img_ori, roi_box) + img_step2 = cv2.resize(img_step2, dsize=(STD_SIZE, STD_SIZE), interpolation=cv2.INTER_LINEAR) + input = transform(img_step2).unsqueeze(0) + with torch.no_grad(): + if args.mode == 'gpu': + input = input.cuda() + param = model(input) + param = param.squeeze().cpu().numpy().flatten().astype(np.float32) + + pts68 = predict_68pts(param, roi_box) + + pts_res.append(pts68) + P, pose = parse_pose(param) + Ps.append(P) + poses.append(pose) + + # dense face 3d vertices + if args.dump_ply or args.dump_vertex or args.dump_depth or args.dump_pncc or args.dump_obj: + vertices = predict_dense(param, roi_box) + vertices_lst.append(vertices) + if args.dump_ply: + dump_to_ply(vertices, tri, '{}_{}.ply'.format(img_fp.replace(suffix, ''), ind)) + if args.dump_vertex: + dump_vertex(vertices, '{}_{}.mat'.format(img_fp.replace(suffix, ''), ind)) + if args.dump_pts: + wfp = '{}_{}.txt'.format(img_fp.replace(suffix, ''), ind) + np.savetxt(wfp, pts68, fmt='%.3f') + print('Save 68 3d landmarks to {}'.format(wfp)) + if args.dump_roi_box: + wfp = '{}_{}.roibox'.format(img_fp.replace(suffix, ''), ind) + np.savetxt(wfp, roi_box, fmt='%.3f') + print('Save roi box to {}'.format(wfp)) + if args.dump_paf: + wfp_paf = '{}_{}_paf.jpg'.format(img_fp.replace(suffix, ''), ind) + wfp_crop = '{}_{}_crop.jpg'.format(img_fp.replace(suffix, ''), ind) + paf_feature = gen_img_paf(img_crop=img, param=param, kernel_size=args.paf_size) + + cv2.imwrite(wfp_paf, paf_feature) + cv2.imwrite(wfp_crop, img) + print('Dump to {} and {}'.format(wfp_crop, wfp_paf)) + if args.dump_obj: + wfp = '{}_{}.obj'.format(img_fp.replace(suffix, ''), ind) + colors = get_colors(img_ori, vertices) + write_obj_with_colors(wfp, vertices, tri, colors) + print('Dump obj with sampled texture to {}'.format(wfp)) + ind += 1 + + if args.dump_pose: + # P, pose = parse_pose(param) # Camera matrix (without scale), and pose (yaw, pitch, roll, to verify) + img_pose = plot_pose_box(img_ori, Ps, pts_res) + wfp = img_fp.replace(suffix, '_pose.jpg') + cv2.imwrite(wfp, img_pose) + print('Dump to {}'.format(wfp)) + if args.dump_depth: + wfp = img_fp.replace(suffix, '_depth.png') + # depths_img = get_depths_image(img_ori, vertices_lst, tri-1) # python version + depths_img = cget_depths_image(img_ori, vertices_lst, tri - 1) # cython version + cv2.imwrite(wfp, depths_img) + print('Dump to {}'.format(wfp)) + if args.dump_pncc: + wfp = img_fp.replace(suffix, '_pncc.png') + pncc_feature = cpncc(img_ori, vertices_lst, tri - 1) # cython version + cv2.imwrite(wfp, pncc_feature[:, :, ::-1]) # cv2.imwrite will swap RGB -> BGR + print('Dump to {}'.format(wfp)) + if args.dump_res: + draw_landmarks(img_ori, pts_res, wfp=img_fp.replace(suffix, '_3DDFA.jpg'), show_flg=args.show_flg) + else: + print("Main_Done") + + +''' +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='3DDFA inference pipeline') + parser.add_argument('-f', '--files', nargs='+', + help='image files paths fed into network, single or multiple images') + parser.add_argument('-m', '--mode', default='cpu', type=str, help='gpu or cpu mode') + parser.add_argument('--show_flg', default='true', type=str2bool, help='whether show the visualization result') + parser.add_argument('--bbox_init', default='one', type=str, + help='one|two: one-step bbox initialization or two-step') + parser.add_argument('--dump_res', default='true', type=str2bool, help='whether write out the visualization image') + parser.add_argument('--dump_vertex', default='false', type=str2bool, + help='whether write out the dense face vertices to mat') + parser.add_argument('--dump_ply', default='true', type=str2bool) + parser.add_argument('--dump_pts', default='true', type=str2bool) + parser.add_argument('--dump_roi_box', default='false', type=str2bool) + parser.add_argument('--dump_pose', default='true', type=str2bool) + parser.add_argument('--dump_depth', default='true', type=str2bool) + parser.add_argument('--dump_pncc', default='true', type=str2bool) + parser.add_argument('--dump_paf', default='true', type=str2bool) + parser.add_argument('--paf_size', default=3, type=int, help='PAF feature kernel size') + parser.add_argument('--dump_obj', default='true', type=str2bool) + parser.add_argument('--dlib_bbox', default='true', type=str2bool, help='whether use dlib to predict bbox') + parser.add_argument('--dlib_landmark', default='true', type=str2bool, + help='whether use dlib landmark to crop image') + + args = parser.parse_args() + main(args) +''' diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py new file mode 100644 index 0000000000..821cf06b5a --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +The pipeline of 3DDFA prediction: given one image, predict the 3d face vertices, 68 landmarks and visualization. + +[todo] +1. CPU optimization: https://pmchojnacki.wordpress.com/2018/10/07/slow-pytorch-cpu-performance +""" + +import torch +import torchvision.transforms as transforms +from . import mobilenet_v1 +import numpy as np +import cv2 +import os +from tqdm import tqdm +import face_alignment +from .utils.ddfa import ToTensorGjz, NormalizeGjz +import scipy.io as sio +from .utils.inference import parse_roi_box_from_landmark, crop_img, predict_68pts, predict_dense, get_colors, \ + get_5lmk_from_68lmk +from .utils.estimate_pose import parse_pose +from .utils.params import param_mean, param_std +from .utils.render import crender_colors +import torch.backends.cudnn as cudnn +__author__ = 'cleardusk' +STD_SIZE = 120 + + +def main(args): + # 1. load pre-tained model + checkpoint_fp = 'algorithm/DDFA/models/phase1_wpdc_vdc.pth.tar' + arch = 'mobilenet_1' + + checkpoint = torch.load(checkpoint_fp, map_location=lambda storage, loc: storage)['state_dict'] + model = getattr(mobilenet_v1, arch)(num_classes=62) # 62 = 12(pose) + 40(shape) +10(expression) + + model_dict = model.state_dict() + # because the model is trained by multiple gpus, prefix module should be removed + for k in checkpoint.keys(): + model_dict[k.replace('module.', '')] = checkpoint[k] + model.load_state_dict(model_dict) + if args.mode == 'gpu': + cudnn.benchmark = True + model = model.cuda() + model.eval() + + tri = sio.loadmat('algorithm/DDFA/visualize/tri.mat')['tri'] + transform = transforms.Compose([ToTensorGjz(), NormalizeGjz(mean=127.5, std=128)]) + + # 2. parse images list + with open(args.img_list) as f: + img_list = [x.strip() for x in f.readlines()] + landmark_list = [] + + alignment_model = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False) + + if not os.path.exists(args.save_dir): + os.mkdir(args.save_dir) + if not os.path.exists(args.save_lmk_dir): + os.mkdir(args.save_lmk_dir) + + for img_idx, img_fp in enumerate(tqdm(img_list)): + img_ori = cv2.imread(os.path.join(args.img_prefix, img_fp)) + print("Image", img_fp) + pts_res = [] + Ps = [] # Camera matrix collection + poses = [] # pose collection, [todo: validate it] + # vertices_lst = [] # store multiple face vertices + # ind = 0 + # suffix = get_suffix(img_fp) + + # face alignment model use RGB as input, result is a tuple with landmarks and boxes + preds = alignment_model.get_landmarks(img_ori[:, :, ::-1]) + pts_2d_68 = preds[0] + pts_2d_5 = get_5lmk_from_68lmk(pts_2d_68) + landmark_list.append(pts_2d_5) + roi_box = parse_roi_box_from_landmark(pts_2d_68.T) + + img = crop_img(img_ori, roi_box) + # import pdb; pdb.set_trace() + + # forward: one step + img = cv2.resize(img, dsize=(STD_SIZE, STD_SIZE), interpolation=cv2.INTER_LINEAR) + input = transform(img).unsqueeze(0) + with torch.no_grad(): + if args.mode == 'gpu': + input = input.cuda() + param = model(input) + param = param.squeeze().cpu().numpy().flatten().astype(np.float32) + + # 68 pts + pts68 = predict_68pts(param, roi_box) + + # two-step for more accurate bbox to crop face + if args.bbox_init == 'two': + roi_box = parse_roi_box_from_landmark(pts68) + img_step2 = crop_img(img_ori, roi_box) + img_step2 = cv2.resize(img_step2, dsize=(STD_SIZE, STD_SIZE), interpolation=cv2.INTER_LINEAR) + input = transform(img_step2).unsqueeze(0) + with torch.no_grad(): + if args.mode == 'gpu': + input = input.cuda() + param = model(input) + param = param.squeeze().cpu().numpy().flatten().astype(np.float32) + + pts68 = predict_68pts(param, roi_box) + + pts_res.append(pts68) + P, pose = parse_pose(param) + Ps.append(P) + poses.append(pose) + + # dense face 3d vertices + vertices = predict_dense(param, roi_box) + + if args.dump_2d_img: + wfp_2d_img = os.path.join(args.save_dir, os.path.basename(img_fp)) + colors = get_colors(img_ori, vertices) + # aligned_param = get_aligned_param(param) + # vertices_aligned = predict_dense(aligned_param, roi_box) + # h, w, c = 120, 120, 3 + h, w, c = img_ori.shape + img_2d = crender_colors(vertices.T, (tri - 1).T, colors[:, ::-1], h, w) + cv2.imwrite(wfp_2d_img, img_2d[:, :, ::-1]) + if args.dump_param: + split = img_fp.split('/') + save_name = os.path.join(args.save_dir, '{}.txt'.format(os.path.splitext(split[-1])[0])) + this_param = param * param_std + param_mean + this_param = np.concatenate((this_param, roi_box)) + this_param.tofile(save_name, sep=' ') + if args.dump_lmk: + save_path = os.path.join(args.save_lmk_dir, 'realign_lmk_') + with open(save_path, 'w') as f: + for idx, (fname, land) in enumerate(zip(img_list, landmark_list)): + # f.write('{} {} {} {}') + land = land.astype(np.int) + land_str = ' '.join([str(x) for x in land]) + msg = f'{fname} {idx} {land_str}\n' + f.write(msg) + + +if __name__ == '__main__': + ''' + parser = argparse.ArgumentParser(description='3DDFA inference pipeline') + parser.add_argument('-m', '--mode', default='gpu', type=str, help='gpu or cpu mode') + parser.add_argument('--bbox_init', default='two', type=str, + help='one|two: one-step bbox initialization or two-step') + parser.add_argument('--dump_2d_img', default='true', type=str2bool, help='whether to save 3d rendered image') + parser.add_argument('--dump_param', default='true', type=str2bool, help='whether to save param') + parser.add_argument('--dump_lmk', default='true', type=str2bool, help='whether to save landmarks') + parser.add_argument('--save_dir', default='results', type=str, help='dir to save result') + parser.add_argument('--save_lmk_dir', default='example', type=str, help='dir to save landmark result') + parser.add_argument('--img_list', default='example/file_list.txt', type=str, help='test image list file') + parser.add_argument('--img_prefix', default='example/Images/', type=str, help='test image prefix') + parser.add_argument('--rank', default=0, type=int, help='used when parallel run') + parser.add_argument('--world_size', default=1, type=int, help='used when parallel run') + parser.add_argument('--resume_idx', default=0, type=int) + + args = parser.parse_args() + ''' + # main(args) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py new file mode 100644 index 0000000000..44ac859e5c --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py @@ -0,0 +1,39 @@ +from torch.utils.data import Dataset +import numpy as np +import cv2 +from utils.inference import crop_img, parse_roi_box_from_landmark + + +def cv2_loader(img_str): + img_array = np.frombuffer(img_str, dtype=np.uint8) + return cv2.imdecode(img_array, cv2.IMREAD_COLOR) + + +class McDataset(Dataset): + def __init__(self, img_list, landmarks, std_size=120, transform=None): + self.img_list = img_list + self.landmarks = landmarks + self.transform = transform + self.std_size = std_size + assert len(self.img_list) == len(self.landmarks) + self.num = len(self.img_list) + + self.initialized = False + + def __len__(self): + return self.num + + def __getitem__(self, idx): + filename = self.img_list[idx] + ori_img = cv2.imread(filename) + + landmark = self.landmarks[idx] + + # preprocess img + roi_box = parse_roi_box_from_landmark(landmark.T) + img = crop_img(ori_img, roi_box) + img = cv2.resize(img, dsize=(self.std_size, self.std_size), interpolation=cv2.INTER_LINEAR) + if self.transform is not None: + img = self.transform(img) + + return img, ori_img, filename, np.array(roi_box) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py new file mode 100644 index 0000000000..59814bc726 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +The pipeline of 3DDFA prediction: given one image, predict the 3d face vertices, 68 landmarks and visualization. + +[todo] +1. CPU optimization: https://pmchojnacki.wordpress.com/2018/10/07/slow-pytorch-cpu-performance +""" + +import torch +import torchvision.transforms as transforms +import mobilenet_v1 +import numpy as np +import cv2 +import os +from tqdm import tqdm +import time +from utils.ddfa import ToTensorGjz, NormalizeGjz, str2bool +from utils.inference import parse_roi_box_from_landmark, crop_img, predict_68pts, parse_quality_list_part +from utils.params import param_mean, param_std +import argparse +import torch.backends.cudnn as cudnn +from simple_dataset import McDataset +from torch.utils.data import DataLoader +__author__ = 'cleardusk' +STD_SIZE = 120 + + +def main(args): + # 1. load pre-tained model + checkpoint_fp = 'models/phase1_wpdc_vdc.pth.tar' + arch = 'mobilenet_1' + + checkpoint = torch.load(checkpoint_fp, map_location=lambda storage, loc: storage)['state_dict'] + model = getattr(mobilenet_v1, arch)(num_classes=62) # 62 = 12(pose) + 40(shape) +10(expression) + + model_dict = model.state_dict() + # because the model is trained by multiple gpus, prefix module should be removed + for k in checkpoint.keys(): + model_dict[k.replace('module.', '')] = checkpoint[k] + model.load_state_dict(model_dict) + if args.mode == 'gpu': + cudnn.benchmark = True + model = model.cuda() + model.eval() + + # tri = sio.loadmat('visualize/tri.mat')['tri'] + transform = transforms.Compose([ToTensorGjz(), NormalizeGjz(mean=127.5, std=128)]) + + if not os.path.exists(args.save_dir): + os.mkdir(args.save_dir) + + # 2. parse images list and landmark + lmk_file = args.lmk_file + ts = time.time() + rank_land, rank_img_list, start, end = parse_quality_list_part(lmk_file, args.world_size, args.rank, + args.resume_idx) + print('parse land file in {:.3f} seconds'.format(time.time() - ts)) + + # for batch processing + print('World size {}, rank {}, start from {}, end with {}'.format(args.world_size, args.rank, start, end)) + dataset = McDataset(rank_img_list, rank_land, transform=transform, std_size=STD_SIZE) + dataloader = DataLoader(dataset, batch_size=args.batch_size, shuffle=False, num_workers=2, pin_memory=True) + + for img_idx, (inputs, ori_imgs, img_fps, roi_boxes) in enumerate(tqdm(dataloader)): + + # forward: one step + with torch.no_grad(): + if args.mode == 'gpu': + inputs = inputs.cuda() + params = model(inputs) + params = params.cpu().numpy() + + roi_boxes = roi_boxes.numpy() + outputs_roi_boxes = roi_boxes + if args.bbox_init == 'two': + step_two_ori_imgs = [] + step_two_roi_boxes = [] + ori_imgs = ori_imgs.numpy() + for ii in range(params.shape[0]): + # 68 pts + pts68 = predict_68pts(params[ii], roi_boxes[ii]) + + # two-step for more accurate bbox to crop face + roi_box = parse_roi_box_from_landmark(pts68) + img_step2 = crop_img(ori_imgs[ii], roi_box) + img_step2 = cv2.resize(img_step2, dsize=(STD_SIZE, STD_SIZE), interpolation=cv2.INTER_LINEAR) + # input = transform(img_step2).unsqueeze(0) + step_two_ori_imgs.append(transform(img_step2)) + step_two_roi_boxes.append(roi_box) + with torch.no_grad(): + step_two_ori_imgs = torch.stack(step_two_ori_imgs, dim=0) + inputs = step_two_ori_imgs + if args.mode == 'gpu': + inputs = inputs.cuda() + params = model(inputs) + params = params.cpu().numpy() + outputs_roi_boxes = step_two_roi_boxes + + # dump results + if args.dump_param: + for img_fp, param, roi_box in zip(img_fps, params, outputs_roi_boxes): + split = img_fp.split('/') + save_name = os.path.join(args.save_dir, '{}.txt'.format(os.path.splitext(split[-1])[0])) + this_param = param * param_std + param_mean + this_param = np.concatenate((this_param, roi_box)) + this_param.tofile(save_name, sep=' ') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='3DDFA inference pipeline') + parser.add_argument('-m', '--mode', default='gpu', type=str, help='gpu or cpu mode') + parser.add_argument('--bbox_init', default='two', type=str, + help='one|two: one-step bbox initialization or two-step') + parser.add_argument('--dump_2d_img', default='false', type=str2bool, help='whether to save 3d rendered image') + parser.add_argument('--dump_param', default='true', type=str2bool, help='whether to save param') + parser.add_argument('--save_dir', default='results', type=str, help='dir to save result') + parser.add_argument('--lmk_file', default='quality_list', type=str, help='landmarks file') + parser.add_argument('--rank', default=0, type=int, help='used when parallel run') + parser.add_argument('--world_size', default=1, type=int, help='used when parallel run') + parser.add_argument('--resume_idx', default=0, type=int) + parser.add_argument('--batch_size', default=80, type=int, help='batch size') + + args = parser.parse_args() + main(args) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py new file mode 100644 index 0000000000..cb75eb1d27 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 +# coding: utf-8 + + +""" +Modified from: https://sourcegraph.com/github.com/YadiraF/PRNet@master/-/blob/utils/cv_plot.py +""" + +import numpy as np +import cv2 + +from .inference import calc_hypotenuse + +end_list = np.array([17, 22, 27, 42, 48, 31, 36, 68], dtype=np.int32) - 1 + + +def plot_kpt(image, kpt): + ''' Draw 68 key points + Args: + image: the input image + kpt: (68, 3). + ''' + image = image.copy() + kpt = np.round(kpt).astype(np.int32) + for i in range(kpt.shape[0]): + st = kpt[i, :2] + image = cv2.circle(image, (st[0], st[1]), 1, (0, 0, 255), 2) + if i in end_list: + continue + ed = kpt[i + 1, :2] + image = cv2.line(image, (st[0], st[1]), (ed[0], ed[1]), (255, 255, 255), 1) + return image + + +def build_camera_box(rear_size=90): + point_3d = [] + rear_depth = 0 + point_3d.append((-rear_size, -rear_size, rear_depth)) + point_3d.append((-rear_size, rear_size, rear_depth)) + point_3d.append((rear_size, rear_size, rear_depth)) + point_3d.append((rear_size, -rear_size, rear_depth)) + point_3d.append((-rear_size, -rear_size, rear_depth)) + + front_size = int(4 / 3 * rear_size) + front_depth = int(4 / 3 * rear_size) + point_3d.append((-front_size, -front_size, front_depth)) + point_3d.append((-front_size, front_size, front_depth)) + point_3d.append((front_size, front_size, front_depth)) + point_3d.append((front_size, -front_size, front_depth)) + point_3d.append((-front_size, -front_size, front_depth)) + point_3d = np.array(point_3d, dtype=np.float).reshape(-1, 3) + + return point_3d + + +def plot_pose_box(image, Ps, pts68s, color=(40, 255, 0), line_width=2): + ''' Draw a 3D box as annotation of pose. Ref:https://github.com/yinguobing/head-pose-estimation/blob/master/pose_estimator.py + Args: + image: the input image + P: (3, 4). Affine Camera Matrix. + kpt: (2, 68) or (3, 68) + ''' + image = image.copy() + if not isinstance(pts68s, list): + pts68s = [pts68s] + if not isinstance(Ps, list): + Ps = [Ps] + for i in range(len(pts68s)): + pts68 = pts68s[i] + llength = calc_hypotenuse(pts68) + point_3d = build_camera_box(llength) + P = Ps[i] + + # Map to 2d image points + point_3d_homo = np.hstack((point_3d, np.ones([point_3d.shape[0], 1]))) # n x 4 + point_2d = point_3d_homo.dot(P.T)[:, :2] + + point_2d[:, 1] = - point_2d[:, 1] + point_2d[:, :2] = point_2d[:, :2] - np.mean(point_2d[:4, :2], 0) + np.mean(pts68[:2, :27], 1) + point_2d = np.int32(point_2d.reshape(-1, 2)) + + # Draw all the lines + cv2.polylines(image, [point_2d], True, color, line_width, cv2.LINE_AA) + cv2.line(image, tuple(point_2d[1]), tuple( + point_2d[6]), color, line_width, cv2.LINE_AA) + cv2.line(image, tuple(point_2d[2]), tuple( + point_2d[7]), color, line_width, cv2.LINE_AA) + cv2.line(image, tuple(point_2d[3]), tuple( + point_2d[8]), color, line_width, cv2.LINE_AA) + + return image + + +def main(): + pass + + +if __name__ == '__main__': + main() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp new file mode 100644 index 0000000000..4f51572d82 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp @@ -0,0 +1,215 @@ +/* +Author: Yao Feng (https://github.com/YadiraF) +Modified by cleardusk (https://github.com/cleardusk) +*/ + +#include "mesh_core.h" + +/* Judge whether the point is in the triangle +Method: + http://blackpawn.com/texts/pointinpoly/ +Args: + point: [x, y] + tri_points: three vertices(2d points) of a triangle. 2 coords x 3 vertices +Returns: + bool: true for in triangle +*/ +bool is_point_in_tri(point p, point p0, point p1, point p2) { + // vectors + point v0, v1, v2; + v0 = p2 - p0; + v1 = p1 - p0; + v2 = p - p0; + + // dot products + float dot00 = v0.dot(v0); //v0.x * v0.x + v0.y * v0.y //np.dot(v0.T, v0) + float dot01 = v0.dot(v1); //v0.x * v1.x + v0.y * v1.y //np.dot(v0.T, v1) + float dot02 = v0.dot(v2); //v0.x * v2.x + v0.y * v2.y //np.dot(v0.T, v2) + float dot11 = v1.dot(v1); //v1.x * v1.x + v1.y * v1.y //np.dot(v1.T, v1) + float dot12 = v1.dot(v2); //v1.x * v2.x + v1.y * v2.y//np.dot(v1.T, v2) + + // barycentric coordinates + float inverDeno; + if(dot00*dot11 - dot01*dot01 == 0) + inverDeno = 0; + else + inverDeno = 1/(dot00*dot11 - dot01*dot01); + + float u = (dot11*dot02 - dot01*dot12)*inverDeno; + float v = (dot00*dot12 - dot01*dot02)*inverDeno; + + // check if point in triangle + return (u >= 0) && (v >= 0) && (u + v < 1); +} + +void get_point_weight(float* weight, point p, point p0, point p1, point p2) { + // vectors + point v0, v1, v2; + v0 = p2 - p0; + v1 = p1 - p0; + v2 = p - p0; + + // dot products + float dot00 = v0.dot(v0); //v0.x * v0.x + v0.y * v0.y //np.dot(v0.T, v0) + float dot01 = v0.dot(v1); //v0.x * v1.x + v0.y * v1.y //np.dot(v0.T, v1) + float dot02 = v0.dot(v2); //v0.x * v2.x + v0.y * v2.y //np.dot(v0.T, v2) + float dot11 = v1.dot(v1); //v1.x * v1.x + v1.y * v1.y //np.dot(v1.T, v1) + float dot12 = v1.dot(v2); //v1.x * v2.x + v1.y * v2.y//np.dot(v1.T, v2) + + // barycentric coordinates + float inverDeno; + if(dot00*dot11 - dot01*dot01 == 0) + inverDeno = 0; + else + inverDeno = 1/(dot00*dot11 - dot01*dot01); + + float u = (dot11*dot02 - dot01*dot12)*inverDeno; + float v = (dot00*dot12 - dot01*dot02)*inverDeno; + + // weight + weight[0] = 1 - u - v; + weight[1] = v; + weight[2] = u; +} + +void _get_normal_core(float* normal, float* tri_normal, int* triangles, int ntri) { + int i, j; + int tri_p0_ind, tri_p1_ind, tri_p2_ind; + + for(i = 0; i < ntri; i++) + { + tri_p0_ind = triangles[3*i]; + tri_p1_ind = triangles[3*i + 1]; + tri_p2_ind = triangles[3*i + 2]; + + for(j = 0; j < 3; j++) + { + normal[3*tri_p0_ind + j] = normal[3*tri_p0_ind + j] + tri_normal[3*i + j]; + normal[3*tri_p1_ind + j] = normal[3*tri_p1_ind + j] + tri_normal[3*i + j]; + normal[3*tri_p2_ind + j] = normal[3*tri_p2_ind + j] + tri_normal[3*i + j]; + } + } +} + +void _get_normal(float *ver_normal, float *vertices, int *triangles, int nver, int ntri) { + int tri_p0_ind, tri_p1_ind, tri_p2_ind; + float v1x, v1y, v1z, v2x, v2y, v2z; + + // get tri_normal + std::vector tri_normal_vector(3 * ntri); + float* tri_normal = tri_normal_vector.data(); + for (int i = 0; i < ntri; i++) { + tri_p0_ind = triangles[3 * i]; + tri_p1_ind = triangles[3 * i + 1]; + tri_p2_ind = triangles[3 * i + 2]; + + // counter clockwise order + v1x = vertices[3 * tri_p1_ind] - vertices[3 * tri_p0_ind]; + v1y = vertices[3 * tri_p1_ind + 1] - vertices[3 * tri_p0_ind + 1]; + v1z = vertices[3 * tri_p1_ind + 2] - vertices[3 * tri_p0_ind + 2]; + + v2x = vertices[3 * tri_p2_ind] - vertices[3 * tri_p0_ind]; + v2y = vertices[3 * tri_p2_ind + 1] - vertices[3 * tri_p0_ind + 1]; + v2z = vertices[3 * tri_p2_ind + 2] - vertices[3 * tri_p0_ind + 2]; + + + tri_normal[3 * i] = v1y * v2z - v1z * v2y; + tri_normal[3 * i + 1] = v1z * v2x - v1x * v2z; + tri_normal[3 * i + 2] = v1x * v2y - v1y * v2x; + + } + + // get ver_normal + for (int i = 0; i < ntri; i++) { + tri_p0_ind = triangles[3 * i]; + tri_p1_ind = triangles[3 * i + 1]; + tri_p2_ind = triangles[3 * i + 2]; + + for (int j = 0; j < 3; j++) { + ver_normal[3 * tri_p0_ind + j] += tri_normal[3 * i + j]; + ver_normal[3 * tri_p1_ind + j] += tri_normal[3 * i + j]; + ver_normal[3 * tri_p2_ind + j] += tri_normal[3 * i + j]; + } + } + + // normalizing + float nx, ny, nz, det; + for (int i = 0; i < nver; ++i) { + nx = ver_normal[3 * i]; + ny = ver_normal[3 * i + 1]; + nz = ver_normal[3 * i + 2]; + + det = sqrt(nx * nx + ny * ny + nz * nz); +// if (det <= 0) det = 1e-6; + ver_normal[3 * i] = nx / det; + ver_normal[3 * i + 1] = ny / det; + ver_normal[3 * i + 2] = nz / det; + } +} + +void _render_colors_core( + float* image, float* vertices, int* triangles, + float* colors, + float* depth_buffer, + int nver, int ntri, + int h, int w, int c +) { + int i; + int x, y, k; + int tri_p0_ind, tri_p1_ind, tri_p2_ind; + point p0, p1, p2, p; + int x_min, x_max, y_min, y_max; + float p_depth, p0_depth, p1_depth, p2_depth; + float p_color, p0_color, p1_color, p2_color; + float weight[3]; + + for(i = 0; i < ntri; i++) + { + tri_p0_ind = triangles[3*i]; + tri_p1_ind = triangles[3*i + 1]; + tri_p2_ind = triangles[3*i + 2]; + + p0.x = vertices[3*tri_p0_ind]; p0.y = vertices[3*tri_p0_ind + 1]; p0_depth = vertices[3*tri_p0_ind + 2]; + p1.x = vertices[3*tri_p1_ind]; p1.y = vertices[3*tri_p1_ind + 1]; p1_depth = vertices[3*tri_p1_ind + 2]; + p2.x = vertices[3*tri_p2_ind]; p2.y = vertices[3*tri_p2_ind + 1]; p2_depth = vertices[3*tri_p2_ind + 2]; + + x_min = max((int)ceil(min(p0.x, min(p1.x, p2.x))), 0); + x_max = min((int)floor(max(p0.x, max(p1.x, p2.x))), w - 1); + + y_min = max((int)ceil(min(p0.y, min(p1.y, p2.y))), 0); + y_max = min((int)floor(max(p0.y, max(p1.y, p2.y))), h - 1); + + if(x_max < x_min || y_max < y_min) + { + continue; + } + + for(y = y_min; y <= y_max; y++) //h + { + for(x = x_min; x <= x_max; x++) //w + { + p.x = x; p.y = y; + if(is_point_in_tri(p, p0, p1, p2)) + { + get_point_weight(weight, p, p0, p1, p2); + p_depth = weight[0]*p0_depth + weight[1]*p1_depth + weight[2]*p2_depth; + + if((p_depth > depth_buffer[y*w + x])) + { + for(k = 0; k < c; k++) // c + { + p0_color = colors[c*tri_p0_ind + k]; + p1_color = colors[c*tri_p1_ind + k]; + p2_color = colors[c*tri_p2_ind + k]; + + p_color = weight[0]*p0_color + weight[1]*p1_color + weight[2]*p2_color; + image[y*w*c + x*c + k] = p_color; + } + + depth_buffer[y*w + x] = p_depth; + } + } + } + } + } +} diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h new file mode 100644 index 0000000000..8a47153d34 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h @@ -0,0 +1,65 @@ +/* +Author: Yao Feng (https://github.com/YadiraF) +Modified by cleardusk (https://github.com/cleardusk) +*/ + +#ifndef MESH_CORE_HPP_ +#define MESH_CORE_HPP_ + +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +class point { + public: + float x; + float y; + + float dot(point p) + { + return this->x * p.x + this->y * p.y; + } + + point operator-(const point& p) + { + point np; + np.x = this->x - p.x; + np.y = this->y - p.y; + return np; + } + + point operator+(const point& p) + { + point np; + np.x = this->x + p.x; + np.y = this->y + p.y; + return np; + } + + point operator*(float s) + { + point np; + np.x = s * this->x; + np.y = s * this->y; + return np; + } +}; + +bool is_point_in_tri(point p, point p0, point p1, point p2, int h, int w); +void get_point_weight(float* weight, point p, point p0, point p1, point p2); +void _get_normal_core(float* normal, float* tri_normal, int* triangles, int ntri); +void _get_normal(float *ver_normal, float *vertices, int *triangles, int nver, int ntri); +void _render_colors_core( + float* image, float* vertices, int* triangles, + float* colors, + float* depth_buffer, + int nver, int ntri, + int h, int w, int c); + +#endif diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cp37-win_amd64.pyd b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cp37-win_amd64.pyd new file mode 100644 index 0000000000000000000000000000000000000000..5a2658ff0655b645461f9edece23f9ad16d4a1cc GIT binary patch literal 67584 zcmd?Sdwf$x`uLyH9w?W1iV!U>XwjlY6sz?@EoeehIFVFPK&l{A5kv(=NdyJ7m{y3# zD6W@9*V}qo*nr zInO-v%ri63JkK+8CgqpRcVs#ojx0hv?r<#SE`M%y{r8vb42R>eV_rSXvAF$PJ(p(q z-|9JH(pA&)rrvPfl{Z{AC2#y?*Is*FAn)=Cc{c>F&AaN_Jnzuqc~h>tVnSc1vqOFY zb%#j(U(f&gxjo4+_`Y-YSUg`c_t|DY_hT0;)*Bm#sTjgS=`h9+zM{Oc)$G6S3tYWUXOToZD25_)wLx$aiV!lwd%iC ziSyKN8DYW0t-89LiM#9K@gIV1eyF$VR_8ReV<3|$MN`bQ?6uMBoeoD$ZJ>+9uOV*n z@TG2QEA(?b&^CIOLB8nQzsBS1nubU`g(G6@e32Y62`^LqMe0zq zDNr2xDkD%Jmm@-Z0bMGik#Y*^qpOf?F(dj1Zsw9&mFBZI%Du@1sJy1uyuVI8=1TXF zK4pai`8ig2R(_u6BGtX@AN=$)^X9wVuVjPe#hGAQAja&bv$@(w33)*Z?QuC9D#hxZj4^3$PDEA?NzUe z4D;FREj!b1I|qWy6Ms8ybl$~uECj~d6xAk%L?FMPWzWjD;6Ea5Mf6Gfg5peGXuu#LI(Sy}6{-4#yombR zI~<{2eMxsvjo}niw8?$PhUgSxySDu9gV1Lg(Pco}^|2>35qI-u{`_^8z0wL-CkX%yQ11AI;$fxFdk~om|kYt#vu))ZW%3l1EXgo(SBGTdt70V5R847yV#_Dw4>(3_ll027kiM=CPt znT4^gN!Yk6$U99#tZmT;@|ID7belnIxIJUXjTL#+(w~)}^x4gbNBesQNzjOX1{3^t zgZ+*r9Yo1R=>#Gfo9>nonHr1s)}qyM*O0$TM_U_rolPX}@~X$iSKDVfWL)KqMr8Jr zw6LEs6#BZ~vLoSH+;tLZjQe)Wq%XeJn@&IQNBY5Ec(QCS1%{2@G?^8Vc&L`)XDkDN z>K`u2VI#CtL%NC2FSt7QSF6*$asg==nvE*#X%8-&<(rxEe zO{9nYS$WCC49j-MU=lYq1r&BEKIL`12=OHgElQqp+or=99#!@=#E;TAC*>?)r2gd};{A+st}V z8gJZKH4|E)%W^f|V3ubuY9CnrDHhgeB{FkpoyA#9i}vvTLnVB_7c6 zNO_)q9wX0Xd1TA6>bB)sMZ}BBa;%~{YZ(Ob2d3kAC~7InGQD-QV$+vW$QXpp?u|Td zl1F;brKka47f9#Ku|flW{u>MqwvD^GP*96RUq|!2jr^jwwc!PPh&bzbZfZu>!mc-o z*Qf5_i+%|yRwz3^`h||Pp{#;>;a@@Y88XpeJttUpRUyg}63Btscgm=g`ExaC(f2fVBU-0GTk0t?T!AejqV)oEi7}YWHLG8fA!~Z;=NV`5 z9K8{d84UxL;6T^tEd9_PeqB#A_SGFN8luKkHDDj2z+rCGO0Sy1yq zFdr(v5E;{xyi{GN{#KqltLH#pxLh>Ryv}GZ3 zBZ6pRu_re;!ry0w`HH8j6}ceS%G^k_trh81<&Sh)V}-ls`%2ypIi^HsNjT9qHHA%tiO*K0^9z-1PuB(I{lL z0w3C+6)D?c-Umm#`IZ#gOA4hlcieS2QMz9zSf3rSGNa3tKJakIPcFShw8Jfl22}E(j!&Ir*4O2-s5!M?yd6r50Q5T z9aze45gR#0wMDDVC%krDS)2>M z;hFQpRo71Ng|qU@I<5gvmc0kYXNO1RcM1-(LeAfP3cq^oI5%z%sKq=E6bz_r+r|;- zIbf>XjxrYQ*@teHH$C54mcbk@?6u4%>!x+}+Qy{s(Nsf?8Q!pQDbZLv(nb(j%EadI zMy#JpcFa0nXr*+oE!t03vTXC!&)QGxP}Z@IygkX%V9edFbp&tNSxwY@^~@?swB}0;2o0X`s*8Xw`k1XLa0YQL8-BR`QOmKf69S$~MjhI@Flm zVDu)EDF}=chA93E+5UK~bjDNq}|$8GVJYzgKnO!?dTZwop@&hNI- z!NNWM#Z6kaK$PkLMrvqfj{ZR9kFp0f?zXRJ(QYUIYIm9QLZ~Aa%`E*UQy) zgt{(Q*G1}jwz@v7u6L! zS^X9XX^4lTnLY6YifY7I`r^$s0!Le+J{iF)ExR;Z^wVL=z)z>- zXR#R56PzYF{SoKqe-YZ;xnSQa(TM9X2$Hd!XW106g_0HW4wjAI0^}FALLPU02*;>O z@JJ|KD)7jCC{_7~$-Di^PI5D$TPt3oRi@SWWbjCP2~DrIuJO4PKv}0~6S#0R)pC{! zd+ar;9^z_oohQYo>d-Z<4oL>l$7rPe9WDvmJv897ZSdEuuHuSBI(Zup$9TCcDecyw zl)l?BaB9`eqZ|R8ER&!!MjeJ{`|Z*kE4~JMI?%C90SVwW~scD7ZsI&e(fH3c{;)EviK5UMY*LNl8k!8;Y5%vDR7KSUQ#_YpDc6l4f8 z!Pu5W$IfgL@);d|7s(6gz*J_ zDUC^U5yPHB>bU6!A)Se#8zp$ON{j8_7Iv*yd}%>`R~quf_(Om6+bjI`YO7>TFwMsy z!!fi}4Mj6j0yVqmB*vxNtnQ;&G%(v1RUXyrrrh1PCEo@*`9LPBTwWWh{Ob0j%#hgS zGSUmnm?qID`+4YD;tR3tn{%w1xOvwKfrt#u^_OIu)tI(u;y6&S+e$XfidpsK;PSsysu7e{lf&-Rym zV%~W(oha7UlaL?c-(k2QYteYc`h^ko35H-?lszhfFzrvtKVe0VWnYqmh9WId^MQHS zvUG;XgD=kTs-?_&VaR~Lfyoa;;HWSp=}+@%ek5A%Kz`iNnjb}q9p`9vxD`Ln|Ihew zLV_O~0zG{36+Xp}cQikwON-A7*H4a;k=eRms^_%YuUqFUX$-d2labZZQoWJLIYpan zM*iwsJVKkl*b&&0=o7w@)w8~{SJX?rEjujj1DZfr#(eO*qD}r1IF;J9+zgN7nq!sUzja;@Udfs-v^bshbocGh=HScx(u>*2bLTC2o$ zqAH`dUK5|f56b0b;nSg8h+78Gtxy(l0j#rlxVVxG7G5z z8J($QVxJ7VIk?~S1RxG>8Zb{`?n~Lp(4Dx8aC};tyi4*V{VpbD?nRyvo{^r5Jfo1d zRdj8?J>c~wxqa`od!0$lKI0^=ao4JcBsE&IE6~Y)PrQ&l(5f&&zqP^-=i_&D?Ec6K zzn+gEbkywfnBK2zzQIjm4`@qn#7f{~+qE-_N@l+)uKG$8rxN2+5@vU`7Fe0J#giMd zgY7N5dqdOv(uwR1ao0-^lGSdeZ=2OU5MY$u-D|H$-gZL-`5+_WdF#oFVY zi@R=7nRcj5TSQ#g&f$>2lqF`vRED)Td`12w5q{|ZOfF>*A^fh81f}v@5_8K1-xXp% z;mnZti?qyOM%M<>N*X2YQGJD?VL<##=>Nv7RuZfx z9z1)QERDroF9<5BAnzf=*v!4bSk2W>Rm`f3MtL-U`1uQmV_=vDJ}=z8!HVOlHXLA+Cb8e~*>A;N ze-SRImJXmvGz9y9$=}sh`q7qZgw{_>UY~39#``P?jxWxos z#mzI$GuCsdXN>Zi>QRu8_bq-EJpyo_dh96kguOTJDpU*+O*Y`6RJz{=y5B=Gh*kG1 zpy%UffzlYAm+lvTAX$@OpVo9UqeLFg7ixU=XlB1_a((u+JfHoUDAUFp|4OjL`6T=j zx1;peqD_p_Nzy+nh2H<7lYVqRl)yR5-m#zfNbqfx>a?Qmu4D&z)S$>v*F zYDskQ^=bV(z9!YhOE%y7wUrr#@En`(%JR+`6tHEiLtTiNv( zb$~)ThLFji=e&l$i7udy$ijkt_xh%_}|vHRZHk?4%gg zzs1mxT*&OQ)nnI})ohxPTRwhGdFJXtCEnH9w`7ZcgmxfrZs~)zVQ5q@`*q<{my$PoZ1wuaLr* zs_Lx{wx>+y!b>r2=u>WKUdxY29_F{lPx6yg;kTz%_({(5+cPVzhSEy7fSbRxB5|K2 z_qc2Ey}~5bjOST4QRfONqS(p}c7mO6@X!>NoVIK0cDXhB?SXi}KmWZvG}GD-3B27F zmoWX?SyG-zJ>RAqWfW%6M8RLuvCSU!d)4jnZ(_%>&cF+#|zYTvze8CQ49QGIBCODF}<8JG;j@S;m>##+1YBMBDebx zQkB+W%GM3Ge*;VZ(!M2WLoFOw06C-kHmF`*w5#ZYxT_$+Ymo}Isx6%FN?jP{?zrnj z#m1-%#96F2;j+;RQQ+FhMSYpvpZJT^cLdxQkp6yqgUrDh;PH#i{jQquN2(&E8wAgy z@g^!PnD%dyPA<4v4}wHqvZm+lmdLB-a!A^5Hm^C@)I&$N5Sb#ghRFc0fHj3Ir|!?1 zWdCPTDP_dIyPj&a7_pZ9mCweBzY#xv+?6YJOn6N22@tokL(3KlxwDR;R&F6Q;`;s> zA%DGJlqjlr6sfUW@C)QwB@NSh1Gh-Ztda>}HXv}b>%q?+2B`W`sRrJK+TKS`O;+_6=fh)2ihxtlzboz9|j}t0dQ~-pWJ5-Yzd$rR?pJyXE~2xgr`7qwky5 zizNs8VaiK#ISIQ%YW$#cnAI05fPpPuk{M}UlF61G5+hW)3hVstoTAmVl6hA*NmIUw zD(XbX$_bp6vU?lF9&PHN@M8XYg@A;AVg(Xb`zJPISvH}~>E+F)4~c0n%nD+Dx`>p7 zhe3H+bd3Z5Ock+q)_4BMg}4<~EBrp2?v(Nb&Av9xzjC=%vNiC>v}&BFs-gS|gVOv7 zX7x0&DO-FjvDgU=Nv+?c=&v)Y{Q^cEVmN)w>T`JV#p`^b71`w_Yt1`v;F*q?BmPp? zFLz4S2mCoA11pHZ&TO#?gZ*3c&P!Pjk!BUkmcE5DB=ZbXGpX(=gFW`K?q{lh=(P@e zj*KSp9ojPp6%3^u_Jw5o!xpSPa8CPAM~@~&X-)h0ZF%zB_eiOJyT=nY%XssBjAVMm zDZY~Wz==NlJcwxHvoAy*A+y@ct4xs_hz0I|u9kZS9;Wkh<>4*A{dVlNv|@0o=vui3 zD*Or8C~MJcf`=&`gj@zr_SnA;Hl4KI>%nfgq7N98*|zcqny+pKE7>4()6Ki*CdW+RTJc)WzeAc~hg3GGM(9R`ySF5x zL?jI0i)f?u;yFiRo$t4Y@c53WrF)t+*GPA=oi__f5!YwpnH3KPQ`jhyn#StaqxI{P zl&6$;JJH@+(V5Peggjv{SEAGOxMQni7%(2;udT&JRZ@3Mlw%a}@Mdi*gt%nJP)yu4 zmMX>FdA5~@E#~2&B>R@*WStq`QrtBR7N+6JPP0=b%|TBY?Dp%x)7w-HVT;) z4zMs#6jIeUr)=Ys{S|s=0F@bLX1>YVOn=JV;Fv^rFvVz^VQ;=W9JwR}}4HPn8uu zRWuI!ZShS+2K7g;rIxH^1nD7$_dWO&F0#U<{RoAm{QgZzDRo=nY1y&CUV8{@nAQ2U zbciyXqi7k0uK`7G> zglRYsl=I-u{EZ}%_i-_8QT!|s1T0xV!YsLnw#tG`_G zc23pJz+!g*5n~J5aIIEGe-d0`4|J0H*s|a>G>454flDkldcc$g%p_Fh31Nvjcm=C? z3nZ#MaS_)O^44zWlQnt@FMye^Q47C06wBPzDBeFH^AuP2!nUTTWl_F(nmfpzWTUxW z-j}+Wx?d!HzX2p&6P#+-@6>+^oQDSWNQ2g?Y##Ui{WXLmygP57l()qmcQBj1!)~f4 zdzXHNh5LagT@xnJ3OxxAFDeleD}Ms9i2v+P27jJWP&d$IV}edh9! zyj~I7JKLPQx;;KEb4m5#+sZ{R|J-BWdD}vwzRWf8?Y{V%esLSc))8A(S~fsGE$3<8 z@G|}IBIYCJ!L)uD*0RFo@Knvtm}%Cm05N?odKa7f=;3Dd!zNfmZ?^Hy8}BWOV_Fsx z`uXf{#FWG~c~_wGR9lKAXI5WB1}olRmHZmquFCpHwywxpb`*OfL(hv$FA+Vjx{qWE zPwUroBoVK(oFyueoIS z;Mhst;;9uL`#q+fLJx}#-g(0ZC-SlOTVB#=);!#S8Y+%@ZFfakHS5$CNo6cH;LFVL zusEnRy|nc#zxe`f&&qYEDI@p;)gMe_w4)AiOzkgar^NCyyFS!>>hyE01=1RAW}Qzl zG({zM)fAprtk@vWE54Y=^UBCre{b=4cbUD%YnJ^QJH_+L(AbHdSBAx)z`5%-xE67p zr7Th=zjNq6Wv4OVUH8@M_Ge0CtQ4|LJIJ#8_IC%a=|47bC44LO+QZ$Rh`TP-e7ZRo z3E)UjvoS%?PM7Aiu?m5bbOt#Icu^mwYox;x-PzbnN%#+S#3d$byy~q zi)H%kwe+Ao*|uA2?Z#EPDbT^{v&NU%XoZL5u>#%C5geXGxio?DsU%9Ro1U^%)u&*o zLfO-Y{o0+1SaVcsg^|T&;zNT^5$Yx)gjv95$7IQdU|Vm$*<%9dn@h^Z^e<&eKQuer z5jcs(ahma>94nL0ACw4{=Q1rpY2om&?53BYzrPje;HipRX3g0!K=kmMQzb%nzkPm= z)XbmhLm{s-Yc_UtIL?c7#S$H!$IuFxzQ`bQzRSXT&_=K8}hK{G0LH{GEugWR6ir3aWxHuj$w=+ys;Kz9Et&6;8;>NngQJjq)! z-W@z%3hGxnDA*%Wav}MW1u8++^x)nD1<}@&Ai~~ZYw3(!hgnVYokwm`(E+vVW($eM6s?x#2*txI&Tlv#zm>!NhR4-;KXeEC_Gu?F9 zgUO#8Z&5w^6*axmotNL5ls{c5Qm^m+xALblL5-Wc77o%#|AYMbX0{?^`CZc2Aw-@J zEq@j#idc{;;@`=ido=D%%47xiRw})}{Hce)R`O>Z7?I4**OdGbDZh`x5b{xO?Ed^; zNS_MuzWizmuXTaKdpb9T_uq=3H)kob+ESOK2yT>h*@lqa17fsA<-&gUcXW#81_1$Jv1n$yjh#5=qk_m1~52C$Ki!gq&77 z(qM#uAD76aCKvCkzACKR+uoe(644+6%@eUUf}wxmtnU4br|0rvQ>0UvgorO1>0~J? z`xnmsTnVkv9+b2XMD~tn>wRfK(?5R_I$dRI068DMnKv?yo+i??0XXR`5!ZUGyC&JX zZ7vz&g(2M-8mJM`8@rJ{oOI99OQew^uBQa@c2d0d>ZT)v$s&ERhR6ABHVJ(o%lz&g zf`kS-s6S6<5u@9j6<8))k<9-kXPaITGh*eRj0e#J{kHmipclUEmX zAQl`P?TSu3=ir`@DV_*pptVEN6$@-af{e)?&>ci!m1ep}(!vqE4o zgx#mIIlq}91dzMGUmwP4)oJ6&`%eOe!5uerxyuX5!WB) zCa8N<*LC;HGC5R{M|TUmpVm)LK|j41{dA;K98=2a(*mbx*`aCKn50E%{!Q|JIaPKv z{ZV0bellGZyGSfRVTwG*Zb~9ms^|!f&crI5H!Kj1=YVV9)D3hN;p3+Kp z#s)}Y3Px$dX4#I|@wDa@y6C4dB304P!hxb)@F0^qr|Yb*Ln;&vL1*=#vx*YFlVsp*=Lr0dYC>Tsp1u1Kn?{yv-hnQ!}1>9ndUlf}HnGFd3RTQuI4Nxc7dvRH7V zV&m2t8UA&uIv-KJ^vPnq=&KVBrLUr^W{zUvP)z;<_0<>QfSxS&p|9Stj}BL|-mIA; z5j9z$tKzBjXAaF#`l_tq`668lTkET<_S08?Zmq92psylq+8sn+El%jGC${LTnq;M~ zUTBs4+)7^!j#G1mS#ws4ek#*MIZH8!lOePPu2|ph-7RLr1~D6a_M1&&haOBDeNK2A zaZRhHk5(ikTC`VJF<)HEU^lJ|TV`z$g~0WpXZdC0dAXPn&L0Dbj#f%z#RNe4FPcz& z%gp6vGWxcL1jM-}3E2%u+7QoRr8wB9if?d+TER(dcNtedaekHMa6B}?;aJT5#KC-6 zM|g_ozZ3pE5L1wQ4foB2^UvXfdf?6j#^Fru+p_L( zC$!v4c!=;P!cu9AtStWd%V?98#XqRjW7>u953<^1Cf;Z9Ahdl#{x1mM5Pl@=A^cWc zFO)hQb%a%fzY?A%EGDca=mz-j+M~$pa6CX5Kv+!}On8Jag0P72TifGMcoojy&>kJi z9FER}T*5Jg|4n;bPX4P1Qwh@vvk1SnJ=Xagj_rgm2n~c4gtrMX!f$Dh^UrfQ9wSr` z{!Ca!c#&{9;UmItZI46YRXBe`dz>}I;W&p-M(`85;P(8jJiMFy4-g(9JVp2m;Wy#h z&y(D;p1^D=n{4%W?0Kw#A|7=x`aHP~Ap|#0EJ|`}?s(x?zb7 z#uOpF3|nLn#)!Ej6I0|fTy-eVF9{Xw(d)t{Se!`NIM*!n+>)%rgEyPYyHxExDUc6U z0cL5ZLAAWf6nU*GQPcCTL-9jBr~Kip1eD79V!VOXe_45^mNh5hC@%TMtR@4zp@@j< z`CA3e2DMfUyz}`f^6aiqtlL8wv2#dWK&pSdEIFQUdsj%lBf!D;KLUV1$T=MVQfDwd z!!0)Op=-XT7%w}j+%m;lGQ|#)l`!@7eN~Ag*o~Y6bk5_!r*g@2tWl_6HE3{#)i6kT z*L`8OnHfHL_dIg>x)Bb?hT+sg{xS~uJLrCG_i%P;^G&$9`~oja1Rmwg<(tk9XaJ#= zmS^1PdKS)bn0LUTo$ynw06qH$s`gF`oK>}VYTz^sm(syye1{u=^xGz@N)N$pkYd>? zNYqadMCh2g0r8-^!9AA{MMAqs-f6F z2jCLlJC3enTU_of`}479`nCAm#ov{*XJimnMg}gI#jL6H8QCs~J=p8!zsLdt8w+IH zV4=2mlpj8;8rzlLT|J4rFB1o`XNeUn^(^tl&hac687mg8f2qt{p5^GzV(;vT<$IP4 zjUD4zGAt$wq7$!Cym&-27PJnt2y$NZuDG$(NmtZPqasV zzQAa@wj}5h+gj13tdyp?3XAiHs{zD6I#Y1MKY5N#P9jsr&fo;1lSb=`j+ zifR+yuBQ8>cOAre&Y>XATPG<-)?%F`8MzYP#>OVimx=A9&dyQ5u|;sF^cgWk!dNDAhgrsVkpeMP-$Xay7!cceqL34DU6VA^ zZ9px5_PYn;HwD%!a~A*gj)!IC+L3p-}o_-t=MvJ)t=_uyTJ<&7C)BaLD$PP4}Prt zf6N00<^AAhL`+P!x;!Upz8{CCXt?L6COGsm9817WoPPy`T}zljSVLGqxQ*~E;h%)x zG6d*Hxs@jY$EI<{GRXx;qQdr zz^ve2%-zxDfBToA5VXnc(j}`)7o%;vE?u(sL)pGV$1a_mhnZcv+q~v2J&QIC~mJ%A4k{bj}CJAa9@iq;AAg<^3*{7p@D3kH{L+k2E6%JO1recopu#`na2_HRy={p>E5L!`E&)%H#t zGE%2+eelR<|C;(nKF6#+tBtUVk4Ht0rMI8mp5p?{>b*h%elJI$kL}3|ZL92GDPJpJ zoD*})7Sc&pAHGK3o5{gLo12;xwHLc%z4#s&9f`#sO^}_-{wJLm$I9rOSQb~ePtXP- zdv)A(GT;ivB>Q$?14AquCHJTwKHpM zc%pEyv7JqKL)oy^K)qL+HK)+;eI;yS>|ImZNoq~)h2`7%&{?y<|P`57AN)_02K zXFDURjOzS(stMunWD*i~pR8A<*Mf5tm>6Pw5AM83Y7%iB0}?;oHI=)sq)YXa&Z!g+ zl4F8%+SDmI&Mq3oQ(_A!3H$>zC!t#I$OB6bZ-mvx%1Wlzl8v$_mpz}Z-efH|%j?ii zegc_nHuiC{A&>0@sWO0Ns8S>(KPfDcK6HmfFk4#<)I^cU`#E3B`aF)n#s|2ie++0W zUnnoO!dec!D5(p|0jCqPCCyn1`yuMtq>=Lh#+%|olNSJe8jhU3AhHy_VJV_4c{q-I z$(J}4rv@x>*Jknpb076>bt=vT-nX2J(=l}_PN3otXWuMFhXmD|qDm^crO&?U0iIT8 z-)y2#8Cea+hktfBW_HEP`!Yg8J}^0jSA2g{^UynCw8_ z{!9w;ZXl}!a#9jadY0AOE?rr_EZ3eF<>j6mlI zv~3KRSDcF|W>B0Q<8eGdNq!4{1Bky!j*q2zBhIa8)p1vehP$E#^G8pk<_O#pX)4>; zt}wd>3TF1P^AvZLL%n9!3qU_F(DMXqXu!>M8b@FjXExnLY(Adc1I}w&O6EhH*YqZi zemSox2vuq4HGNAmKCNci``&V1)A@1N8$!LD*Tev?FTlAZ=QV`}yz!I}F#zI65aZkA zN7T*vE5!1>kS6}<2IB{kUl!um0^eYK#^WM+e4R(WmzE~dho8{ot9ZFgUOvW4diq~4 zm$!Ud<`LrP7AH{LBNV{9!H?qG*_Skrsc$a|L1ExPIw?lpWvRYMI zK7KK47D$=4^9V_bxHexU4WqwJdQCVG?vVw?2IE)?silwxqdPGggiMAHwU(Z62Pw4A zUT*9S4Q8n)0doX8(Hm^46TORVeiXA=KxJ~gUP1L{fx!5OK=NnS$X+4TBsry{X_$&T zm-CYPjC+DxxW^3jbf{|mFhXF}H$BHwR;fHjTv=&Vib^fRJ+22$Q-M|(duYsKNs2C8 z3z1>tQ(>Oy1cnCb07&!^{qA_7K;AVW?W0Sb$*UgPajF z8szY2<$RH&aaX=@NcRr1$vJ>MR5pv)SCXxdrl}Lpnx;#L#a&NbplBk`v8g;=iG3}3 zhGrZ{9Yt?rBvbTNiK71kik6~3fUe%E=(mXN5;Tj{P9fJ;vgx8f;Z5AtJ5}^EsXPx7 zYnD9!bR4wk3A*Ux@6omHyI;|jWG*3JgK;6THZIVdp^I*tD*6iE#9hx1Q?)*#!8j?E z=V)SC(CNNwB{q8`o`@JB7Wi!ahu6FKIB=@a8Ze8;q62;3im% z@5M3*8>a!QzubIOp-CQq({#5y%BeVP&rc@C_=fOQsVwsHNIKM|LJ;y2LqaJhPb&hA ziv%ZU<%qIi?cVacT28RSHfTAR?x)AVJ!hpTDy_~B9H}gf0;~t-ZN*kAhwmN~Ue^Om zNo6c=O<5f$4)9c7fB%0wy-E6+G|*piKb&ms8Pp}dQ@ z-WBalw5cC?){Yki#n*XmUOz9dS$;!x<9q@RV34)HytwPV^Cg=S>D4ugi`X{Xg*J5t z&i+SevB&EQ^&#s*MZEgHwG`x+s%eP>iRPuS1upjU^+?3^s3MCKeu6?KIKBIyt5a69 z>n0q=`yWaq8oQhk!FmStQUO`n$V*k+cIFB)sqB#u0$-N%yB1P(#PyX-K5{nFFXa-E ziUm&9q#Tzdk-{CF>qg2RiTK*lqEuT6mRI?f;QsXDJrWJ^S}YctW&S3 zap>O^(;->R`N?9+6UFR6^v7L;bukN*#hlVoOfg4-_9Iu|2wkl}vb-)Ndim;7IyiDR zc`j#JP5h?gnjVxH#ddz8@KCFd0hU_LQN?ZXOLZEWYIthV^mVM&`2&fMhB<}fdrskq zyY45eY|c=eKD~HW{;*(sC=|W~I93}O%?G|PDo8?tIEF%!IJ+Ts9AsHZvIZ-%0(|CM z243D8Dbt6w^VGNSxNcIvQX>9Q`mw?w1M74ZYh=`%eXq1LpPL>@hYCCUs7Ll&++WMk zRZe<*3_U6SDye?N^?OlirB}VkbJAWa&<6!^FnZ$8iZ~y%0gU(hbjZQ%qt@rcwti0k z{(8>WGOt7GcVTX6hE2pdg)tR(y&{z9$>#*X@~-4DY+Ue15qxVOVnKu<+hDvwY%5b+ z(+Pqta>f8P9{Fbby}0YA^8`aP5hi`f=QoxsDb zh-8bnj=f9(X`u^v8yldSC)8Xy?OHO(t~U5QS|F9Z9Wh4fT(U@@8%UKy_QQqnR-%^* zymW;C8uLWM=dZ!IoS3e}2x4DKM|%cA;aMi1g2EYj4|P9}0GJLy9~3H^d8Cffli)OM z@H9Yl!eAaHTtEp8Mn_Wqr3(1v51{r{k1|04E{)T19Nrf1m} z5@GjFY6o@ehPQADD&OQ$*!_llwh%VH04T}sPZ^yJ#={_cfKk_A+(qm=LAzaaxWsgk z?AG{Cz92a%?c{vzl;~55|Lt??{Nx{QhkmoVN@@}5@eNPP-+M57ya2Wy!X7_k5U;Su z|0Q=$bYmuw^OMK&c%VJb`N=0slZ!omu=A7Yi*LxV2phlNFPxddRl0A3aW%0&!=vOh z&~m~u-Y6{;U7y~I{Vm|=AyRzUXeO6%opY#-$r|y~3h^;qa{z5HIuko1g?P?N!Nnx$ zs<|(PY)zR$_EH+MV>Ge~g=`a`T@|vG#M}o#wy=e=*&5lSX~;HJD*PQ4GAZ{og={*p zAqPRWwgp+CM)p0B$!gYUWXnlvFyuV93f0Ht47Q67g6!KCWS@Bz89$~W>!*=juaLz6 zovM(1MC`JIAp6trQ#_L$(KNtgKqe`R$q2;Xq>w$zi^H;n#`(l19|YM`Ey#Qt*+Xf_ z9@WS;lhj~bz_qJFR!VGomt;3dwWe{mAOnI~(HXl(ZpIZP(ayc(CVoHUNvR_69dK|$ zhLDZ1_!IB7f%zq|+rZuQIP#F3^}xwuk!?u5#pgN|`7%j>1z{yG`X!K~IvY|SNgpN` zOAdW-D~jZ?AKzZdy-=_e6m1eY1*hrA9}kpV#tA}`CP?Vk1YJa|MhG$$LB`(sRFWw` zD%n<7@&#^?+AKF!wr!l0h@;6HOUWVWZsL;@>1>8E79`^6c*f@m7_>wqFOd$jUe<*D zDz!>6tC9D;A)vu{mDpWE+cf$z6g&W&X4dNpUxHZzECf`7yki2nFl%}up3W?(nZO>J zSuc77!gyaYYi6BAR1?&j*gZnfr-H^dW(%_n;xKFL-Kt02CY0H(;b|4VI}xYCD>d>r z1-Y&;j*xeF|Bzt4O<{EjzjZZdgEQ49N)n_?HRa$FsfJW@ej=V;O`+-ES96V9AdK;n zS%h1I;V1eCWHcCqh^f+SV;U<*fGs9POU=Y^K$AOm#@l^qpdjJE3>@j^FWp_o|RwD`PM|rXffyC z6~hAgm~R^ta^l~1Vra15*s^fS^W{WW?M`p2bhG2&xXGh7)cc3Z{ zde%ciX&Oy=nm{#_j>UE*_K}D1^XQg`kM5 zW0JrNRi*lJve-zKE9nt!8pNY~KZo<+OrcFYr(xrrJ25;ur#Vh-qn0G;Gkh@M9HX(n z>!x4tqx5*rfc9uH5b+RDffM36o~0^*bCZA}$BE?dB2q*tJ&bai)=Ree3)+fv^e{4B zM#gC_o#I(~n6NqG`kc(My7>J0fKfCj__%ti8adyW$_lbN5I2cOfrY<7djJXnhXP>mV9Q0U5>RY7Zn)sy+tj~Eg9v4G) z5D#>qKcU-2C$m@2AGtHJxPMc)mR zw-*Boi>__S&S^h8U~C?ccJ`b-jUeHWv#aEgvpsDhC;9MS2}TY%o1_joYsZs&-i&-X zgzZe$hoo>Yii2_;;uh9wza&i0Qtx(&oBZWF5X(}tAB6$-`3%8bX~*1Qyt6L>w++>~ zUFG82lNSUOtHi8%T4fr{2$NIUn=lf@>Ylm#_Jzdwa|f=IrN_lmzO9Dpjbf$qtD%Ch zT$1%jRZhc}ks6o7?brigZ{F{4yP)d(&PRlhh^y@d(g>?mH@camrb+tsE&BCP_^RF= zp^Dl69Nq>)&RqKw6&H`;)CS{49z`<3@h_Ma#F;q5kMp*QTF45+Jnjsv4zSCzZx<=1 zzJuYcczbFNr*Heim7JM43F&p^_y<$MT4@4`-W3obP+--jT=R@q{EtQX;(70=}HdK8}N7)8^4+ROVAE z!B5^H)%9-_L?-9Y$84FZ`n&OSF>f9lSu zQ}EgM=%&e|53{X@FT5XB)t^!evv&}pFua+cjU&}-M{4Dz{2Y?a7x5g$1iSO1cO5Gf z_PA0Ama z9N^*OFt%xKP6E`walnd-9zjYUm7QV9U-;k`LX$snU2( z%!##?;S?AN?=REkoI^R%in74)s31)6{We|3y$6)hO_gD%%9u^;C(4LTkufOaNk*C; zhgbL0W3qf(^w>Y}qeabXL5cOn(0p zl^J$bRgzIliTx~8Tn*k0AoMBwgIuqwTPGb@t}|)xr)hk zAGy%%qWj)aWgXvOTmmd(h+KYRzmSVOv;ExoeOOXB+xiHU4}{lxe(y$xmCycXbnnBjH!m z7a5AO8HEDw^pG6uPMJG^x5AM;c^i)8a`T6umaPWGB6k`CSvdX6h@i zB%l^eKv#KVzVHH#aF}R>LlWhtG{R@5NV)9$O%)tW6r9swJU`pvxPpRXGxhmTYr9DnqK#2?asoe#tM6^TM(yd$?_e(@IbBlI(vMRZ}F;(!sKdTy>H6frI zj8iG(Dhj8Xm;1PdjVDQIFnVz94I}t{uD*Rh{|% z{$w&1Pia(oVKxoW*rN7j$6d?RYt_$X=Y5UZw4!@w3Qi@)U3Y0r1q#z7Y04Bt*`&DZ zI*n-_gey#q(i?QIOTj7qj^7~xRO^~)zpo-ct-`OW{4aIPQ<725Odo#i*&4?U9(Wf3R&6Bk-?Q0 zJ-dh0q{llaaTA?1(GX>t`@WKu39WQt+%*)K{Trg?H4RayULUj}QgDfeFf^vLhDbq3 zLwwOk3Q}T98X_VM;VW6Em0b0dXco?205qZLiXcSOEkx5r+nwo7X}g{=Np)At({z0o zYjG<=v{sthHb!2>O=-M1A8yD&)t!if+kmdiy?k@I-7L>vTJ~Of-YpO9l;XP`HOC75 z+QFRrk;Ed-%&X)#|6bhj7*C@2GS@QT%S`SS+}m*<#+?abAj=hHyScl$f5%qW+tu?XVdn;|-~N-)3|S3Y+cc~HDB0p{x^Vp!i#P@L zoCYJ4)?H_(z}}gj26np!gUB69vHp7sOzi6vws9Kl69w~C3T9C{suB(JrGhzDrb0#4 z>FIcm)G*&Em`hSHThj6DEKn8XBrg)=qGyAKksrO_7mQOd{nGLLO~c6X zV`g<5rbdnD^>jRQH4Gng0CRQ%MtGBSu@9g^}Y){q%A%5s?5`2gV>Z zWw=I3l8E{aYBZp;e*|uP0I5Pe#MVe50ab|eE#WM`pDXzVH#`yDw{05kHX8TOqJi&+ zdyc}r6UOKY{EOH|!JVsdU$1eWpTtdV1ovMBQG)K}GPy)tj~#%!m%{yr6z(vw_XPLr znkO!e`v=J{bW@_>K0gij%^G*<0k~JwdiXz5xKAebq2QKXKJa^)v~I+BAcU)CjGd1onC22pNOi{Q$2_|{+HnC3ycb3L|sm6VF5;xvy!TmrQ?jK}kin#0paCcU? zLn+)rVo|}pP-*xPLib=)Nw6`!ZsC1^0d6P8Xa_Pk>bfJgTAXe2;8Y=Ovjl8I)p*bxAAkOG%Ho zvJWWkE>)Z(RooBL9FE_?i>6+xDq@X^^cJ~(LjmIUleF2uotPbxvqf_DNX{meyk~YP z|1LTythLg7_7(Pm66`h<)H^7PZbNlv5$htQz94n7%Xdf5B!{-*dh)w+i9Fa_7p3x8 z#JZA4eo3r69yKIG(m33fuM5FA>Wm0!I1Cp5d@HH>^X;9*f^V_m}l>0{*PV+IarYuii<`qZ+yjN z*?}TN+ipg>7^=suQidIjm;)*jK{}0CPtdF1Tmti4l~K%3$$1%=#r$k~GnsQR@CZ|K z785&Ga;k1&8|5metbmcxQW@7r7wL@f@i4$iB%_1aaZ>Tgly{_5!7l%V--XByoJ`IO zp(}b*D%eNmU5Cmn<_?a=noFZ^@Py`522WFDxo)_C!rgmGod|UDd7`B+Xc`|xPTBO zJV0bXH#<3#1diAYLdi_^UB|&-p_i#f=#tg=P${A~NA4b|z0$%@j z0%c1M> z1pNIw#oxax7Jq+_-h%(2{{9L0`+qj8FOa6e z6P2MMxgf&#xT$<{Y@D;Mm)NGP{Ik^||6 zjL#|=GTvX3HDvsVW3u_t^-g}ikKb8p=48#WB3Ztj9Ax}949 z7VtaBbf{!Imp7*jyySY&Hf5QnFJk;#(@wd~68^Hu`FUzhLx1l+mCihKG~|k9$95=L z%)qq6sw>A%vG=lm!C>dgZyzdaZe<%0{c8DOs#-MQTptz@&Qw>nRToBb2eOyL5h5?J4L%SHj$(TWp0 z5+og|$lzoFNE$7~1ZFoOCZWdl^3`h2(l%ROchab@J85QpDIGe_iC(TZ)8U$br{OHU z&}~+aPr>y_hufHdgSsusbooRIRrn(5a0?S~IMLL)#CpV@dDk{U8ZnMTGE*m^3(JFT zdMHYxE~g)e2Q^y$51DIa2g3(o6nWENyhY3<0(QL`kNa__=?<0VEE5JrjAM`ECb}!y zvgoe2FrOw$G9KzFAg*bWB>JVO{A!VX4{T~Mu1S@5DY4E{-dK&hN!S=MRzbMJO}h*3 zW7BZ=)wq?NxgYLF6mBzxJA+t9!QDpVj%eIhCvlT1xK|ZWmgbPLB2S8Uy(UTf;r1xp zFI|=7)6>KZ!To1Jf@o4}2U%+EpqDPE4do=)4lYkC<56A4r~}IQh^EB;OO@dvc06Uo z%B1V-{(E&EzhjEFs)hm6STuS|QaNNzp#CW6rXQfqWs{kLK)81j>Jw-fmwnFQ7B(jH z4sEU%@1yf>RodK2KmquHH!0`-gUDib3`9r1u5>a~tCv8f`^nS9P7>N=N#J$CCpw@^ z-|fI^p#vV1$~7BUq!XlEN9<(DB})Rgt6ZgeNnkr=$sY42PEBF?0mW)Cuq2YJm{?z3 zfvZmkENuLZ6cnq&{q|bv$$tAy+}F`^wH)AQ2PUdhEj|{1WK8~4?WDQ}jY}`bj%oQN zRVBalS4R=&t#Ci?`cpTdL$>h;&ZQ&VA)`x{IoszFVRHqeQMPBKR;l@j^aN~_Pvte| ziz#qS`87NrxJk|^5wK#aEc^U^)p>>{^E@EWvXzOgD#O7$cM@V`b#f<>)_9W}yGBWNs(Fwdg?x`yvD3_XO8pp~ z*&Bgv_GWF4q}TJS=qg^bH?oGSKOdElxraA&2d>`yD|VV=_7JxD!=3U;nVg@+^>m_R zqJ-~dgKIV^Bl0_O<#z{3+(_8!H4E15^qLp1GYcAanin_l{H8pwGz;G1UcrL=#h;o5 ztGK--?+HIkdM%N!K;?RrKC9mYQ?ZZao#v6|@$Hnk&FuTx z&20kh%mSAD$9||YZ;-d&Q#zHCsAwq{*YZr2$+O!mP^=gu^z0<8-D!5do;r?sn}|=U z^PX7%%Xng&(aMVY^p!6ihrg4At;2+^ZpF<)#m&3~)AEILv-0^FM9nO}VwqQ1Hbw}? zTkeHA;jUZ*`CUnDBJ7Y&;<+S^QP&aq-AUX^*hmF7m<20^qZ^=lCC{tmp}{OzpJeZP zVYzwn5AvSyS(3ebb-KKZY1ZcSDlBq1lDySy{z);)ENGIu5EC`~#%^vGG|W+(V(|E% z@c7+y9xHgcGy@Df$LywnHH06`Q`X*axOvLX8=MsJy?M$PH)bu$h_yMbRyaFMS^%!m zAaIRl%7e}O{h($_U&>LNsJ!qbuVphx=<8wr@r&~xrz7+8f#vKpkRKl0Q)ch;*guv= zvTn4-a~e+IT1$-_hVr?x;b`vL%&k_W`y*t-M*P{4<8a*2+lmZ1!QzL3Z-0)r(JkXK zBgOr>K*3Hu!3uq`%?b}{<_Ck>K-p0Q?_z<$P?LQu{fpFA(df~bpK6$u+aIfurUw`w z)|z+8l2&;{w&H!v`NFUvJKVqBIS_@qW3${wblfMmlRElBb)9^nZ&57=!%2VSd=_~) zOm|?jZ~S@teB;?Zx04zw3qB`%m$N(Q+k3dr3WL@FYv?#AAU zQJsEN#~S}^{sQR^p!dUFW{L5wK+5Q=~0)H zX%=jTeVdU?w8X`$gr7tzHlo3Ed)w^0VtRWOzhFg-c32S|-BHkt-9c!kMaKfxS>c?P zrN&g}nbb6ACL~|HDuEoZ&Y997N};@Bqbf_vIb<2TI~=48VcoQxL>a=7nFyQx$^g9- zlKY<(^814oB2sKx_e3G!&zYImx{zE|NdA9T$jJvQ1X8^r#hZ_r&KCx33EnJQJ?~!{fJk?Wgi(5w|>i%@$95U3qxgMlaiTW%af^ zJhRagU+*z{Pg~0FJMJ?VmYKav7nIR4i42)1E$!TZlaQx-XE4yX{}DY??oU?e``;{m ze@O-+>8}-V_i{Y?WG@ElBrB}9lbiB;z4pb;_D%cjQzKcUk$jP?TWQwQ@8IISLpt=V zJF-;rjWi113UU4a+WQ{xsIKeIJCYCr1TzQ$0^9WAj6A|dvJ8r?fd5EHB9bl1Qz3|O zaArm`Pnt2KneqGqiF#WZ+gSyHKiNyOZ0jZh1Slzv^`&l06MtP{v$pJ#teiSqoONTZ zO$~b^Rz&SJZBvsJ1qq1NC2{31w@76BmUNU8O`XOb)=08mt>%T&%V}^8yC)M*OyE{n`JK zMGZ{0x6%7wYx^e~5fiDkKJ#vM_DhZ4x2cVg*`O?LP9JG(FKCng1#!C_*6_7DYsoJt zLu<*q9d*{wB%VLOK!(9?7mlpS%Xn4-ek3D61D3e zkn&ALV#9+X-UKbR_fGoPb^+$wpoD?PgLUWg6ft3x2YWW)`L`gtQI4!{;^`+S#P>n@ z6Ke?+1=AwR3;5S4fZ?_c^T4(XKuF%;xeErqYT`mB6?KUx@Y>!HH3M!a6qO&{fZZ%N=*v5+5&j-V{+*2vBlHFe zgT|qy!>h9BOIshqa)M?%*FJ%N7NW$)rEE{4dR((dme;PX) zn|D1y|Eu#9EjAy|V(4qTK>VOTyB@=2=^e~2?uIdwpBZf0{Nsn_zt;)>vEO~r5XcrO z5WE|6&!L=av;Vs3rt?h@1>U?kl>czkO)u1|+M+OpUqOXFrk1gARr8-92~yc`(}D$+ zY`~UV;uV>j(Sy;Esb3=gE?6Q``H!*Q!fK-)pMgYjtT?pn%XOQ7X??s3@3k8poBCWY z8ln2*(faI1W%sPj4%!=$fNiO=a9Ew}LItTJo8Sk>! zV%N}*!J-!5%*P><@v6{admkwZre^hzoR)(U-nL@H|5&d4N!ya=?1Ly0dgQbA5fy-w zp$ZJy$5r4d`-BP{woj?RLAwBFqAAz}L2Rz#7h_&;?0>ZmUr2kL3e$9`EwAy=-Iy*{ zVZL00`SP|8o4nby*^YLuLg%&R9H>8((}EDD!uK31B;3-R0~CX)GO+pk2_yg8Fj)pB z=SjlI|29mPfvLqf1(X6d%$9*`+%Ito7<5l6ItgqGBpC)zByE(yR=b}k&cscIZH-Tw zrp`q9B=nu;&g-GwwDFLKwK0ABrKumkH;=_l|ErUWQFWRmpi%KrZ-}RATU|dUJwK{` zHyj=ah$bp#R%c}A1+a|9w=btX%rvhQ`8$a`EgZG8I`wMO41=e4W1yfDl`?Z#A zYn;C5XvuzLVyZEFtZ^fYHQ+)CB z7-y@npMFWhV)8&!6-uw&|Blmz$(0GMP2LI2fQ)=06`_c~e!oFnE*GR0ywQkJtpr;D z8g@@F;YGv7{<=v2c0Z0u-IEi$6sL0N60)ErtYzcN4_BfVqt?S``_BRoF@Ade1|(qr zKryXk*ut=b;eLjL44-8<#_%G;j~EtysQBH;u$G~l;U0!YRj&Nf2q}2|zbJT&>7HZw z8pBHrvzYD(KO0;QGd{x5$Iyt6exyPVFnoxi5x<6?a~ZzN>Bks;i{VoYzsztS!#f!o z{7zq0q30RC!SG#%A2H1TXGK@Sa1+CNhCYT5F&tocn&DIbq|*O@pWkBmKEphg^J<1S zGTg+lnW39uoZ&%+gA7L)exKof&NsyHYh0gtelBNtJ;Q|zKYU-6_g#knjp2(7zsc|q z81^!JfMFxUN`^Nuv@yK$3&nq&;TeWcF?^ii?=o~VY-V^n!|NILalMxC^G8<{{|MvV z3?~@x3;F$o$<473^j@w@uMiF?7DIo-^_79-rn32E$%YJP;1yw<~6$ z!%&xYg%Vwno-%JZs+YOC62W-D6ODR$T!}7E90xvd`LAwisov?Tb~eBfJ`^`SYoTyl z>j)=8zH~Y*5R*~Po*GXimI&%vxcvd$8^>=brtu@X=IxD*S|kj{I{bF%>I(ZPi8tI8 z2?TZ2snZh(#o}7^mIiH)9*v=LT6uSIx7MDBYjHn#7}#)!CM!dHiDC|Z&R?J37O>ok z7gQQIY14`s(-NWGq43_2riXk14_ZK7v(pCy@i=O#$k)sI`1Cjggkp?IK62W!{%t=pSxs9nE-Gi2%OJ9Cfyw>57Z5j`2 zfv!k63Y9R`LkLnhQ@v1ZQnBfJR6mU_U5{|cDZ2ES-$iQe^7h32;ZT`35&?QTKGGvK z#pMcw0&$nCEYdCYIo=b|jc`XWOn#Fh6pnUzg4pCjxKoe2)U(Xx6Mvpo{>%(fhCr96 zlXSrw4u+#}wL{TFJR0C~VMU^zJ)zE^j%WdAx#{(vf$wjF4$9}#qo^CPR#g-@d7KeX z0-K4xa~Ax4U^El`ePA>r9*kzf-#ZI`s_RVjsM4>6cg(_v_)lw}88Rd~Sf_ZvE5$G3 ziAa#z3+e>f;usfUv{$ly!qGry0G6W|5$LdSzpFh#b^v7mOZky~PBkY!jDmFLXK%)H z2tC3aPyNsQjK?{jr^z9M0qURBN8)Fe2R)nXPtSAlbN1XTM-$iQ_4pPkolV-FfWG&R zJERTpL^~5*WH(_v!tJqe5Ehl#vfT>#IG!iyoP-@oq-ZDr0 zhB@Ll&JkZRNBn1HyrVfB(pzDa96X#-4aq#hZbv=sQm=?`R=QZCJ?4!DB5_4r*Bwzq zO>x;Rx5n0MonTd5wxPV7loFS+%wY{=MIdy#uLRyFv?@x04m~v(V~jr zB0LO^>I`^>x~OuvV!9{l^@FXNTC#qqQPq{4kj&N&O=>>p$c%Sreh;QJ!6;@GR7FfT zRO9NtGOa}~rO`Ox2?kIg@*-Fna7a1@ha!@4<&x6|N+E156T#J~X4 zW$29Ru-sf2Qj((n-^7Gv>T0Vn$h)y1Dzp?I@N|zXR>QpS75py_k?`ZPoj`A$jzxpGFYW>J+%@? zvDdLqY){oKJwC}~lSY(Mc=+dt1v*2RadY;95Fu>&SAncg3_WJ8jMou6w(n4 zhdL3N!YhQtqd&_?;*rdcaH$9ft$>1hwKoY$N9MCZC)uVVRsN}G z56Leuq&k!Qu=v47m`+l2ewqcwd55d|!g=oun_O)f^`<##zjBXN)h2tz$lN z4MMDDEca#&T8_ncKo4S2kSoJ*M@LM@5Du$Cbth=ZM3$(=6QW*)5yyv?is})a7K5-T zSnbECwrRb^RK8zb-()$H_3_Z!y~{I=rK(u6>zVo~rsWzWnN|Q(i#pa(6Ew=za;aJ^ z8+L$dlpGQ;Flz0APJ@K>A0r=cD~K(r7&ao)5~~IIZ0VJ~@^~Li1e8VJq$TU@_#KDl z=^z8;Xk~rjcB4UVZrG5 z-V@i0W9!gG*kH<@kR+A*Y$lXifO!<=A<*pY4r54F9n#27XnXyTn%pBG^Y6j(o0iW> zgCJ+wQ62j_acyq^QBXEL8pR+59ZF8lYp}(jGK_ixF?|z_T-wIu(B#qLFz9l>K|@NK z+H&y(Z8Ug7UhIo>d$9zf?Vc`Aj~o&Fo;}iV=&(3YUNt}(={TQMDsRw(JriRvguO|$ zD-`6o#S_!(4E~CPDWNA3gGgmc3_%#8wT@`GDud*7L04Aa|?jUWl%9$lwFRNYLLM5E5UFM4ab=xf-4ny#v z9%CAktdd!0_d!|tnswcgu|An?UcvUs8xGM5k(VMGa^*#d0%Zyqx(hwZn4a)_$e@$f z8ACg!+2~)SNCE85(LNYVMaUDSXEGF+WVQ#;|9i?(otm0dHhhid$0Svc8YZ66hO#RG z%rKRd5$Warw=0iSM#wL$1=07QrD51*C0hhBN8xspW1O-Y|0Z(6*j!Q^+f;05j@J`% z#FIq}>78f_^f;sV$&wm~nd?K7995yY_!*S5wHFT?w9A^@Sw@-BHF;U)Ko7?%*^y~@ zc_m~@Bnw3Qir7}fh|3*56b@l=6W2MHeESEgyLei%2Ugw$AyKGQs?*%zkSW@|>= zFbObk@zSiBv>nTUsNNOc149Gz?9nAmsP|v3re{3lCs(eX}98f zCx$fS9n~=`Msd-IvROo2Ig(25Vk6ea=KhyI6zQqL1Vv^;*Kz1W7l3TM4*R0T3pj$jZ+b91w&bt zrtYBC7-vV_hFoc5$oBZNGMqe(TU=zFnc5b9LtXnU|Y##BcdWM!;+?;<`s{=*-72$HkQK{(o7 zc^mf+38St`I+XkkTzNvBGiI_LdqMTT3k);k#c!xQ)AE@EG0x?iU^vNe&^mLy=3w`) zjy8*9V1Y%HyaiTt&vFrcl_}n73R4o|VO(@jOW`Wqk}o~*m7)@&5@{--H#jq%c8vos za9@}qP7==&M?8(oTn@^|eAz%}P@3rmGU#T@pVF3?XboPJ2k|C8IP;$M%0|rK40X!_ ze$<3F^L2x7H+bTVeA-2El$I`YeTd$;K3~(?C~QJZ{4rjVjAJEk?+Q`e#CL_b70|%h z%ge=z=Y)7NiNjIjs1Rc)bXmaXiF@pFb02+9ln_mMxwixJm?=Ex(Q{1;oFr?i5foR` zarY5UoHNrkNV>dqoTRg*;hd7LC>(_wC!~Yo{q%@85Bmqj9gjp<3qAmxjAf<}qN#wJN$JX*fkU2%HnbD(hBd znfe*zhkVpCeH9z+@(=jw8N%s`#tDLh^sL%o1JS$zTww~gk#JMMncAv?aO5{~X+bhA zH8(B`bQL%j2kC7uu<{w=pgygop5=f)0^BH-yFI2R0GdTSnv3TMGby(SH>HWjqF#Hs zJcM!scO-?A<;LT$yezp{B6mYpxn*IFSXhxO7H-ImSaK{PXFp0!aEnTg<`YZtWt-*k z1!B2xzE}?0xM-U3uL{Z9r1ZNA}^W~$#z2?i*v-{iUnfvhFs?YlEd{v z7<}Jbjq{spV2i<*pan0pXE_1!fuKTsin6eEz9N{e&<&4NB8kslp zt(YgQ1<0F(g^QqS10@?SYq`-rt5#)+`IchQ2peJpvC%ebm!loV8Vnxh^cJb>i=gWZ(SFeRmZcL?R;}oi z$agdDGtA5UylPFZ$VWTPi{!Yegvs{82d9>`=8I(&i^Q@P>%`(nzT2{Tp;(Q4SEKB! zp);!?@6{VtOr-V2OkK@a_g za^2K56RXBojxLW_3i3oj>q5{k5Ct2SOyoxvxn+H?K(5<_h~XZ7SG7(0Gh~-RW68@F zdHZvwWcz$IUwqZKSloa&h1R|XyKyd0ERHWho9Bzjb|D_vf#l5B{4Dj8`XuY=oY1gi zL{I7Pb*&YsLFbD_c|eFmxDUUF zx)$V!f{S*M=etqjNL zEtdIYTb8o@;W2>pp(WX$BlDa&!f->t%b$0SUBQNoN?VN92L#NVW}5IQy2@z3r20oW7Um|#I+TgxYkl6(mAO1A0Zz0 z#ljxHRoH!>750h>VMjUBcoZ)bD_U1VrwYZ2AIKVvQXio*xGBB4@2y`T)*rEm8~U=u z%I<7Y;Li~`$3U|hef>z5DD2A?OS*H!0{=Xb^E~jfUlyQWE=9kziM3Cr`sLbXxnddk zEC!!>;4}C%?Ay15u>VZS$^2|tmMxZ{-1#Ke1J{cKR~L%?lZ(W@3;7~CwpcuXANOe= zw&FWuHc{WVRMd1Ah|T_GV&jz(;kdX?Xs_KU3eVjnZ1MFX-&ZPfD$2#gxA8{HpTH-H z^D{pu?J>FE*h(UA|i@u;Jhn&YlMHHk@H|#Ch4sRfxa)C3NpBWcKqEZp8FhY!3x!W8Bq1UT2}LI1JsU zoB>4*rP$_*g(7ecjCY8S@!aYspRR3j9I}yIoiNE34FLt{2Jgv=) zwh5(T>OC>Pc#>+C@*7Mx5dA#vC6tC5YlnKnLUg9m0;@FP`?4MBtR6W?kD;5W4&>7; zrF{%_tPObM)Oh5zV3gx{5)VmF zr!m9R^lOi<{13iv8%p{o*yX9+9AZ(42(>Be-3nC5}^2`KCXM{Z5pY$ zM+{1iGRilF{I~`54p=9c6tNMsTX29AbEWK5b;!4kbnih^4QlArADymLm032fIt2SM zaYgry9MqX|PL#X6?i)`J&tx)jwZQ$13(6yjQ{yYy2+;mKPSE!3Q9UD+vyXH08I zD2wvvApyds_HfcD5Rpzm8*eM~kzdc_S$DbG)X|nprxVz!Slm}twX?d-S+}LO3j4QB z+ji7AZ>cC(^gqmLi3cGrI(x3cu_yJ$Hwf1l4)0Dx4B6J<+!pTCZ&S>UKs2841hD&odKdlarI)LbzU&L+JkPw0L6HtNdt^cvPROuX8aHlGGOrS`INY`&pX+f3jP`Nq#!02iN6!4=9_%}0#(44W7wylx zTs5v3UJwX$1iUUKcyO`^S50>}3K>QpVDBt=ATt;8XTZUKN7&%+g7-Fh+1}jR&{UVyjL!+AfNMv<2lsOtw;@y@89nyruM(Qq@f|DByDxF-MDkC%k$f*dhM2fGcn~+JP^}TVfAJG3&}m2}eMG zTFUel%coDDqy)ZqzxIO;{1V%SV{exJr<+oH60}D^dm%S#oZD3e32(lPXY2>|8fhpV zdx`SdOiSUb?6(pbQt`B}V!kenDg3}!XO5@6jQ^eMBIj@9iCxQ2>}r{TlOdlOqS8>k z-t%iuKUQ?N8xahR@=h^b$=@ov&&)!1mHidE znJzzrPFNI28}l1`Q}MHB&>8toGQUwSANiG}UCghD`Au@Z*m+LBm~M>uoo4xvPN!W= z*T?*9%nwI==@-*Enazkt+zWBIi`o(&BmHCZwJ&Mz*46z75(-qB;?*P+{&ruJJ z>7JUS|7H|t*2hcdl)W_wKFRT}!C-2FhCGb=8uA|DdKr3dBr*Dt(S8PfCY>?nPcWZM z{!Yf{G0e=*u#1L#jd8&!2mP)MU6mQ~D!Z$C2Ye!3a?-BWiL=rrS15DPNq+Jx7g!Rw z)I!uqGE@G^l}S~V%=;7lhk;}xZt_EN10KgcjIe-Vrn9SO5hi#)Zb<|9M;smm{5J0A zKr;^bbKGMHUjZyHQ1tjtjPP>U2ly;*q9^zYI{kTs&jUIOaklgK3O)>D??d=ez_Yjy zBYY0PqcGt2v9`F1u-R{h{Ar3yvG4Pag0~`Efp8Vz0Pb3Z2LYFs3DJnK z+5g4tA47gJ!#Bf!6f^{1!+jWGf|qd*A#C=6A>WuE;yw>N!Ns?rTnKA`pWA@5LxigU z-`t3`JA6ONDWh5T49wIU7h1V6?-jxfPp+fh!0&3-84pW?p{^?OgjUwjez zg0R?yeu}#i;SsE^2-xKml`3U!GpeMK?3^hlX;9ormJwce@ zMcjGt;V}D}kk3hV4E_bc6C8{~PY@mfY)GJgAnXJ@gS#7Hu}6rL`%qqlrvU%rcZ7Hf z;ZeX3dZ8N#+a88|_Ct3OHv5W@56LUI&jU~JE!-0b6Kv`OKZMPGAmktN`v=gTKUMG# z`k^NXV@)P1o=!})5}UpQ`2bI_2KRY{oq%_L z7kw6C@*4@@COZ}cdrwiRBy+q2V9Hw#_enAdw35dzJ7pgt&y8W*1uAuQ! z*&U8s%E}%1CKyg%aEf_{V|(k?(n^P>-lfN@ZTcOK9zEu`>(0dsZ^!GedRIHX9|8uU z*c}eMZ@MXl^TaMsth6iOjfP|4j(92FciZHNb(QV8#X;|T20E}e%%^}VH)3_O!3z)5 zO8EadVia=*h+ypa`0)$JCyq}ZFCVHLsvmL=wGFw4{6mqU?xEhHBSV8j!$V>meIB&V zr`w)(KYiwDF(GwDoEaP+yfD~(wD)M=(Sf5N5EgL}|LX_a1|x&rg9C#{2B!x5jt(9j zK00=E{OHutt4E8DX~(LL)gSX8iyRv`cI4QZW246=j!hoRJ8nB(e!TK{+i~~t-s648 zhmViQl3hJ6hP0s)Su!dUm1uNmY-n<5YRGn?=tSj-suS)L{u6yC22PBeICJ8{iHQ^9 zWZub=ljSF!C)-YTpX@z3cyjpU*vaveQzx&UEIOr~sybDF%6}?yYT(q7Qxm70!)?Rv zVgGPsxOcd3cwqR*@W}9);nCr-;ql=M!xO`k!&Adohi%`e{D%7*1F*{fZ(RQqzQFhN literal 0 HcmV?d00001 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp new file mode 100644 index 0000000000..c64e963201 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp @@ -0,0 +1,8365 @@ +/* Generated by Cython 0.29.6 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_6" +#define CYTHON_HEX_VERSION 0x001D06F0 +#define CYTHON_FUTURE_DIVISION 0 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef __cplusplus + #error "Cython files generated with the C++ option must be compiled with a C++ compiler." +#endif +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #else + #define CYTHON_INLINE inline + #endif +#endif +template +void __Pyx_call_destructor(T& x) { + x.~T(); +} +template +class __Pyx_FakeReference { + public: + __Pyx_FakeReference() : ptr(NULL) { } + __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } + T *operator->() { return ptr; } + T *operator&() { return ptr; } + operator T&() { return *ptr; } + template bool operator ==(U other) { return *ptr == other; } + template bool operator !=(U other) { return *ptr != other; } + private: + T *ptr; +}; + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__mesh_core_cython +#define __PYX_HAVE_API__mesh_core_cython +/* Early includes */ +#include +#include +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "ios" +#include "new" +#include "stdexcept" +#include "typeinfo" +#include +#include "mesh_core.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +/* Header.proto */ +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "mesh_core_cython.pyx", + "__init__.pxd", + "type.pxd", +}; +/* BufferFormatStructs.proto */ +#define IS_UNSIGNED(type) (((type) -1) > 0) +struct __Pyx_StructField_; +#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0) +typedef struct { + const char* name; + struct __Pyx_StructField_* fields; + size_t size; + size_t arraysize[8]; + int ndim; + char typegroup; + char is_unsigned; + int flags; +} __Pyx_TypeInfo; +typedef struct __Pyx_StructField_ { + __Pyx_TypeInfo* type; + const char* name; + size_t offset; +} __Pyx_StructField; +typedef struct { + __Pyx_StructField* field; + size_t parent_offset; +} __Pyx_BufFmt_StackElem; +typedef struct { + __Pyx_StructField root; + __Pyx_BufFmt_StackElem* head; + size_t fmt_offset; + size_t new_count, enc_count; + size_t struct_alignment; + int is_complex; + char enc_type; + char new_packmode; + char enc_packmode; + char is_valid_array; +} __Pyx_BufFmt_Context; + + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":776 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":777 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":778 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":779 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":783 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":784 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":785 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":786 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":790 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":791 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":800 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":801 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":802 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":804 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":805 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":806 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":808 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":809 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":811 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":812 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":813 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +/* Declarations.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + + +/*--- Type declarations ---*/ + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":815 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":816 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":817 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":819 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* IsLittleEndian.proto */ +static CYTHON_INLINE int __Pyx_Is_Little_Endian(void); + +/* BufferFormatCheck.proto */ +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts); +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type); + +/* BufferGetAndValidate.proto */ +#define __Pyx_GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack)\ + ((obj == Py_None || obj == NULL) ?\ + (__Pyx_ZeroBuffer(buf), 0) :\ + __Pyx__GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack)) +static int __Pyx__GetBufferAndValidate(Py_buffer* buf, PyObject* obj, + __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); +static void __Pyx_ZeroBuffer(Py_buffer* buf); +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info); +static Py_ssize_t __Pyx_minusones[] = { -1, -1, -1, -1, -1, -1, -1, -1 }; +static Py_ssize_t __Pyx_zeros[] = { 0, 0, 0, 0, 0, 0, 0, 0 }; + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* DictGetItem.proto */ +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key); +#define __Pyx_PyObject_Dict_GetItem(obj, name)\ + (likely(PyDict_CheckExact(obj)) ?\ + __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name)) +#else +#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key) +#define __Pyx_PyObject_Dict_GetItem(obj, name) PyObject_GetItem(obj, name) +#endif + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* BufferStructDeclare.proto */ +typedef struct { + Py_ssize_t shape, strides, suboffsets; +} __Pyx_Buf_DimInfo; +typedef struct { + size_t refcount; + Py_buffer pybuffer; +} __Pyx_Buffer; +typedef struct { + __Pyx_Buffer *rcbuffer; + char *data; + __Pyx_Buf_DimInfo diminfo[8]; +} __Pyx_LocalBuf_ND; + +#if PY_MAJOR_VERSION < 3 + static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags); + static void __Pyx_ReleaseBuffer(Py_buffer *view); +#else + #define __Pyx_GetBuffer PyObject_GetBuffer + #define __Pyx_ReleaseBuffer PyBuffer_Release +#endif + + +/* RealImag.proto */ +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(__cplusplus) && CYTHON_CCOMPLEX\ + && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103) + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_float(a, b) ((a)==(b)) + #define __Pyx_c_sum_float(a, b) ((a)+(b)) + #define __Pyx_c_diff_float(a, b) ((a)-(b)) + #define __Pyx_c_prod_float(a, b) ((a)*(b)) + #define __Pyx_c_quot_float(a, b) ((a)/(b)) + #define __Pyx_c_neg_float(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_float(z) ((z)==(float)0) + #define __Pyx_c_conj_float(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_float(z) (::std::abs(z)) + #define __Pyx_c_pow_float(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_float(z) ((z)==0) + #define __Pyx_c_conj_float(z) (conjf(z)) + #if 1 + #define __Pyx_c_abs_float(z) (cabsf(z)) + #define __Pyx_c_pow_float(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +/* Arithmetic.proto */ +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq_double(a, b) ((a)==(b)) + #define __Pyx_c_sum_double(a, b) ((a)+(b)) + #define __Pyx_c_diff_double(a, b) ((a)-(b)) + #define __Pyx_c_prod_double(a, b) ((a)*(b)) + #define __Pyx_c_quot_double(a, b) ((a)/(b)) + #define __Pyx_c_neg_double(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero_double(z) ((z)==(double)0) + #define __Pyx_c_conj_double(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (::std::abs(z)) + #define __Pyx_c_pow_double(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero_double(z) ((z)==0) + #define __Pyx_c_conj_double(z) (conj(z)) + #if 1 + #define __Pyx_c_abs_double(z) (cabs(z)) + #define __Pyx_c_pow_double(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ +static CYTHON_INLINE int __pyx_f_5numpy_import_array(void); /*proto*/ + +/* Module declarations from 'libcpp.string' */ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'mesh_core_cython' */ +static __Pyx_TypeInfo __Pyx_TypeInfo_float = { "float", NULL, sizeof(float), { 0 }, 0, 'R', 0, 0 }; +static __Pyx_TypeInfo __Pyx_TypeInfo_int = { "int", NULL, sizeof(int), { 0 }, 0, IS_UNSIGNED(int) ? 'U' : 'I', IS_UNSIGNED(int), 0 }; +#define __Pyx_MODULE_NAME "mesh_core_cython" +extern int __pyx_module_is_main_mesh_core_cython; +int __pyx_module_is_main_mesh_core_cython = 0; + +/* Implementation of 'mesh_core_cython' */ +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_builtin_ImportError; +static const char __pyx_k_c[] = "c"; +static const char __pyx_k_h[] = "h"; +static const char __pyx_k_w[] = "w"; +static const char __pyx_k_np[] = "np"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_ntri[] = "ntri"; +static const char __pyx_k_nver[] = "nver"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_image[] = "image"; +static const char __pyx_k_numpy[] = "numpy"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_colors[] = "colors"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_normal[] = "normal"; +static const char __pyx_k_vertices[] = "vertices"; +static const char __pyx_k_triangles[] = "triangles"; +static const char __pyx_k_ValueError[] = "ValueError"; +static const char __pyx_k_get_normal[] = "get_normal"; +static const char __pyx_k_tri_normal[] = "tri_normal"; +static const char __pyx_k_ver_normal[] = "ver_normal"; +static const char __pyx_k_ImportError[] = "ImportError"; +static const char __pyx_k_RuntimeError[] = "RuntimeError"; +static const char __pyx_k_depth_buffer[] = "depth_buffer"; +static const char __pyx_k_get_normal_core[] = "get_normal_core"; +static const char __pyx_k_mesh_core_cython[] = "mesh_core_cython"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_render_colors_core[] = "render_colors_core"; +static const char __pyx_k_mesh_core_cython_pyx[] = "mesh_core_cython.pyx"; +static const char __pyx_k_ndarray_is_not_C_contiguous[] = "ndarray is not C contiguous"; +static const char __pyx_k_numpy_core_multiarray_failed_to[] = "numpy.core.multiarray failed to import"; +static const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = "unknown dtype code in numpy.pxd (%d)"; +static const char __pyx_k_Format_string_allocated_too_shor[] = "Format string allocated too short, see comment in numpy.pxd"; +static const char __pyx_k_Non_native_byte_order_not_suppor[] = "Non-native byte order not supported"; +static const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = "ndarray is not Fortran contiguous"; +static const char __pyx_k_numpy_core_umath_failed_to_impor[] = "numpy.core.umath failed to import"; +static const char __pyx_k_Format_string_allocated_too_shor_2[] = "Format string allocated too short."; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor; +static PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2; +static PyObject *__pyx_n_s_ImportError; +static PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor; +static PyObject *__pyx_n_s_RuntimeError; +static PyObject *__pyx_n_s_ValueError; +static PyObject *__pyx_n_s_c; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_colors; +static PyObject *__pyx_n_s_depth_buffer; +static PyObject *__pyx_n_s_get_normal; +static PyObject *__pyx_n_s_get_normal_core; +static PyObject *__pyx_n_s_h; +static PyObject *__pyx_n_s_image; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_mesh_core_cython; +static PyObject *__pyx_kp_s_mesh_core_cython_pyx; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous; +static PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou; +static PyObject *__pyx_n_s_normal; +static PyObject *__pyx_n_s_np; +static PyObject *__pyx_n_s_ntri; +static PyObject *__pyx_n_s_numpy; +static PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to; +static PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor; +static PyObject *__pyx_n_s_nver; +static PyObject *__pyx_n_s_range; +static PyObject *__pyx_n_s_render_colors_core; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_tri_normal; +static PyObject *__pyx_n_s_triangles; +static PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd; +static PyObject *__pyx_n_s_ver_normal; +static PyObject *__pyx_n_s_vertices; +static PyObject *__pyx_n_s_w; +static PyObject *__pyx_pf_16mesh_core_cython_get_normal_core(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_normal, PyArrayObject *__pyx_v_tri_normal, PyArrayObject *__pyx_v_triangles, int __pyx_v_ntri); /* proto */ +static PyObject *__pyx_pf_16mesh_core_cython_2render_colors_core(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_image, PyArrayObject *__pyx_v_vertices, PyArrayObject *__pyx_v_triangles, PyArrayObject *__pyx_v_colors, PyArrayObject *__pyx_v_depth_buffer, int __pyx_v_nver, int __pyx_v_ntri, int __pyx_v_h, int __pyx_v_w, int __pyx_v_c); /* proto */ +static PyObject *__pyx_pf_16mesh_core_cython_4get_normal(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_ver_normal, PyArrayObject *__pyx_v_vertices, PyArrayObject *__pyx_v_triangles, int __pyx_v_nver, int __pyx_v_ntri); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static PyObject *__pyx_tuple_; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__5; +static PyObject *__pyx_tuple__6; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__8; +static PyObject *__pyx_tuple__10; +static PyObject *__pyx_tuple__12; +static PyObject *__pyx_codeobj__9; +static PyObject *__pyx_codeobj__11; +static PyObject *__pyx_codeobj__13; +/* Late includes */ + +/* "mesh_core_cython.pyx":29 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def get_normal_core(np.ndarray[float, ndim=2, mode = "c"] normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] tri_normal not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_16mesh_core_cython_1get_normal_core(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_16mesh_core_cython_1get_normal_core = {"get_normal_core", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_16mesh_core_cython_1get_normal_core, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_16mesh_core_cython_1get_normal_core(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_normal = 0; + PyArrayObject *__pyx_v_tri_normal = 0; + PyArrayObject *__pyx_v_triangles = 0; + int __pyx_v_ntri; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_normal_core (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_normal,&__pyx_n_s_tri_normal,&__pyx_n_s_triangles,&__pyx_n_s_ntri,0}; + PyObject* values[4] = {0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_normal)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_tri_normal)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_normal_core", 1, 4, 4, 1); __PYX_ERR(0, 29, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_triangles)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_normal_core", 1, 4, 4, 2); __PYX_ERR(0, 29, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_ntri)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_normal_core", 1, 4, 4, 3); __PYX_ERR(0, 29, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "get_normal_core") < 0)) __PYX_ERR(0, 29, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 4) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + } + __pyx_v_normal = ((PyArrayObject *)values[0]); + __pyx_v_tri_normal = ((PyArrayObject *)values[1]); + __pyx_v_triangles = ((PyArrayObject *)values[2]); + __pyx_v_ntri = __Pyx_PyInt_As_int(values[3]); if (unlikely((__pyx_v_ntri == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 32, __pyx_L3_error) + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("get_normal_core", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 29, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("mesh_core_cython.get_normal_core", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_normal), __pyx_ptype_5numpy_ndarray, 0, "normal", 0))) __PYX_ERR(0, 29, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_tri_normal), __pyx_ptype_5numpy_ndarray, 0, "tri_normal", 0))) __PYX_ERR(0, 30, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_triangles), __pyx_ptype_5numpy_ndarray, 0, "triangles", 0))) __PYX_ERR(0, 31, __pyx_L1_error) + __pyx_r = __pyx_pf_16mesh_core_cython_get_normal_core(__pyx_self, __pyx_v_normal, __pyx_v_tri_normal, __pyx_v_triangles, __pyx_v_ntri); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_16mesh_core_cython_get_normal_core(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_normal, PyArrayObject *__pyx_v_tri_normal, PyArrayObject *__pyx_v_triangles, int __pyx_v_ntri) { + __Pyx_LocalBuf_ND __pyx_pybuffernd_normal; + __Pyx_Buffer __pyx_pybuffer_normal; + __Pyx_LocalBuf_ND __pyx_pybuffernd_tri_normal; + __Pyx_Buffer __pyx_pybuffer_tri_normal; + __Pyx_LocalBuf_ND __pyx_pybuffernd_triangles; + __Pyx_Buffer __pyx_pybuffer_triangles; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_normal_core", 0); + __pyx_pybuffer_normal.pybuffer.buf = NULL; + __pyx_pybuffer_normal.refcount = 0; + __pyx_pybuffernd_normal.data = NULL; + __pyx_pybuffernd_normal.rcbuffer = &__pyx_pybuffer_normal; + __pyx_pybuffer_tri_normal.pybuffer.buf = NULL; + __pyx_pybuffer_tri_normal.refcount = 0; + __pyx_pybuffernd_tri_normal.data = NULL; + __pyx_pybuffernd_tri_normal.rcbuffer = &__pyx_pybuffer_tri_normal; + __pyx_pybuffer_triangles.pybuffer.buf = NULL; + __pyx_pybuffer_triangles.refcount = 0; + __pyx_pybuffernd_triangles.data = NULL; + __pyx_pybuffernd_triangles.rcbuffer = &__pyx_pybuffer_triangles; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_normal.rcbuffer->pybuffer, (PyObject*)__pyx_v_normal, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 29, __pyx_L1_error) + } + __pyx_pybuffernd_normal.diminfo[0].strides = __pyx_pybuffernd_normal.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_normal.diminfo[0].shape = __pyx_pybuffernd_normal.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_normal.diminfo[1].strides = __pyx_pybuffernd_normal.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_normal.diminfo[1].shape = __pyx_pybuffernd_normal.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_tri_normal.rcbuffer->pybuffer, (PyObject*)__pyx_v_tri_normal, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 29, __pyx_L1_error) + } + __pyx_pybuffernd_tri_normal.diminfo[0].strides = __pyx_pybuffernd_tri_normal.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_tri_normal.diminfo[0].shape = __pyx_pybuffernd_tri_normal.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_tri_normal.diminfo[1].strides = __pyx_pybuffernd_tri_normal.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_tri_normal.diminfo[1].shape = __pyx_pybuffernd_tri_normal.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer, (PyObject*)__pyx_v_triangles, &__Pyx_TypeInfo_int, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 29, __pyx_L1_error) + } + __pyx_pybuffernd_triangles.diminfo[0].strides = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_triangles.diminfo[0].shape = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_triangles.diminfo[1].strides = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_triangles.diminfo[1].shape = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.shape[1]; + + /* "mesh_core_cython.pyx":34 + * int ntri + * ): + * _get_normal_core( # <<<<<<<<<<<<<< + * np.PyArray_DATA(normal), np.PyArray_DATA(tri_normal), np.PyArray_DATA(triangles), + * ntri + */ + _get_normal_core(((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_normal))), ((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_tri_normal))), ((int *)PyArray_DATA(((PyArrayObject *)__pyx_v_triangles))), __pyx_v_ntri); + + /* "mesh_core_cython.pyx":29 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def get_normal_core(np.ndarray[float, ndim=2, mode = "c"] normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] tri_normal not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_normal.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_tri_normal.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("mesh_core_cython.get_normal_core", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_normal.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_tri_normal.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "mesh_core_cython.pyx":41 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def render_colors_core(np.ndarray[float, ndim=3, mode = "c"] image not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_16mesh_core_cython_3render_colors_core(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_16mesh_core_cython_3render_colors_core = {"render_colors_core", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_16mesh_core_cython_3render_colors_core, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_16mesh_core_cython_3render_colors_core(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_image = 0; + PyArrayObject *__pyx_v_vertices = 0; + PyArrayObject *__pyx_v_triangles = 0; + PyArrayObject *__pyx_v_colors = 0; + PyArrayObject *__pyx_v_depth_buffer = 0; + int __pyx_v_nver; + int __pyx_v_ntri; + int __pyx_v_h; + int __pyx_v_w; + int __pyx_v_c; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("render_colors_core (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_image,&__pyx_n_s_vertices,&__pyx_n_s_triangles,&__pyx_n_s_colors,&__pyx_n_s_depth_buffer,&__pyx_n_s_nver,&__pyx_n_s_ntri,&__pyx_n_s_h,&__pyx_n_s_w,&__pyx_n_s_c,0}; + PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + CYTHON_FALLTHROUGH; + case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + CYTHON_FALLTHROUGH; + case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + CYTHON_FALLTHROUGH; + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_image)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_vertices)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 1); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_triangles)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 2); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_colors)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 3); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_depth_buffer)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 4); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_nver)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 5); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_ntri)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 6); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 7: + if (likely((values[7] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_h)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 7); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 8: + if (likely((values[8] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_w)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 8); __PYX_ERR(0, 41, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 9: + if (likely((values[9] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_c)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, 9); __PYX_ERR(0, 41, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "render_colors_core") < 0)) __PYX_ERR(0, 41, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 10) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + values[7] = PyTuple_GET_ITEM(__pyx_args, 7); + values[8] = PyTuple_GET_ITEM(__pyx_args, 8); + values[9] = PyTuple_GET_ITEM(__pyx_args, 9); + } + __pyx_v_image = ((PyArrayObject *)values[0]); + __pyx_v_vertices = ((PyArrayObject *)values[1]); + __pyx_v_triangles = ((PyArrayObject *)values[2]); + __pyx_v_colors = ((PyArrayObject *)values[3]); + __pyx_v_depth_buffer = ((PyArrayObject *)values[4]); + __pyx_v_nver = __Pyx_PyInt_As_int(values[5]); if (unlikely((__pyx_v_nver == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 46, __pyx_L3_error) + __pyx_v_ntri = __Pyx_PyInt_As_int(values[6]); if (unlikely((__pyx_v_ntri == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 46, __pyx_L3_error) + __pyx_v_h = __Pyx_PyInt_As_int(values[7]); if (unlikely((__pyx_v_h == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 47, __pyx_L3_error) + __pyx_v_w = __Pyx_PyInt_As_int(values[8]); if (unlikely((__pyx_v_w == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 47, __pyx_L3_error) + __pyx_v_c = __Pyx_PyInt_As_int(values[9]); if (unlikely((__pyx_v_c == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 47, __pyx_L3_error) + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("render_colors_core", 1, 10, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 41, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("mesh_core_cython.render_colors_core", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_image), __pyx_ptype_5numpy_ndarray, 0, "image", 0))) __PYX_ERR(0, 41, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_vertices), __pyx_ptype_5numpy_ndarray, 0, "vertices", 0))) __PYX_ERR(0, 42, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_triangles), __pyx_ptype_5numpy_ndarray, 0, "triangles", 0))) __PYX_ERR(0, 43, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_colors), __pyx_ptype_5numpy_ndarray, 0, "colors", 0))) __PYX_ERR(0, 44, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_depth_buffer), __pyx_ptype_5numpy_ndarray, 0, "depth_buffer", 0))) __PYX_ERR(0, 45, __pyx_L1_error) + __pyx_r = __pyx_pf_16mesh_core_cython_2render_colors_core(__pyx_self, __pyx_v_image, __pyx_v_vertices, __pyx_v_triangles, __pyx_v_colors, __pyx_v_depth_buffer, __pyx_v_nver, __pyx_v_ntri, __pyx_v_h, __pyx_v_w, __pyx_v_c); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_16mesh_core_cython_2render_colors_core(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_image, PyArrayObject *__pyx_v_vertices, PyArrayObject *__pyx_v_triangles, PyArrayObject *__pyx_v_colors, PyArrayObject *__pyx_v_depth_buffer, int __pyx_v_nver, int __pyx_v_ntri, int __pyx_v_h, int __pyx_v_w, int __pyx_v_c) { + __Pyx_LocalBuf_ND __pyx_pybuffernd_colors; + __Pyx_Buffer __pyx_pybuffer_colors; + __Pyx_LocalBuf_ND __pyx_pybuffernd_depth_buffer; + __Pyx_Buffer __pyx_pybuffer_depth_buffer; + __Pyx_LocalBuf_ND __pyx_pybuffernd_image; + __Pyx_Buffer __pyx_pybuffer_image; + __Pyx_LocalBuf_ND __pyx_pybuffernd_triangles; + __Pyx_Buffer __pyx_pybuffer_triangles; + __Pyx_LocalBuf_ND __pyx_pybuffernd_vertices; + __Pyx_Buffer __pyx_pybuffer_vertices; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("render_colors_core", 0); + __pyx_pybuffer_image.pybuffer.buf = NULL; + __pyx_pybuffer_image.refcount = 0; + __pyx_pybuffernd_image.data = NULL; + __pyx_pybuffernd_image.rcbuffer = &__pyx_pybuffer_image; + __pyx_pybuffer_vertices.pybuffer.buf = NULL; + __pyx_pybuffer_vertices.refcount = 0; + __pyx_pybuffernd_vertices.data = NULL; + __pyx_pybuffernd_vertices.rcbuffer = &__pyx_pybuffer_vertices; + __pyx_pybuffer_triangles.pybuffer.buf = NULL; + __pyx_pybuffer_triangles.refcount = 0; + __pyx_pybuffernd_triangles.data = NULL; + __pyx_pybuffernd_triangles.rcbuffer = &__pyx_pybuffer_triangles; + __pyx_pybuffer_colors.pybuffer.buf = NULL; + __pyx_pybuffer_colors.refcount = 0; + __pyx_pybuffernd_colors.data = NULL; + __pyx_pybuffernd_colors.rcbuffer = &__pyx_pybuffer_colors; + __pyx_pybuffer_depth_buffer.pybuffer.buf = NULL; + __pyx_pybuffer_depth_buffer.refcount = 0; + __pyx_pybuffernd_depth_buffer.data = NULL; + __pyx_pybuffernd_depth_buffer.rcbuffer = &__pyx_pybuffer_depth_buffer; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_image.rcbuffer->pybuffer, (PyObject*)__pyx_v_image, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 3, 0, __pyx_stack) == -1)) __PYX_ERR(0, 41, __pyx_L1_error) + } + __pyx_pybuffernd_image.diminfo[0].strides = __pyx_pybuffernd_image.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_image.diminfo[0].shape = __pyx_pybuffernd_image.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_image.diminfo[1].strides = __pyx_pybuffernd_image.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_image.diminfo[1].shape = __pyx_pybuffernd_image.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_image.diminfo[2].strides = __pyx_pybuffernd_image.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_image.diminfo[2].shape = __pyx_pybuffernd_image.rcbuffer->pybuffer.shape[2]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_vertices.rcbuffer->pybuffer, (PyObject*)__pyx_v_vertices, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 41, __pyx_L1_error) + } + __pyx_pybuffernd_vertices.diminfo[0].strides = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_vertices.diminfo[0].shape = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_vertices.diminfo[1].strides = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_vertices.diminfo[1].shape = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer, (PyObject*)__pyx_v_triangles, &__Pyx_TypeInfo_int, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 41, __pyx_L1_error) + } + __pyx_pybuffernd_triangles.diminfo[0].strides = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_triangles.diminfo[0].shape = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_triangles.diminfo[1].strides = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_triangles.diminfo[1].shape = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_colors.rcbuffer->pybuffer, (PyObject*)__pyx_v_colors, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 41, __pyx_L1_error) + } + __pyx_pybuffernd_colors.diminfo[0].strides = __pyx_pybuffernd_colors.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_colors.diminfo[0].shape = __pyx_pybuffernd_colors.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_colors.diminfo[1].strides = __pyx_pybuffernd_colors.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_colors.diminfo[1].shape = __pyx_pybuffernd_colors.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_depth_buffer.rcbuffer->pybuffer, (PyObject*)__pyx_v_depth_buffer, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 41, __pyx_L1_error) + } + __pyx_pybuffernd_depth_buffer.diminfo[0].strides = __pyx_pybuffernd_depth_buffer.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_depth_buffer.diminfo[0].shape = __pyx_pybuffernd_depth_buffer.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_depth_buffer.diminfo[1].strides = __pyx_pybuffernd_depth_buffer.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_depth_buffer.diminfo[1].shape = __pyx_pybuffernd_depth_buffer.rcbuffer->pybuffer.shape[1]; + + /* "mesh_core_cython.pyx":49 + * int h, int w, int c + * ): + * _render_colors_core( # <<<<<<<<<<<<<< + * np.PyArray_DATA(image), np.PyArray_DATA(vertices), np.PyArray_DATA(triangles), + * np.PyArray_DATA(colors), + */ + _render_colors_core(((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_image))), ((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_vertices))), ((int *)PyArray_DATA(((PyArrayObject *)__pyx_v_triangles))), ((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_colors))), ((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_depth_buffer))), __pyx_v_nver, __pyx_v_ntri, __pyx_v_h, __pyx_v_w, __pyx_v_c); + + /* "mesh_core_cython.pyx":41 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def render_colors_core(np.ndarray[float, ndim=3, mode = "c"] image not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_colors.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_depth_buffer.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_image.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_vertices.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("mesh_core_cython.render_colors_core", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_colors.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_depth_buffer.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_image.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_vertices.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "mesh_core_cython.pyx":59 + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * @cython.wraparound(False) # turn off negative index wrapping for entire function + * def get_normal(np.ndarray[float, ndim=2, mode = "c"] ver_normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_16mesh_core_cython_5get_normal(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_16mesh_core_cython_5get_normal = {"get_normal", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_16mesh_core_cython_5get_normal, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_16mesh_core_cython_5get_normal(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_ver_normal = 0; + PyArrayObject *__pyx_v_vertices = 0; + PyArrayObject *__pyx_v_triangles = 0; + int __pyx_v_nver; + int __pyx_v_ntri; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_normal (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_ver_normal,&__pyx_n_s_vertices,&__pyx_n_s_triangles,&__pyx_n_s_nver,&__pyx_n_s_ntri,0}; + PyObject* values[5] = {0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_ver_normal)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_vertices)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_normal", 1, 5, 5, 1); __PYX_ERR(0, 59, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_triangles)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_normal", 1, 5, 5, 2); __PYX_ERR(0, 59, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_nver)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_normal", 1, 5, 5, 3); __PYX_ERR(0, 59, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_ntri)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("get_normal", 1, 5, 5, 4); __PYX_ERR(0, 59, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "get_normal") < 0)) __PYX_ERR(0, 59, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 5) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + } + __pyx_v_ver_normal = ((PyArrayObject *)values[0]); + __pyx_v_vertices = ((PyArrayObject *)values[1]); + __pyx_v_triangles = ((PyArrayObject *)values[2]); + __pyx_v_nver = __Pyx_PyInt_As_int(values[3]); if (unlikely((__pyx_v_nver == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) + __pyx_v_ntri = __Pyx_PyInt_As_int(values[4]); if (unlikely((__pyx_v_ntri == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 62, __pyx_L3_error) + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("get_normal", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 59, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("mesh_core_cython.get_normal", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_ver_normal), __pyx_ptype_5numpy_ndarray, 0, "ver_normal", 0))) __PYX_ERR(0, 59, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_vertices), __pyx_ptype_5numpy_ndarray, 0, "vertices", 0))) __PYX_ERR(0, 60, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_triangles), __pyx_ptype_5numpy_ndarray, 0, "triangles", 0))) __PYX_ERR(0, 61, __pyx_L1_error) + __pyx_r = __pyx_pf_16mesh_core_cython_4get_normal(__pyx_self, __pyx_v_ver_normal, __pyx_v_vertices, __pyx_v_triangles, __pyx_v_nver, __pyx_v_ntri); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_16mesh_core_cython_4get_normal(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_ver_normal, PyArrayObject *__pyx_v_vertices, PyArrayObject *__pyx_v_triangles, int __pyx_v_nver, int __pyx_v_ntri) { + __Pyx_LocalBuf_ND __pyx_pybuffernd_triangles; + __Pyx_Buffer __pyx_pybuffer_triangles; + __Pyx_LocalBuf_ND __pyx_pybuffernd_ver_normal; + __Pyx_Buffer __pyx_pybuffer_ver_normal; + __Pyx_LocalBuf_ND __pyx_pybuffernd_vertices; + __Pyx_Buffer __pyx_pybuffer_vertices; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_normal", 0); + __pyx_pybuffer_ver_normal.pybuffer.buf = NULL; + __pyx_pybuffer_ver_normal.refcount = 0; + __pyx_pybuffernd_ver_normal.data = NULL; + __pyx_pybuffernd_ver_normal.rcbuffer = &__pyx_pybuffer_ver_normal; + __pyx_pybuffer_vertices.pybuffer.buf = NULL; + __pyx_pybuffer_vertices.refcount = 0; + __pyx_pybuffernd_vertices.data = NULL; + __pyx_pybuffernd_vertices.rcbuffer = &__pyx_pybuffer_vertices; + __pyx_pybuffer_triangles.pybuffer.buf = NULL; + __pyx_pybuffer_triangles.refcount = 0; + __pyx_pybuffernd_triangles.data = NULL; + __pyx_pybuffernd_triangles.rcbuffer = &__pyx_pybuffer_triangles; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_ver_normal.rcbuffer->pybuffer, (PyObject*)__pyx_v_ver_normal, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 59, __pyx_L1_error) + } + __pyx_pybuffernd_ver_normal.diminfo[0].strides = __pyx_pybuffernd_ver_normal.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_ver_normal.diminfo[0].shape = __pyx_pybuffernd_ver_normal.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_ver_normal.diminfo[1].strides = __pyx_pybuffernd_ver_normal.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_ver_normal.diminfo[1].shape = __pyx_pybuffernd_ver_normal.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_vertices.rcbuffer->pybuffer, (PyObject*)__pyx_v_vertices, &__Pyx_TypeInfo_float, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 59, __pyx_L1_error) + } + __pyx_pybuffernd_vertices.diminfo[0].strides = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_vertices.diminfo[0].shape = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_vertices.diminfo[1].strides = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_vertices.diminfo[1].shape = __pyx_pybuffernd_vertices.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer, (PyObject*)__pyx_v_triangles, &__Pyx_TypeInfo_int, PyBUF_FORMAT| PyBUF_C_CONTIGUOUS, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 59, __pyx_L1_error) + } + __pyx_pybuffernd_triangles.diminfo[0].strides = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_triangles.diminfo[0].shape = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_triangles.diminfo[1].strides = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_triangles.diminfo[1].shape = __pyx_pybuffernd_triangles.rcbuffer->pybuffer.shape[1]; + + /* "mesh_core_cython.pyx":63 + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + * int nver, int ntri): + * _get_normal( # <<<<<<<<<<<<<< + * np.PyArray_DATA(ver_normal), np.PyArray_DATA(vertices), np.PyArray_DATA(triangles), + * nver, ntri) + */ + _get_normal(((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_ver_normal))), ((float *)PyArray_DATA(((PyArrayObject *)__pyx_v_vertices))), ((int *)PyArray_DATA(((PyArrayObject *)__pyx_v_triangles))), __pyx_v_nver, __pyx_v_ntri); + + /* "mesh_core_cython.pyx":59 + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * @cython.wraparound(False) # turn off negative index wrapping for entire function + * def get_normal(np.ndarray[float, ndim=2, mode = "c"] ver_normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_ver_normal.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_vertices.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("mesh_core_cython.get_normal", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_triangles.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_ver_normal.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_vertices.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":258 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + int __pyx_t_6; + PyArray_Descr *__pyx_t_7; + PyObject *__pyx_t_8 = NULL; + char *__pyx_t_9; + if (__pyx_v_info == NULL) { + PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete"); + return -1; + } + __Pyx_RefNannySetupContext("__getbuffer__", 0); + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":265 + * + * cdef int i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":266 + * cdef int i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":268 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":271 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_C_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 272, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 272, __pyx_L1_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":270 + * ndim = PyArray_NDIM(self) + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L7_bool_binop_done; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":275 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_F_CONTIGUOUS) != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L7_bool_binop_done:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + if (unlikely(__pyx_t_1)) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 276, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 276, __pyx_L1_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":274 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":278 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":279 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":283 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim)))); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":284 + * # This is allocated as one block, strides first. + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":285 + * info.strides = PyObject_Malloc(sizeof(Py_ssize_t) * 2 * ndim) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_4 = __pyx_v_ndim; + __pyx_t_5 = __pyx_t_4; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":286 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":287 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":280 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + goto __pyx_L9; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":289 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + /*else*/ { + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":290 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L9:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":291 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":292 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":293 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0)); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":296 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = PyArray_DESCR(self) + * cdef int offset + */ + __pyx_v_f = NULL; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":297 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = PyArray_DESCR(self) # <<<<<<<<<<<<<< + * cdef int offset + * + */ + __pyx_t_7 = PyArray_DESCR(__pyx_v_self); + __pyx_t_3 = ((PyObject *)__pyx_t_7); + __Pyx_INCREF(__pyx_t_3); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":300 + * cdef int offset + * + * info.obj = self # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(descr): + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":302 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0); + if (__pyx_t_1) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":303 + * + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_4 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_4; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":304 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0); + if (!__pyx_t_2) { + goto __pyx_L15_next_or; + } else { + } + __pyx_t_2 = (__pyx_v_little_endian != 0); + if (!__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_L15_next_or:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":305 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L14_bool_binop_done; + } + __pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L14_bool_binop_done:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":304 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_1)) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":306 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 306, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 306, __pyx_L1_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":304 + * if not PyDataType_HASFIELDS(descr): + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":307 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + switch (__pyx_v_t) { + case NPY_BYTE: + __pyx_v_f = ((char *)"b"); + break; + case NPY_UBYTE: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":308 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + __pyx_v_f = ((char *)"B"); + break; + case NPY_SHORT: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":309 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + __pyx_v_f = ((char *)"h"); + break; + case NPY_USHORT: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":310 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + __pyx_v_f = ((char *)"H"); + break; + case NPY_INT: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":311 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + __pyx_v_f = ((char *)"i"); + break; + case NPY_UINT: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":312 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + __pyx_v_f = ((char *)"I"); + break; + case NPY_LONG: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":313 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + __pyx_v_f = ((char *)"l"); + break; + case NPY_ULONG: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":314 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + __pyx_v_f = ((char *)"L"); + break; + case NPY_LONGLONG: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":315 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + __pyx_v_f = ((char *)"q"); + break; + case NPY_ULONGLONG: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":316 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + __pyx_v_f = ((char *)"Q"); + break; + case NPY_FLOAT: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":317 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + __pyx_v_f = ((char *)"f"); + break; + case NPY_DOUBLE: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":318 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + __pyx_v_f = ((char *)"d"); + break; + case NPY_LONGDOUBLE: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":319 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + __pyx_v_f = ((char *)"g"); + break; + case NPY_CFLOAT: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":320 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + __pyx_v_f = ((char *)"Zf"); + break; + case NPY_CDOUBLE: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":321 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + __pyx_v_f = ((char *)"Zd"); + break; + case NPY_CLONGDOUBLE: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":322 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + __pyx_v_f = ((char *)"Zg"); + break; + case NPY_OBJECT: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":323 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_v_f = ((char *)"O"); + break; + default: + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":325 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 325, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 325, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 325, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 325, __pyx_L1_error) + break; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":326 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":327 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":302 + * info.obj = self + * + * if not PyDataType_HASFIELDS(descr): # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":329 + * return + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + /*else*/ { + __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF)); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":330 + * else: + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":331 + * info.format = PyObject_Malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":332 + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, # <<<<<<<<<<<<<< + * info.format + _buffer_format_string_len, + * &offset) + */ + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 332, __pyx_L1_error) + __pyx_v_f = __pyx_t_9; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":335 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":258 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fulfill the PEP. + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":337 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":338 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0); + if (__pyx_t_1) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":339 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) + */ + PyObject_Free(__pyx_v_info->format); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":338 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":340 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0); + if (__pyx_t_1) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":341 + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * PyObject_Free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + PyObject_Free(__pyx_v_info->strides); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":340 + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * PyObject_Free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":337 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * PyObject_Free(info.format) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":821 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":822 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 822, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":821 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":825 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 825, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":824 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":828 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 828, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":827 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":830 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":831 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 831, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":830 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":834 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 834, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":833 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":836 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("PyDataType_SHAPE", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0); + if (__pyx_t_1) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":838 + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape # <<<<<<<<<<<<<< + * else: + * return () + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape)); + __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape); + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":837 + * + * cdef inline tuple PyDataType_SHAPE(dtype d): + * if PyDataType_HASSUBARRAY(d): # <<<<<<<<<<<<<< + * return d.subarray.shape + * else: + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":840 + * return d.subarray.shape + * else: + * return () # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_empty_tuple); + __pyx_r = __pyx_empty_tuple; + goto __pyx_L0; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":836 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline tuple PyDataType_SHAPE(dtype d): # <<<<<<<<<<<<<< + * if PyDataType_HASSUBARRAY(d): + * return d.subarray.shape + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + long __pyx_t_8; + char *__pyx_t_9; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":847 + * + * cdef dtype child + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":848 + * cdef dtype child + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(__pyx_v_descr->names == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(1, 851, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(1, 851, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 851, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3); + __pyx_t_3 = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":852 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + if (unlikely(__pyx_v_descr->fields == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 852, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(1, 852, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":853 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(__pyx_v_fields != Py_None)) { + PyObject* sequence = __pyx_v_fields; + Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(1, 853, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(1, 853, __pyx_L1_error) + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(1, 853, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3)); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 855, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 855, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 856, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 856, __pyx_L1_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":855 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":858 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0); + if (!__pyx_t_7) { + goto __pyx_L8_next_or; + } else { + } + __pyx_t_7 = (__pyx_v_little_endian != 0); + if (!__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_L8_next_or:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":859 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0); + if (__pyx_t_7) { + } else { + __pyx_t_6 = __pyx_t_7; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0); + __pyx_t_6 = __pyx_t_7; + __pyx_L7_bool_binop_done:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":858 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + if (unlikely(__pyx_t_6)) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":860 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 860, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 860, __pyx_L1_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":858 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":870 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 870, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 870, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 870, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!__pyx_t_6) break; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":871 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 0x78; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":872 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":873 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1); + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":875 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_8 = 0; + (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":877 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0); + if (__pyx_t_6) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":878 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 878, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4); + __pyx_t_4 = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":879 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0); + if (unlikely(__pyx_t_6)) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":880 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 880, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 880, __pyx_L1_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":879 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":883 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 883, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 883, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 883, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 98; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":884 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 884, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 884, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 884, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 66; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":885 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 885, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 885, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 885, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x68; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":886 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 886, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 886, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 886, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 72; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":887 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 887, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 887, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 887, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x69; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":888 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 888, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 888, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 888, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 73; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":889 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 889, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 889, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 889, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x6C; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":890 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 890, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 890, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 890, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 76; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":891 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 891, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 891, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 891, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x71; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":892 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 892, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 892, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 892, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 81; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":893 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 893, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 893, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 893, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x66; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":894 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 894, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 894, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 894, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x64; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":895 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 895, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 895, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 895, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 0x67; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":896 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 896, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 896, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 896, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x66; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":897 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 897, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 897, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 897, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x64; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":898 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 898, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 898, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 898, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_6) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 0x67; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":899 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 899, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 899, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(1, 899, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (likely(__pyx_t_6)) { + (__pyx_v_f[0]) = 79; + goto __pyx_L15; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":901 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + /*else*/ { + __pyx_t_3 = __Pyx_PyUnicode_FormatSafe(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 901, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 901, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(1, 901, __pyx_L1_error) + } + __pyx_L15:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":902 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":877 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + goto __pyx_L13; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":906 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + /*else*/ { + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(1, 906, __pyx_L1_error) + __pyx_v_f = __pyx_t_9; + } + __pyx_L13:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":851 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":907 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":842 + * return () + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1022 + * int _import_umath() except -1 + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * Py_INCREF(base) # important to do this before stealing the reference below! + * PyArray_SetBaseObject(arr, base) + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1023 + * + * cdef inline void set_array_base(ndarray arr, object base): + * Py_INCREF(base) # important to do this before stealing the reference below! # <<<<<<<<<<<<<< + * PyArray_SetBaseObject(arr, base) + * + */ + Py_INCREF(__pyx_v_base); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1024 + * cdef inline void set_array_base(ndarray arr, object base): + * Py_INCREF(base) # important to do this before stealing the reference below! + * PyArray_SetBaseObject(arr, base) # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + (void)(PyArray_SetBaseObject(__pyx_v_arr, __pyx_v_base)); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1022 + * int _import_umath() except -1 + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * Py_INCREF(base) # important to do this before stealing the reference below! + * PyArray_SetBaseObject(arr, base) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1026 + * PyArray_SetBaseObject(arr, base) + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * base = PyArray_BASE(arr) + * if base is NULL: + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_v_base; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1027 + * + * cdef inline object get_array_base(ndarray arr): + * base = PyArray_BASE(arr) # <<<<<<<<<<<<<< + * if base is NULL: + * return None + */ + __pyx_v_base = PyArray_BASE(__pyx_v_arr); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1028 + * cdef inline object get_array_base(ndarray arr): + * base = PyArray_BASE(arr) + * if base is NULL: # <<<<<<<<<<<<<< + * return None + * return base + */ + __pyx_t_1 = ((__pyx_v_base == NULL) != 0); + if (__pyx_t_1) { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1029 + * base = PyArray_BASE(arr) + * if base is NULL: + * return None # <<<<<<<<<<<<<< + * return base + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1028 + * cdef inline object get_array_base(ndarray arr): + * base = PyArray_BASE(arr) + * if base is NULL: # <<<<<<<<<<<<<< + * return None + * return base + */ + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1030 + * if base is NULL: + * return None + * return base # <<<<<<<<<<<<<< + * + * # Versions of the import_* functions which are more suitable for + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_base)); + __pyx_r = ((PyObject *)__pyx_v_base); + goto __pyx_L0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1026 + * PyArray_SetBaseObject(arr, base) + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * base = PyArray_BASE(arr) + * if base is NULL: + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1034 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_array(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_array", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1035 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1036 + * cdef inline int import_array() except -1: + * try: + * _import_array() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") + */ + __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1036, __pyx_L3_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1035 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1037 + * try: + * _import_array() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.multiarray failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1037, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1038 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1038, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1038, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1035 + * # Cython code. + * cdef inline int import_array() except -1: + * try: # <<<<<<<<<<<<<< + * _import_array() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1034 + * # Versions of the import_* functions which are more suitable for + * # Cython code. + * cdef inline int import_array() except -1: # <<<<<<<<<<<<<< + * try: + * _import_array() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_array", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1040 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_umath", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1041 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1042 + * cdef inline int import_umath() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1042, __pyx_L3_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1041 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1043 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + * + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1043, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1044 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1044, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1044, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1041 + * + * cdef inline int import_umath() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1040 + * raise ImportError("numpy.core.multiarray failed to import") + * + * cdef inline int import_umath() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_umath", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1046 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + +static CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("import_ufunc", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1047 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1048 + * cdef inline int import_ufunc() except -1: + * try: + * _import_umath() # <<<<<<<<<<<<<< + * except Exception: + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 1048, __pyx_L3_error) + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1047 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1049 + * try: + * _import_umath() + * except Exception: # <<<<<<<<<<<<<< + * raise ImportError("numpy.core.umath failed to import") + */ + __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_4) { + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(1, 1049, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1050 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + */ + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 1050, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __PYX_ERR(1, 1050, __pyx_L5_except_error) + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1047 + * + * cdef inline int import_ufunc() except -1: + * try: # <<<<<<<<<<<<<< + * _import_umath() + * except Exception: + */ + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L1_error; + __pyx_L8_try_end:; + } + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1046 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.import_ufunc", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_mesh_core_cython(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_mesh_core_cython}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "mesh_core_cython", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_Format_string_allocated_too_shor, __pyx_k_Format_string_allocated_too_shor, sizeof(__pyx_k_Format_string_allocated_too_shor), 0, 1, 0, 0}, + {&__pyx_kp_u_Format_string_allocated_too_shor_2, __pyx_k_Format_string_allocated_too_shor_2, sizeof(__pyx_k_Format_string_allocated_too_shor_2), 0, 1, 0, 0}, + {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1}, + {&__pyx_kp_u_Non_native_byte_order_not_suppor, __pyx_k_Non_native_byte_order_not_suppor, sizeof(__pyx_k_Non_native_byte_order_not_suppor), 0, 1, 0, 0}, + {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1}, + {&__pyx_n_s_c, __pyx_k_c, sizeof(__pyx_k_c), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_colors, __pyx_k_colors, sizeof(__pyx_k_colors), 0, 0, 1, 1}, + {&__pyx_n_s_depth_buffer, __pyx_k_depth_buffer, sizeof(__pyx_k_depth_buffer), 0, 0, 1, 1}, + {&__pyx_n_s_get_normal, __pyx_k_get_normal, sizeof(__pyx_k_get_normal), 0, 0, 1, 1}, + {&__pyx_n_s_get_normal_core, __pyx_k_get_normal_core, sizeof(__pyx_k_get_normal_core), 0, 0, 1, 1}, + {&__pyx_n_s_h, __pyx_k_h, sizeof(__pyx_k_h), 0, 0, 1, 1}, + {&__pyx_n_s_image, __pyx_k_image, sizeof(__pyx_k_image), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_mesh_core_cython, __pyx_k_mesh_core_cython, sizeof(__pyx_k_mesh_core_cython), 0, 0, 1, 1}, + {&__pyx_kp_s_mesh_core_cython_pyx, __pyx_k_mesh_core_cython_pyx, sizeof(__pyx_k_mesh_core_cython_pyx), 0, 0, 1, 0}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_kp_u_ndarray_is_not_C_contiguous, __pyx_k_ndarray_is_not_C_contiguous, sizeof(__pyx_k_ndarray_is_not_C_contiguous), 0, 1, 0, 0}, + {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0}, + {&__pyx_n_s_normal, __pyx_k_normal, sizeof(__pyx_k_normal), 0, 0, 1, 1}, + {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1}, + {&__pyx_n_s_ntri, __pyx_k_ntri, sizeof(__pyx_k_ntri), 0, 0, 1, 1}, + {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1}, + {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0}, + {&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0}, + {&__pyx_n_s_nver, __pyx_k_nver, sizeof(__pyx_k_nver), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_render_colors_core, __pyx_k_render_colors_core, sizeof(__pyx_k_render_colors_core), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_tri_normal, __pyx_k_tri_normal, sizeof(__pyx_k_tri_normal), 0, 0, 1, 1}, + {&__pyx_n_s_triangles, __pyx_k_triangles, sizeof(__pyx_k_triangles), 0, 0, 1, 1}, + {&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0}, + {&__pyx_n_s_ver_normal, __pyx_k_ver_normal, sizeof(__pyx_k_ver_normal), 0, 0, 1, 1}, + {&__pyx_n_s_vertices, __pyx_k_vertices, sizeof(__pyx_k_vertices), 0, 0, 1, 1}, + {&__pyx_n_s_w, __pyx_k_w, sizeof(__pyx_k_w), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(1, 272, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(1, 285, __pyx_L1_error) + __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(1, 856, __pyx_L1_error) + __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(1, 1038, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":272 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple_)) __PYX_ERR(1, 272, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":276 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 276, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":306 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 306, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":856 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 856, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":880 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 880, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1038 + * _import_array() + * except Exception: + * raise ImportError("numpy.core.multiarray failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_umath() except -1: + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 1038, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1044 + * _import_umath() + * except Exception: + * raise ImportError("numpy.core.umath failed to import") # <<<<<<<<<<<<<< + * + * cdef inline int import_ufunc() except -1: + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 1044, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "mesh_core_cython.pyx":29 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def get_normal_core(np.ndarray[float, ndim=2, mode = "c"] normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] tri_normal not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + __pyx_tuple__8 = PyTuple_Pack(4, __pyx_n_s_normal, __pyx_n_s_tri_normal, __pyx_n_s_triangles, __pyx_n_s_ntri); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(4, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_mesh_core_cython_pyx, __pyx_n_s_get_normal_core, 29, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 29, __pyx_L1_error) + + /* "mesh_core_cython.pyx":41 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def render_colors_core(np.ndarray[float, ndim=3, mode = "c"] image not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + __pyx_tuple__10 = PyTuple_Pack(10, __pyx_n_s_image, __pyx_n_s_vertices, __pyx_n_s_triangles, __pyx_n_s_colors, __pyx_n_s_depth_buffer, __pyx_n_s_nver, __pyx_n_s_ntri, __pyx_n_s_h, __pyx_n_s_w, __pyx_n_s_c); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 41, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__10); + __Pyx_GIVEREF(__pyx_tuple__10); + __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(10, 0, 10, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__10, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_mesh_core_cython_pyx, __pyx_n_s_render_colors_core, 41, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 41, __pyx_L1_error) + + /* "mesh_core_cython.pyx":59 + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * @cython.wraparound(False) # turn off negative index wrapping for entire function + * def get_normal(np.ndarray[float, ndim=2, mode = "c"] ver_normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + __pyx_tuple__12 = PyTuple_Pack(5, __pyx_n_s_ver_normal, __pyx_n_s_vertices, __pyx_n_s_triangles, __pyx_n_s_nver, __pyx_n_s_ntri); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(5, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_mesh_core_cython_pyx, __pyx_n_s_get_normal, 59, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule("numpy"); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 206, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_5numpy_dtype = __Pyx_ImportType(__pyx_t_1, "numpy", "dtype", sizeof(PyArray_Descr), __Pyx_ImportType_CheckSize_Ignore); + if (!__pyx_ptype_5numpy_dtype) __PYX_ERR(1, 206, __pyx_L1_error) + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType(__pyx_t_1, "numpy", "flatiter", sizeof(PyArrayIterObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_5numpy_flatiter) __PYX_ERR(1, 229, __pyx_L1_error) + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType(__pyx_t_1, "numpy", "broadcast", sizeof(PyArrayMultiIterObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_5numpy_broadcast) __PYX_ERR(1, 233, __pyx_L1_error) + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType(__pyx_t_1, "numpy", "ndarray", sizeof(PyArrayObject), __Pyx_ImportType_CheckSize_Ignore); + if (!__pyx_ptype_5numpy_ndarray) __PYX_ERR(1, 242, __pyx_L1_error) + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType(__pyx_t_1, "numpy", "ufunc", sizeof(PyUFuncObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_5numpy_ufunc) __PYX_ERR(1, 918, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#if PY_MAJOR_VERSION < 3 +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initmesh_core_cython(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initmesh_core_cython(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_mesh_core_cython(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_mesh_core_cython(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec_mesh_core_cython(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module 'mesh_core_cython' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_mesh_core_cython(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("mesh_core_cython", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_mesh_core_cython) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "mesh_core_cython")) { + if (unlikely(PyDict_SetItemString(modules, "mesh_core_cython", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + (void)__Pyx_modinit_type_init_code(); + if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error; + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "mesh_core_cython.pyx":1 + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * from libcpp.string cimport string + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_numpy, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_np, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "mesh_core_cython.pyx":7 + * + * # use the Numpy-C-API from Cython + * np.import_array() # <<<<<<<<<<<<<< + * + * # cdefine the signature of our c function + */ + __pyx_t_2 = __pyx_f_5numpy_import_array(); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 7, __pyx_L1_error) + + /* "mesh_core_cython.pyx":29 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def get_normal_core(np.ndarray[float, ndim=2, mode = "c"] normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] tri_normal not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_16mesh_core_cython_1get_normal_core, NULL, __pyx_n_s_mesh_core_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_normal_core, __pyx_t_1) < 0) __PYX_ERR(0, 29, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "mesh_core_cython.pyx":41 + * @cython.boundscheck(False) + * @cython.wraparound(False) + * def render_colors_core(np.ndarray[float, ndim=3, mode = "c"] image not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_16mesh_core_cython_3render_colors_core, NULL, __pyx_n_s_mesh_core_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 41, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_render_colors_core, __pyx_t_1) < 0) __PYX_ERR(0, 41, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "mesh_core_cython.pyx":59 + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * @cython.wraparound(False) # turn off negative index wrapping for entire function + * def get_normal(np.ndarray[float, ndim=2, mode = "c"] ver_normal not None, # <<<<<<<<<<<<<< + * np.ndarray[float, ndim=2, mode = "c"] vertices not None, + * np.ndarray[int, ndim=2, mode="c"] triangles not None, + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_16mesh_core_cython_5get_normal, NULL, __pyx_n_s_mesh_core_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_normal, __pyx_t_1) < 0) __PYX_ERR(0, 59, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "mesh_core_cython.pyx":1 + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * from libcpp.string cimport string + */ + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "../../../../../../lustre/liujihao/.local/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd":1046 + * raise ImportError("numpy.core.umath failed to import") + * + * cdef inline int import_ufunc() except -1: # <<<<<<<<<<<<<< + * try: + * _import_umath() + */ + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init mesh_core_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init mesh_core_cython"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +/* IsLittleEndian */ +static CYTHON_INLINE int __Pyx_Is_Little_Endian(void) +{ + union { + uint32_t u32; + uint8_t u8[4]; + } S; + S.u32 = 0x01020304; + return S.u8[0] == 4; +} + +/* BufferFormatCheck */ +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type) { + stack[0].field = &ctx->root; + stack[0].parent_offset = 0; + ctx->root.type = type; + ctx->root.name = "buffer dtype"; + ctx->root.offset = 0; + ctx->head = stack; + ctx->head->field = &ctx->root; + ctx->fmt_offset = 0; + ctx->head->parent_offset = 0; + ctx->new_packmode = '@'; + ctx->enc_packmode = '@'; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->is_complex = 0; + ctx->is_valid_array = 0; + ctx->struct_alignment = 0; + while (type->typegroup == 'S') { + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = 0; + type = type->fields->type; + } +} +static int __Pyx_BufFmt_ParseNumber(const char** ts) { + int count; + const char* t = *ts; + if (*t < '0' || *t > '9') { + return -1; + } else { + count = *t++ - '0'; + while (*t >= '0' && *t <= '9') { + count *= 10; + count += *t++ - '0'; + } + } + *ts = t; + return count; +} +static int __Pyx_BufFmt_ExpectNumber(const char **ts) { + int number = __Pyx_BufFmt_ParseNumber(ts); + if (number == -1) + PyErr_Format(PyExc_ValueError,\ + "Does not understand character buffer dtype format string ('%c')", **ts); + return number; +} +static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { + PyErr_Format(PyExc_ValueError, + "Unexpected format string character: '%c'", ch); +} +static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { + switch (ch) { + case 'c': return "'char'"; + case 'b': return "'signed char'"; + case 'B': return "'unsigned char'"; + case 'h': return "'short'"; + case 'H': return "'unsigned short'"; + case 'i': return "'int'"; + case 'I': return "'unsigned int'"; + case 'l': return "'long'"; + case 'L': return "'unsigned long'"; + case 'q': return "'long long'"; + case 'Q': return "'unsigned long long'"; + case 'f': return (is_complex ? "'complex float'" : "'float'"); + case 'd': return (is_complex ? "'complex double'" : "'double'"); + case 'g': return (is_complex ? "'complex long double'" : "'long double'"); + case 'T': return "a struct"; + case 'O': return "Python object"; + case 'P': return "a pointer"; + case 's': case 'p': return "a string"; + case 0: return "end"; + default: return "unparseable format string"; + } +} +static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return 2; + case 'i': case 'I': case 'l': case 'L': return 4; + case 'q': case 'Q': return 8; + case 'f': return (is_complex ? 8 : 4); + case 'd': return (is_complex ? 16 : 8); + case 'g': { + PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g').."); + return 0; + } + case 'O': case 'P': return sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { + switch (ch) { + case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(short); + case 'i': case 'I': return sizeof(int); + case 'l': case 'L': return sizeof(long); + #ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(PY_LONG_LONG); + #endif + case 'f': return sizeof(float) * (is_complex ? 2 : 1); + case 'd': return sizeof(double) * (is_complex ? 2 : 1); + case 'g': return sizeof(long double) * (is_complex ? 2 : 1); + case 'O': case 'P': return sizeof(void*); + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +typedef struct { char c; short x; } __Pyx_st_short; +typedef struct { char c; int x; } __Pyx_st_int; +typedef struct { char c; long x; } __Pyx_st_long; +typedef struct { char c; float x; } __Pyx_st_float; +typedef struct { char c; double x; } __Pyx_st_double; +typedef struct { char c; long double x; } __Pyx_st_longdouble; +typedef struct { char c; void *x; } __Pyx_st_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_st_float) - sizeof(float); + case 'd': return sizeof(__Pyx_st_double) - sizeof(double); + case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +/* These are for computing the padding at the end of the struct to align + on the first member of the struct. This will probably the same as above, + but we don't have any guarantees. + */ +typedef struct { short x; char c; } __Pyx_pad_short; +typedef struct { int x; char c; } __Pyx_pad_int; +typedef struct { long x; char c; } __Pyx_pad_long; +typedef struct { float x; char c; } __Pyx_pad_float; +typedef struct { double x; char c; } __Pyx_pad_double; +typedef struct { long double x; char c; } __Pyx_pad_longdouble; +typedef struct { void *x; char c; } __Pyx_pad_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_pad_float) - sizeof(float); + case 'd': return sizeof(__Pyx_pad_double) - sizeof(double); + case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { + switch (ch) { + case 'c': + return 'H'; + case 'b': case 'h': case 'i': + case 'l': case 'q': case 's': case 'p': + return 'I'; + case 'B': case 'H': case 'I': case 'L': case 'Q': + return 'U'; + case 'f': case 'd': case 'g': + return (is_complex ? 'C' : 'R'); + case 'O': + return 'O'; + case 'P': + return 'P'; + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) { + if (ctx->head == NULL || ctx->head->field == &ctx->root) { + const char* expected; + const char* quote; + if (ctx->head == NULL) { + expected = "end"; + quote = ""; + } else { + expected = ctx->head->field->type->name; + quote = "'"; + } + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected %s%s%s but got %s", + quote, expected, quote, + __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex)); + } else { + __Pyx_StructField* field = ctx->head->field; + __Pyx_StructField* parent = (ctx->head - 1)->field; + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'", + field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex), + parent->type->name, field->name); + } +} +static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) { + char group; + size_t size, offset, arraysize = 1; + if (ctx->enc_type == 0) return 0; + if (ctx->head->field->type->arraysize[0]) { + int i, ndim = 0; + if (ctx->enc_type == 's' || ctx->enc_type == 'p') { + ctx->is_valid_array = ctx->head->field->type->ndim == 1; + ndim = 1; + if (ctx->enc_count != ctx->head->field->type->arraysize[0]) { + PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %zu", + ctx->head->field->type->arraysize[0], ctx->enc_count); + return -1; + } + } + if (!ctx->is_valid_array) { + PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d", + ctx->head->field->type->ndim, ndim); + return -1; + } + for (i = 0; i < ctx->head->field->type->ndim; i++) { + arraysize *= ctx->head->field->type->arraysize[i]; + } + ctx->is_valid_array = 0; + ctx->enc_count = 1; + } + group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex); + do { + __Pyx_StructField* field = ctx->head->field; + __Pyx_TypeInfo* type = field->type; + if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') { + size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex); + } else { + size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex); + } + if (ctx->enc_packmode == '@') { + size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex); + size_t align_mod_offset; + if (align_at == 0) return -1; + align_mod_offset = ctx->fmt_offset % align_at; + if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset; + if (ctx->struct_alignment == 0) + ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type, + ctx->is_complex); + } + if (type->size != size || type->typegroup != group) { + if (type->typegroup == 'C' && type->fields != NULL) { + size_t parent_offset = ctx->head->parent_offset + field->offset; + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = parent_offset; + continue; + } + if ((type->typegroup == 'H' || group == 'H') && type->size == size) { + } else { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + } + offset = ctx->head->parent_offset + field->offset; + if (ctx->fmt_offset != offset) { + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected", + (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset); + return -1; + } + ctx->fmt_offset += size; + if (arraysize) + ctx->fmt_offset += (arraysize - 1) * size; + --ctx->enc_count; + while (1) { + if (field == &ctx->root) { + ctx->head = NULL; + if (ctx->enc_count != 0) { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + break; + } + ctx->head->field = ++field; + if (field->type == NULL) { + --ctx->head; + field = ctx->head->field; + continue; + } else if (field->type->typegroup == 'S') { + size_t parent_offset = ctx->head->parent_offset + field->offset; + if (field->type->fields->type == NULL) continue; + field = field->type->fields; + ++ctx->head; + ctx->head->field = field; + ctx->head->parent_offset = parent_offset; + break; + } else { + break; + } + } + } while (ctx->enc_count); + ctx->enc_type = 0; + ctx->is_complex = 0; + return 0; +} +static PyObject * +__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) +{ + const char *ts = *tsp; + int i = 0, number; + int ndim = ctx->head->field->type->ndim; +; + ++ts; + if (ctx->new_count != 1) { + PyErr_SetString(PyExc_ValueError, + "Cannot handle repeated arrays in format string"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + while (*ts && *ts != ')') { + switch (*ts) { + case ' ': case '\f': case '\r': case '\n': case '\t': case '\v': continue; + default: break; + } + number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i]) + return PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %d", + ctx->head->field->type->arraysize[i], number); + if (*ts != ',' && *ts != ')') + return PyErr_Format(PyExc_ValueError, + "Expected a comma in format string, got '%c'", *ts); + if (*ts == ',') ts++; + i++; + } + if (i != ndim) + return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d", + ctx->head->field->type->ndim, i); + if (!*ts) { + PyErr_SetString(PyExc_ValueError, + "Unexpected end of format string, expected ')'"); + return NULL; + } + ctx->is_valid_array = 1; + ctx->new_count = 1; + *tsp = ++ts; + return Py_None; +} +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) { + int got_Z = 0; + while (1) { + switch(*ts) { + case 0: + if (ctx->enc_type != 0 && ctx->head == NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + if (ctx->head != NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + return ts; + case ' ': + case '\r': + case '\n': + ++ts; + break; + case '<': + if (!__Pyx_Is_Little_Endian()) { + PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '>': + case '!': + if (__Pyx_Is_Little_Endian()) { + PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '=': + case '@': + case '^': + ctx->new_packmode = *ts++; + break; + case 'T': + { + const char* ts_after_sub; + size_t i, struct_count = ctx->new_count; + size_t struct_alignment = ctx->struct_alignment; + ctx->new_count = 1; + ++ts; + if (*ts != '{') { + PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; + ctx->enc_count = 0; + ctx->struct_alignment = 0; + ++ts; + ts_after_sub = ts; + for (i = 0; i != struct_count; ++i) { + ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts); + if (!ts_after_sub) return NULL; + } + ts = ts_after_sub; + if (struct_alignment) ctx->struct_alignment = struct_alignment; + } + break; + case '}': + { + size_t alignment = ctx->struct_alignment; + ++ts; + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; + if (alignment && ctx->fmt_offset % alignment) { + ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment); + } + } + return ts; + case 'x': + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->fmt_offset += ctx->new_count; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->enc_packmode = ctx->new_packmode; + ++ts; + break; + case 'Z': + got_Z = 1; + ++ts; + if (*ts != 'f' && *ts != 'd' && *ts != 'g') { + __Pyx_BufFmt_RaiseUnexpectedChar('Z'); + return NULL; + } + CYTHON_FALLTHROUGH; + case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': + case 'l': case 'L': case 'q': case 'Q': + case 'f': case 'd': case 'g': + case 'O': case 'p': + if (ctx->enc_type == *ts && got_Z == ctx->is_complex && + ctx->enc_packmode == ctx->new_packmode) { + ctx->enc_count += ctx->new_count; + ctx->new_count = 1; + got_Z = 0; + ++ts; + break; + } + CYTHON_FALLTHROUGH; + case 's': + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_count = ctx->new_count; + ctx->enc_packmode = ctx->new_packmode; + ctx->enc_type = *ts; + ctx->is_complex = got_Z; + ++ts; + ctx->new_count = 1; + got_Z = 0; + break; + case ':': + ++ts; + while(*ts != ':') ++ts; + ++ts; + break; + case '(': + if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL; + break; + default: + { + int number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + ctx->new_count = (size_t)number; + } + } + } +} + +/* BufferGetAndValidate */ + static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { + if (unlikely(info->buf == NULL)) return; + if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; + __Pyx_ReleaseBuffer(info); +} +static void __Pyx_ZeroBuffer(Py_buffer* buf) { + buf->buf = NULL; + buf->obj = NULL; + buf->strides = __Pyx_zeros; + buf->shape = __Pyx_zeros; + buf->suboffsets = __Pyx_minusones; +} +static int __Pyx__GetBufferAndValidate( + Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, + int nd, int cast, __Pyx_BufFmt_StackElem* stack) +{ + buf->buf = NULL; + if (unlikely(__Pyx_GetBuffer(obj, buf, flags) == -1)) { + __Pyx_ZeroBuffer(buf); + return -1; + } + if (unlikely(buf->ndim != nd)) { + PyErr_Format(PyExc_ValueError, + "Buffer has wrong number of dimensions (expected %d, got %d)", + nd, buf->ndim); + goto fail; + } + if (!cast) { + __Pyx_BufFmt_Context ctx; + __Pyx_BufFmt_Init(&ctx, stack, dtype); + if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; + } + if (unlikely((size_t)buf->itemsize != dtype->size)) { + PyErr_Format(PyExc_ValueError, + "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)", + buf->itemsize, (buf->itemsize > 1) ? "s" : "", + dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : ""); + goto fail; + } + if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones; + return 0; +fail:; + __Pyx_SafeReleaseBuffer(buf); + return -1; +} + +/* PyErrFetchRestore */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* PyObjectGetAttrStr */ + #if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ + static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* PyObjectCall */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* RaiseException */ + #if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* PyCFunctionFastCall */ + #if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ + #if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCallMethO */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* DictGetItem */ + #if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY +static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) { + PyObject *value; + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (!PyErr_Occurred()) { + if (unlikely(PyTuple_Check(key))) { + PyObject* args = PyTuple_Pack(1, key); + if (likely(args)) { + PyErr_SetObject(PyExc_KeyError, args); + Py_DECREF(args); + } + } else { + PyErr_SetObject(PyExc_KeyError, key); + } + } + return NULL; + } + Py_INCREF(value); + return value; +} +#endif + +/* RaiseTooManyValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* RaiseNoneIterError */ + static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* ExtTypeTest */ + static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(__Pyx_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* GetTopmostException */ + #if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyErrExceptionMatches */ + #if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* GetException */ + #if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* TypeImport */ + #ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* Import */ + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* PyDictVersioning */ + #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* CLineInTraceback */ + #ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ + static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ + #include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +#if PY_MAJOR_VERSION < 3 +static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { + if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags); + if (__Pyx_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) return __pyx_pw_5numpy_7ndarray_1__getbuffer__(obj, view, flags); + PyErr_Format(PyExc_TypeError, "'%.200s' does not have the buffer interface", Py_TYPE(obj)->tp_name); + return -1; +} +static void __Pyx_ReleaseBuffer(Py_buffer *view) { + PyObject *obj = view->obj; + if (!obj) return; + if (PyObject_CheckBuffer(obj)) { + PyBuffer_Release(view); + return; + } + if ((0)) {} + else if (__Pyx_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) __pyx_pw_5numpy_7ndarray_3__releasebuffer__(obj, view); + view->obj = NULL; + Py_DECREF(obj); +} +#endif + + + /* CIntFromPyVerify */ + #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabsf(b.real) >= fabsf(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + float r = b.imag / b.real; + float s = (float)(1.0) / (b.real + b.imag * r); + return __pyx_t_float_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + float r = b.real / b.imag; + float s = (float)(1.0) / (b.imag + b.real * r); + return __pyx_t_float_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + if (b.imag == 0) { + return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + float denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_float_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(a, a); + case 3: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, a); + case 4: + z = __Pyx_c_prod_float(a, a); + return __Pyx_c_prod_float(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = powf(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2f(0.0, -1.0); + } + } else { + r = __Pyx_c_abs_float(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +/* Declarations */ + #if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +/* Arithmetic */ + #if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + #if 1 + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else if (fabs(b.real) >= fabs(b.imag)) { + if (b.real == 0 && b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.imag); + } else { + double r = b.imag / b.real; + double s = (double)(1.0) / (b.real + b.imag * r); + return __pyx_t_double_complex_from_parts( + (a.real + a.imag * r) * s, (a.imag - a.real * r) * s); + } + } else { + double r = b.real / b.imag; + double s = (double)(1.0) / (b.imag + b.real * r); + return __pyx_t_double_complex_from_parts( + (a.real * r + a.imag) * s, (a.imag * r - a.real) * s); + } + } + #else + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + if (b.imag == 0) { + return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real); + } else { + double denom = b.real * b.real + b.imag * b.imag; + return __pyx_t_double_complex_from_parts( + (a.real * b.real + a.imag * b.imag) / denom, + (a.imag * b.real - a.real * b.imag) / denom); + } + } + #endif + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(a, a); + case 3: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, a); + case 4: + z = __Pyx_c_prod_double(a, a); + return __Pyx_c_prod_double(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } else if (b.imag == 0) { + z.real = pow(a.real, b.real); + z.imag = 0; + return z; + } else if (a.real > 0) { + r = a.real; + theta = 0; + } else { + r = -a.real; + theta = atan2(0.0, -1.0); + } + } else { + r = __Pyx_c_abs_double(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value) { + const enum NPY_TYPES neg_one = (enum NPY_TYPES) ((enum NPY_TYPES) 0 - (enum NPY_TYPES) 1), const_zero = (enum NPY_TYPES) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(enum NPY_TYPES) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(enum NPY_TYPES) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(enum NPY_TYPES) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(enum NPY_TYPES), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* FastTypeChecks */ + #if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; ip) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx new file mode 100644 index 0000000000..7d9e5a76e5 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx @@ -0,0 +1,65 @@ +import numpy as np +cimport numpy as np +from libcpp.string cimport string +cimport cython + +# use the Numpy-C-API from Cython +np.import_array() + +# cdefine the signature of our c function +cdef extern from "mesh_core.h": + void _render_colors_core( + float* image, float* vertices, int* triangles, + float* colors, + float* depth_buffer, + int nver, int ntri, + int h, int w, int c + ) + + void _get_normal_core( + float* normal, float* tri_normal, int* triangles, + int ntri + ) + + void _get_normal(float *ver_normal, float *vertices, int *triangles, int nver, int ntri) + + +@cython.boundscheck(False) +@cython.wraparound(False) +def get_normal_core(np.ndarray[float, ndim=2, mode = "c"] normal not None, + np.ndarray[float, ndim=2, mode = "c"] tri_normal not None, + np.ndarray[int, ndim=2, mode="c"] triangles not None, + int ntri + ): + _get_normal_core( + np.PyArray_DATA(normal), np.PyArray_DATA(tri_normal), np.PyArray_DATA(triangles), + ntri + ) + +@cython.boundscheck(False) +@cython.wraparound(False) +def render_colors_core(np.ndarray[float, ndim=3, mode = "c"] image not None, + np.ndarray[float, ndim=2, mode = "c"] vertices not None, + np.ndarray[int, ndim=2, mode="c"] triangles not None, + np.ndarray[float, ndim=2, mode = "c"] colors not None, + np.ndarray[float, ndim=2, mode = "c"] depth_buffer not None, + int nver, int ntri, + int h, int w, int c + ): + _render_colors_core( + np.PyArray_DATA(image), np.PyArray_DATA(vertices), np.PyArray_DATA(triangles), + np.PyArray_DATA(colors), + np.PyArray_DATA(depth_buffer), + nver, ntri, + h, w, c + ) + +@cython.boundscheck(False) # turn off bounds-checking for entire function +@cython.wraparound(False) # turn off negative index wrapping for entire function +def get_normal(np.ndarray[float, ndim=2, mode = "c"] ver_normal not None, + np.ndarray[float, ndim=2, mode = "c"] vertices not None, + np.ndarray[int, ndim=2, mode="c"] triangles not None, + int nver, int ntri): + _get_normal( + np.PyArray_DATA(ver_normal), np.PyArray_DATA(vertices), np.PyArray_DATA(triangles), + nver, ntri) \ No newline at end of file diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md new file mode 100644 index 0000000000..5d959a45f8 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md @@ -0,0 +1,6 @@ +### Cython compiling +``` +python3 setup.py build_ext -i +``` + +The `mesh_core_cython.*.so` will be generated. The specific name depends on your system. \ No newline at end of file diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py new file mode 100644 index 0000000000..f58aa5b041 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py @@ -0,0 +1,19 @@ +''' +python setup.py build_ext -i +to compile +''' + +# setup.py +from distutils.core import setup, Extension +# from Cython.Build import cythonize +from Cython.Distutils import build_ext +import numpy + +setup( + name='mesh_core_cython', + cmdclass={'build_ext': build_ext}, + ext_modules=[Extension("mesh_core_cython", + sources=["mesh_core_cython.pyx", "mesh_core.cpp"], + language='c++', + include_dirs=[numpy.get_include()])], +) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py new file mode 100644 index 0000000000..7138268c37 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +import os.path as osp +from pathlib import Path +import numpy as np + +import torch +import torch.utils.data as data +import cv2 +import argparse +from .io import _numpy_to_tensor, _load_cpu +from .params import std_size, w_exp_base, w_shp_base, u_base, w_shp, param_std, param_mean, u, w_exp +from .estimate_pose import P2sRt + + +def _parse_param(param): + """Work for both numpy and tensor""" + p_ = param[:12].reshape(3, -1) + p = p_[:, :3] + offset = p_[:, -1].reshape(3, 1) + alpha_shp = param[12:52].reshape(-1, 1) + alpha_exp = param[52:].reshape(-1, 1) + return p, offset, alpha_shp, alpha_exp + + +def reconstruct_vertex(param, whitening=True, dense=False, transform=True, align_pose=False): + """Whitening param -> 3d vertex, based on the 3dmm param: u_base, w_shp, w_exp + dense: if True, return dense vertex, else return 68 sparse landmarks. All dense or sparse vertex is transformed to + image coordinate space, but without alignment caused by face cropping. + transform: whether transform to image space + """ + if len(param) == 12: + param = np.concatenate((param, [0] * 50)) + if whitening: + if len(param) == 62: + param = param * param_std + param_mean + else: + param = np.concatenate((param[:11], [0], param[11:])) + param = param * param_std + param_mean + if align_pose: + print('align param') + Ps = param[:12].reshape(3, -1) + s, R, t3d = P2sRt(Ps) + Ps[:, :3] = np.identity(3) * s + param[:12] = Ps.reshape(-1) + + p, offset, alpha_shp, alpha_exp = _parse_param(param) + + if dense: + vertex = p @ (u + w_shp @ alpha_shp + w_exp @ alpha_exp).reshape(3, -1, order='F') + offset + + if transform: + # transform to image coordinate space + vertex[1, :] = std_size + 1 - vertex[1, :] + else: + """For 68 pts""" + vertex = p @ (u_base + w_shp_base @ alpha_shp + w_exp_base @ alpha_exp).reshape(3, -1, order='F') + offset + + if transform: + # transform to image coordinate space + vertex[1, :] = std_size + 1 - vertex[1, :] + + return vertex + + +def img_loader(path): + return cv2.imread(path, cv2.IMREAD_COLOR) + + +def str2bool(v): + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + elif v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + else: + raise argparse.ArgumentTypeError('Boolean value expected') + + +class AverageMeter(object): + """Computes and stores the average and current value""" + + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +class ToTensorGjz(object): + def __call__(self, pic): + if isinstance(pic, np.ndarray): + img = torch.from_numpy(pic.transpose((2, 0, 1))) + return img.float() + + def __repr__(self): + return self.__class__.__name__ + '()' + + +class NormalizeGjz(object): + def __init__(self, mean, std): + self.mean = mean + self.std = std + + def __call__(self, tensor): + tensor.sub_(self.mean).div_(self.std) + return tensor + + +class DDFADataset(data.Dataset): + def __init__(self, root, filelists, param_fp, transform=None, **kargs): + self.root = root + self.transform = transform + self.lines = Path(filelists).read_text().strip().split('\n') + self.params = _numpy_to_tensor(_load_cpu(param_fp)) + self.img_loader = img_loader + + def _target_loader(self, index): + target = self.params[index] + + return target + + def __getitem__(self, index): + path = osp.join(self.root, self.lines[index]) + img = self.img_loader(path) + + target = self._target_loader(index) + + if self.transform is not None: + img = self.transform(img) + return img, target + + def __len__(self): + return len(self.lines) + + +class DDFATestDataset(data.Dataset): + def __init__(self, filelists, root='', transform=None): + self.root = root + self.transform = transform + self.lines = Path(filelists).read_text().strip().split('\n') + + def __getitem__(self, index): + path = osp.join(self.root, self.lines[index]) + img = img_loader(path) + + if self.transform is not None: + img = self.transform(img) + return img + + def __len__(self): + return len(self.lines) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py new file mode 100644 index 0000000000..3f40232aae --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Reference: https://github.com/YadiraF/PRNet/blob/master/utils/estimate_pose.py +""" + +from math import cos, sin, atan2, asin +import numpy as np +from .params import param_mean, param_std + + +def parse_pose(param): + param = param * param_std + param_mean + Ps = param[:12].reshape(3, -1) # camera matrix + # R = P[:, :3] + s, R, t3d = P2sRt(Ps) + R.tofile('pose.txt', sep=' ') + P = np.concatenate((R, t3d.reshape(3, -1)), axis=1) # without scale + # P = Ps / s + pose = matrix2angle(R) # yaw, pitch, roll + # offset = p_[:, -1].reshape(3, 1) + return P, pose + + +def matrix2angle(R): + ''' compute three Euler angles from a Rotation Matrix. Ref: http://www.gregslabaugh.net/publications/euler.pdf + Args: + R: (3,3). rotation matrix + Returns: + x: yaw + y: pitch + z: roll + ''' + # assert(isRotationMatrix(R)) + + if R[2, 0] != 1 and R[2, 0] != -1: + x = -asin(max(-1, min(R[2, 0], 1))) + y = atan2(R[2, 1] / cos(x), R[2, 2] / cos(x)) + z = atan2(R[1, 0] / cos(x), R[0, 0] / cos(x)) + + else: # Gimbal lock + z = 0 # can be anything + if R[2, 0] == -1: + x = np.pi / 2 + y = z + atan2(R[0, 1], R[0, 2]) + else: + x = -np.pi / 2 + y = -z + atan2(-R[0, 1], -R[0, 2]) + + return x, y, z + + +def angle2matrix(angles): + ''' get rotation matrix from three rotation angles(radian). The same as in 3DDFA. + Args: + angles: [3,]. x, y, z angles + x: yaw. + y: pitch. + z: roll. + Returns: + R: 3x3. rotation matrix. + ''' + # x, y, z = np.deg2rad(angles[0]), np.deg2rad(angles[1]), np.deg2rad(angles[2]) + # x, y, z = angles[0], angles[1], angles[2] + y, x, z = angles[0], angles[1], angles[2] + + # x + Rx = np.array([[1, 0, 0], + [0, cos(x), -sin(x)], + [0, sin(x), cos(x)]]) + # y + Ry = np.array([[cos(y), 0, sin(y)], + [0, 1, 0], + [-sin(y), 0, cos(y)]]) + # z + Rz = np.array([[cos(z), -sin(z), 0], + [sin(z), cos(z), 0], + [0, 0, 1]]) + R = Rz.dot(Ry).dot(Rx) + return R.astype(np.float32) + + +def P2sRt(P): + ''' decompositing camera matrix P. + Args: + P: (3, 4). Affine Camera Matrix. + Returns: + s: scale factor. + R: (3, 3). rotation matrix. + t2d: (2,). 2d translation. + ''' + t3d = P[:, 3] + R1 = P[0:1, :3] + R2 = P[1:2, :3] + s = (np.linalg.norm(R1) + np.linalg.norm(R2)) / 2.0 + r1 = R1 / np.linalg.norm(R1) + r2 = R2 / np.linalg.norm(R2) + r3 = np.cross(r1, r2) + + R = np.concatenate((r1, r2, r3), 0) + return s, R, t3d + + +def main(): + pass + + +if __name__ == '__main__': + main() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py new file mode 100644 index 0000000000..fa2f1b0875 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +import numpy as np +from math import sqrt +import scipy.io as sio +import matplotlib.pyplot as plt +from .ddfa import reconstruct_vertex +from .estimate_pose import matrix2angle, angle2matrix, P2sRt + +__author__ = 'cleardusk' +__all__ = ['matrix2angle', 'angle2matrix'] + + +def get_5lmk_from_68lmk(lmk68): + left_eye = lmk68[36:42, :].mean(axis=0) + right_eye = lmk68[42:48, :].mean(axis=0) + nose = lmk68[33, :] + left_mouse = lmk68[48, :] + right_mouse = lmk68[54, :] + lmk5 = [left_eye, right_eye, nose, left_mouse, right_mouse] + return np.concatenate(lmk5) + + +def get_suffix(filename): + """a.jpg -> jpg""" + pos = filename.rfind('.') + if pos == -1: + return '' + return filename[pos:] + + +def crop_img(img, roi_box): + h, w = img.shape[:2] + + sx, sy, ex, ey = [int(round(_)) for _ in roi_box] + dh, dw = ey - sy, ex - sx + if len(img.shape) == 3: + res = np.zeros((dh, dw, 3), dtype=np.uint8) + else: + res = np.zeros((dh, dw), dtype=np.uint8) + if sx < 0: + sx, dsx = 0, -sx + else: + dsx = 0 + + if ex > w: + ex, dex = w, dw - (ex - w) + else: + dex = dw + + if sy < 0: + sy, dsy = 0, -sy + else: + dsy = 0 + + if ey > h: + ey, dey = h, dh - (ey - h) + else: + dey = dh + + res[dsy:dey, dsx:dex] = img[sy:ey, sx:ex] + return res + + +def calc_hypotenuse(pts): + bbox = [min(pts[0, :]), min(pts[1, :]), max(pts[0, :]), max(pts[1, :])] + center = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + radius = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 2 + bbox = [center[0] - radius, center[1] - radius, center[0] + radius, center[1] + radius] + llength = sqrt((bbox[2] - bbox[0]) ** 2 + (bbox[3] - bbox[1]) ** 2) + return llength / 3 + + +def parse_roi_box_from_landmark(pts): + """calc roi box from landmark""" + bbox = [min(pts[0, :]), min(pts[1, :]), max(pts[0, :]), max(pts[1, :])] + center = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + radius = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 2 + bbox = [center[0] - radius, center[1] - radius, center[0] + radius, center[1] + radius] + + llength = sqrt((bbox[2] - bbox[0]) ** 2 + (bbox[3] - bbox[1]) ** 2) + center_x = (bbox[2] + bbox[0]) / 2 + center_y = (bbox[3] + bbox[1]) / 2 + + roi_box = [0] * 4 + roi_box[0] = center_x - llength / 2 + roi_box[1] = center_y - llength / 2 + roi_box[2] = roi_box[0] + llength + roi_box[3] = roi_box[1] + llength + + return roi_box + + +def parse_roi_box_from_bbox(bbox): + left, top, right, bottom = bbox + old_size = (right - left + bottom - top) / 2 + center_x = right - (right - left) / 2.0 + center_y = bottom - (bottom - top) / 2.0 + old_size * 0.14 + size = int(old_size * 1.58) + roi_box = [0] * 4 + roi_box[0] = center_x - size / 2 + roi_box[1] = center_y - size / 2 + roi_box[2] = roi_box[0] + size + roi_box[3] = roi_box[1] + size + return roi_box + + +def dump_to_ply(vertex, tri, wfp): + header = """ply + format ascii 1.0 + element vertex {} + property float x + property float y + property float z + element face {} + property list uchar int vertex_indices + end_header""" + + n_vertex = vertex.shape[1] + n_face = tri.shape[1] + header = header.format(n_vertex, n_face) + + with open(wfp, 'w') as f: + f.write(header + '\n') + for i in range(n_vertex): + x, y, z = vertex[:, i] + f.write('{:.4f} {:.4f} {:.4f}\n'.format(x, y, z)) + for i in range(n_face): + idx1, idx2, idx3 = tri[:, i] + f.write('3 {} {} {}\n'.format(idx1 - 1, idx2 - 1, idx3 - 1)) + print('Dump tp {}'.format(wfp)) + + +def dump_vertex(vertex, wfp): + sio.savemat(wfp, {'vertex': vertex}) + print('Dump to {}'.format(wfp)) + + +def _predict_vertices(param, roi_bbox, dense, transform=True, align_pose=False): + vertex = reconstruct_vertex(param, dense=dense, align_pose=align_pose) + sx, sy, ex, ey = roi_bbox + scale_x = (ex - sx) / 120 + scale_y = (ey - sy) / 120 + vertex[0, :] = vertex[0, :] * scale_x + sx + vertex[1, :] = vertex[1, :] * scale_y + sy + + s = (scale_x + scale_y) / 2 + vertex[2, :] *= s + + return vertex + + +def predict_68pts(param, roi_box, align_pose=False): + return _predict_vertices(param, roi_box, dense=False, align_pose=align_pose) + + +def predict_dense(param, roi_box, align_pose=False): + return _predict_vertices(param, roi_box, dense=True, align_pose=align_pose) + + +def get_aligned_param(param): + Ps = param[:12].reshape(3, -1) + s, R, t3d = P2sRt(Ps) + Ps[:, :3] = np.identity(3) * s + # t3d = t3d.reshape(3, -1) + # p = p_[:, :3] + # offset = p_[:, -1].reshape(3, 1) + # print(R) + # angles = matrix2angle(R) # yaw, pitch, roll + # angles = [x, y, z] + # if 'yaw' in change_axis: + # angles[0] = 0 + # if 'pitch' in change_axis: + # angles[1] = 0 + # if 'roll' in change_axis: + # angles[2] = 0 + # p = angle2matrix(angles) + # print(p) + # new_p_ = np.concatenate((p, t3d), axis=1).reshape(-1) + param[:12] = Ps.reshape(-1) + return param + + +def draw_landmarks(img, pts, style='fancy', wfp=None, show_flg=False, **kwargs): + """Draw landmarks using matplotlib""" + height, width = img.shape[:2] + plt.figure(figsize=(12, height / width * 12)) + plt.imshow(img[:, :, ::-1]) + plt.subplots_adjust(left=0, right=1, top=1, bottom=0) + plt.axis('off') + + if not type(pts) in [tuple, list]: + pts = [pts] + for i in range(len(pts)): + if style == 'simple': + plt.plot(pts[i][0, :], pts[i][1, :], 'o', markersize=4, color='g') + + elif style == 'fancy': + alpha = 0.8 + markersize = 4 + lw = 1.5 + color = kwargs.get('color', 'w') + markeredgecolor = kwargs.get('markeredgecolor', 'black') + + nums = [0, 17, 22, 27, 31, 36, 42, 48, 60, 68] + + # close eyes and mouths + def plot_close(i1, i2): + return plt.plot([pts[i][0, i1], pts[i][0, i2]], [pts[i][1, i1], pts[i][1, i2]], color=color, lw=lw, + alpha=alpha - 0.1) + + plot_close(41, 36) + plot_close(47, 42) + plot_close(59, 48) + plot_close(67, 60) + + for ind in range(len(nums) - 1): + l, r = nums[ind], nums[ind + 1] + plt.plot(pts[i][0, l:r], pts[i][1, l:r], color=color, lw=lw, alpha=alpha - 0.1) + + plt.plot(pts[i][0, l:r], pts[i][1, l:r], marker='o', linestyle='None', markersize=markersize, + color=color, + markeredgecolor=markeredgecolor, alpha=alpha) + + if wfp is not None: + plt.savefig(wfp, dpi=200) + print('Save visualization result to {}'.format(wfp)) + if show_flg: + plt.show() + + +def get_colors(image, vertices): + [h, w, _] = image.shape + vertices[0, :] = np.minimum(np.maximum(vertices[0, :], 0), w - 1) # x + vertices[1, :] = np.minimum(np.maximum(vertices[1, :], 0), h - 1) # y + ind = np.round(vertices).astype(np.int32) + colors = image[ind[1, :], ind[0, :], :] # n x 3 + + return colors + + +def write_obj_with_colors(obj_name, vertices, triangles, colors): + triangles = triangles.copy() # meshlab start with 1 + + if obj_name.split('.')[-1] != 'obj': + obj_name = obj_name + '.obj' + + # write obj + with open(obj_name, 'w') as f: + # write vertices & colors + for i in range(vertices.shape[1]): + s = 'v {:.4f} {:.4f} {:.4f} {} {} {}\n'.format(vertices[1, i], vertices[0, i], vertices[2, i], colors[i, 2], + colors[i, 1], colors[i, 0]) + f.write(s) + + # write f: ver ind/ uv ind + for i in range(triangles.shape[1]): + s = 'f {} {} {}\n'.format(triangles[0, i], triangles[1, i], triangles[2, i]) + f.write(s) + + +def main(): + pass + + +if __name__ == '__main__': + main() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py new file mode 100644 index 0000000000..71b446989a --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +import os +import numpy as np +import torch +import pickle +import scipy.io as sio + + +def mkdir(d): + """only works on *nix system""" + if not os.path.isdir(d) and not os.path.exists(d): + os.system('mkdir -p {}'.format(d)) + + +def _get_suffix(filename): + """a.jpg -> jpg""" + pos = filename.rfind('.') + if pos == -1: + return '' + return filename[pos + 1:] + + +def _load(fp): + suffix = _get_suffix(fp) + if suffix == 'npy': + return np.load(fp) + elif suffix == 'pkl': + return pickle.load(open(fp, 'rb')) + + +def _dump(wfp, obj): + suffix = _get_suffix(wfp) + if suffix == 'npy': + np.save(wfp, obj) + elif suffix == 'pkl': + pickle.dump(obj, open(wfp, 'wb')) + else: + raise Exception('Unknown Type: {}'.format(suffix)) + + +def _load_tensor(fp, mode='cpu'): + if mode.lower() == 'cpu': + return torch.from_numpy(_load(fp)) + elif mode.lower() == 'gpu': + return torch.from_numpy(_load(fp)).cuda() + + +def _tensor_to_cuda(x): + if x.is_cuda: + return x + else: + return x.cuda() + + +def _load_gpu(fp): + return torch.from_numpy(_load(fp)).cuda() + + +def load_bfm(model_path): + suffix = _get_suffix(model_path) + if suffix == 'mat': + C = sio.loadmat(model_path) + model = C['model_refine'] + model = model[0, 0] + + model_new = {} + w_shp = model['w'].astype(np.float32) + model_new['w_shp_sim'] = w_shp[:, :40] + w_exp = model['w_exp'].astype(np.float32) + model_new['w_exp_sim'] = w_exp[:, :10] + + u_shp = model['mu_shape'] + u_exp = model['mu_exp'] + u = (u_shp + u_exp).astype(np.float32) + model_new['mu'] = u + model_new['tri'] = model['tri'].astype(np.int32) - 1 + + # flatten it, pay attention to index value + keypoints = model['keypoints'].astype(np.int32) - 1 + keypoints = np.concatenate((3 * keypoints, 3 * keypoints + 1, 3 * keypoints + 2), axis=0) + + model_new['keypoints'] = keypoints.T.flatten() + + # + w = np.concatenate((w_shp, w_exp), axis=1) + w_base = w[keypoints] + w_norm = np.linalg.norm(w, axis=0) + w_base_norm = np.linalg.norm(w_base, axis=0) + + dim = w_shp.shape[0] // 3 + u_base = u[keypoints].reshape(-1, 1) + w_shp_base = w_shp[keypoints] + w_exp_base = w_exp[keypoints] + + model_new['w_norm'] = w_norm + model_new['w_base_norm'] = w_base_norm + model_new['dim'] = dim + model_new['u_base'] = u_base + model_new['w_shp_base'] = w_shp_base + model_new['w_exp_base'] = w_exp_base + + _dump(model_path.replace('.mat', '.pkl'), model_new) + return model_new + else: + return _load(model_path) + + +_load_cpu = _load + + +def _numpy_to_tensor(x): + return torch.from_numpy(x) + + +def _tensor_to_numpy(x): + return x.cpu() + + +def _numpy_to_cuda(x): + return _tensor_to_cuda(torch.from_numpy(x)) + + +def _cuda_to_tensor(x): + return x.cpu() + + +def _cuda_to_numpy(x): + return x.cpu().numpy() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py new file mode 100644 index 0000000000..1f89d76b52 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +import path_helper +import numpy as np +from utils import render +from utils.cython import mesh_core_cython + +__all__ = ['path_helper'] + + +def _norm(arr): + return arr / np.sqrt(np.sum(arr ** 2, axis=1))[:, None] + + +def norm_vertices(vertices): + vertices -= vertices.min(0)[None, :] + vertices /= vertices.max() + vertices *= 2 + vertices -= vertices.max(0)[None, :] / 2 + return vertices + + +def convert_type(obj): + if isinstance(obj, tuple) or isinstance(obj, list): + return np.array(obj, dtype=np.float32)[None, :] + return obj + + +class RenderPipeline(object): + def __init__(self, **kwargs): + self.intensity_ambient = convert_type(kwargs.get('intensity_ambient', 0.3)) + self.intensity_directional = convert_type(kwargs.get('intensity_directional', 0.6)) + self.intensity_specular = convert_type(kwargs.get('intensity_specular', 0.9)) + self.specular_exp = kwargs.get('specular_exp', 5) + self.color_ambient = convert_type(kwargs.get('color_ambient', (1, 1, 1))) + self.color_directional = convert_type(kwargs.get('color_directional', (1, 1, 1))) + self.light_pos = convert_type(kwargs.get('light_pos', (0, 0, 1))) + self.view_pos = convert_type(kwargs.get('view_pos', (0, 0, 1))) + + def update_light_pos(self, light_pos): + self.light_pos = convert_type(light_pos) + + def __call__(self, vertices, triangles, background): + height, width = background.shape[:2] + + # 1. compute triangle/face normals and vertex normals + # ## Old style: very slow + # normal = np.zeros((vertices.shape[0], 3), dtype=np.float32) + # # surface_count = np.zeros((vertices.shape[0], 1)) + # for i in range(triangles.shape[0]): + # i1, i2, i3 = triangles[i, :] + # v1, v2, v3 = vertices[[i1, i2, i3], :] + # surface_normal = np.cross(v2 - v1, v3 - v1) + # normal[[i1, i2, i3], :] += surface_normal + # # surface_count[[i1, i2, i3], :] += 1 + # + # # normal /= surface_count + # # normal /= np.linalg.norm(normal, axis=1, keepdims=True) + # normal = _norm(normal) + + # Cython style + normal = np.zeros((vertices.shape[0], 3), dtype=np.float32) + mesh_core_cython.get_normal(normal, vertices, triangles, vertices.shape[0], triangles.shape[0]) + + # 2. lighting + color = np.zeros_like(vertices, dtype=np.float32) + # ambient component + if self.intensity_ambient > 0: + color += self.intensity_ambient * self.color_ambient + + vertices_n = norm_vertices(vertices.copy()) + if self.intensity_directional > 0: + # diffuse component + direction = _norm(self.light_pos - vertices_n) + cos = np.sum(normal * direction, axis=1)[:, None] + # cos = np.clip(cos, 0, 1) + # todo: check below + color += self.intensity_directional * (self.color_directional * np.clip(cos, 0, 1)) + + # specular component + if self.intensity_specular > 0: + v2v = _norm(self.view_pos - vertices_n) + reflection = 2 * cos * normal - direction + spe = np.sum((v2v * reflection) ** self.specular_exp, axis=1)[:, None] + spe = np.where(cos != 0, np.clip(spe, 0, 1), np.zeros_like(spe)) + color += self.intensity_specular * self.color_directional * np.clip(spe, 0, 1) + color = np.clip(color, 0, 1) + + # 2. rasterization, [0, 1] + render_img = render.crender_colors(vertices, triangles, color, height, width, BG=background) + render_img = (render_img * 255).astype(np.uint8) + return render_img + + +def main(): + pass + + +if __name__ == '__main__': + main() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py new file mode 100644 index 0000000000..f7c6ee8ca8 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +import numpy as np +from .ddfa import _parse_param +from .params import u_filter, w_filter, w_exp_filter, std_size, param_mean, param_std + + +def reconstruct_paf_anchor(param, whitening=True): + if whitening: + param = param * param_std + param_mean + p, offset, alpha_shp, alpha_exp = _parse_param(param) + anchor = p @ (u_filter + w_filter @ alpha_shp + w_exp_filter @ alpha_exp).reshape(3, -1, order='F') + offset + anchor[1, :] = std_size + 1 - anchor[1, :] + return anchor[:2, :] + + +def gen_offsets(kernel_size): + offsets = np.zeros((2, kernel_size * kernel_size), dtype=np.int) + ind = 0 + delta = (kernel_size - 1) // 2 + for i in range(kernel_size): + y = i - delta + for j in range(kernel_size): + x = j - delta + offsets[0, ind] = x + offsets[1, ind] = y + ind += 1 + return offsets + + +def gen_img_paf(img_crop, param, kernel_size=3): + """Generate PAF image + img_crop: 120x120 + kernel_size: kernel_size for convolution, should be even number like 3 or 5 or ... + """ + anchor = reconstruct_paf_anchor(param) + anchor = np.round(anchor).astype(np.int) + delta = (kernel_size - 1) // 2 + anchor[anchor < delta] = delta + anchor[anchor >= std_size - delta - 1] = std_size - delta - 1 + + img_paf = np.zeros((64 * kernel_size, 64 * kernel_size, 3), dtype=np.uint8) + offsets = gen_offsets(kernel_size) + for i in range(kernel_size * kernel_size): + ox, oy = offsets[:, i] + index0 = anchor[0] + ox + index1 = anchor[1] + oy + p = img_crop[index1, index0].reshape(64, 64, 3).transpose(1, 0, 2) + + img_paf[oy + delta::kernel_size, ox + delta::kernel_size] = p + + return img_paf + + +def main(): + pass + + +if __name__ == '__main__': + main() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py new file mode 100644 index 0000000000..e298bca1f7 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +import os.path as osp +import numpy as np +from .io import _load + + +def make_abs_path(d): + return osp.join(osp.dirname(osp.realpath(__file__)), d) + + +d = make_abs_path('../train.configs') +keypoints = _load(osp.join(d, 'keypoints_sim.npy')) +w_shp = _load(osp.join(d, 'w_shp_sim.npy')) +w_exp = _load(osp.join(d, 'w_exp_sim.npy')) # simplified version +meta = _load(osp.join(d, 'param_whitening.pkl')) +# param_mean and param_std are used for re-whitening +param_mean = meta.get('param_mean') +param_std = meta.get('param_std') +u_shp = _load(osp.join(d, 'u_shp.npy')) +u_exp = _load(osp.join(d, 'u_exp.npy')) +u = u_shp + u_exp +w = np.concatenate((w_shp, w_exp), axis=1) +w_base = w[keypoints] +w_norm = np.linalg.norm(w, axis=0) +w_base_norm = np.linalg.norm(w_base, axis=0) + +# for inference +dim = w_shp.shape[0] // 3 +u_base = u[keypoints].reshape(-1, 1) +w_shp_base = w_shp[keypoints] +w_exp_base = w_exp[keypoints] +std_size = 120 + +# for paf (pac) +paf = _load(osp.join(d, 'Model_PAF.pkl')) +u_filter = paf.get('mu_filter') +w_filter = paf.get('w_filter') +w_exp_filter = paf.get('w_exp_filter') + +# pncc code (mean shape) +pncc_code = _load(osp.join(d, 'pncc_code.npy')) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py new file mode 100644 index 0000000000..e8a0da08df --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py @@ -0,0 +1,2 @@ +import sys +sys.path.append('../') diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py new file mode 100644 index 0000000000..a849b9126a --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +""" +Modified from https://raw.githubusercontent.com/YadiraF/PRNet/master/utils/render.py +""" + +import numpy as np +import cython +from .cython import mesh_core_cython +from .params import pncc_code + +__author__ = 'cleardusk' +__all__ = ['cython'] + + +def is_point_in_tri(point, tri_points): + ''' Judge whether the point is in the triangle + Method: + http://blackpawn.com/texts/pointinpoly/ + Args: + point: [u, v] or [x, y] + tri_points: three vertices(2d points) of a triangle. 2 coords x 3 vertices + Returns: + bool: true for in triangle + ''' + tp = tri_points + + # vectors + v0 = tp[:, 2] - tp[:, 0] + v1 = tp[:, 1] - tp[:, 0] + v2 = point - tp[:, 0] + + # dot products + dot00 = np.dot(v0.T, v0) + dot01 = np.dot(v0.T, v1) + dot02 = np.dot(v0.T, v2) + dot11 = np.dot(v1.T, v1) + dot12 = np.dot(v1.T, v2) + + # barycentric coordinates + if dot00 * dot11 - dot01 * dot01 == 0: + inverDeno = 0 + else: + inverDeno = 1 / (dot00 * dot11 - dot01 * dot01) + + u = (dot11 * dot02 - dot01 * dot12) * inverDeno + v = (dot00 * dot12 - dot01 * dot02) * inverDeno + + # check if point in triangle + return (u >= 0) & (v >= 0) & (u + v < 1) + + +def render_colors(vertices, colors, tri, h, w, c=3): + """ render mesh by z buffer + Args: + vertices: 3 x nver + colors: 3 x nver + tri: 3 x ntri + h: height + w: width + """ + # initial + image = np.zeros((h, w, c)) + + depth_buffer = np.zeros([h, w]) - 999999. + # triangle depth: approximate the depth to the average value of z in each vertex(v0, v1, v2), since the vertices + # are closed to each other + tri_depth = (vertices[2, tri[0, :]] + vertices[2, tri[1, :]] + vertices[2, tri[2, :]]) / 3. + tri_tex = (colors[:, tri[0, :]] + colors[:, tri[1, :]] + colors[:, tri[2, :]]) / 3. + + for i in range(tri.shape[1]): + tri_idx = tri[:, i] # 3 vertex indices + + # the inner bounding box + umin = max(int(np.ceil(np.min(vertices[0, tri_idx]))), 0) + umax = min(int(np.floor(np.max(vertices[0, tri_idx]))), w - 1) + + vmin = max(int(np.ceil(np.min(vertices[1, tri_idx]))), 0) + vmax = min(int(np.floor(np.max(vertices[1, tri_idx]))), h - 1) + + if umax < umin or vmax < vmin: + continue + + for u in range(umin, umax + 1): + for v in range(vmin, vmax + 1): + if tri_depth[i] > depth_buffer[v, u] and is_point_in_tri([u, v], vertices[:2, tri_idx]): + depth_buffer[v, u] = tri_depth[i] + image[v, u, :] = tri_tex[:, i] + return image + + +def get_depths_image(img, vertices_lst, tri): + h, w = img.shape[:2] + c = 1 + + depths_img = np.zeros((h, w, c)) + for i in range(len(vertices_lst)): + vertices = vertices_lst[i] + + z = vertices[2, :] + z_min, z_max = min(z), max(z) + vertices[2, :] = (z - z_min) / (z_max - z_min) + + z = vertices[2:, :] + depth_img = render_colors(vertices.T, z.T, tri.T, h, w, 1) + depths_img[depth_img > 0] = depth_img[depth_img > 0] + + depths_img = depths_img.squeeze() * 255 + return depths_img + + +def crender_colors(vertices, triangles, colors, h, w, c=3, BG=None): + """ render mesh with colors + Args: + vertices: [nver, 3] + triangles: [ntri, 3] + colors: [nver, 3] + h: height + w: width + c: channel + BG: background image + Returns: + image: [h, w, c]. rendered image./rendering. + """ + + if BG is None: + image = np.zeros((h, w, c), dtype=np.float32) + else: + assert BG.shape[0] == h and BG.shape[1] == w and BG.shape[2] == c + image = BG.astype(np.float32).copy(order='C') + depth_buffer = np.zeros([h, w], dtype=np.float32, order='C') - 999999. + + # to C order + vertices = vertices.astype(np.float32).copy(order='C') + triangles = triangles.astype(np.int32).copy(order='C') + colors = colors.astype(np.float32).copy(order='C') + + mesh_core_cython.render_colors_core( + image, vertices, triangles, + colors, + depth_buffer, + vertices.shape[0], triangles.shape[0], + h, w, c + ) + return image + + +def cget_depths_image(img, vertices_lst, tri): + """cython version for depth image render""" + h, w = img.shape[:2] + c = 1 + + depths_img = np.zeros((h, w, c)) + for i in range(len(vertices_lst)): + vertices = vertices_lst[i] + + z = vertices[2, :] + z_min, z_max = min(z), max(z) + vertices[2, :] = (z - z_min) / (z_max - z_min) + z = vertices[2:, :] + + depth_img = crender_colors(vertices.T, tri.T, z.T, h, w, 1) + depths_img[depth_img > 0] = depth_img[depth_img > 0] + + depths_img = depths_img.squeeze() * 255 + return depths_img + + +def ncc(vertices): + # simple version + # ncc_vertices = np.zeros_like(vertices) + # x = vertices[0, :] + # y = vertices[1, :] + # z = vertices[2, :] + # + # ncc_vertices[0, :] = (x - min(x)) / (max(x) - min(x)) + # ncc_vertices[1, :] = (y - min(y)) / (max(y) - min(y)) + # ncc_vertices[2, :] = (z - min(z)) / (max(z) - min(z)) + + # matrix version + v_min = np.min(vertices, axis=1).reshape(-1, 1) + v_max = np.max(vertices, axis=1).reshape(-1, 1) + ncc_vertices = (vertices - v_min) / (v_max - v_min) + + return ncc_vertices + + +def cpncc(img, vertices_lst, tri): + """cython version for PNCC render: original paper""" + h, w = img.shape[:2] + c = 3 + + pnccs_img = np.zeros((h, w, c)) + for i in range(len(vertices_lst)): + vertices = vertices_lst[i] + pncc_img = crender_colors(vertices.T, tri.T, pncc_code.T, h, w, c) + pnccs_img[pncc_img > 0] = pncc_img[pncc_img > 0] + + pnccs_img = pnccs_img.squeeze() * 255 + return pnccs_img + + +def cpncc_v2(img, vertices_lst, tri): + """cython version for PNCC render""" + h, w = img.shape[:2] + c = 3 + + pnccs_img = np.zeros((h, w, c)) + for i in range(len(vertices_lst)): + vertices = vertices_lst[i] + ncc_vertices = ncc(vertices) + pncc_img = crender_colors(vertices.T, tri.T, ncc_vertices.T, h, w, c) + pnccs_img[pncc_img > 0] = pncc_img[pncc_img > 0] + + pnccs_img = pnccs_img.squeeze() * 255 + return pnccs_img + + +def main(): + pass + + +if __name__ == '__main__': + main() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md new file mode 100644 index 0000000000..560c319af6 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md @@ -0,0 +1,3 @@ + `render_demo.m` is a simple matlab demo to render 3D face mesh. + +`tri.mat` provides the 3D mesh triangle indices. diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m new file mode 100644 index 0000000000..25f8f0281f --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m @@ -0,0 +1,10 @@ +tri = load('tri.mat'); +vertex = load('image00427'); + +tri = tri.tri; +vertex = vertex.vertex; +render_face_mesh(vertex, tri); + +A = getframe(gcf); +mimg = A.cdata; +imwrite(mimg, 'demo.jpg', 'quality', 95); diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m new file mode 100644 index 0000000000..2285ca2489 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m @@ -0,0 +1,18 @@ +function render_face_mesh(vertex, tri) + trisurf(tri', vertex(1, :), vertex(2, :), vertex(3, :), ones(size(vertex, 2),1), 'edgecolor', 'none'); + + re=[1 1 1]; + colormap(re); + + light('Position', [0 0 1], 'Style', 'infinite'); + lighting gouraud + axis equal + view([0 90]); + + xlabel('x'); + ylabel('y'); + zlabel('z'); + + axis on; + grid on; +end diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat new file mode 100644 index 0000000000000000000000000000000000000000..a09de19c989b8900d8002c458b58cd2ead8389b9 GIT binary patch literal 339799 zcma%jd03Oz_O(_K5g9Z}R3@z&pv(>o21tulY6&2sf`UMTK&@azB%lx?G8s{56(Nd1 z9jQen0R<#LAS9&XKwCu<1EfHa7Q<*wf*JVsf&07HKR=&7dAg1J?Dqt6_Pf?z>-GC; zH}Na))teohR{MRm+dk${T(sTlopGe&F_dHRTUPrJ{X@QT*<`nR>#=Ck@#urADMz-f z_M;qG?MFJXdV||)=S^ESyKLFCVf6+lXSdb=pMU23-=DcV{EXoLI~{c9q+GQtN?!Kt zgJmvZOkpKX9c$WgVcq}!^S*F!J^IV77uN0DQ5_6#RbRZcjyv?hldJMleDF|or;cjI z++DXWBq^$2_EDKiM6N4{CnVW_`SbWJZe`&>86>8kmX4eT~d{=&?uK6Zom9hLZN zd%nLL?%+^+7F$;TL#BDj(Xd}1pVuD{>#f1!z523#C@C?=&5l3$q`sB;oM3%Sf@ftIN+GtKr2_ z{jJ-d`@2~!8+x1dNY=RXbZN=aaO=m<^xunht}Vk8`cCh>5^8Kxo#(xhO5$z1W>;rz z7gL;dOxB;}>wV(L4kh!zs?kV-fBmN2e-`CL%O;OnaqRjgcm5b^;YAA0;)nv4-d}jB zetBOIHM+h;v#GCR=jG6qUQxl>siMxM_ZRJ{-_w^!O{y=}c=QeLycBBQ9Qis+C30MP zU$3ivbDu9YzP?`kU*MNS>TE9>3lXs9*E)FPE(IvfJav z9Ou5$0r8508GWEcIL&PND~-YF zj_!7QR;E|QuE3P(jnr@KvrBbod|u|4;_zVTdG>o*_V|KSufFWxGC0QCgN`|_Vge?7 zvH7FCK1cbklst`X->u&moMjF*!QQd*V?5hy`pN6VqN}p&#Fctg+4s92j=QFMHnwR? z$4~y(w@xO_-3a^CSzqhvw?CsGR8DNc7t=x`JD=T z{iolrov_2jJPaIej?NUvZz_Anm$_{3sWWq`I4?+KGGDacQ5EUGt({15z$8lMCrKw; z8eNujMv=oypokY!iW`_BKBWY`q}k5Vf3v&s6hkNeN&_W3l2nq9-N0r&+o`y>(C+(< z6QAQu9_|}%UY;q4cL{&W?|1R(sawjr`Ap!;4Ef{j9o61NMYU5YHW**YXGzkImg-t# z$0+i?P$=Ra(rYp>NVKE)!sh4gWfe@|iFYG?;bXzDFb zUJikQt)xQPgWFqs34(c*qFALTZZCTe1p1Y4hLlaXy$`z}(5)0>l{DPmn#~XxRZ5DL z8r^vi&oGkW@*v40GA(1`36j zFSpX(W|)s$hoIG8j@1@1N@W)sD5WGU=UlQ2JMRE&G+xwozn@Y9udq==Sqyz(j466{ zzfvW4kR6%+bL@(!=g7}0xrK~0-9L6&)OO?-mE22qXu5B#MD*gw9n~v+`CFsud%r#5 z_ulrQyla!+Qs;bl_?^a@{=jId{D%6t)a#%sNV|+FDxZ8oQ=KvYaw>@2Kc^!=5x*LL zX0oE4|5ai6s3elQa4;Z`vDKQ;Q+G|~)KPAM$qh_zptOmX6cX0r@_&MbwpZVrA&?C{Xg``3<$e zIl%%IC_nJ!lrOpeGgM#==geerJAWH0K&I*qI_5EaQGo*StwOE`&fyj;TEu>0L)RgO z`NN_#-u<7{%bgQ+U?I5;=fY$#kADdz$I8Cwh**gUMpsl`Q)xqAP7L!y39a|9srNW1 z7@&mpoTAC#HvU7D5G>o$5n+ybjS}984GOsna7WM3+oSuTk=|Be&HLof0;p zWCKq2V?d^!r#nkPBWGP!>O+P^F-VO=zvB^8NCz+A2K}r@w7wvgWoLR_q##!8it4NPD3e@75DQD1+ z{6|mXR0+AC1j!>-@`Zzx9EJ}{C>P%-+W>}#FsrX_ccQdXGRnSHT1ZrKFDQ6)8|E%nqI^KC=F*i2_}tJ^x@RL)@|>5hnw-| zdUcbrHxzqR8kgyoT`7blDJ9oNvsF#S2A!lQNrPHnYL}FC2wr-9*`wIxbkB(YqMrFm z_l~N}@V25gWefI&$~Z~7vE`iCvd%xr+bwU8rsA05+=0tsPi1Rg(mFq@`S^`TUfw=h zVZ)d>l4^NAHqn_=5UB9fefm$-gqg$Je-nnAhfY7Dd6Yd7bbEc(qwsL53gEx5XTG+A zB1Uf&k5jy`?^Vl^r1~wVy_P%vN&c!FimV9{RSaZ@*~vD(q&a?;_mS9mCT|<9s9{VH znNWTno5-&33{*7DxBDk?VyQ#XzbA&9i%;L8d4+cf{%D4sY+&7WT2(p&bR;E|vEvG$d7)x&bR;!m@oOdZL8-lhDtxbAe z-X+?thA~CU7uuES1$leYEjOzM^$TC z`SeUeKiOP*eN=UWb)TL`7$zT({xYhHVwKTz32(`s(%Yk|0ai0bXqn7z9^0wV=!sgA z`vdCX^^QurF79v6gw=t#zf)h*=!;w?o0LHgE|aw3W`cpKDqVdDYpR-?BrVVIQ>Us| zVoQ`?L6GgI(N*ummV8_TLFO~HkJ=quGDU(Q_nF2@eF9seT9PDic&P=yOyA zCC7oPFoi%jRmoMRI>^UMATUb(NLQ*H9Ay{m+?^7(2{ss^p`%eIUVIoF}m;t9GfTps*tc5XLucs0o|Id*7W^EGjCYLC^|P# z`!CZCogtRbp<}FcPM8m0Os@;(+om=~^;^S{K-+}RnT!?iozbEpRI|a(e8vErEs6=* z{El)A#tId{h<6kcTyg%WfK^S9H9eeo6%`1Gt_aksnQrHyK>2g%7&^`g3s8ZJrQ4?T zqx$Vo0T+DkBv!z8M+L&D7K4uYjQ6O(l<9ib3g^KHNMvLeZ(+S zfc_LZ#-MY84J_Jx?tuT4ZDjv3SX6_d>I_z$Vc4RD0>qaJxl3{P(J4rX3J%nEFzeAN zIDPei-&AE}|Mw_CpK35zb(V1$B{+&77IIhN?xS;-OMV@w9cI?&LqbF+bd1eT3G-03 zAA7!@I!x}zq2$BVje}L$j1n72rikndxh}XP*HChF-7f7GX8k2pEx%{`)H`y&3)(ZD zlR5dijc<`%oGRuwQfsB(|YqC+|dqdQ_oG- zweX`*LX|9`L%I}GfD*WDD{Hzf(c>$W@EkhEJ?DvaC_y*%>||XlKL{lpk}n>lg zfk)Swcz-Bl!F2WKzyI2qauo`tInl448_FYV-L~}<=tQ=w3r*2Pm{M#KnRC8*O`Zd}#XR2FK+BnOeRA0#Y z;h@#NHIEHo*@#!1N?Exwc zrOd_Xzd3zyZsu7H>1aT*-uhJHo)Gb#A;Y{i-J^r?KzTKh9@uj$;2VIZ9mmdo0Xy%u@unX_)c*VL! zTB%y(7*AdqT5>nKJR>^ZTzifY9Z#z+qnQ-d7$|FSMK$LiZ`Z~mcu0>LPMOcxH$3v zK8t&hd;ovdEaqM7N9z2lSTYTN#Vk4^$3V71Tg@<#r5w9T({Xt~K}h5~s|y%eCF@68 zF86!#TKwg!2Tj{>a?f57A4mPdUEq@aOs3m-hw)5y{MeOv!%$H{N+KpybosBvs#S4^ z2z^zTMSncU#ftZ`9%|0jZD2nv^?lB+YIBk6Ya1Bxvb1Amw0X4;BPLvlafh)N^kpTi zQcVu`5E;*5h8>i&8Org!y?jHe%_BZFt5jyxcmvSPv16BM`qWy&L>*BU7kt4s4lC-? z@;FR@Gq_;!5f(>tK6xXXbHMjL+cPpir8`C<2XM;5Vx>7vvIuoNCRKd?uLTlQIeU7% z7bj<{NyqiK-e_I1y^P~@ z-k>&Atk~ry`r}yU2#tD1Q$QnRZ?!Xb<>yql$y})4Ey~?@@(9x))<-lV8()^sTHL+R z^l^SQUi71lS@g$8LG zbzQr>Vk*S*WI~&(G478J$@TC(76%DC`i?&l_|*l7SIUi5hZe<#966FhYk&SASKPVF z?ZxELjqxUUi3KV4*3sI*!5E1d^}c7v^BsL*PlV4No{*)bo+~ZpO%;`w07}ufrl%9b z&>3!j5F4mH&8$ba`=L)AuBo$~6ZGJ;tt~vTeJU@qe*>I(Bl=YLLC3Rumdn2F zh**pXMmOV?qOL-&8}9yXI53+JzTT<5z^q58PIdNx@02g8e?KJ0n{&!0i(C00;Ix%2 zpsqzg%M>l-CyVKbFvPgRnNJB7-9w=325qh8?hUKu93*(U8qT3gheHS(c1`5 zxx&@SH*<0kp7KPCy2`>4o>HJIQi(JOPmQ7@+>oVfL$@b-d;<%alZ}F)|D`ZtAv_Pd zP||D{qMif5@-Au?u!JJOa%vW^jEMlSteFKY&9MMjw$B0n}}2R$o^=Q5FlSPIF`i-MhRR|Wg&MXE*w?R##9Ar?=w@-{fsKN z$se_+B$N;#UobevV7Q`$X;Dcb7mMpf6?UDl^c|s1>~^>rR{tF!nB?5P zu|G%2$}a)sWE@f-P@7<1C|5s}Qa&i`?iuY>Xy{I*+=A&&!EvvzdlZt$la^&>L=+>eo#9D4QH&-ZLQ3tx>I0dtqx-2LW>Ax~DAy zkl>#73jzsOwax$%+-X-3NC?yFX@AOll-2-2Yq!=$`$OI%S`Gwl9@>>!R^ExU+YMs{ zEsb9KbiSa`E3`-9L-H!m1d!lWUI`$z3|2JFvHQDxKqt)ZZ+*Z|^YvBU zjX&gN(|&~fw$Gba(7OenH^V1j6&VMKMlkT5DptO5D4GPWtT-oHZQJ-DFFFk%PiISYvmw1*P~9BfqlhKB zOHUErk=>>DFI$uUqvN69l#*8}!w z$c)mKN=vQQR-2O- zuQ?3?kE%7*-UjeWP8cJhHUXKQTX`q)(>yGu21Gw-rldy`X^SC2qNA9)JREkJA-ImG` z2`Rs*da?5NyW=9TDs)q+5zn1LE$F6FO43DeCj;F(a%8JJESYZT=%g2z6Q^E8W_sQ; zx~4v)(JV7Wcip+wZPxTHL^DLt7{&VRL=fjR{Aqn-H(g+;#K)Z>sk>Cwz{Q8KuOj8dCp3 zNTw~qUz&^+^6#Ka6C#>y-V9APR?d21L-!|MMF%Rq1F+8{ry1;{U5USlIJV0u!H()O z*qOI;y1Kj`09WMxBj^J@Of?#;%3-9UyLXDHsgSz`H;T?v1zTl9 zHz0Ngz(SUhM!U58nJGWOLIl3WZFQ2C|~D1pF^wV~@1J$xZy z`f1q>wY~EMVnYERl}*;Q^WUR{Dw$4)bOFW@C2-kT8+s_*ouh>3;n&q8P7_N}f^O>N z$>bLP6O>RcTizkn!&IRJIy=~g9!qRQAJX(QM3Xts&}3R8M3V_;XfhQS(PWqznrv*E z4K$hKEKOE%)~jsG$NoFsM03qc%{M=7qkHK6PCnHT@TYDHAMbW*$-FL2z0Pe$@R-$QUmsT0kIQ9?uOoL@z>%Y#+xc2;iorygK5>n2Je5nOFXT`Nq zp}AAkIsVu}=cseL6a)TDo9zS1IZT`OpH&Iir)HLaCTBC<+ka6dxISHK*`J)tTn|%~ z1f!>`1}0LypRU3{J#0)~!FcM{rzRM<5~G-c9ZK4on1aW{>D@+&{AFsbaUy?r62an> z`#_XB!#J^h69ie0`Z4NO<3z!62u?p52vT1&P80(0vB){mzfs+1{GLyQAmc=oiT0Z_ zC!%T_EDLyP&bstX{50p_o&sT{mYGX1A#0gcqds#(oV?=33&J=%}U! z%TC@g_FTFP{}?;4r(h^@N|Z&wlBYy>M}2fcoNNDF?tOPfXqV>a`L;rr#$9JFMR6J| z_e6GyGJ^Hk-^+F@22O<7Q~Nq|;k8mV@!4IAhyll}5KsBfs)^6MSkgnekykdJ^ILwk-*Bs?5oyZa&C|=eA`oSo3&4U z%GH3q?gAlGvA;6ocAx<>Rlt<5tFYX};456?Udn8ixwqVX@r!eysX~ zv4npjiO_p0ejr$V-dNJE13}hEzpMJ0u|%*fiSVY)a01`!#WzgY(NmDm3Q}hzFJ=dY z?Ntoei`=z*=^}PwSfJvDzHAk}?|`>#RdAQ)?*$y8G^Ng%Bix)sIN4G*Vbr^pUzTv7 zr=X5hrNq+l{P2XWJq5k20CldykR8yyTXDlth6N0=28~Y$A8Y1W)4wO0{X|KXn^RX0 zx)RM+oSZjsfNDHwL^J~mJH1h8cw2qQDFK}?6JGeN$qGTv*9|MSPrVTQw#*Q%kyEqQ zntqaK26RaJxr07|+Qm#a;EXgYp5MUa`gg;zHV{Q!HQ1TKcmd~Q zL4fF=LV^{}AC7*4S9I)7Z5Y!H=n#$aTLFx z7XaYsC^y0k!O?H|3EoJz`ybxWgnWWG*q`8yflu&8!6$gb@)Nv4_yligl%L>@s88^Q z@DsdI@Cn|q`~+_hKEWI5vj5=?jp!4+G4ToB5PpI;EI+{;gir8By4nBmhQ{p^ypiw; z-WZs{8^+VRGXzPIp5)A+$@U$cy`jfGQI`N$vZ3>_N9UUV2~K9|lM5%xhE8mGr|qRR zDfV+C4|LKG(SGTao(e5@VVMSPrEXo|dV70!`vXSL)m1em8|vLR_qkNq%U-=K?77f- z@6O7)&GAun6>nv=i;o1@*N+_1b_VbKRW!YMaQ3gFB$l&`7EEH zSot>#3T``e|9MG9WDW%pM$;Nv!ikP7?1mKQ4?_;OiLVIJwR2N&n2nN_ zO+Pfvk6~jce2GuVnSZR7?_$c-EI+WoFy34>qqyCx1mQY~l2GLy+}qmU4{b`_{4|O(5M06(w%>G?ZFZ_^%8Y;!ziCN(^=n?5 z+#&rQV=69el`vfQy(CqcgnL^zamXXp(=Kvs;B`qz3+|?#%z*V1?UXc<{8b4PSMAF4 zmaj>_!N`u=b%ozMUti**^uw9dK0dS|b)y~Ga=^7Dgn#>{jm)0)GwrmLOb#r$a=W?? z2rirSN+7uQUuo}MkSB>%#sk6i&!H`;cso-0KwU`)FXrY-nK|nwEkjBo?z8i1l&@gbK~SG%Z)f#i~q_P00PWuC0Nv`i@|3Q9`eRx5bN zMkDC`v+FO>XMQ=CQ78P7xx>+d`Qp)keH; z;l*&V~JKxOsE-vpD_U zm;9L8Jw0`AS=sSzB_$$WZKdkf(%NeC6kklJ*gr{HmU%B8tG&v&7hl&v>1=bU)us3H zU221S>S|e~BW;o^qUUl*v7oZ%rKDo;A{Z`h;rUY+j(tbA!rvVYg_%wMpQU~>GDr{J zE#ios8!H(-vib%}56K9hk?hLOErsi{l0{dHm*M2qFNT{xfWBZW>$G= zo9c?F^EoI2K9f4sR6)XV1H zQyAO~jtM8_?n)Q(ufZWNsYDb0<+Bt1Dt4O<-6gI*x(+7&oTjqYCFoz6%DzF^!w2y( z6g2nupPl;nbVuUDRta<``EB(rrvw+2uo>n>AR$2WA|Ro{IwBU#U?z&R`aPNpT}6AK2?06{Iwg3a1W(SX z$zma22PGKEygMT1Vz8(Jp*qhRD6cTI2Myohw)&D&f-_3k$jO=vZs)H@368Q|9T7U1 zVwAwGeqv4ELkvTE(6l>LsJomJ+))Cab9ypZ$X|{UED=*TZaIu1SQW73z4W1OgPGp7Q(#sQ|u$0wcz$Oay7#P=O_KYea?lpaM~7UZgWKFQQl> zrp|Phsq<%7*wBrL9;iSmsdJY$oS9Mt1@OYM3N_7X0{tdYGn&4cQjz+%K>_=Vin^y> zb2|Sb$5;aEvfe3=dnTfvY+?z6=37ovzRFt;ILo`E7-j2Q?FPf2&|rUF%!lCdqt2B^ zdti{nd=?>aOmr?W3Q}l}F?%A~9Fv_nM!_l#UE44?zGS#LVs7F`Q{{3P>i-CVUgA`V z@{+^bNzjZ%=p?G3i#WWM{|GDkdN>(pRdK9~?}^5q&mC|D&$bz`^8SM4v6xpUPF9cQBdU0Rd21&_f(d zWi zM;waAS3)q?_ajzm4#d`F2=oxKRSLw`J_vMtRnS2kisWJl44x=oD|;M@Cfp&I_vE81 z@O(ud9zmf0WU3B$zM}UhAkclHf(GJHq?m%h$X2OPY8>{C>i}mq$M&Oy>bBriz}6#$~K3+6YC-Ptn#Cya;?MO54Rv#R5?|pyy39- zeKZ6MDpk-v9QG;(Auv3w98(TByc%B)!Q8_iEtMFDSN~pu0JyPoVPP0b7_X=A#iC9?omMV7;BR>h@LAT#ezBx%>#=^$^<;mKYiiY z-()KS@P5!d3jRa`e`p?Ki#SuzJo0)Fg9punkdd6s&Mk-QvXTu#^Kg()Li14M(A3a8 z1YYTTdlb+-%G*>8qRviW);^PsLGuWM*Djjn9Nm-9Jn~TUP<+}vB0gyz^#3%E1lm^h za;o)UGSLjN58MubG(cUgfUlu;75?gEY#YB1zT2vIvV|Sx=B9n_Y3l|%`zQyG&J28U z__LEke10JM7ooE6I?DAiK8Q-9)I?d+j}Xnaq9xn%Z>di>CD@@Q9q{>+LjryRN=TKZ zAOIpn31f&pOaBjj)&lgIj?)Z%rd^Pc)RG@_NnvK@LXkA4^YkKSRo9gvW2{@Fn9-lW^%;)bz z3Bj_sj)+B=Sd(=84+)ytg{)}Vy7AY=>X&j{I60Wxw&gdPTq5(r405Iw6-nC3a$Qtxq^Q733^ zfuRc)%rbNo#LyYcGIZP;TWfj}F>Ge=-g^Hg5*VyU6|6XCCtvgVd(b6C$iDB0(8mO$ z3iO)8*7SH{7}~>h+kF_SI3?Jl1V>K(IJUs z5*rW_bQ2+&LIQOHZdRQjZ9h_>e&&>bhlQFmdv;8PM)v!|QBA{gN+#=g{2sKZ1=Vqo za-NZjeg|o?bi|D*Pyt;L2-(bMg=|LbTpPMB(E}B*Bo*vJO0h!d1%!5s3ZxWsg97QN zJz=s>>fZqcG|PaYQ?$%7bSvd{gJao@SX3ZUcA`V7i|IrKuxMywIXkr3xQ*UvHgBA` zCMG{)I6dsM8h)N~{hAoYR}d@;YRrmQMLyd)oWA1Ul$AqY8F}XXtXkr9xbd{lnSD_| zH&_<8#!Y;YzVD2f3{(E%Hrd3g^b=>qQS}X$)0yu5Nox93cH*F8qY_Vlgn1z7!_`IqBLekV932IDo2G6 zogy0ZB23B6D=J2XNlx^}+z2dLyP|4T_<{9hV?l%^IfMDrsL+5FnRZHQOm1ZYn{%GE zGwqCY1^F5irucrWqiI>vW#m5Q-BIB));DSAq-NwFnb${!_N?#HPD_`Pe`o$OD!j)E zNjocDMP@T^j|$^iDQP)U6Y>P}{-|(-^>rGe`WQ?^^|217Wk{EhA2VS;rL5g)4CzYp z9VYDO0PA>Kwsbl9A13Ulheb>~FEuA$X2O0vS#fEZQVdzlg#9$J!qW1jrsPLV*bj}B zmX<38rVBVYp@!w1Rv@(`XNSOk=CR1?Qv_qOAO!Z4$=anpLs&t+6$1O&$~vabA}k{h zhQNN>Sl_D85zNS!LSR2@S>LNq6PA*@LSR2PSo_sy39HBtLtsBqtdr^-;QBsTIC&MnYgeFIfKS^8_Hv zLSR1|Sx3~F1Pr++1ol(I3RmY5Ovz0l6{9{h(Q0kmyIj%gSCy*Yja|+fq_hD)wta>l zW9w1O$QKc%7bLQ){nKx2oC#2R-x-GTT^diEu_ptiXHTjWWz;QZ??IHFyJ!ngdfD&_ zl%7W%CUtyhE4)IAKAi83@n=TJ+y=`LQm)&nIwSU0GvKY(%9iDh=dYojZ5vKMo^zmo zVXr&iAz^nQEE zVSt_cTVEw`Z={^j^fxwGI<{yN=F->jwF$sZ&OI7=kgS{K2GqJHO71C z8;;`J|L`M-MA6%E;jXMn(Oa_4cHjRIC^r-7(nOlVo%?4P<&AM$F6p~(o|NaCuF7Gz zZ9Pd1;q|SV=jw4T(kJJfY$4O;+@j=a%k^f*dTVrD*ANd{WnY!?vo3l&9NEQu>aX`m{8+Cd%cHx{=~Ai5WOWe6 zUw^9p5NQj3?O(AKR#%rSI=YcZr0%u;^*Pbac3FQ+wpey`C$qHlsJ}JunO>s!#5HTY zYhUBeb0yffHTvEO)O4QCwfS|cflJH2Ame4_dD|Qb1TszUmRO)yoVB6*0;}+VNk_FW z#zB9uKAPmt`MfZ;*eYzv7e{S*-%|Jd{Of&U{TI9Y_hxs=uI#*gp!BHU&%Aqjhs8

YYTT@eHmls9O_v&^7yttUas5+x`el_KxU16hG6e%i7Xi#JbdD zG8$8d>oyraCgBNX@}owo1m4Isi#pFV-snO_b@{sPCBun$UWM%WQA^b!Ug@>DbsJ^m ziB`Ni+2iqLs(9YT zYX-?1(u8G;0&1k|J(j;YR>4!q{uy7aqVZl_Gf&>4ZYk62tR}4Uz`RL}Pr6O1oa z9p>eM@x@u)9=^!2hP%FTS;Mhn-k5A~e32@Z_vD&svb(x9T(7E{zOHd;Lt-$mT-G(d zN)^kiyr!GHSuF_Hud1Q1Z!|f1>@`m+`!K#hmCSp04V&z#ZtGo0sh(cfxa4G_D~~IC zHg2M#@Jg;3C2v#<hL2seh_cJqoQJ;l5b3KfUg6A zz>`Ha;t-EYhB)qO14T?SG%bjAbX8*c8(Ns8MTF5z#fg`IFn2P9{u@#F)N+oKXs zRvGOaFm0Za`=gR3Rx=QMhsiX_wNVL;l|efzjRmvIol%Jf%r3k!S1`NmRgBC9o6Da@ zOn3!z$LhGa@;;+0A`x=F7{`&z2aGO@_`osAo#MXN9nS@`i!L})=DXBvRj$Rk)U<&^ zXHkg+tGt18sap%df>2R0&}_A=8xR$Ut$6~$B96pTS&5_8*+Q@&Ra6dk8#${I0>jid5z2Qs`Gdm{ z%uN+bm8QU&y@Nn6RYFjf;N-QSdT!EDiMYy8q$q>HK=p>Ml;GqKLG}D0Q)90q>Et6- zGjLY_RW$==HP43r1=$zC>L+HngKiKqlJNppF_GL zkd^&}MmV2AA(tOZzn)5r>|cjg+yK1S5O0?E+Kt9JnKR>@x_@Mg5g>|%kuYVWcwZrR z4(>TpF~=YzvSt>EJc}L*)PBx%D}Vweoen=CZfq_TsQux<*HgZc{hy-(&Ol1RkbZ`g z+C^Q7hV)O6+-Oj?2%(i=RKQler;z&@?mjB87r}(}voPUaG>DkLm=!UHA`x@ftcbaG zM#LOBBVvx6o^L(xehqM2jEO1{m<{BDg>ASA}$0I;t@=6 zZJB`yk{OtAW(FqIb&5d5yku6y>=mmB(zY|}k*x{e0wQKVrx_75^~@)J7yVECE{>o0 zU7E=S|MRN2IV#}4eiv04$j2n)S^1bQl8@!i%Ev6lARqJmk9>^W z8Hj|-GxD*^ARhza@{D{83-U3bz-HuQ5n@0hpaw%&Dm}_EQ0wp?`B*8)$2jC!`Ir%s zkLAqD#|UDOkKz9#A0t-MqjQ0v7^ z$%9YKw~uCjOVJio&-_DGsY*8`4vzg<_lVh{`?D$?PaJ`^!C}79{Y90oOB@*M15H@F z?j4oIPg5#4qm$oxg@EMjcS1{#;P0(3X#x5#rgBsEV_!_|Op?B6TkN$sV2r%L^8RS* zcIIQBKUac;Y%Qq9FfsCianO^=ZoEwAKJf`PvAi*wx|jJ(*uU?<)3+cc>oiPMrg(5b zBL;FIcalT-nW5v(#R0n%o~};^E>3gUWX7KLgUiA^|yN@m)kjwZ01|19RyD2IJ*&j3*bvP)u3?axgPzQmI z&QuA6(patm4GTIt14JkpwQL7MsYS3I5lXtGsOiOEwrOt;KxP}vv>E+Z0j@a$0#Xp+ z6qs#-2*8f6WnH%9SthVATY@YwP}I$Bu}oq=vJ6ycoJBk?-!hS%Z3(1kg6Iz}56m{V zEQ65Qri0sFew2O5GFYM6A$r4YEkDkFSPp_g8&Nw~0CKtWKtpK`iC%Hr%Bk#o<+~J` zm7?cdVR-_(v>fW}6ZO(t%9GelYNb$PUn>;vu~6`9UakQdV9oC z_Qi6jvrQzXw?-Ui*F->_^&$aX5OIv17XfvmMP2l^2r9cW0_rRmJ);XF64)gXP-l;* zXSyXKiOq?CI-5mp)BK1;c5VdJNfN!7=0zm4Dcv7OpNrW=w_(TmN$nF=EH ze+kEy#uGFF0p}T+NSLG$w-yo%aE3_XM2O7Y33{_Ni1o;o?d-60PC)JnuXE=9Q!hyU zMyC0gj1SVS-4Hv&Xvd|!M(Ng3E)e2ohAIWIRy(zEOt%cU zg7RPeudCgi5|Dhz>vF00R3fQ=8CvlRpt)eGKSOgJr!E-m%w=puS9V!uhXyaPX!RPg zdm+~t_Z$h`hS0=g$?U|V6RE_=pQyxu3j4203`D{IRf&NpI2v8@F>uH@=FU1~E+I4J z+!-_FSfzOLj9Ug3c#XypV5~&rh^koEo!al2^=JpuUyoczJQW6Z(0sYad+IQ$-vsTz zmUDjcbt^v}6%fKWA_6-zjsP(n-kR=7?4BX&I!13IpX7g4VldZ0DlwW7rx^Jpn^1DB z%&SAX5Tig_F0TQVSkxyfG4M(LS0(1>JfjlR>ZE2(y0-DxqU2&(V29KUQvef{u?V)% z8VSA7%!d99I4BT$Ar4Awka`*sTNsp3C)?2>U4;oi2~u{2HQkixfo3)!CA*D$l4y1Y zQnH-Mx;Fkslu#hs*CDmQbfN?Td&n9wUg$#tDOrWu+<9i=0aCIIWOzYlN|2I)nR4Zf znG&RAV5Y>*nkhj_b{i?fkR$}8WZ;l#n{~*P%l33g%`sJ|0-gQZ8s;mFXb&JIt5AD7 zPoUWuNXarM6+FHvs=$@`bx2Jy6qEq|d~3QTu@UW|;tb6*T9#XJhTI{F00~X&?g?{s zt@Z7*^FZB$Cz?wIlmhaw+s>YXi zUo)Ie)PEr`ns^XM#OD5@a`t>`oZI)$4dxodU`5n+rcHj!#_i5QX28qfNpF=)(qorx z6oO}_Q9p(Oz~ee|eSV-S>XqB88bJa$X$%wj6De}*w`V|4#>Bim3#cRT>2fP@(zrgw zSPeY7!L;%ZR78a{9sLFGw>w8%1zJeW`toZ2Pyso^4HQ5+q-$;%#}xwagd%X#w0)l7 z67F_(TfZ4NX|e_VVU6D{%M+XcCyk*me=23dy6B8PIBAv^odx6(Xtx|3m+Vb;80^5U z8zDFcD6}uc!9nn3J3A~43Qe-S!}5y@$l3MYJq622x9XQxtq^>haH+u(nn=RtaQCx& z`d1|p0L=IAZ~T5)r66fI{jT9*{)dzg*00W(LLpFY0s08MyWABDx!lFL4m^x1RTBQs zm}6>q&@^Yq1x54Nx*cA#EXf*i)(`9O`yTf`_P@|{v9$EkU0K%MxC;lXvVc`d8~Wg- zNC+F6!d@kKRARQp`;iSo7n{^QOmW8KNgDWDqkP>%Q3!h za`+2sm)XUePV54mfgi&!Ze_@;=C&uykXJ5NE;WOZCnTqfH?kY!{oj}RAUDM$1=a*yOZH-@}Na#7T_H?u3=G5i^SxYm(f zKr?^E(!lNhE7m;5Jeet|1Ws{5C4gtT{n`vvM*-aLzpCEK*eVNU)oQZ1(d2miRkx?F zGLlAO8}k`SBj5Bd3^jUHcO%6U^G?F%JJ-&Q`%XBq0O#@m!@$TStRGAHQWy4*rQcp9 zOEb=oY|`d4&W{lL7fa^5JgiBVD0E$Y7P}7?~W*`LVvW*@z=1C{+N7N*E^tj~J)Hye)Kh+m9-sy6eQ|_D-F4xjy{W7CHY;dk3H93xLH2ks&%{=UZ^(vr_}7?qZz_5IoFx?f zv*S7IYWL!j$Iq*U;>$apSBG`ml{|aShL3Q^b9Q&Pnd)@u;Q*kJ++R-~O^mm|OUuYZ z>)oZ3$KJ=!<@{C_gI)LNx5)(k5nt+$o^9!u|N8t%r+symNFcXU?YWS5Z%&=4h~+!t zPTg_A_ulHdm+<6(krUK?7oOZRuIp}&2^Q(gUhlZi;&m?yeSE(fE55Yje)Z?wUxYrp z&n^~q?YPgr-n}aH$^9GZZO#eiaG-+czYK{HKEQzrp8o|%jNpoZm8saTkh=~CT@Nfb zQLaJSKbZBGp%F@WM_`8JoL~#dwdZ^Mr=CRhQ=ylQq*8T}{}hh^RH@7XjfawbkC=j;+BeLUEJ(n2SlmF?8Z_XeUiJJ6p)x&m73;ve2^}>EZD+>2nKcW0X1tp# zX1tpeE^Ek!9z^s&8f;AmSU8_KPat$l{SmyIFa<*1O?arlhCvF0QH(IMRBVeLDiDsg zppC5BrLASAAcL#wV$=31NmT!4l;9}eGB|dI@fsy?#ij@|^&%m^mPFZw95l#Uq$+OO zhQ>hlD1jh%863-J)S-lBy)Sm1bo9v1vuT2x=H!;$y}H~s!R}tTX~vVjpEl;c5WMV# zn`Tbrs_BK?CxT7AaMR2o?Vn!3l?Wc72L#ODG(A56oHjd=(+2fCU&F2ci zQ?t8AkxDvgnInA%o*FPD(3~<0TE-H=Qv-LA>ENd6Q*huFX0UU>O*5_=2CrS|^xp^% zpL&7$d%D#Qd@clRjWQ)kYI%Ae?K5z|?4yBA!l*6IOOGxR#Ch!l(@P|^{506)spa5| zzu}Zi&j$yLVL3QhqMRxMgy4Xw=!9F?1m=_2{sIoXf-`>p(~+79tFWgdz@E{UzMXI! zw>t22AQ8MJi{5@X0fX_kA7E%tFnB-p8QA3CPkF=0I%7)X3pV-ps?Q$ z4ma*USHWWoR@jXmo*;uD0jBKS#lA{cg{D<-7FlS5N$aOA=v{(uo458TI#_4N__mx7 z0eUpRs=LFSiPvFwhrvQq1p?7S==4-nK0>nQ_` zTW^aXCmr@*M_Nl@fE{HmSZGvu7Q4J%n!siQ@@dkG8;+g8Ikc{vbX=-b4+3Ar%G6lXzO-7R9$qN4Lg8_2k z!2sopFf(o%OSZ-aMA0(zHK#u{{0YgQ=fKxotB3yqEbfPLmNE1^s* z4AMT;LK{JudLSPw*O`%zja?KyE+p9E{845FD+uIl#H&cEK!5H(^I#hEJQ1(b(ytJ` zwivcW!@)owLp<58IqAfZ`Vs~j9ne_-K zOkV&R?LBFRM$_0*od-J$7=LmJ98DfB72y&gC=2S+U|;`?Pjync4@V#HC>O`jZ8z7gRX(dorw&F z;QuF1`9}4}pk#f{B_ubvhi=?msQ5w00)_-#(U{o0ko!4q6djex)f`0Nb)%d3u^7cp zMBt%k>qwuB-aw8_OLRr$o?lK?M)l8yMK!we)q_ z`oihUo`BY(`j?;tYj_p_2nM3iRU`n722UYCkBw1Wl-YDhjWGcTd{u}yA_5Pd7jsOV+_2S^9N7t(fVZ2{@H&OSCqwN1WskdnbD{(o$}2{=^!|NmcidkQ6~ER!}Z zmQbn0+@(_4in8xQ_AN_IGgApkOvn<+-GbXkVaPIu#y)lxm1Q*9QW_(ap)uq4dXK($ z{@3;I>N?jsGjrxO=Zrb8^FHVKcs|h^F*_RX)3)z`Ar5>BXRwWVKW7^=8(ek>#Z!dt3Z}(|ie|rHqffqo_Z!dtbGEfZI&v^mZATNN>IWK_Oj&5{$?IWsS zblkvTW7+q0vgvm}0*A8D=mcgaO4R6tjX#!sE3;o~rbX098w(3JUHZ1pWQ(>PC&EIg z3lJ)7(YD$J{@Asz`^1i{c~bMYe`>kNkK0E9vjrPwbuJ3J)~u}=^KLKW_bDrxZxbU*YDaLxEcO= zL&hY1wokQnZ9iZp%bg-uOliqFMTQR(j;zYB*#-bie$7P$z;rsS1OTSf;eQB#ks3S1 zzjXVMJ-7vsDjq6J6XmNPD#Pf9ZkL@H4mZWK(XD*3y@5@&smqlEo6d3iPbbQ~Do(}4T`R=SGFB$>Q z2(Q1peacUE{oOS9Xd*&2Dl^lfLN&eseOwRecbf?T&}Jnp+&W>uoo^4I4UCGEDxQ!x z5qAM-bFznIpBwbNCL+Z2$oow0n2J2YS;9=Wz#aCnmGT>zeL1&GRSp2Pvoby^GAL72 zhrYuf3l3^y($+hw^T=jBB(O1XuV@T0)hq-8W})$2kDw6IJ+%Q)G9<1K2L#KW8exCz z?yB8nu^dnmbifX9%XMYd+6Vz#12oB|=Q6l4S6O2c9tNK`aDqDD42o{!E)Sj3r6Sb(_?vFLEct0ljad-85AK3l_xgIl7MJvc2 z2#t|;#}Ge_9CODYG-iQ%ZE8@MsCq4+F^4N!#w0%Os%}AOjAE|aOl05+vK!+pryt*; zz^`D6kSzd>>0i{L!LPwCC*v4~oTtlJ8)thpj#FTKY8m^#aDwB~&bWB>awn3WLHZb$r2DbK3+aO#Mw2i$2SF*8<>W{{qYiqUR6HiU&SKr!( zRL7WkZ37EqkN;~MglqHKhVs0&;V`dl(C4)c(!92T{rsnG5aj=~4c^qew$VSYZII@* z4a~f@frSCb|FsRC>AbcYSzxpfekY`xVOtbZ=rQa2| zNl+4EZZIO$aJx2EQ}0`Twm!YoPl?HjySRMxjBgxHVPiwt)%*`JcBO9h{_Mz$2RXl* zKb|`I-E-S6_v0~dPv#jVCcQnm{b_O0>+_RGxa*CKI=@`p$1zUovbJTqy1Gin-tGN( zvaKV6)vc@N;1Ek@bi0`2!tvBh7Y|F7sN|^hqA}Lx^z@HNT(u`&Q9HC&-~a<2KACKE)t)RdB|N~B@9>8tmoTRdQpZWlhkPK z%ye*7OipbY`l#avoB>X+*+VF134?n{1{aG@&Aj4nmv;|`ZFch_kE-p@Q8=^SXX4Ea zum-7_(;hx~iQEMWwxhY2y#?15PRDy=o-G-%nhZ`~|1z!KkgfIdUKyQG*M6)}-mF$Z zH=={fs!U%`9<2BEJZIjdX)Ce6Ao0a1WABKzwUe^c;>_*tj(IWMy$UWEGUhPS{V$*3D+rACJB6~H<@lS%c}krIiRSeOc1Tjku^MPlPEQtUuBgszit0_|Uu&3ooCU;eXUkydiMU@-b z)86h>sMJaK(6{IqWqnRB%;nV+JaxJn__js+3gVWKYpo_cauP1Fu;ij|8R zm(=sF-7`R!Z4ua26_rm-?yAzS!+$%C>pUG`V1(=3Me!c9Lj$jVW-MYIq0%xJv2HSoZd|@+bx~xW|2?Z1?tw&MawDrEU6{Pbml^ujE+9^$ zz1=QAkKOHkxH3CGEctMyXB}Shw6@tw_9-K6SuDGIg7Kyc19Utt zM194ySp4A?@cJEo8JlVO?Yrto+yimytEW|S+g;Fniu$5yRdV}AG#{%jG_9K7ZU*y{ zy8{o5J3D6PM`Ji6``JE$kIDNVf&%z*s~a;XOovxO0q#BO+UW>#dmJjTCQy7l z!8S7s6>wm?4k*scha&^0RxI#G;N=bRRk1B_4y!6+!$XX38FAC$rKms*b`Ks-=v+{8XJ85MA2I}PMi8mgfJY7B)so47Pj zRKSa35luU+(}T8v=~E<2)Qc}!4Fz5ed1~{`<^9#60I$5~#*Dw|urMkRO%MG4K#8GI$Go1#Mg?prxMsM!|G=Au14y6P>mow+EvF5**R-BD>5mRKSaEH{eA#6hQ^BjIDLd z8)-7A0Fj~{O%v16KwB{DLy{#*#9u|JD+0F4+m%x0sZ;tG& zX#7>QhqC>=HqY*jzq$zK4X2jteBIi)%Tn*_(}7paV@A^xEDKA$!KZ_-m}iW*LM#g_ zy%7g^=49Ix^b=Bl=4Tt_m749 zVm`?ward|%>yfL+izHRZ9Cr_R(m~EkmvZ8+i_Ip?Nve z4*SVrtEC9GH%_h-zM)(1=$$6EVGv z^(1@!gt#vgS_mInNY?giX$(%7%jwzMyxgqq_&5ef`_iu2MBeSJv}*;h;NmiSi}!C< z8a|T2p1->y1PTtFh#9GB~u7_;oIAup(d`iu4q?%WJXv7mA8vju0AW_ZV zsMX<#H;u=kdlZ}3t2N+rbhD;RSPj|6V|u>K)MpoXZj-Uu#y^{VnOSd!`Kzf(S?}-> z49=#P>BPqDUROOPY|Sq7Mw$b5&{cs6TXWz=KQrHrYECbLGD+`$Vmb_{<{tu~Oz8Uw zXdK1nt$_`AV*GnfYDa5{IWw>xPmDV7J+-ry(p4VVi1#`rIjzJB%|0-$;mee4+W{B? zIZN+a4ug~a^1*Da=di0P6Sn4%M=vcunc7v!fmKdhOk-H_*@feae3{TQ07NLr@|gLD z!6|&1GF#*M%~c({jZ>yE@`;$=P3@`>X~d77QlG}MqOt?VxB4>G+Y|vs$j`Ex`Io`T zewjQ=XAiroVPRv`M|$a^O52K$c-A`^a(LDYz`kC5%)<4ewA@9Ehjv=(llv^_^ zbW#R>>5bPjcGZo_`>%wD4F>hK_^agoH=+5VmcRMB^8QOYjF(6 zI-7fGidwL>97rH5-0F1)LfFbvB`C*{DD()>1#G<4 zHWQb`m!Mrp8}K~Nza#Iz6&0xIxiu4KI=llFNTP0>&dP0XMg>*}3XPjMW?G;E7Hs2z zoEF0(R3MPyQD+mI=7|c7Qo5sQ>vek27GS*tHxs4fOVFW_IKV#6FOm0`g95YFJs~qc zO@|dxfd^E{>2LY%J*a?CpvbtIZ6*#CFkxE{cvTy6PysInzK(e-jg1O0DcoqX^Z4kB-578$-2@8Ikq~{Z-~O z{OXutX^+tBCm2F?Oj4RPdVK^#rw)5P%^JPFk+G)^dppe{Qlh9+*|Y~9+t&Zu*vR`a^H0RrQ+D*f_g%44UfjlS5t*Uyz>Q?|E1SJq)` zA4t-$c61ink86qt&za^mIpoom`?Z^*!E>g0qhDuWSd!}JdR}qZ=Q|%dIk%Gbd~W0y zt9|^=ApZ>v%07eA`d3QrN62S^LD}b3TJs9qUP?a2*49kIesmfV+E0{^=Z-B<5QSmK z@`AtRqq$?76*i3KVOAB`%ExiXL>1PJk})d_&dSGf&n2!8=X8c>DXeWAB&01== z=4?dGN(!dNC4B0h)sgnKWJADKUS7ObO*q&dLp}r8%FFZDnh4w44aw$!t-QQ^t&*^> z{Vn+qQxBhHatrG?F?$XbxP%DM@ zVe$#K_G)w0SB@W+7dW}Y)~tIW&LpR`9&UWj!=8NF|{BCHIp7n15w zM(bM*iRFCkg@ighqw|e!z;f1lfuX`P+LG0+tf6BbqSQgczGsnr5&>0bC2M3Tm`In{ zskokl?K{XOz`%6L6CBi57nrePxo;j8-ez_-Yk92U4ia8Id(kHn(0JCOW+4Uh!XEWJ*mbCH(xx`YA3s&uVB6`c3KT77pI zq{o*{EgeNx`MHH$;HNxxByit&#>LF1aCYCn zguP+FpvI66XZQ372DvUZDJ|**fk8?r?!O04bJAfm#h8AI*gdR*PRezdSMrnFLcAJ( zc+IpA*1A{!82?t9KKx`!e;Nis_fNwh=>BOKgr+|Y1L&Lo8U`vrb};!( z;>2tlohV(=d3y3A&g;LcBgX`Ddk~8*_I+|_$Dz_)T2MAB^6 zb<79}6Qs~wn@gb?&qpaVON!@GXfC1@nw36tDKt%j6q??-6q@^j6q?$(6qoV$*Ob>(EGP3Tfcb6`QG=p3Rop)x*`|I~CpPVtu}4aZK7%r=!7B$iMT&ilUIK44{2`)(q!&i?NrlE|#{Fqwd*v zF>nV%e3K8g(lfx|Qj;JtrgtteW-k>b#@z3gc;&qj;WRg1 zKU5ce#71qrOf!U%OI_)eDI+d)TKAHpmDW<^x`Iig6{A%d&gdfs>={;4Q^$0V%9!d?m-dj-G8W| zD2}(aH|})`buIs5PUwdY$eCHKQJe)iGv+}lkTX;8I+nT~%re8&I%s0nB4x$jdNGqH zgJOAdy^*hzscZO-UZF)DM6r*n@Wsi!*h%A{WS&y*i`NNMaYIw)y|=%5cj-c0j23b7 zKw`tk3NKaVxYf%Sb#&Xg;v{H2)$IrNuDUpS-G!wwBP`a|@z37dWn$?CoH2DhUAN#W z+eyX57R~Ze6~JPEk)hgM<%-g}tA8BWx5{z!#zxlG85uFvj@-R>SHEUCNd2APYj^P5 z-Y&V~?6m#fFdf-?X$)j zb{LcQ55Kw)^3Gks`dzhap?*)%aBk=edKOr}z+D?-vG;kAL3(0tojd%Q;7C@uF~`GR zx*LjEnYI@8`RhG*?gl7FM6x45VCWJyXXp}+3|&T`2pGCh$B?0m20$pQlDn*D>_D}p z2|+bCnpti$Ys8W}39t~-JR&)BPAs{5AuI$duqU}wQyNbIUE3^>+Pgo_-lb>6bQk%M;T$TdKE+BL!RHv$vgHtYWMnLFHs7zHOKTR>>jDXOYP;0Z7 z9Ghao83Ca)p~7Y#Ip69mX9R@Kgc=(ah=w`O83Ca)p$ZguSyu2ZgV32kw^1f1TLCZx zLT7%t&3-UehB_d0=2zROlEbW^4hWt3l{RYR3@fMuLT7$0b1yl;3hIE+8SdijBNz2R z9S}P6YnUozQV-Mtp)_6L_LG= zOB?RYc%_oTaRzSxqnp+E7ElB{`0Y5e1jPqb%sI{^AjcW;Imem)j=P9FK%u_~p&r?Ph&QnHv%u_~p z2Ox1+*dK8ibHO}u7{D_BiNnO^#9{bmHoDg?Flw1xjk}J7&Wn(ytB7?J37tjIH^3x^ z353qb%|;rD!@vpoKXKTLy#jIAqz3qB7S#&;Gr4eI*pJkG8kuGX6olV z?9#z)d zM{=-k+ug)vwC0-gNDc;#g9wcS!ec-#q;9*3C<1!2eVQN_QnxcFVn9#!5(pX4lO3L2 z0D3Zk=`!>UP?~ku{1B+cY?p%4j9#-!6Y!D+&Dh#2ueBzCvsAy!U%^&9iFwTCmhF8? zyaR%=lbB2-D2pYAGKcJUVA9w)=o?tNDmAeN1Z4zl)3D7aP?x=<|I2-}+KHny=C0_( z0n3@8>xb}CkE&`OYJw}V<=!HLaz9F9w;WguFKFtw=@ltXq|0UU`2QFbmTI0rz=&q* z9qa%&Dnkej!L&T1f7(j^lPd5sU_+x@`cFj2H3*>zv0DQnGy&w>D1>IG{Aa(hC*U=6 zW%!ju*!F9h){L69uVD88xU5vIQ)1cxl4$Eq@nbbXE)tDX5JJ;f>HBJ(5||@{?K19) zX+=W+pw;tFdq9~5-Y{oMpFjo;B+bOxiQ7b)1d#6_$=L4apWVj(2L>=888Zpne@(+0 zBwHv>23+;Ms@+OV8$k|jE%|<|Imkoe+thl0=C{hLuf%h{2{L1F_>S_T{-0LvpVUDy zwOn#qzw~iLz+OD){*~EFqISF$K{7k{!7z~7xesb1nO))JQ}CZDoCIMQ zc1btb;TS0vjsbCrUEK|KIBklBKgD2SQTOy|klA%lLoiHg$OvyA|9Qj+4}xJ@D}?!5 zL1rgC^Bl?SqWEI`(g00T>r-{vwNSaN$+X!4c#@e%kLu;litdd0`^3nTD6_|xFo z5)CRQ{RB7m`NHVNezoAnKKLucjs2Bk%iy3gI!X=}SZShz3au6H^8Yr?*4uGq}0ihLJl-T$g~%z z|NFG%J6ga3lsSZ~UzF#pUzCu4=Gr;GejDVUnYLHp*Z&InXEx6H_2UnL3s^#xzy*wZ zCO~{J=X!V+D)5fA3F&`7paMYqBmHkSD)1p6lw$kmDaAI;DaDvgo{v!qnGhV|Ud8o5 zYK-mwe`*Z4=Ko2J0aykV5TwSem;)|pP5@k{&jFXSp`a8?wwqImS%6Z^D{4+DHrfG7 zu_bd#F_C=jokX3v)EJB-=^WoUQ4laT`v?J-bAJ6GGXuZ=;CV{1_4AZs(sN2N;ze)) zo1Ak2n+=D6F?Cx(z!=YY@(9ue525egrbFmHO>8bThUlndh{Rrk)R;(69#iL($GkvL z9*^D;D33YV4p1IT&MA*)FM{%zJ*PZ|n3GZ zM^GMroKqgpl0bRPwG}9jHBPX;?Q*~#IkC>DmJgzw)#-NttURH7K?*!gy6uLi6bZO> zM)Vu-)S~ooAS;h6>q&vBiMn0alqL`^ZM79h*OMLruE{5qt)#%!glhL|N|q3~u8j_Y zvC?Huw=un(J(Hl9AeE4{c*r{)9@H{hCF6ZCawe7FvPFze$ZbjI?| z$x9>v2y^7QD0Gphrn1>k=!EI6?CL!y$%d9ts3G@^Wu!@zeOxgps4mzQ6tB;Sx$-zFMa`^GaDOUh2aY{*ob)){^697OAGKuU0p9E`@-U z#?IR6latb>AHm#M8L*l&?)HGyOiEZ+RNpB)M1JrBoH*B)#)QXtxY_TqSYlk@5hN~_ z)iqfiFllAl3}^rQ$w?!pShldygz{kz#>~NC z6CaLeadV@1!89)zg25di!h&yI3uhLIee0^IK3XXA+$-$+)gAAPs=}*SVYxS?E3o^> zH>CBX9I-c7k8I#ahp!$9>O06)cE2e1oumg-F}cp#-0+l2$>fe|eYnJyTAm)Tbhf%W zJ>Y;(yvNeyZd#BkjL8sVv&7wHXly-zG(zD*2&7T$d!g&@k+3$Q!@y3GdY9;GFDU?N zLPl7-KfbRH2vG2e^bk?+@GmWcxXqJ&OS{(uY_Ms8%Qhcaqs&``LRil?@$2gRUenly9lyU~P8-YccK?Po;iEd}Ok%4OwjD++@}^ zIr9L|C-`RlJbFh??8A$Pice3jR_~N?rk(c98ywgHXY=cDH+kzn=VeZA<5~e!qo(#Q zo!mA;WWC&$VIkvL(roM8WyHF>&Dq@AGrif~xx0<^e4Dd{re`6(wT0~5F&g{OVVZJi za*_InM~=i(zS-iRK6jB=M{rR*<@zhe_mZ6<_<8b(I<|g^@ofuRELmt|$z;Yh33EHo z$Gj#*0Ax~QEiaxZJ~1go?SO3J)4pW!!S&s112W5+6i@qRiucENsRU5UDiw8nDdI!% z-75pq${Mmy`KHBvGVaP@?Z!p&_Sau9j}CgJ>~l%uJ?V{;efKTnrt24vbe$b-Sd2-i?E2efxH29V!*ct;%0mvaU(vD zxY_fYxbZ@RsylxMRp77ohq!@kn*Rn>koAd(8(tL*stV=?RRZ@*=(7L0UxF&S3JpS# z`z5HN|F~Z+nCE`^JEunAehJxZ|G8fRo{HQrxe3tslI{NlYyMV9cMGI$0MnqMbt6g) z*d#~{@G|LiyH8ua1EK;hGqdwy#G^hp;_0~vE&#%FE&yConclRzU7rV>(I|ujlJ9>M zH|VuLGYU|N^xn8Y`lz>Z90#Bh{lqR@UtkGLAs9wNx4&$RYN^|ghxixC^q;NYsaCq* zAg7tOEPbMN3}>l31Ubz#iF6JhJR;LM3z0{p>eS!Bu&YjOMhyE#{9a(#H{x%XjIOw( zxelQYtGKEv1G-;J2YN5MknU;IUI0>9*1Hada7DL8>&f`5cg;E!oy1e^?v zj1VWYLcqzuz!5l^*^R_EkVpdq#{}DRvrCBeHBVl_z%jv2+2$sV1&K63`s~RqR;mKP zy;uo0@`7ol$)}tWn8FZbll)g&Uk=6etqBZ44$BYHdUL3z4JI&bSt>t5>-Q=)?fL-2 zmL2l_#7|x&rnMhno%QnHzI`vUgefpUlJOI(Awe>JA`%dsMUC84NRW)@=n3E&o>J*e z2nfxG@tCb6^QMU}InV0?r)=f(;qLFd!JTsiRuRgYWDvQ zc!VAgMorDOezOu@pY8py8El+T$J^F^1xxp@5-&VIB78U20zZ(Ds^f{jTg9fiU&-rb2pKoBCesQ zQ|3N)`;JL;YKY4#6X>1M#Vd{z_bk6Xu>+A7vv)^yAYR7a27YWjzd{VkTjrrNdOotD z5uO*WsWC5HbLqTr&Aao$HABOjR2Q36fHyabwvxdfU~@D@O+GiXhs?T{g)&KyKW!L!jR z+S5X|eMivBn&74^Q7v#&2IEU`Q(i7`QwHP92xQ(Va8m~3%XDPkiQJT5-Lg6}9bw;R zh*sW6eK;;rRXqsDR_@Ax?I`PWk6GaZh{+9A{{+6>yI_>+LxQPJbrg0u|V^aNxxXHOYN8Hd5akG0KaRURg z|A-qHko`y8?4KiU_#)I)WZld|o3IcOH^Fn`C@o0e1iJ$fxL!4?-SPB3O>-`&nM9`V zA}-QBf^vXTZ4C_hp@~5;uv4)$kJ3?04e}Nsglg!Zcr!D!nKqMBgI;&ShtMC(1WKTh znoFRO=+qIwG^isg<--*;x4A25Hjdybb70O@hSmaOosqp`C}foo3xgc>xj_!(X98Y% zOMq8$*1f4QI%#s`#O+;AlS?pwSH6*^ZLL242AC3cyWua$9&-2#oF`lD)%#I9x>8y1 z2QVd`i3c>RMun98|WBVcS17XAxSn7ojy1w|fDt*$UXn0;fCCQ;TGs?to3ek>#rNOOOwv zO5Xqw%uO?UdH&hkX7&$DFd{JQS;F@Q3s0~%B$d05ZAW;F{Y`|&u*SpzkLh%Hg76rr zvHg5gU}a)pq3Yq-YKVb3OPcCV`3L>dV{vfJ{*}`y5(M+xkFoTUyn>KzvZ5v38l%{XCjkJ z$=u5`QM5JoUy;dWa<1;o4cbQg2d2OrS>(DuuYz@eh*W{1E0LYVERAqn6NyC z*f)C1RP#Wl0Is2ZUny&Rw|SUw{$`wmkoB(W2J(d-((YVW_o}R0ruz$F-zYv`PwXEJ zg{&H&cBJ3mP6n>Gwuzxv$N5$D3MwAX0ToPWC`#{xD%*aC5&+soCx3e)uajsD()^VF=i}V7^O2zxVEyp+Hs~0qH4q;kft)gN8C%&o?4vF*2ZR*XWfx+)n8OH;ELxgej3xDf@(CaM?8JP%zB3(qh3yEy87I9J<5Ofg)MH)K7xz z*j_S9e1DTXqBU*gV2^>HaKCDy(x5?bI3A8vBp8a^$13djlS~82WN@XNuzO zU4eX|FjL>!Fg@NXI|UrpVu}>^V$Yx zUfaMb{%IQo%Rg;{S3Iw6_z2oY_3T1{&+GdzZj}FqP@iL`V#XL8kI}NV$LjRg9y?_m z!@9K%WF@ipkT=Qy=l^AE8V6U(xx!nL+CQyCiC@)^eHf@fX(@}o`^nniYvjdkqbFk; zPVUGA8;ZEhOpnVdNjNd!UOLnsB-PgLaG~lp+5KRLxi{!ox z4Z5(AUl$}Ct4TC_wC4LgNa|iM7eTpl$r^Vfh2CCTxXrAxS$m?}FhJ92qC4;6#-arZ zmz45^63t)7%QaF$F4^J2t?Jq<3%8h6C>)FEYGiFmUta*>$DYdO3}bS{1DMKXQMO+? zjSI1=Y>z>>ioyvU2>LKgk1s5!H}~9#Q`qJ9EykbW`?9pGMbX*yibd-4p^r!RbV#z) zbzj&@#;j(jyPN<7CnHnb!yzx)BHXyJs3T7Ig?n<09`HQkxMbjYF1Y*UJ+O!~&JXOk zp!dSbB1VBhb@9g~;EOWfdAQ}pSp>g-$?4dy`@*F=#)E;oq<~AslQIk3ad{~a(?S;M zDA0T1pdKU5D0%q^UzhFXs?q&`HSj)THEU}C-bk_okt)_*uQi>RZmwF0RQaea*(t~3 z>Uvzn#b_|NE)LxLcv@zphgV(#cY%j}Dj7(i@DZI!pX9erlZU98na%FRyj1RH4^Nw1 z%)SDO09AP%Ke?S6lBqBPs0vrq!_kI}Q2~nOjIa-)SoRAj7H{7~?p_Z!W-gFGFQiXL zd1D@ruq=QCD$IXcf6f!vRq@K9NUb1PS|`d!{jEOafIQ7iLHW$wog?npT+IH0{FGDr z-k7IWEbU2$^Z;^f{b?Y9vUt@%0%fCsX8g^7{8S);x|;RnGAKPRBHWOko_3jl9fzt$ z0C>q^JxZU<%da={^y;D$95@FGNRZI%lllxl5;z%~K3OmUAeqt2R9*#;K!gBUO8JPv zvW`?1KV3LCqW%QC?eago8X$pgponICsBj()NT9=YigF1+0#!_k0133LJ8ocQ;Bsl# z*}SmC!|jmKOsV?f%_V6xwf7=CX8GZ_+46A1@(7hE6Q6i$TaynLbft^tDPS{NJj4xRXn%a2Z1X-f>9Lr)ops3BCnPXYXs)1!$ zVkTf&a1VfG5y}^^EQNPKjvO&3N1j{>a%2f-fgE|{0+J)w3*^Y%Zb*)tRM67Ub#*|= zEhpnLf>+8AyfSDefL8#&0eB^M4qjQ0;FXJW@QMe5SDNPF6?Fu!gv`M!fe2pNGzYJY zB6!7r4qo|);FY8~cmYv#c#4gjxsmCwN|atL0zHwUj6%!5}_ z5xn9s2d`)%cxA^Nyn;vYO7m}cB?jP?ds2T7Zmx0j+Lj&|>DNbV?%L{rY+Oan8R;?b^-DQ$><3~kF@!k@;{HD z0u^P~XYgjjOHhGW+?r`Fxjh0Ekl?Hx7qQJuKn1+mjsuuFLnTxI`$ypn_>D<4aj#C< z9DW0M%-MN(jQ4LmX8M%Q$kUEYOT7_9#2jy%+Ayh?ojykPWwyP&tQnQ^{@rCwYc{ue zUzvd|ljV9fbtgw-QS+DBj*XTug;kEJPOqC($aWgf^ksUbJ-~0`8k?wcFH=ULrD#G+ z$uhf^&fs*r7^l{}>UsBE(}ImVkkm7jn@@ph1HSdBSSr1x*R^kAP_w*Mjk5%R8i@t~ zYT8mA!mhP`F6I^H~CKFs0%IK0yM~Tr^ z|5C(0RgAVR+BrD~YEycyVfCRt+A=m9;OQ(JB>t zT~FI=<&>>c7|F@gp_q4lXuvCJHU4sAsbt6h^v`29PJc802+#^G*!a6zM_{pe1wbpb zSmPgRoq;7lumH4jd=CVFoXIwx1ZZXS$%Pq@Nh=^&5VZ2vl`l;FU)H;80Ifh5Awm}c zf+b5pu+ZoNf@KGXCu82obY}Jg!J;T2STMPWU;$_)N9Wsg7^@`v{X~i{^IO|e{u=75 ztku^N5D=s_n+QM<*kmFgXzT1P0D>Z~kq{8%FbfSND-sXT%Cbvhvv+ydfnZ4$5G=$N z0l}gKKu{NhW(z=&A9gdRY>CHLn>>r+uD2q<#@sF(&*>1MbTx?7})Xaeim;A zAT9po>$COj(cuNOo#5Gd`1Pzkc4YW(+BWd)JX|;HgzX>ROang3>~P1d9roL>Eo}=2 zzg&74I!5u~_M8Sh=42S;f!JjWbKHEH+OMMtDH@gUq6u1T?#{zqMn2kbEyR=YhsDvnPsUc_j`+jUFz@W` zyoKl#e^?C7`#5hUu8%(~3G+kVLtBW(@rNbQywA{9VnF<149rV-OKc%l8cLK9gm{P8 zTLuE-OP0gKsY8k<`48p&mEo{KE4R8iLopj(3x^Gx2CLASD`vwgFi(u8N=;AXw+|xl zVr}4(afyqWd*Belb7Y?xP--$Xf#$^Bjey4n0r1G(gMi0}0^pHjf`G?u0^o5p2LX>R zbHJkw0v_|8|Fr(7wwZW^xHU}@y`vuc~+kLNJOb$+CrV4NGOo%&Dl)UiZ59P1+atc6a16%{z_1Q zSlbge6KOWA2?b^!QZduXx$QhuUq^8w#+xt<0MV!Us zf%chlsDL*6^uTD7p#dsjfT)&`IjSX!l$j16LIqNAGSl4L_6byA8RxHYj$@_-Dqz4qF)&IuR6+%8=3y{2N zf?4lB4CU;g^$C83{LkH}0KF`9M$2q?KNO&~l--!AG8_|XRO@se-}U$ zF+DLDhlC{UsBPe#(rK$X7|?aLttN4AQE74!ZWv%lA9z!YV?5 zyL1%YO4^m)z$=b(igICDXf;c<4H6h9^Q%`)dOUUtf8dMt%rJ9U4@6CVtvdzNd1*4S zs%5?FWa1vq)@6G3GV0p@o^DFu+O<0@OcuF1Ocov6`l#yH?GxjgzHcqZN*N;iP2}S_ z(aE2~B?bupUa=1^C6(^80g~nM=LpAf&GELs#(>8G&?SL$?%w)4fIIvfVjogUs(g8> zRGtH9%br0zpe^=s@FLUa@tkuH<8Pb5i?-WGmXcy#o-f@EFA{ef#NSVM?AuSlxIQqI z8+|ITAd%C^JKI=um4X@UUrpP3ppN_R-=lZc1_}QL)}FkA5qC)o(qV_0?>bP={a1JN zP7!*d#_0;?!lidX+VI5wLRjPEXjCA2f^K{T^UftQ$N-+$S_o^L9u4Qf6P)URE0}^y zBU69F6Y2r5hSs;*FkftY3EAkQwuU)YGlu*4J;`cZbJJ^FzXJs`Tt>F|sI7I?=GqpH zR+-Xd9rx1lQIY|AE4uX+Oyt)^V-FZ2KW}uvHmva2lnUFBTuZouY5o#C76MOH9Nawm z^pM9In+D!li|V6SFbZF_#$ea{e0*@*=u>eIX&YF>f^K&OvIK@^5yARFs$+BpzP>VRlGdmKCH1` zZV-R@F^O&f8%6KGUrNgU5k6(c4*PZdVLazeWB|zqzKfpzyQL(Vm(DZrU5spW9>o86 zQr52qHAWA@c8K6r2nq038;9VDGy}M!hHcO2yID#iyd=)-f;BJ_gZNed7|?Cd6a6uz zq#HjTOj$q;UCVfm@@-AY**f0Y)W|*KnoHYc$8s1V{--SCILh~hBW9s^RbNsmsrT}6 zOQ-=L%(x~3VeTLh<`RjcoTR>V#0Q(^@LFcnK|WBbtDkU&{#fH9J&3*dC7pL)|8HC= zDV%Zo5{bJj<r+A| zi&Nj4p#sV3%TNJ7R6sFM1{D}W1+KCGLIo86=H3qJcodr)QMC6_zv8io|5{(C-lwj5CzRXn_^z9GiEG=!>-}|2O(>pjK^xLA3pOSnf87GVDYsw=RdigdN>v^g&`s{l@UoR^D z%vfI}?&;^_?N;7xDdd^lEgV=JRIKqyI(vIj)yOTX<(^{IPgdFciyB8lah88Eq`%92 z2<`5lti&s>VXqEKb!!~)QokKdijix^=Hg+R%VKKM@~ z*GG#<#p-Ou(xG3K)P+nxj30-ApDOn+RsnuSt(_X@S_w#m=GeJJa$_g32j z53OF=Ig9xPZ2Wq%QtDvFwZ<0N&kt1^B5zW>J*T5mbebrRFC|w`d@3E+P$wl$G;B~? z`YLpyUWE0b`k_H>33)G8aWgh=B7$=Lt{<)Vn2OB$b$+tb?|3VP`nRauurh5rF`&kg zyli@Li=7Ix8-+=*I1tZ{QAoo!;L1<6jp zYqrwo<(+>&-TG`{)cr`-Qv0a;dixF=ZtC0F+2pzN^Uls=PoF+p<0)^^8ClIREqwE( z_U&Tr#()4ZW$i}cP0|W_K6hCc)x`bovYxzAe!Gg^;2EH*OmC3cB)zBE=ON2cO~UUX z>**Vxw|~(a2?0XN^u~poq?HtW_8hA7-1XVm_x8!6nD+;d3g=ry96B2H`paUMV6nlS z)W}4!!3~l6_X`i6j>_-8bLh1G>o40}!eR%vQBx9Q2iHaF$4?$Sp`YI!ap*+L>o4oR zUN;()rv@h)4N67o$3-2~Yspu?b4X9|^_P8LZ>cw)2#`|_jMv#$l3yB5t#h1Awr6iK zxpZ0Licnx&7OSY5aUuK7m+bKa>>VFEy&@>djDOzr3`jGqJoA`q`~oQ_o>CjB#bFF5 z+MdrczF{Xh_c-1LltzLkYaJZT;5#dxdW6+@|#q@`YnIB#JO!qH2%r~yl{x$C(B<_4Jxf^t#;lr4-HbQOg~TmpeR}{ka#;( zB0q8Lmz1zXMa_>x4;`&_Q@4IvTYO(rJGV*atKT-ur3r0Ar{b?1!bjcQ^XbelsT!4> zXRVRFyNi=F)tnxfv<+PNp7=poR=Pmx?U0Q8#ED-FG3%QNU;PZ?r&aLmB{}6; zKPVE7*Wxrep&vf^|6(LNzS2+SmP>x$foE5#FrT&JdliC`H93wCyxN8c--SzcQn}=& zz^{H_>H0CFm1p*ud;ZFTYYQ-jgA3&+wElNQRP!b&bNui4Y2^zti&M)p&R-F2oVmYS zB((LD-~WyzIui8b!pbGSZ@(Z@sX}|!U{h(bH$^x&=P&&Xn>T}(lVqG<_p*Mtw)|t$ zHn=Kjjq}?zth=r)&zP?Vb(1zazlmlwxwbrE8V<=PUC=%qz>7NUY~&Xptrwl|Cj8Kd z<~(%PPhVOq+Rs(-p#_qK_<2g}MEkkPJTxI*96IM`Ev+3rG3<6Qm0wNJYo7k$eCMIT zME_sEgw$zCLSKUJSpF^4ckI_5>OVsf~cBfl#tr=uIcNfzI1v8O%$ zyz+4J7L(U^iep;rOwRjlPtl3qPZ+5xSC5Z z`?@8-fB&+t_IIS7?f1@AwEJ+@FIO?GuEJa_?2Af(-u|#JzIUXa6nOtX>fSw^>Hd%7 zuW8hrDvQeLE)~nEs5Uv>9aTc8kRgYqlNrM-Q6p9=tp;B`!pV=^$#L!(TWSgN) zvOX5&kVC(>zQ5n~`~CC#|M%;%Ol0zAu+Y&&TumdhapwWS1NHG4s)z2Pd@Y z!<(Z*w$q2tUe_s#B>DL8m<7_B8pisrj~8(GuJVy5R`@RWFQn(G1`!JEx-5eTgMBIg zDdOC)<=T&}+_3In4uo7Sh)`xf$SR01-k0)zKU7W(#JZ6Kh0eFN&uQp9x9)cO0Di`V zSM6xpT9Yh1+P<|Wdwuh1OP!LK?)DGhXH59B9nE1isX}~vSWW)=<}=DVrApnVAHdI; zaL&tD&wnmw7iM`6`(GCv@*Z4b+emyaU1EFawnmkz^M^p$jaopt`yiMI0U9Yovmh^11rt>bO?Dh20{WSQDo=a1uPt7Fy!+iJP+ zW9hcq1GhV?G~j0$-MJqGUgqv!I(~10A9{3$K4m5Q7QXTMX(~8YTHfbI_NaBf&HmO4 zWs6SMKbI0Oq^+lPQb?5ArZ|Uo;jbP0VCLw6-Ua#`Gb`q+&vn6`KA!C_y&2o>lOJZ# ztm3-Vm|=y9nX_=AIwstFZqei?2buZ@fw#W<7t!x^aDhv={!><}UE!P9p9TVt{BnCY zvR|z8?U`@QP=4LXOV6ZnzwfM{>~tYrifx+T+dj8lx698yQNf73lR;fX=vV}hf*~sc zq!2om0HnA|4+EsM<{$x5+@&VZ8nT?~@3-~zFUO@@1P5o*-K05kN8AmIp`c8lU&y*#Dm={-FCeq5`}Yq09^{Nc>xZ!U}ffM0li;^8|uYJTk+Qjd51Dfxb@(82A@dC+n-s|a7i4|ioZ$7bIwc<*OL?p zY5|$SN2DjG^m7hoCU{9R)2)_t#8+a-OZhh07W76dk{LmbkU8Wg-GWo^C~osN8Nb+y z*D|}Tj3N6lsOt!7^ki{Bt4@c7c~RR6(lY#^Oao78VY<~yvjjedyn)Zjv2bWuMcPP6 z3CQ$tlfEXc!aHSZSGJX4$lLG=U2f9T45}$1g*GC-(5l0;C@pR)Cn*RHWCnUkZ>C$R z5#zHlWF0;y)1sMYN-`#-1ZP&eNsp7X1epEb43%F#AXne2+TVhkA@t(aHXCiIk6 zrn9VVl-{~aVUuxct3bi`vKEG{&X`_LfR~v!@Y7j_{EL|ulkpznW37T@LfxUv7~VPg zP6FI+;aA)iD^`t8vzYYH40V&%$u&7nnKKn_)flpUw34W>ZO&85{t4N0ltw!T=S{|A zFW4-yyR1I^V-aI|ovq$d{f6eB(f{yg@9C+>yNKN{*jy#V9m-VV1xHL<+3F$a;ctH- zFCM%`uV^#J{Ma&uDjN4|@K{_t&N~yfGhZ)Q?pN6vLsqnp*9?}g3ET8X)w%#1JZ5kv zY*YT4;2(VzmN8^a`|Fy)KfJF1? z7h{LU0?&k*Ws{!fGNME0dbp6&7{eLE9XO_!a~jZi{A~btlMBF0Z{oVb&K0+ zY9?_Uw3zpcIumL(82sOWMV*P+5TMF0X?WZwQ?nudAa>RyiW(Zndt1~LI)#EB=JS~1 z_EmHE6Zm%?jd$m4qBe#yWka0fbo`n;<(G${fz<9LTa;(lut>y-tMZmp*EGanXqR-J^U@E$gM9 zQ}Vf)E%@eAe>#<{ThGw!`VkpsTc6_7l|3^3s|w0fZ~ir>7zJUwCbiFm#p=_vr<`O< zLy2N+=YXQ!O6*VX?%N{4f1!mds(mU-g+Ym||a_Jn1vRhL<8Qk3>5{U8(o4&`QVDZ~>cMdDT-X+<3>ZstI z-^t<&WQR+)H7P&OKxy@_oX4fk^A+>@|NE3;x?cuCiw?ZZiWRqIl>G6aQ~!@2Qjxc7 zn8Au4D_3L!J7ZaW3jXV9LZlP59pgD~hibrQ%Ks90Ae`{-l#Tzj`KaQTZHttif=`Io zo=@?HMr6_sL8EY~;4%nklr-V{OmCJq&AV(nagbQ)ZB5pu7?PA;_rHYAtUEG$Yhq|I zWxB*sxU$!Z95eZke9`McHP|nTV@V;YE2#TB2wLWv;jeAEfV_ZmuAYEN!1D>-Im zG=e=qgfZ-r5osQBrC^*(o?0(QL^WT7H?rB5hBg0~y_;K_hTk+*FuFmjZ6hoHWCdU4 z(pau+Hxx8!c{t2V{I<>=ZiP;Uu$a?O!4AuDo>-I$Q8Q9|&3d>wZP{I?`iz z+^fkWLt%S_TX2F}Mf$WJJQl_iF2%jCRdblq8y+JY2#s)`YZp6A>wQO&3WTe1pK6yi zPw9=I!UKgkoTwJrJgqm13L6wI!M&?hHJr-(iXv+WH{g0|6%42IWYZ>?lUp#>y}U7RuP3vVXzg(&EK>@trj-{H;S z#=_0e=UAl2wC{KDhK+D7^eI-gYszil@c`r>HB3n-VU zCHQ`p=Tk>v?-UTLmEg$8e!adJA6sH+uhJy@Hz|K~j=8wHPxrEt&z@-R6qy;$f( z`_o8|z$Oyn4p7|tFUeO3od}uw?}Xc?R@*&bpD`KNsL58#Zw!vW<`UxeQ!K^gsd)HQ zS(~{LNqy?hyy8KZR_zU$q_Up@!7IsSKZ`gGch=Ak=trH}N#RtRR04;V zlOL%Rb8g?!aA;W?<-FCQ&2Lsm5w|wK@od#Lppu^5 z2pn2Les-gnQ$B|3FxKcUy=ql@_z{OHIi_>#pvz0=c`wIW>DR&WK zY?GN0xjXba>wa4y2PrYdACQ*!tt79@bfGG@^g?AOC+kh6jTN`XG#M1qis|gRlzYi< z*0*}^6*cXX=ufZ%%r+-uLS3oqT4@_)ldtfH8E&G0F*`Fee)niH3okVZ3a$t>CSb05jukOsLniN=BNg?CmSO^Gi2T_?VTMd2e@qGK+J* zm?M!W<99`s)h{O_dk#{K-()}!WdWBk((4u3W5?4=NgI23^M~5*2ANBqZ}1yztw^1F zanG0^*Xr$6*Kq91q8aV29=8+HtFK>7E@@piQ60eCOvi*Cn^yBkGmwqG!aJq7iO!DM zGR>O1`;u8TI>nri5+(c|=dw%7$*NWdspk$j;T@gJ&s}FRa*8-F9_?Tn1b4q^!aH=h ziFUeab)}JIH33Y+Axx<4lT__D$f($@*jvfz)wFG|Zhl$GesE8_h0xl2VC~?h@I;|! zo70!gl92UIVV11d#Ld!E;kjcw(xgzL;3PIfGraRfx_5eYVO8UE$qx42d)|EA*4`7J z#W)t9XduP09H@hjOo@h*Jt0j3XT2k$sP}r!{Ce3$KuQKZ9qYSOoHC{lY@l_kL2Iw; z&i;1Q2M$p<5eV z#s5sdqpU=#CD}Iwp>*0T4Bf@crz{UY;I1)w7EFiYD| zI%<~^htaqkf^#u=ND9h_rz(NI*C=*q?Mk0h=`Wi2JTgOXfr? z@REwqIK>HCEQX}Q7i1us8{^9Cktu}C8Fw*pQ0Yh4(7N3A2T_)(6OsF!} zk&KX<45|@wLc9f+gNQL1C!uoGQ!X1|NOlbB24si2L&G~X&Yo|Rh0wYjDE36*!9VFp zQ*I0+*&xl{06)h~6feQuMu1di=q7F?t;{)0Q?6`F!;n_skl?>-pQrQ*8ZydJEOr+^ zG#QUT;Z^3B1s@Ryn2f8S@H+FzU^nrA$@pOu9n7u8QTSy{+BpnqBM3}LrV0>nXb5DQ zJcI{LTf83y0-opqt)$V3WQk<9T7(vHKZ-S>y9kd!keI~Z77`P!q!J`1lInti<8Jqq z=At1@0g~b_K4CIWMB$ZSS`3Dy%b=PgnM#ulb7)8pL?#6GGEi_R!DWKo0h)Hh9{Cm| z@s)*s5!Xq)B03#`_0LRo6Q7c=wsEFqm$x-zNXpT>PqboC0ypR1cKc}FyiU0alGtvM z|9~4NH?eV`4SGt+Xh_#a-Ta6+%48gg(or{WbVTFw2z7gD){T5Q3JJfl@O!|`?LQWM z6G1tZS?Vfg%6D>h(LngZ8j%p3WuX8kjA>1z^Ofr1QQeyCC8Lco5mV}HtJ78>Q~}s{_It_sG(WDOz0Gtng5?KVhU|nz#cXkx2(xt zoX^vWz$Oslc2Q!u8CwqgxojS*bOzAm5unLLK(HIRYy!|EYYqK0pviXnX+V>QfF=`x z!LH=T6M!aJtLVdkCcqj2niK(=s09EI2dM#?Wa&D51vJ_2@D1DX^9nxq5(LsyjoXp*Jf{1wn-d-GR7lLA1Klt7^Co}~bq{6=+@0Gd3u zDs=`l`E^X^KR}a@I{yKh+*eC>_8%uTKXk{GJ`QjxEc)p8ZB|7~XW^iNseF|w=AVQTWX80emv ze|@nupeB7xvvV>m%;$BSUwc!emBf^(FJ4&_KS#@Q{CcR>d$;|ojghNn_H57i@G-sm zQQQ^B;k6SEXUz)FVL~x;t1~`D%2Ge#PjtD9-v3@^R-oOT|Cn`y_=rRJ>5e~+Ew?{L z_7WbZ^1lT`IYj?UMbhw^yJL*bw_#M_>v?>;B+^=9TIwf8)g;U%W*L8VZuQ<(4=zcx zW*oPBd=RErKa5K>9@d-iILmw#g9+U@x7OoRtgQ4S-fPlb)cRYM`B165?=kDT-XqSj zpWgUm>gDyv$j59CQ%k;4pe#NAOU2T&7t+U$caDZd+P@7gtf%3FWR&ow5^Wna_ibLPMS>jC}Ko(%s(mk(^%5{KWh9%Z8noH8O&1_4d;i zCtbKL{I^~8*GfsqFW!LV>>==f}Z#iu&!kH+Wpk*vz7vUX=U~k7E5%W z)BXZUe1h#U{UC4)brmr_qpCmk%8q|NF?t|JRM;5uL8o}qC^Y9~)Ayft?B`j%!wRjv z&b@=ta~VS4_8-4*jlDjnByjx`NI61wh}}!IK5HoO(yz3B!m6};z{!1~3_o@0@%bP) zmhUQJepWU7N{+&AH~gyHntfjw_OLF9;z53>7+-WcAJ2bRNN1@qGj4&AoKv|xWFo+@T zVo(84mKbucp`qn`8vvA~G;t6Le-BtVS~3?y+Ke=7v~U4H>EDLSMSz+J03|@~f=fVX z1p?b99*~>k(hyp~nE)s&Nt!uMG{>?w0F;9`rH(@GMlUG<3OYw=$X)CPbQ=o4c0sq1 z&H&wpG*kGKlx##=!=M77DF2})V@O^MDgcU72DcCm0aFHmLg$OaQ9ug%I@64~z`Y@v zT+rokS1oio2#=sl0F*-{RlFn3xV#Mjg#f8Bi@A5bqyQ)=!6K}ic(chk0LrQb`9?Yh z${Zw9v6}lF4Xx#K08r??;wTh8mmqSco#X1BaHqJ6vuZjg%}{u4I4Q=66vCjYBbmyR zjj?FRjL%6$bkV_D0EN#d*!@e>ZcLCbM-tyy=s)D%?Dvnpf&k@G>Jf2~+{k7>ExV$v z*@#4r*2%liZTFIDprMU6%UoQ=43lv{l(l)Ec3en8G&fYrb&x56nQvXiQF0s3zqH=+ zHZ3EP4QHvjt2ml5y%Gs8HRlCv&6(5o3p`h~e}FB|gXuKcm2 z3tnOOu$OZ`TGFWsn9zF}s`QwI?j9R5guOSG6w?!S1$!k|Xz38W2NQZFL&4z!uiMIo z{0N&fw*P`vSUNU7x6{&rYJdq%&ro)VDd=X|kgKp+WBX%RVK=ZzxxJRn(SexI8yQN? z7Y4hjHsoj6ys`h&y4<^#ef5`?19&rK!_BSI+R3#ub)|v&fj73kULDvs8FC{eYlHp4 z3d4hUWHWW6Gj)#t*ZI&7Yf9ZDwPSh~IYiCJ?j36t{Bu-_BqwkN)b36g^j10vUsUigV{guQm-}7 zt{}tKc{Q0C!MiJVda8l~sv^n9Q?M^6q)bz@sVPX8{TNDa`F+yXB*pQKVpLdI#;uzT z`#o1?+;NiK88F+JozPyk&CI~NyRgY;|4T)0e=m?X%K6vXIp@`1^2VTe*~tto3bJR7 z?8>UJjXh0FBlu8-b+@YV;N}XtkC)(BP-vOf@urVK`s}ArNz436`)4WV-Ykxa4C}g; z(6DRuwR5fOq8>r#TKmOYo+X~+uK;hQbTJAVTs@V#?gq+s4ls?I69&ulnC6Dvzh3$H zIV&3aqly0%ce}p-*IDUZ_$Hi!_Q2}t9N`hc*63HD*L3y|tPDy(ZH+@Iw zDF_RMPBqo=P6_X>W5uY(q44=kC^5*`&S3*QPY&FThJ1|5khzm93Qh~7gpUV*`+5$hi;*cazd(WGdxRte&L!m!hKg~tTK zFHt1zsAk(Q*eFWaVdJaF{$M9a+mnam#e)>_@Y1YZv! z_v$I7_8-aF^rxJD-P$Q{iHmpy=aX0Ar$wm=8h9^J#U0M0g$7063z8W&KK!x{t}$4Z=!~U zjq4amgRnBkq_pASnmm(rf*`8C^FoB0M5A<2_Lto;BqP46fE-Nn02CeNer zOFC#|5MIE?ULnmChZ?5Q&}zPIJ|g3?vUoQNA20DeKyz%^w-7g)!a>|1{4Z|!vq0Pk z|6klF7x4e@xB-K>vGQ-+IAk&j;s)|>+*rkk0dYg|Z`@eRugybrUH%(45+opQocmwg zXbA^#<3;~M+~_*R0&%0}Z`>HE5P-N5`!{a<@`6F!SS;B1u7Zwm;KM4T54c-N<^l&= zpeG!Q#%aQIB_omvKlU>C{=R^Slt7j`_--D(TJHh(hsop$RL-K*#y~WVCb8R3)94S2 zjz<_)2n>x#EB_K=17ik_bWSSnUxUhN3kPB2xp?UPi4HgM4!Lr43ZmMd^_Lco(RqmK zK-OD#v6|dCnueg9WPupsEI${08$k(RnY)S2n< z=tRWiNml6*v9CNP`YvKJfK}=ywv#J?lrrhhDs>gd$sM9|5tD(eQg<pyShL&m(5YK+N)Su`z0g&z zKXnyO319(X_D=3Mbq`Jf56^cBx619OuD~h&ERcE1<(RfdgW^t=;P*e$v1xV*mLp3h zEWdR;)4+tDiq#B$cTg7mF?XM#yU66XPRG;H%6xaq@!Z|i%5USi>^N?mudpX#v!uPq zTrB&&ZUz|{+;t`$%dWdh{^}sp`k1??*j=Radu7KH{YpPKigWHRD)}29ml4+(=OtuC zY?8DWmWlhSKK#nya~f!u}cn!*iGSWt2^W#N)V-csdE^k5vzcx1Kw( z$l)t0YH_3kh#1-r06~13%pSwLYqG{P-#0>6nf_YGDJy&&smZ5Z@rS$0lb^EhF@fi* z!kInL?r!?%vm5IsT6tn+#(MyfZ)Sx+S7A(V^1o_Eor%nN_Z+A!Mw28$oBgbYKOEKS z?aUtp0mH{vxGKU>^87yFh_H$I_-;h?(NDJN)fLAZjzyZYuh*1(vupJ}R?`2iCf~{wkQBC)9_a;j9icqZd&s$cLZjkjSogf$7yLpPGLC z#Icz*Bg4yDyvP{RG05q8cGzt9q?A14|qtNY| zz+XC&n;&1}oa=+I$0<2^#hkFuOPWVKWShRFbSVRg1XwZ$SR!0@;r7_3)0R1p`^vJP zSi73LiuQ)2r2X`cB1aky6I!z$nUX5vnu!nKpxWKkv=5@AyuZtV*85Tr|ewcV$Sy}Tll_xd2b0j0YnwuDg*eDeWmqNcADrBC%4KH zepp{-UBXTlJ>>XQ8N)^O73`<%Y*7(MSY-`As4r)iu<4=)9NQ{GxUjx5<|#WvRLH5V z!omfY%3?~`siFc-Y?V1&e5qpQDLY?O%$cjQcYasjJ@+y9p}JA%bwHNifGjoczt%~d zKGLFZ&GYmg!@GW89aDSX5Pfx?ulEdI_xs+M+Peohr$2$}$0%7K^_=;=)48iy*7Y%$ znF&S2DyS83Gd*E{G|uv(hFUQ<(+loL;LO{wWP(JCti1sIarl$p*R8o5jmXhd?J($jIIkt+m&7?K%IEv1;t@`Qh& zaoC(iKvD@zCikLpl$_(!FeE)jj0K1;qdW{6cSVx7o3^K42}BJw0;||fyk%-#&VrMJ z&!EBNZ3ze*Bu30mq~_GtoL#g;N!~7+Y8)ci?PMSeWR9g%n4EpU)WKdLz|5+26mb(h z;T|-!G)G~`O}yS@5)af@N5VLUw3QKKhcsKP#Z5#*OZc_7fZ_smKTuo}-@`OMcOwY{ zv{txzAy8QsP!3R8tPFQCiiE@;q#2h9jDf6z4jGTEMd7_Xf}>TDOjK86BpTYxuT23l zVx(UY#0ZJ#Uz$Pxe&`y)p+b;tL^6fe5Fc<~c*3S=NKJsMb`|e5ncRWGs}t$gMkHHC zj1n>>-n!omD8=y6hun80tY8mNim)wEitxw%HK{-+&dC=6oybx@BF058*L%nfGnrfm zG?O~d0B9zO-ENwO*fshF!mvyL(#ColHJ2mey^NV9NauLNehez-Re1hGZi{@I%^q5s z7i@`!R?X_<6>^_@!j))9brz9Vzzy+)$v|SwuFNaue)ojO(U8I{%J)83*ArGoLmIPd zeIIdiJ>g_Dq&%zW`;hD72|J=8?O9FVBCgOA=A$8{SvB7WTw71r7!7I7>iHIOYdzr- zG^9Se*tdWi>j|GjL;ABTe2ckrp7017hn!y$dPIz7%qSwA6YY7~Xxv>%-9egCgP+(G zWph|%myTB86h@fl(I<1He{$Dpo zP*i5sK^YV12^&UGOfI4J5L=EO3IZH4=K65=`8C@9m6G(XXNywGR>=_3l(M_&*hbXA?w8kn z{zLVgH9ddMDAeCLKKb*Bs1;R#~3fcggS(fv6bT0yPwKGVTBjHtmJ31Ip6PdB+n7d)7Cw!+I-$c zxr(kIU*_>rC_5x&o{upKC6vUMeD;l6RdRNws^)uf5p`wkQ^jW}MVHXEgQ9D_pE}~{gMXV+Ig-WB%e~L}^~^4%B2D&N zno{AOj%{5lK5gAjJPDt+-a{>TzAmNwZI5cZZeTD-i`yH^Cy?yhtibO8Cof*(I_iS3 zJ19B104JZ8H1`6WNK>i-PDEJ)OJ=f!9|X&y08TtDb4~-CWLm#712{RCk|qNDWY`>qeb$+PXmK!B5M_DvsvlV_&HV1Sb>cCHV=$unE{ zB)~~FJ3$C=^2`7Z0yxQHrwIX0o>{^H04Le(WFf%GGh_G^z)2Q6TL^IS%o_FwILT(y zg#ahd3}FJmNftXp2ypTY3kL$6WV2I+04LAP;b4H1EOx#S;N+RTb2-3Cdv3r2PE@&1 zCIC)Y?%x4UUI3iT@$^o^yCwimDgaLA_aG5`O&|ah3`HU9Ib{lLk%@jL8rpEO+{$Y z$NLc|e2&ETUz%fM;zA?R5)K-X!u|m03L@haE8~c`f`q~Er|~NUd?OMS(qZ1`O1$7z z3oXbp4_DBFOm0Tu*D#aL8IcH#n7=$ZvEAE&R>J#5Qb000!b9(KZ<7oJduiaCm(xPK zp;CPWv>TI}pxq!Q_8Nh9BSssv8`V7Bg?6KMp`OYmX(7R%mx0AP4Qaycr685y7&9QC(CPgvP&#kJ^&bG2v|z6^JYf(}R_M_| zDp}8u1p%cYO%75?0L#Ty92C7aul$d~|28exF1?M^X%8<^#;nxFIycY1gjnCD!#jpRJP%SUn2M=4nFPtLWbAMQh))p8IPH^mcMXR)ngr5loaRz$&`^DOV{;hl#=6NDAGV zgm8uFLM#o@kisUj>;D%so)9w%56=w8ta*pMiu2+LSFi@$@~E zXrlq%I=Zz^3b){alFiB=?{4vRRhz{>{?pJ)o{Ik3q^oWyZuw6;@9I?0S6;44v3OX% z*=E&`;r}wMbvw9~{oA8A4OBkRr|if{7~+k{@vnyVT!Jq`i_DW2%Xh!h;{6K;KugT& zyX0=KGI%H9a0pSFv`&8Ll?U$#d=^qHrTfcIyz1g<3&2;3NF}sUdqtm0Xc(4fde0$= z=e=U+_6q+cZOP*;y)w=~hoJ%Qz&(AfTa@nUCZB&rs-unZ(5)K*T4>5TsyRmq$fq$o zb{UlXMyX%H6Rnl@PWEuXY?FSHx_=mc_v^CcWcHPdXuJ50VVE_1GYsrD7B;n-@tP2% z!>)q(Zw&i2JT)`Ye#j~Y%nb9BT>Po{U7O1Ij>u_~qv_ZD*K<@o?!4`(zQ(?D7?mde z4pm%isPhrrC4}Eo8RZ`P;y9r@z+CU%h>kX7ZuTo5MN^Jtt}SY0leSN1#by9ERkcM` z(PT^&*tB8)ok!1)nF5<;VbFG&GWcywcZL||k&)@YiKEnYYqV$bfNJKdv#sj>#~}~F zc0$xW)luGyFAfv21I&%&MxAJTrm21Ts1_w8Q(M}oMADtkoVyH-yy0w7x4SyhYA#c1 z^6`+;r{(MLE^!3=4e*7Ee@rf`5B?t0p79Us%1HEIcX0{!R$ou*RW{88l`JlR9?Ukx z9Tlh&!aP;vtpn{-{lACveNmb7R>&E}JF`#+WEYfx@HE9Pt+vq`zDDjRCjHP*(2KI2>-4APiEhBddGYSlJ0 zABgU0jB^$IsJJ`!n(qiDo)4_w0Bcs{j=j8<5e772y$QSotPwOGZFElekue6G(}n3+ z*Ax94dU(W*@P-~oYTBdi&Wu>urvYciTsqe4vlwl~B^tnJs{>T`N2bnq_sCujIN#-^ zV~>6A-)7ZFG=#TV9inzVvUSb~luZmcXAGueeRhgftQz2dU==V#uzY0Dk?t-_8R$sw zOvk$K>^HIE!5d%`D@SVOBg+m3S=KSo!5B}+diyr`d_l~t3ZvWxJA~wjCL!1%ltAo- z9fDKMpbo1MHiLCohp6O7N*#C4$@B&~?vA8mJuVFRh9?N!1+`x`NxpBa4bx{e!KSd? z9r}ntX=5E4r_L8GGB$)yU>hz>f+NVe5ORtiB`f@?_Uk7 zZ;9R>P<^K^l>T=mp!6?kNbSd=@HZuThVwVmT*T2R6O+kk)Ooo(j)u_Y+tNV%$Zr7s z%}&1UZD6$W`L!0;5M5XNxGiXC6W=xy(G~C4hyeuQ+a@BqlKdK;prIWL=B|t8$2$X( z3E%c6qARhFJG{_w*ybX-68aj~EHK430rVYx4Vh@jj&GX=+Vei17Z6{3+hhdxiaoan z4XxwbW+Sli_Kj9(Xa(Pvj=+M=1q%&r<=bW;urzxf6%8$0ASO1ko;!_(Fbh%-n^50~ zL_=%%wU+_2`hegA`3GzZ-pMJ@RfI#O0JK8nDJA@2S~mE5Fszu>Faw6oWO5}+NBK{@ zBL=v&+H~O7>iaEFI?dtc1>7HUeURCjD;FG~@{~D#7Z^SYj6uIXs}AM|%Smdb zG<-zkZcBWfXldfL5CdUYE;xrFS>Tnr+{HnR7+a+C-}OOGV|eHz?h}%R;4sbI3vNf_ zREaci3<(&+yNF_52+11h9L%bA7pwfWfon`Aol!ZiKF^4;N5TkoZUV?q{McIvN&;Q%j}qLJ*d3;k|85V4$^;G|HQOkrx{L9Q85|Nu zx^UA#oZ`pcfhTCi;xje9bcvl4Wv0@T&xx&~^WX{K06T^Aas=l9Wu{V~ zH4Oh7z09|Wt>6XoBPd%qh|>4M29wESHP&d~gsx%u2xDfaFjhfy89NMXn@l>@%xH3u z<6lY9#sfAxwUC=spXYlh7FgU`O=Bcjb044F>>K#o$53qM9~0@`X) zy!N;4j9z5de_rX;NuuO)-_&I>AAQ_`$jiE*&n&dB7c%b-K zJ#&hTG#(X8qWDkBgf~<@;63UubqgjpeUxlJ*bIILEhT)=#V*-@1uRzTc?) zrNgeQsr&^J{U;BSRxKAi*zEIjz2y5tQ2Erpf}g^~3z=h4oiNU!w2WP1?NI6_()ms! z|60qY*N}hhl(c`$gEB-XCy!FhYY$Rm&VU9ew0yzTN&A0)Eh#fccJRU{3%S$cb7lI- zj^^;m0xnWKa^i^65pn!y`+>!1Xp7ELC0B8y$>cvMJX$Sjj}gh05wjF&rbch{M?*Ni z?F~f6l~67gH0O*NC(x7EA?fbo_a>7h6kapsa;Uqw!ep`+4cX`{>wB<}Bq&CteZal# zqZwB$IJu2ec_QYhySP=ZfCI(&DVBq~c(Z&DE(zh`&uTs*ehQiy2Ev2DYIYN!l?UR0 zvgIEp9Ou#RH{$3 z{42c_kxcda1WVASFk&_#iHqv#l}025MvNwsxb)KHN(^ZaBW4GZxa86mvJuIE5u=7A zs$7aEV@QDu?GO=hDQVn@q``>MLlRM!XyX`?`-0gcDqc!dHX>OvVip6bR^O0_#sRZ; zfK~wRRaYQEaiHZYmde$kdkAbWOUqrnddd*Ld!ew|2nw5dB}NtVYE@scThAO%4}x(gB#Grj}^5>jmzNJ!gBrV9zF z%S{~gcZbkoGI;@&gTQicqH&1}9g}$QM#v6-78tbQqgC{PiZz)GLkX1gE*oM<=8TyQ!tV+$4V@9y z;N#M>;EBXwu~$uRhQ!X1GSfho+aTfQoi3Nm{}V2jujlNgD3u8^zQX@Rukn4rP5_!O zg0hr@Dt#veW#IN2YZYHQ_A9&v=saPphUiN5S2)`Q%%iP!q~-ltUHi+%?^&1DI#I_@ z*yZ%?uNcp?uCLug9rs?0|4&5F+Ctf-y^pGVVjaF)6sT3vI8mqhLj5smK|Q_upL+Vf zCu%EMt9omCHFTG|5quKhGIB;$IT%*mZ7lpA7=k($$Hy-S~j_g4n^w^HJb*IrbYj(vGuh+Cll1DR!o-rEE*ZPcr@hEkbXK#h8!J0W zEFVtKpj+>lVcab1QE5FFg=G@^%13ulB7x2;>9vKDV&B#^3$~xvlsxyJ9LSnbnDHfd zRrRD-7YXU-dQxRGZ9aVI5z&b#du9j-C|F8Jwj=diuL6`Z*A~YWM4s1^JP%YKsF_fn z*+uSpF%9-rwpT{LV~_MUpSPMvL>DHuFhlFh^>*B;j>a5}$6 z5)$AtPy}9lJV?gAkfc{Xu}#eGsh1VE3EPez5uKXYmQwMj+a=q&^|+hJC`Qqtck_g$ zH09;TbnNH6ruGn3;7GC6FVd@7w)DXBx-(Yf=I#rv+JSEeJ$t!vB0>4f+heakE8{wL zl>32C1_~+SgiWkZsWz2qwd;m@V?sH#SG>D-OgKu3{=Jw`<(M^w-D(qjsdHIwI=11& zz~0^_;u&~vuM<_{`6}lMTiJ1`b46`Bw&lcNP;WCa1Ph{kg7lrGYF;>Refnj&1)u7-9XI7z#&NJ5xKKD|D1;$ke1AWohYH z-p+x;)=ltf_^`DT)$+MUM}?)#S=v!inU3Y}96WE`44;S3Te~DweqY7D@AG~ADmVaU zMqL|=dO?KPKBp*_6bkTwY;QH#-=oJ$3j22%g{t$LY`^cc?h@F3SCo7|Q%eeEx4;Bg z6qP(S@zQSU7++!L-pnynj@CpXXko!~N$0>2Sf=Dx3^rVF7g%g{>#N=8BE8_hP&vKt z{BmHXlz8j@nyD+mOzr940?gF*w*T|1SBlC|`M@3(=u%fjXbd@S?dU(>)Vxj}sy zf8WYY8GIAA?dF1&0e>Bwi0$k8``aCJ7e~oHlXEl5@Cdt(!^~G6{LL!$XYiHk)p8__ z;9H7nThw$vmR9MrB;)&U-eTq=nSS);F@37VoH|0nU@iJ+_EHsgktvUo*kfOv^W_Pm(rHxdkNse}O zp5vgDHBF)!D-9|6ir=%4#C7;Dt-M5<7E9%S&)c9#Vd9NuY2E{jMgqJK+9tQ~xa}=v z@K(VaAsac$Ztg=`A_V+YYaScFNa!Bzg~iYK(-BK|AC-J??rBed4LY z+aWu-qQ}i%@s36(I1<__-{g_o`(+2u6jp_-4$j;jzzaeUhz=y1Mk*! z2_mOzN0yWhx(HOeGTHrcm$AZPIh~93(SnO+R*1s$8}t?W!hKlYuwis>tlA~i#oow4 zB0IldcM>^kCD-lBWe>*DD8dYR2AATa4l`#~iO7HA>lN#UE`(1CLgjTc*jZC~Y*#XS zG>)z=xFjB>=|duc0(kr9YhC;$u^oJS#(dUNZqStx^JP9hRrp8jM+1X{I50}M0&>>E z$l7E|{o)}?rKhww0;?^(bw0!WcP06%Rk6x_j+JChj-z5l{tB|$n!QwVOC_#a<&?d# zRA6ysOv)G{>8)vvb8e#st(LSKyd7M=rX$X`ZJkB@SkfD1+|m6Ne#^;^Z#)ENJX-XY z(^0>CxdJEpWzj7!>Vp%N<&R zqIQ8Z9yB=PNhDPrCTM{(p2G#4cW$ej-$X?DgEJl#aK^Kb^sFsN3!L$^J>-1>10EcfM)d5Zb{5)n z+-jGN1$5jpO~>xpEI!=R%KS%qxMx4rQnC;4`m)jwP2ShDi)!5J2;I5Jk2?s?XTVrt zgZW@9kb{J+9toPK9fTW*$B5uLO9et4R7D zQJp>_(+nVD`Z~po_lJnkr!j?9aSmB5B#z46b2 zx=Sa(4m6&)YfCbMUf$-lFcnY$*#XA~6J&#vmvBHWm9;5=!=1mu3p@q`T8{XH{^bkj zJL;erAsOlHUpU_Z`v*lex`7cm-?51Yefw<^df|Krh&mi@Z3;N_0B`4jzP-6IY~id! z&jIx9(LlTzmbGaZl_h}By+_GBr3cZT{Xw`ygfZVH5nup>}CPcnPJ{T&9kznRl9heDb zZWHpAoWr!<%C-y)Nhul$<_Ef-QZPR-;3$IeT8hbdBucO>P901RoK416QGzw|i@=1f z<5~9mec+bUY%EahjfRY8mx+qFK3-A~vbAT|h#r7#gVY)gS(~pP*V9i#rr|0bxt5mfD4)rHh=OM4>ZHd)hha;lp zv5V>74ooPYRr~0gjy*a)utgW>Jn0tQJyhN{eYLJ~*~PPJU18~1{P^Hf-M1a3(xtio zQVpsWrF^(1L!3?da4;R~raWMj+h+DyY6Qk|1y##aK0T4`KAZ9htm95753bJbFncCl z4d!rxRZCMoX3Ex|P5B7+Z^x4daJlVfPo+5E$p))dr*yGo0cTUXs9=(nJh&wHtr=6g z1eh?5swGi1w|sWDJ8QZ6+6l_P1?*^i>S$E@wE0?1`uC4HkNHNS4;ilx*5vrOwI8gB zAKP>Wlk?0N*u$J>B}SnYjMtc&ET0qYn3|-qqSI?~o}TL=e8_n^Vifw6(R8LJ&*ymi znHt(y(HW~8rZ(`3Im~RM&>BYbnwm@>-}W^%iDRcuBXgd3_pJJm^Q6}(w5aZ?@wcdn zNn0uP_zag9-?_-4h%=Yab{|8fF9uL+2|9`71*~=7_twok=kYsCOZmA?&QubYz zgt25OAz3DbLe@~Zkz^S=g9(G|6>6Aa>`G&7nW0(y-bdfxz%(eS@R19 zrrOjFlTvF-nv+Q12dIo;vhfn=+WfV{rwm_?yc!VFWN?@C15+2t>E>MRllUn)yZAzC zGVzxf^SD~j5HqzlKkPB$`x5ARBI`?C9XI~PLwgrqjx-H$550UB@uRRVp3}v--e>o7 z9|*|hO_u%=WvZ$L4R@y27KbI`zEh|J!(^kptK&YuxM-zY<@l=sL7`Xf;(nypVdfq< zyshWv>gUL?W z&)GPTuK48354C!LXk0LO10*frYnq@GxU-E1lmffvI^riMFKEW@M5NtK9?kwQWpY0+ zmE`eg86DbLK_N=dO;!O(9i&r4Y5X(ppc4Q#&;j( z4aEI_gF>)S-zRE=`~Kyi5YPkq-A8Ie_Wp$+W_d5@{&d(7y>A%AtmsYp?t|RW0F66L zY_G{T9}!UG_8MmeUo&gS76{nz>if>D3UlIjl|&(>w_=@y%Ec>gj?n#qL{Tn9Oz zHv{o)Ea0M`1lL^TP#FW5Hnt_0h$G0DmP6GIBpqh~9|e`T43k4eDL**QzJT6=O8l83 zhx$+XQ9cVeDX7F%GdWbMa#TM1IvAFdNFS6#RVhDqVF51%mAIlRhkC61(1mRXM&~51 zSIVK@DaTE-fSZC!TnhkAf66h_Y+JMnDv_}!hiX(#gt361f-+v_lS4gJj>vKNpfuUn zul%a9pU)hh(T72p?>$_lrg;+ zV2g5s8Osw)*fCzY4Qiv5BhA>>=tC&u^&bFSl;a0k_s~`-<25lkRF*PkkbMcQg)(M9 z=%SpY%5nq4clwuY6PNh4d_zbj_A(hvN-_tL(778h){ zC&6f_-U*ujJXQ`DFi;+Y{u4NM>@)O#iSfJc{q+nc7sF7!OUY)}h^vjGSZ_o?HySPI zH=imIA#oB+Vx37ld$Nqm3xs_^_9GHR0WhJc0+VAx-gHGdD#IV}b?cya_=%>nBiQA8 zmEhdOXo8)k;BLXzW15D7*!V{XLqTrAwpVo-;!dImf>g8)Yx05-ME;NMY{@5Pf+sZL zu?Af{Xnnu*tZ1qBv3C|CwP0xx4xxR zp5|MBsE+6M+NIzO750yjj4Nj)>&hJ(DA$C!Te&=m+!Xvz-gE79uDTso+wy-4BCv0X75gDdBtS+ z@S#=xG}6-}(#yGgA_mHj)1*H+>iGpFE7uu+K7@YytvukP-HbtL{GI4A>D|=B zcQiKRe#neD3B~f@xW{yx=k8rLAsU=_9C#7ZrD3c5U4VHRPDFguSylY1_p3EbSd1>v zkaxdxJ%!`@fDplslsrNS*md)Wxb#b3$B&US+&)kEV|1mJ#KnK^-Fzx`4B$>IH)glC zVc1kmm#^meskX^;Nl)_X_ni_>O#a*V=EwXQQJ>VuF)q?a#U*|o+++|Wz|L@M- z+Qwni9bLhi=Y-o<&Y3)+)bpI$2k6Fqoj<0`2>B!%#+XUVKNtVGe={ZfnAzlM{oRGT zwT*U?PiG3cKYIWD+`!&hw-1Jz2K@c}_tK>2-MYLq^{%}iIXCb$l`NjgH?RMXrpC9s z$ywR5z^T#SHP7Aj?`MC$n@4S>zfZKj5Y@lu^AsLqAic|0{HJi&2gRqaeKT)9G>Ej9 z$*gy+5HEcK_r3d3L0X~qJQx<8EKtgS?|?+VphcV79Zf_}Q;gtz)a}VLXP&228~gX% z+vM|k(N%9n6F9A<%o6kYuj{TAE#b5llEv-wstd$TjZW7eVTt+_bb+{O-|703EFqtl zUC~xF#nW2+pq9?V|6os3f{bV+X;LuD_VMiaxfFxdM% zL_Pc^(wIAqO1Z&Ifz$PpJ)yBO8V|sz=q&Ps+2->pVlBnXhmCv){to$FPobFtDWFnN z>~y_6m?`jSrU0a1vdH3~8Pvl+AdM+#CdICRGKMhS*pTho3EbU*}*KRcctmr10ZZ@Gn?m znb`=k2+?eu-@dZ${`}v<%5d|bojK>?Vc(QBU-sYJdPR#m6E7k=GBT==P)I<#P%|KpASVWZ+7Cvplfhkhy_&aodmbZ3`c{|hktj-8!jI+l6o zxLyB$nSLT{Opeu<)g72!|0^*1j-8%!1GGD-+VvNV_zAL;a_q*m?(o_5zXX%-#=859 zXtoUrO7ow|nn^zH^X5U4W9QP>JnWyF1XZlPMdZ<5^=zI-D!sssY!2J~%lJ6E(|WJy z=O*gI1*QL(a(9j@zGJ?AB>AG};%R=-;YjMqK+$2{JN0iw+S=Do;xZcIf9z|pwKbSX zzL#oue(c2hK|7@)P&=;Ouk@Ps`QAgN^e>=xJoje0X^@rn#q{lYsV0>@MYzV?mp!x9 zJ4@fV182?7bl}r$oHdWdmo@5@h0=GT-~3%m9u_ZLL>~yW7&f?5|JJ;1c>NSEtD*SE zfd*T<+Y?#$p4sV+sjVNjD=nI=OS5;!6ywsyU-VR;-ud-QHg!+n*Dp4A>TABV1g@*& z5*vOjNHo}5{FwZ8FWt^$Y<*qQuCxrq?3xCAUeIEGJy6mZ0x>(Cn;O@GTtY9Ti`|y? zd6hKsbiwD9$v1zPcU<;&GqhQt#-RJ1`b_ms#h+TZ`UabCd=2FDpi%W#^4PVplAj0c zd|sQ@RX4lo72&FzU-pz#3-}bHM!bi{%qgE{YeofYzcQ%k1)nmhZ~h^R4-gA_XzswE zA(K1x`RLZ1byZwS1LF4S(n@rM-S>dgX*h+@L_u5&?Wfk6p0OOA&Ij6#CcnfZXKDI* zF0J_*Gn4Vm>N2y@omSwzJj?%EJuU3gMC<`kbJ~Y8w zmu&71vi6@B454wI#j03x@vT4+X#Ddq=yIyAIwJ<^zXlh)K&sw4XBUS{H@oC{KoFQYKBgEm;7+kmyXSiA0{l(#~{kX6b6lGi6?SX(K z@aPwH#a9TAzr-jw!lV{Ht_f(SgJOVEU@0ZLUK43+2scJX{dVD@E zbi)}u)^-PSqY6C&Z-9Q$l2SpFHr1+zGd-a(=3k@SC$+5YQf77Zif^~9E$=KTZTEM0 zF?5cP2(2Bh|^#}0yI zzDJ-7=kxn3IcKs1ftp-p3d zgK7^u=6p~zY7)Ua_G~sx?9_G^)yHn36Z0?f ztsD+Z^fkZ#R3gRX3(%|OoIcYK8tX#*i=IB0=(OwB9eyTnXVp1sS+B(4?!j*vbTvWj zQEFMAgtP9gJC8ymT}DpQHRcjs+iu-~GetUU_EAxN5~NF?zY*yYi7xp>N+u#Pq2QAr zHALLgg-FRp40U||c9prKv)Yd;)hqF}WAIxY{dl4~jEKxaQwPCJ>$`%Hqd2Y%{;dL+BJ9yVflIOjuHTdcPT zQzJmOX5{o4EIToUC{ACRGjb_3M^f z-Uko~^@F3HZ*#tUp8Q!8lhaTB?yWd%j5_3d$)>a4k@mPI*VDbqP_vTV-hY$UTNB~w zUIW)GX7}|w)85rQ^>nZH(!{X=aVd>j6YJ?-3kV+AJ^fC!=QWw0^HsK*wd~G*R~oq{ z+H=0%2;kJ?Ro26oHURH@&%s`%zrJ=SW>jj>&y2sMWwm-45CkyvH=f&vkyR)T^vLmy7b}h!Hi@fIhEGh=6 zadV|N-QS>36hAH|TA^J7EU*FBad9`=hpA_AZ|jRkxZc|Jy`4Ucerh*HR-+0JmpZBW z)!DgyJsnhM_i}X7oKKH<1FaaKM^t+&;Z*G>f$J~zrkz1YdVa+=)a8!d{HhkDjk{R= z!zz^g-ly_MHZEf9<*2~Bqdqr-Ko8Q`2b$^Pr7s8dMnlmqpvP&~TZvmnzGfjGe4J?o z^hcf=)oyk5+HM-EK>;D>tFvwR$e(j7Y%*}Kwev?6OAaJf%!9&*#0t^th;Hj?b$asH zDA}8OVA#h63_l3z1GNrldW>gnCA}`P$+cD^h&x2D8es|MdX8_m7KOeT-EJ+C8p-J! zYu7PdRQhrtsB(Z>I!AN+ROjbb8jS^6FkztH4@B}cvq^3psg_;MU;MLRl#5ESRd zm@a9;87CJccrk7Jh7hTsQo3mx%DAj2_V_2g8ef_4(2Tj)2@2D$E^YY2#r2w|E6LMu zf_7?38owClv83?~A&x%ZBWt=81!qVv642teu0Tp34Oa#(fX0L0j^)x+yTNn-K1m5PR&D!e^54PR#Rl8v_ z;9pj-aA*?L6Tfx82}-}#dwnHrO@>t~$v=wjnQw@a{~ZP$v|?}Wp*J+hmxn>utXR=K z;|(G5g<;SsoBigV&xSJDa5$inoLzL!enX0Uet4~-#xjg_X?TtFuWGt5^( zM!dP_R0pHIS^%Dl-F0n#FrntnJ>a=u-ik&7Jb7ymJQuxTgZBVWZYZ|^JXv@>0r2E4 z@LYsU*H1xsGTZ~|jNjbTg79Qv7WDXBZ`uj1!-m_eXa?7{ciP8Tyt(&xPiXYpuMuEM z&0BlSZ1bhzA1z=?&5g27f69vmfeEAF__rF4^(fQzOUvqwy2D~va?qQ5(v5sert6o} zn36Y8yi0A|*PG;f*bm-nV1y=*za4)*8+KvXrWK4iyMgNRN4{JzU6#LpX*l1K=6=0N z0Km!Bb#(wI%-saf|GfPb6U^|oa{|x;9(dyGqxigzz^BP z^m~hT-a_<$vP!RXyl) zo!6*-&}T5`xAY&Ew+_?TjBoVe%^eIS>klFoOQ))zC|(iq;~Q)>Cy2;ir26PhZc=$G8bx2TXhRq4~;uxiY|IvRXh$6QglOAE~!*!D4L`NEP?6D67;45 z!0yz=OAD=vYU*ho7!s~IW!2S`msZ6fGP12do1ZAI6Z&|KuEX46W-zjFQ;l)XdQ`^| z>EU_HCgt6v&V?Q#hKey%bh({UO|GbV9!gNU?Wtvx{%$(0@Bq~Xeb-EZ`0J*2?Xi-Z zdF2XO)?Mn-Vs%2KLVZ_KhS!YF`C_6wRnM@*8RD zJ`au793N>3jr)Q{jhJLwccNtw^?|pvnP67-(7hUkk@gZ3F(QNdt3bVctjmWsSoMu=cf6yg0^+&&T=Qic zk`z_c$lF^l`8YYAMdibZ3(rPO4L~kQ+_s(P&a{8?hW;VN`XmJrDu;yKK z+$wiasgBG61lA6(B^Dj%#RcdpKw#}7J37K~2|5M1t z;DHiggV0{PB-X6u!acA7GYYo@;;9yZHjocGzG~s@UDaK~v#CIGFM{|-YX>0yQC@>F zh*#Ue7x0hX_{~4sJo7*P(a`q)@sEOrWWYaKzRf>M7x*9l=%Nk8Kiap=KS~OM_(w6@ z{G+u?5dWy;HvcFJ3Gt8a`prKYHUs!aV}A3GvI%2=f7EiDf0UUC_(#PmxA{lud=USr z>o))BqQ(FCM@cA%fArxt|7a}*;vYS+%|D6?hxkVoe)ErVW+X3izQDKnN7?Owf7JUo z|0pvX@Q;$P+x(++A;3Rc>%PrDy66D$kG|jLA0>rD{G(5|`A2I9A(HCM-z3$Q|JoiRwJs)~UTSN~^2}mfWSz#x z6s(0&Z`&HpvuB$f3U5RD3A(4VeMp zM5?2?W!O;1mc~ptU76%a`R3^+1^9wxvfA~qD1Wv1q%>xjYTp9j9mQo0cXo&3qutj0 zs5Z8JbJKv2`bs79Z+c0o8zUC*QRi#j7iOwk1Z>=_U#cB^t`i-K1Dw)tw>hOvT;~1N zYE9CaxtO{2L)5dSbL*LazWK%GeG#Uh#oDG<{nF+wFr~DA$#X}Z0)uUA{c#=(MuWc? zNN0wLD$ei`gX*n6hyu1~!}yJ3XeW#K4d?o$pg{t~k+SGHWiX@jURi~qF%s3F{ER7C z^#o&YIU}_ei$S|5@N@g7@Z#P)? z)b@miCx4-%h$aozBtTpp9`|JhU`M@mXHa)2#9|$^&0=l$o5fmto5dOsR~O#W-HKpW z0TydOT(U0~TvQT7KI=AL0g*bo%EY@A2;Sn{flo92+(QeQ`5dO4=jwC8KjE*J&o7kEa zdm6o-OEHj3U;TEmzI%1a5B6kVYdy$l1&YY8?$j8gQ^ z6n_fhQ$I%vmr|U=P9-yxs*3a6g8ZtB^PV!t6VQxGs!u7J5dci`wOMDGY8D|IT=h%Z zx0LZ8Xx}}R@!&=Wj*^rs9fGQB9e@djzFth#DW$KUws%$HKQ%z_>@B-%! zJn#r4PYGV){DcRdgyc!ViyVG5-)HJ;epbSJe7Ef(+dwg z4at*<7ddcvAVeCRG@Yo&c?S=KnB`3-j4yD!;DN^=dCK?_hYAlo1<8}f7dhhaK!^j_ zWZGB{qLe-ZE@7E*Y@@WmbAqrBluhLoHb5NS_z9nLoVV~mK4`_Ru{hSS<$^i1VjNCjv$%4|8(L8<2(kLrv4{kK zjV(qKoGEL+7IXwqFxQfEJ~(8*rdb@ofZBRyz{6KmB zS`ylTQipkh=Q7Za0vq51gpP+Bq}@9RHrSewJjXGIukV32=+cg3l~^s9LL0<~@NbgJ zhg_fyG#HwzwJt@S&<0X8o*C4@H4|t9JUQh9CE|51w82(g;S>CE18wLE3hUjU;8hK@ z0ZcEs*UvwJ*sC!>p05{uf=3uMJpi(3efkqTw?PwhLh1DuPw=7!P2gw4l12TkPnZz3 z;i_-(8-UBjNR&YS(O-t)4332tDwn+anOkD=j?avWNNW zs%pnoz%>2QP~~x`?wd^KI&zZOt+|6O%_>8^D%WV8MyCxPJ5%<1rX5sdOuJ>Nb(Et7 z)owq2260e}OhX*hiXC8^XKqP65C^p+4C0^;AV>oa>bXb2lk+Lma^VFW)BzpW>!s>F zXc-;-2RvyRN{r(|ePXJ_yXV0xFgdr@4yQ{1X6X@sBw&_S!Q>!^cB)v=I&-QVdtE4p zJXo~6nyFDA0`g(Ti zIC=z-R-XZ+)wiNIGVFAQc(L|0y^GfJI!+e+)z`e9Fjr2Ni04&E30C`ir7~CclnCcx zrFg2ZcqOw}JnqwOxKxspIz6g49oMIP|XA<8rIl}bb@*dH<0%IGfk z8xt`Ca5i45KN|B#<`xJ{bJGW3sz25S%FBc)J-X^k^|&UWyiS;wrB}XGPYm@(W_>JJ zQN`)MF>=9^liaHAvRZvaJl8ajFs8LekAdk4qMq@Za*XM_qo=}LkW!vKn$nDEuF-#h zpRw1o7=(bPFOHt!IN}d_{?I(em_9Ko!?}sy@0qRn4|zIJ)+FIa zlrf_^i9PJ#X!N6SVp-JaQR5Hn;AiN^;Ui@+qbkPT>~9wko#9mDm*_+@e`LeQQopRR z6GNqbmuPw#quTuyxM=VS7E7Q%TBCkFzqk*OmjC>(Y3dlx{Y}$UGCAq%t);r&Fzumn zzl$_IsZov63TD)(#^>PBe5qRGY9V%r1HQE?4-k7d2eCsDko*X7aC5M7s0EUv5c@Y( z%ZFY<@?(VfrfS8|BqTpX2yd!lhn_-m9OCe%YUR);NRB~>Y*v;JVIesYA-P#uF|-27 z5r}=8mDr(JNRCAu+N`V`>Vo8G#DUF#@}VL~PC!U(22>1vhvY~EY%`#8C=-(75n`Kb z6+g_EB& z(HM3iONS~f{%-*8vEN~muJI65g~qmgQ})c#QrN&+?waWH`8lB!-H0d1`zgNuavzKY z&4@)W_aaVA4qeU2K3squ)ZK@+ZvFmJPD8ub(LLsU|7A_pgBq&6&hFgq?kAp09MeGb zI?cy8Q=dl#==%BiWOVimogm)jna3oPS>+Gx4qn0X2`llxu8(%whto`T5&G+t`4~jwx#Ah#samOaRuV&gDGRGF{9zfq{9e()_ z7$M{IHO7;g6?I!T(8t@dvs6fp7|8P#lSTej9x-;%6Dua{!(UyW-~_{I4LNDFG&mUq zXRy^L)2=3wglj5D=R?uATfJZIs8#fG4~o(wF7gCL#gG%rQ^pP)#v))e-h_HPCk>qJ zX{R7rGAzqKq}3kn;L!nkc9c)g)1%qch>s7)@|OSg=#tpOJ)-;aK*-6-mmjkb`a zFnS|P!jgjCb#0R&^vl7I**cR?Kc*ps!yJ=hI!C00B?T7i@+bA^mcb9%DwEicxd@Rk z=Om2mNS?5ykVM_|q&xjWa2#7}GWO$B#J(^mlbFI02jN`)oVt`r3;Olo7`EDE(Z@{0 zfiM>njQa>hI9JfFZg4V)ZWx@%)}GA#_zVH#I1a?jk06C}1;*+sCr#+K!4YiL$@-5u z2vLsn07i5KFPtl+UAH!gqMr|rWg{k|Kc?XJa-5W7(nrFD?fIMP;wObev>IFOuLNuT zoKwOmP=}ZQ2{|!2{1JmYT#EhLcAHuq^;Oq_c5frSX`j9OmQnEWpVK~3Tg2>10XjZP zqb-}<=NoV2#2&HXF(W=W`JhRlpsIG6WN3J6SKGgIbVm1h2TDWbnE-Dj5#i_=!i{eu z45@4KyA8gtqATTg5USLr`JI-czEibX#<0PWk0(=`_zG$nD>y?#R9ovwx=%)n&if^G zl}G`hwviCWOU%9aPQqjLV~-t|qBE(-S@tm1kq;-6n}pnIl2(|8y>!&?7V7wWz3FnQOo_cpBUg1S^M%3*rbuosomDqZsz+=DW~ zWeE3384gwWn~$`=@kwm7_uB^4MMu-@7*Q4M8zTz_59_C+`Y!H|UIh&(!Ti3~F@i@W zIDB_JNxU&5w%xn50d>MrKO2*N68m4D1*2p?+?#LtnoL?v^g{Eq2v_fY_8kNeS01bN zFK<+A>B+PlPkYNW-(g8cM=+EtQKoAi=?Xq zHE)>aQtIU;!J#lQqIw`jGD5%`!Fd!b;EicOB{;f}Vl_@-v5VIjB|Ki9xD^>0^=j>f zX0^yZZ{_h`!d#8XSglC;>%~r+4B}_OH_R`*H8qjJ3NRbeQLt9DHx7EM@OFdRt>C{fx}W!6Pl2=OjZ9%?V5SSMg1qg+6VX`)K8Twx%$1(b-U zMPW`{Y|~;z1FEc52h9~eOwd8!7_0s2=zxpCyE!}HN@=7Y<$3IQypJ;y_mV~uTQ*_} zg|`tj7QvqZd-5+_G(*Gh!Rv|LL>ubA-FZ7<+}FYyP^ijCx$rB7;uki*H#fBj7O;v= z`kkCD1VJH0)M{Y|;b>|ejTAz?+MOo?!$1a0m2Gn2XAH$J0fW%@f`zQ_Dt?-f!BND+ zRrmLVeUO13^>TOKKA0Y4AXS+!7cOHcei1TgO)Ow#s`#CT4CE0ptM2Ut70AGyYT2D9 z0PBPd@+zn0!qJA}dXPa&Vj)ZGq~BS{-~=LKb-tCr0~uIQFLdXL!DJx=hsqSWaC^h& z&^3g%85giNRs2o?1NG>fE1P`IQ`fsq7~Y2wIjf=_gzu0sKU!v1IWAV!2vyMM>~0p; zMi_zqSsbl2o0OrMzB(A1-PvErYFF{o0n5}pu)Dlz;5>C6T4oP{l&<-6hjFz8O zj*m5vM~>CHQvJIfBw^`h;CukQq~edsl>W!MLjmX_AMR>sYXr`%#Odob&fAQ-G_Zk9 zXK5j8SjA5p+8|==^5$*lsY}oXFkI&9*A7A-v_UOZq}xH1QO^jD0t+!Cw6AQEoTp5I@uD96;4BIg+X)%u z5vNn}+A@-bz#z=c^x+EC$6R95i+^Z4jeb-s~pLU;0VY7({y<= z#Chr>WFQ=pxyooI^g!2hfd0?yQbMc+^by>q26j`pVP()quvMWU7w%x#2;C%h=lFjt z_Q@3)aAVN%Mw8pnvGG@2UMnh6o~_7I7d89!Zy*JQDPnoE(4uCWM*=BJ4~p0lEJ0B- zreh!lKnDjvM|+jdg)s)g=}fo~ez5(H&h;?{&hcKj6uzhZr;Z^|oOW;hg(tQ1>)4Jl z0CH>r#8GFyA_U)v3Ju8lF6j&55A@dNF>I#?BhLdq>6?vQ5fY1($S#!9JJKhU}EOqk>5?V`$6v&N@tJ+4as&4~K^DnWF>T zup~!9t%b!=%N=NE>FAF!c9y>gj}-OAzA!OoH0^~`&6dwA ziO`D{nnfcnh90&jw7=4^8)N*{=RR@&+sjSEK#Ihryl7nfC--8eJ--M;D_BJ|o@pCM zF?gS>Bcwd(te;m#3z4@R6`go(3t!|L4prXO^XpjS<9OFlY{Sy5<6Vb`=>gbAMvex1{%V7(!iGVpnEm%#om}wnIx&1z? zS+H!<*~kHGqG(Cdn5edWw|OUxI-Yv6hZz7;47Zh4Pvf0wWFuO&i&IErbk(qIPF9?He=W`$F>? zmJ}TgC`q-x*w<258BGVg7P(i9 zv@xwzvz8B(9VBMb2281j;Q|yM#D>X6iV}Kx8You~l{W^J(5opx z*@a-1ZKx`t>1IIT!)>58DwWVHgFxZN{aV@xP(rV(0!0Y7DYdbtgkGy$XvY7AW9Dt} zDWMqwK-m+r;jj^}L|sV!^q+h{;SbqFZuBV8SL1=QGlY49bwD(P=^99J zhz@1(a^A*=vfef>nXvnC0# zy5QT)U=UxXA{Au>Uqc4&7*o!j49&;DVC#W|@#dh*lrLnUi~f6d;6dzh$RLUMG!=PV zCK1{J>85AOG0f16gAAf1j5bwWraT}6Gc^C~KwPW~WH3O)rXs~;5}_TC?wzI_YKCSa zWbjbJWHZ2VDi|^_K<}I#h>nFp24%$9RHTNCF|-5TP1cmtn4uX98N}omZmzjZ-2?`q zyohJ3q69(_c)&1EqtDDLKaL#)4;aQFs$sXoewaJJoTW)2rX)k2H1h|UfbOqxvH@Y9FnVGMZ}0*D*h;- zGY2aMRffogyBLyn!HP?+3{y@)MqLuL;(+7}R-eksUtqc&iY&i~G@TKCpW$d9Hww#o&adI<)p*w^p*cY&xPOd<| z48o$Du55<5Ox=Kt4~OKeGFk{fpu2^q3UpIMVJPS(ZDBR!!;yxK`ryV;oXrRvYDtv* z6u^yRX$2s*BxdDiyBH%I+x;{e`y+mx7zqucCl!3Zjai)TSC1Dk%XmK z=MK&DV@Zrw!<0tfoD{#zK|tCM(SR z48=rK#`La}AuRszBXJ|smVro}JpqJ)74H0u2KZf*srERD!ygPPtYD7 z*FJ3grZS$;1anniDFD`;?eU77F5*(-NV5t4=9e(KzQrYhd{4~a;Hebe@hR{OI%Ll?rlcGz^VgX86D9&-<|iM%z5+$&o_tPEiWh8l{uN3a}c8dyo}BHZU?_ zU>br3v9vsUjDX7`_FV&YXgP~cw}>=-dU=(t(2p*9#T~B!=@6@XrRnQ#Ov~LhLCK-; z<6k>}Rj{cdHK=4Z9QJkKSLH6Mc};loF1V`3dmT6KQEzIezF-xc{4X;~;%04Sp0w77 z3jav2lL!LMky}=aP~oTRC5Pyx-NeXf^;QJ5QO(eG06DW}e0y31Y6pn>&>BpuO?$M(y`unKQvXoe8yNL>~GP=8a?+9I4UL>?S*^OZ}>ET(y&Yffc<6t<}Zr z4-FNs&~6ROp1G+&FshNKUJpslNuIf`&}&qEi+U&IaZcLI9fejS@Pq$Qnj_x8KjwR1 zc_tMefObo+gx6WAXXm&SQK{1NmZm<5pju~Sno5?Qw^H%R?Q<6;!l*21QA`>Z0qy7ewMxq z*P81Ze%U|z*`zt~3i_FLXQ`q8vuCsBBcA94?SWEb|Ac4LUx-Om>yQYxN>{acuFY*E z_6PMMy-@nA6_-yAySKD9otT$mi#kxhPUpk5*t>;CJoCzLYUO0jTq^GMt}>#!F{N5-v$*oQ{m(=Ky$^@!IDgXN(7c&7yQj+^R+!!3;VJ3iVbu zWhcx8a#-Anm8tjxGPw}O_haYf!^I5GLKtu72oMZ_DuiG-2*DPABN$yMBziUIJwX9l z2v5DyP1y%4gPh?l>{q#Pcf&@=Keuy7ZTno2ka1T*um#;MS$;fWsxbI)#uwc5EnCAM z+C!lx+=C$|XLI7eH>Ec$9df+1l;G~GlFtA>h)6~kbE>uuycRLT6nd%b6&t%=>aL+s zynx$E(hAA8cWT#gC|<&C-DH_$*Sio2-r420*0Ld? zEtVNeNi<-?7br1WY*m(zXuz*MK#8bl1+XMV12!)MCAvOze^^<3eP~s~5~Jm;&Til< zoz*c1j+2x9)=oU7y-~+_jKS@wvn3qm z58nn;${M)J7<1Y%B-`=l0srA2_(J8%bN`XsXPxyO-qDzY<*CXod1!1Z@QK)mEJxEP2kO9U zF%zVOr;!r_4VE1iZTfaTzQ(8V3G6owO8{SxF&ci@A}O%H1hIFkmEwoZ%mc2N zme(YJ7p0nIvxVl`s1g>c@pXI&3;az0bU{YeqOl2i;7-~RzYI(+HGo&JzogeZ02f>E z3U(1fY@BFpikDzjG%NvZ;Q(xjmte;=Eaf=5Z;39=IJ@J)^#f?x!h}yai{cAtOw96e zd}8>wDLu#!0?0xk;2Ma1W_3Q}Ig95u%}!#u!J}Y2W%1(XptG|w_%=6bkT9!-odg~5 zB@A(>k(wQN6w8L(3=Z|L?mQkCH}o@7s&bcnxSOH)W$=XzZxwvSvO4Lv7c8WcA#r}Q z(s^nxSSVBu@p#p}jo<_p0tF$y-FXr)4D{1K4|`lLT+mP)ierTL1II{9#ZMYCjsqT1 zfYY`|6v~fCTb=J9U?F2ss%Cc{H>?vp*_IqCrsY9gOdNWSgtr^NU~QiC+X;+0YKW(+ z^PPlP$UuWC-RXO#`PXf9R(W*#)A(#+!@gSd*ir0~m zgnX{8n?h!s-5J5q#UooKU$X>MR^)(j*kf(uO)-}#Kj{3G>3`3n;$mf?i%+4-cT@Ob zCeX!)R~E{L^BJCnLW+4#z#}sM?GYh6LEOyy?fTA?)UZL-`uHM+uKV>5V1XI2L@1rwBV2d@H1N zmYfXu*@+TR3E;oVm{!j?q0@olQ!(HNeiJ%FO{WDVq;i}88e>{8Lvicr+A4=m$!dk-B4?{Lq>4XlKm?>zuKqdL*K zMAMC?qjQO$7Q)EfYlMTW=DlkK;Qi1rcc=MSoV{xunt@2k2b)s{tcbm9?ZH5a+MHHk zwd`H%{2M3_xu$NjUhd^1TmVW8*R&XGaxY(pG*BXL1RV+RK;$*2PZqe5H(ZJvq{>=6u>#?W1Wzou z@~=+o#0b2k^E$DJrYB&j8rF6@%2E|w#5Cpmk~kJzviEi;@RY_M1Uf5oE7>mnIbWfjbf8Sh>L$E=zFp19xJs{Ni&$Himf2xQO{lxF!z2 zl6JHvZg3&2D#m+?fBXzV-On7DIFU3lcrj;^CfGlu+|Kqrg%4|Id!3Q5P2HQmOh1Z# z)Uy!QD1_)hZZMCcA1f?`-FcWZ?a*lUC}$dc@>1`WR-`!R9@$N00i`u*>I}!}kyN`t zAhMy=#}Br2*tz!dWW&Xv8tz$>7J?*r7U-GOh|ZeC$2NhlbJDn%RCk^vj0wJlQlTSZ zP|%AGV*5zoj%?>j{@wf@cE1-y+CZ-42>3>>@E{(oy0;L{frUC9L5K|mz@QLY(o5<| zC~txR&jqQxiZ*#DZ(B`#93)+KY$sj9?)QMC z3)k%>5fFH>a!r6)PsT@;u7C_kXa_`3Gzy{Q(ZjZd*ct-nEnGclKy}VWY{)rY)9MJ zL@3(!4BKaVqUIXsA1v@UpnLlVY$W{w8&ZG3Mi}*fu)!w&0UIlSz((gEup#vaY#=%R zgAKOkAF$!~2W(9L0UJqwz=qTxuo3qBf3U&U`~x=p{(z0?KVYNt57>~}`X6kBvHpM! zzdvAOS|&Sr^jTBp%`2V&H*77C|6yFAX!)C}_PoAY5UgZq?N-II=x(_Wx5a>)%0+ zZw2NsH~+@}ka+gk@S4qGv1k8bKXrNdSZDRyyA`Ub;~$q+2rCADYKNsOAPrsov6RetZfL$f=L>vN>IdZM%cA7$?z4`urQ@o(G8mK;ivge{B8DGAlYY_+9A ziY>>%5hgSBdQsigWvla`~7T>-#@>{ z@A3G4eeTQcy0815cIG|Tb-kXi*R2;L0}fHFLN3jr-KX@18|TiJjN0ao6Tv1~*)PTh zc1N+-2(0tQDymyN!?bVrj8Q*j!cT)w)w9P*gzW6jZgIs=ZqJN=QRv%+UjTH>pZ?1# zRg6QDdgJYlL+ozoan_YRG!9TVD}QKgmmZPyYDln%mRZmgdgjR;j|=H#E4j<_(#vkP z90=Rs-Z$W%vAMjg=)lp4$=rYt3(+y>Hr8!_MWM}Ymb-wMr(a>>#R6S~jx zR9t#_GIvw?qtFAE5pA!g1!}ZA1)~UC@i>39tg?5debI4`8^)XiWlw4wANvOfuy2Qq z4mcLFKaP7-cdbIz8*Hu2-2U?Mw< z3!xX9ED%ULgnEl^5IL91t`nQXjtA?qbGYL+yENJ3z}$p}$lqAVvE?N_NYt`diqyI9 z0ECTSo@M#&mbLuhvRgpdaJmEWTerOkJ?O#e;g021l<2lB1NbHv2%CoPbo`cWt;8D@ zPe3JoyRS7xk6GpT>a9Raw87bfJvFjbc%rYlE(jYMtckgP=bg=NcAqjg$UgL>L24=| z=mbV|UGk`a$NeqxifBz}IF_1YUS6Ah@L&|?Ws1O@7FVFfvyZ&QPQ+Juv9j%XAwj97 z98fAtTKT@gE|`ov!}$CCJS)7FGmSkvvZGR-cLHMBGW)trepavQ>^cU9+-HDpX&PBq zX~pv+KOtoGF-`O`s!U(j3DiY$&Z53i0Lk|BbxH&2$Ahu78=mh4`3t!^c(SuR>&?hU zq2BhF(g3>u#4ogT&tauM<9JxXPtNk}FC*Jye$)!KV(qrq3f^#fcj&5^s{RHod}*Vx zUU*gVyOc7IYTYpWgWRFyGQeKaZ(}9`ZFRAHb{og+dCsNW6oeR1IT6ouQgTxek}$hkBy#pY&$*s^5FrUIoDI)&5_1otzjvu}zK&I}%f14{M^{zXb*!OXb_yJu zSD9bOYT9Kd!f|nA&{{G+eY*lEWRDGlx_QmKy15;xn?2{%&Fx6t%$Zj=*LL8}iO;xA zd=JW5kLS3H;#s$eb?Ep7sGH^7Wa?&*R@^0V{Jf-@5ci*?x#9wC`D{{Y_f62b3G`!D zPG|TR6kgL2hFZiZPG{hZH)SKkADFT4WP>plWCXH08EaxjBr5%I93s^(@pF^ zoLm*26DeFvCZ^XY!vHqJd|RYlM)GxHhxIu`xGX>eTp*|b;iBZOin%Cu;I?HUQF6Ff z1c;KI=S9hNV<1WnoEIe%8$pz8>?#u_i_$@q?D0eu{2u52ofG1J!dS7O9v)xs0BG9AT_jFMqieo0UB$sh$XHQN6-Kn1jSkTlJ1je03G4>X=VR{9yGurt9a%2 z1692%Y*tJ;&lqpXRrVVk3O=A?B9t7x-D+DB9&K@xk|w%)J8$YRxBGG4)T_wBuGhay zH)Dzl|B1cI5wee;)oH71fAiS}P>)5oUolI~t8TwCcuu5e2Tmt)*|_$R)RDTNJh#}* zn9@SG*ee_%=J-)^JTalr4G}Py6P7v6bzN^lY;b%LRFzKe!O>uR4uCdcw;R0z0AW$e z(SvRO_R(T$RL$+Spf(~sVN9@%OYm&xMUB>t-#bIp#!w2+5bHR?*e!H=N5^4P5IG3NL<+^EaFkfe5iTkFlivRLUGN@YSZbFSaVJX*tKEAm z0T{|TIu+9^q`5a$0w{9s=ocYNxytMs@Z+OGt;oTXFKb{`Z1kCJKm z(Xm&V*7r=@IFIf^L(0yzvSO|?=2eE?S{$tCjxF>@;pG=Di zbZ&^GTSLdZs5&v_w{+4)19vJu7WcN}Lf_j0ytC8es}gK;Hgr&PNy%ea`EqNV;6>@3|*lk%&>e%{EBT3<78*XqIk_ zQJW45Dky~yN{IzOWTdm+DF{9&MQiw;Bb`l7<3K@*cJO0II@_Jb(eFQS-4R5 zIH**U;gK$D$@|l5mcWOOOBAp!Ktv+er6;myEmGx;@Rw6 zs2Ut$WmVR7x{(ZhoD7eu{E2*K9(^2qAGSw&#VFbkKD+2`fIi-Io3KQ$0|n5>!g=&j zY6Iw_OF#hzJ?}zN65P;nmOe zfm@rqP55Cpi`uP={wWlN>!&Nd3v3as;=wl?32$+t07xo|;S4_?yppSJ3(r1TZE%fl zVP}0FUPA0C$8}`Q^s~$1r6P~q-6gFs`mzF^eU4$BEatgRoInHIA}pF^mv)<@0ThHr z(=q-9L1+LM{1|2(2K{3zW{o%=>83$7u(?5`g^I)3GlN`vd^xosf}B9 zFE7aqXqNl`_!)^R4s|iNR_kaSZ(h~ep%UDj2}lI~_6GeY;mQ`y_BTY=Zv`f{a9`XD zOxyry${%jSD*3 z4qr&W3KNif=~r(?Vh_A}J*7|k?P0FpO%8={;+AWBomKZ{>RvvRxFxY1;1~zrY>x7M z?GL@;m(CQ$NbH65s{ve5v@ zMIVrxzU_5>fZTl83NX%zK7E*QXaVE`Fpfr_CCMAB8qD6hK{)KzXF~GFY6jD`>LGr{ zhU5$UjA*N|@cu&HD#(Gp*k{)9Tn4$A${?3g8RRl$NV9v08@|J#gq0D*&3v_B;Z{@O zeR-Y+%r!3dnVSG4b60Q$%j!(%Gc*B6#zDl$;SSsrB-%w{kNrV}B?X0hg5)-6Jdg~9 z3zF^$k_;oUU%M7eDbvUg7Y_Z!p=A91Y$ikE^l;(uZ4PDU@2A8}?G+R6s!W7_KWeRp zX~r(C)iBNQTNSf)I^VmX5#C>ZKg9ezo$p_ui;h>sY?;pYEw~TI!vXwNBaV$uO!&sf zbmeayab!9%;XTMa&HrJD$%Oam2VTIV^p<4jdmFH~9rYmJ6nbNk$_kyQE^H=w;}5%w#75D;J~VQIp-$1{r5h zdRh;)VxF^?xYk3hXmXo4Fv66aPNglGwR_Ti1U_h}w`q%J?Mk~X&~Z9VX|}DTI~pC| z1#BKb%mpy6Wu(v)=yAsRwQx zc>Nh1O#W|S&A8up126YAk1EOzb@9ILewdzjL=W zs;g@$KJc$+9nVh%{`DNKf%FzE#~3#D7Hqb{wEK+bVS2Y|bT}NTsy?BO!5#@157>%5 zGCY5fmx-E%o-gEHdJm14Xk`1>TNG^>$|P(O@49e zWN=5Q18n~PvlH)eiS4WqM(C^O+bdHsn>n}KBQaej_?X}(e(R~2;C9|Y0;{$_k!H|X zTVRZ)*us1~w{EF0U%-+6e$F~`dx`w!9huuB<+~iY_dahDIbTX}GT7js{I-YPnI$<|q&fBb8*b^2Xza@$AOiB>H;dMUFj zl{=}S?bu&6{ExEW*Rxk7x{a#;Ktk2mRO?%Jedb<#^q=U+wkK2bUzc*dH0~a-u5$h- zT=4tZ6bYuWtH2;;yQJ~%{?5`l|I_-l&$9Q=JqhYc;?gzlI+#WTb&Ns#zwa(#!?=#;Vt2f$ zgWgT4T{TwHGnM4cm}$A+Lzf6a)s+@cszd|{SnE#9d5 ziCwbgYVB&Z65&*#gOPV**D1kj*OxZWip{8hB=6H@o%C4?)XiJQF3Ziy{>5?eJYDg2 zRR#9LEmvx_)u@!I4-Q7Y_q&b@7P)rA2GS<#!{q&bEKMIX(F*!2hOeb%azOZnCZ4%i zUp0wcwI!uiBfX?^D$BvhXSnOMK+W}a-Lr5r>H~*;sw@wm4Wi}rHw-6BGY9|hxLls0 z*izMw{a{OCtyVg{ zmin)TU`yrVU|)-@V!lVTYopq`Hx7?{KucY?_+;<8r+JCxA=H}inEJkoALwn2zKY-a zD``Gj9+w6~eYHFu^F3he{ph=Bhett%Z-_Mcx0Od`to7ekz5y!coMNcrz%$0BilhCN zpyn!SL~5?Lg(AFZh%fe?@HUHK`A~${2?^E&fGH$6tD8vEu`F$$k?32N8VA#@y|J)Z z32ceHj{^G=_PDPC8`zTFfY&_J*-Xvl!9%SK38j5Js$WsjKT%hHJ~UK$pqWuo;nH7; zFRA4>>O3VID$a7;gN*i)4s0rClLf$r3m$W&@oSL6~RLDCsb3qb^4Ifg#@9nQt ze{Ux+mHwG%DnoKE&6Uo5%y1CrWIGFs)VZPKQ^)iL6-J)ngm~WW*-~#`9oP!x9jP!y z8!trIJE`np!LE&_Y}aP+SdhpWl*gJC9WH@mPn-A93e>>^zY$R}&JN*!5t#??!#+{L}w z_WqNG-6WF`SjJ`VN4xa)@aLgwn5=0R#(w}#@zumAG}akzY^#@?>NpZdDX`+x=&SWj zkZK!>%m%Wey(iKr82e;6N!>H6kmA^**(4wB!Eb{duZjz}{j(puhYp|tR?==G>DC+A zp<%Da8;^o?>m(W=ntQf&P(`%|4S+4tlxd)0->ojx-|DbbKeGzS!r3pv?ui}DA{5}F z0$orDST$C-N3jpiX0a=lR-&FDD@&ux>GRR8;i>zdmnb$ASh^q$lX0v*f=qG3-oB{MEQvK76cl2QKxif&?ECGr|1sWG; z#W?W+^-r^hpmAYV0`)`v6Oxk^Uy>6O#lh+dNKRIKUQV)5yjooW$;pc2&_$?OcFF6HubIuPdSlRkzHkjqg@&&;Lur-z3H$aakUpu+4RV&TOl89moe1 zX2H?V8mHyd4Zi}0V_4s6qfk8J8<11P7iE`R>8zjpoqDAcu$qmJ#y)-g%jD>%S-@%% zEMgZN>u*|(RA;wK(oGvq*0{EThMJ`Qh-dea2K0;_$n-SdP6f>Lo4lO@5~nBrORU_n zo~E;i#36VSz2XOleTiO0e8NK3%bs_No*{pqoR?~CM^Y_fEoku?^rZ_ynCB21hlF`~ zb<04Qmsbb6I+2h0;MKuNU-Q8~`J#m-)pM7z?|h>M{}PDe>e}5Qg?muvf-zBEdM(MA zm=6Lj?~{Y^m66o)Nq~_QLp~5bS^`&73o@@b9#LnYqe%dH*+Qj~`#R4sRjgOae~F}< zxFgv2$~0Dsa)C(tV{A9MStnxZFcx%mekG-I7qDZ#L0A}j7}Q)>uzX*2EJ(XlOA6;M zVc+|@U_siIT9P%Fg8j$W3j3HZa{hQ?ZXvm~DPMB2X0r1cUuqz|n$$0O0F4H1sk-!0 zlC?Mu8ufRjR?<63))G*ibvCMtb;M`Xy*`>F{hik|c}g7@+tLX)w^^Jsmq6zZ&57^S zOxAjbV=2N&YGu&-vm!&Z%`Ye0=ASHnLfk+;YR|QUw=2CFcKdrbQDu6e?d+WCASOc! z?{ASN(v2;g*Nufg0o~ZadEFQ|7X2x%p4W{@Eisu$?p2REV7Mws?v;S#Jd;m$%j8~2 z&O>KH-2l1QD)>N<+Fr4mwYCk=$zy0vyN;s2lN{B37UdW|-TE zm3)Od7;smiZF?r_kn_ZE1NpH18uSe0W~duYn^nG` zoXP*Hhs`(DgZNGL5Fvc?-|8X!rg}_%TRqgjsUD`^R1dFjsz=>7)r0s=^$>kiJ%ry> zkI8SVNBB3@WAK~mA)T)t|LM!ZQ$mGmb*a-}g|8v29x>lkkI8SU$KW^BgY-@H0IcSJ z)dR4a|EdT1o9ZF`rh16JsUDNxR*$)Fs)zKO>LL8Lddz)OJ?6ft9^l6Gzv?kLNBysQ zU^@d7W!2+Xs2+MI=U@R#d_kpjuTC`9S1H?6O||?AU7rnXNE$>)1db9 zj>bMdcV<{idyG~(iib;UI*3e5G;NZ2#vC;qCE!~onT?A)GVRq-X2q~=x7 zxY#u0EN)gwY687k*oCHHb8((Z5)<@hVVCrVqQqk=b_~#)g`L*}C0aTNyFhOi7M}}B zw6`jDO`tami^~NiT7`-o6ZB?b2?3x)6R5N?KySuB7XV7M6qUB-717w`Wix7yYu)XS z0OJ1}WkyMUlP}1RM!jcev`y80XEPu3Z2^=Ce7ADfqgCc_BoBWuWEwqnx_r|Q7@U}f zjb`$6bH+w9mww7$CGIa83op4Ej}?jzYrVZJclwLUNye^dY;c1+^~Yij-y+vXd?v00 zZ&W|Igo-x;=~$SnLHhfc1o#PE)ZZqpJl(JIq3HnJ;rTCqD@l%*zxV-wqv+k}8fWz* z)d$pONU-%mCfEZ0hNF{qyD8NXsCT4R_k3|UFSiRC` zFh?oPe!ipLMxMsXnTgWJQ&Vpl^p5+uh!n^JjK}zmpH_&z=LV98O*2{T2ZQbgdR^@2 zErlsdKQCm=O`r$!oO-X#Fwbe*!K_aV0k?qSzW5gkA*L+0 z$xy95H&nwj{6nLl+$ba`jc9Dx)AN-oG1g?829Vo2zEm#Y{?b;jv~SzX$jF7EDi~HI zHO@&1SQT&02Bq>H+-qsOjZLk(FHswfbudPwr`kFdE0=Ni89qzdx9#{ z`%Wnp4fA-*I&Nl~U(tH2x6RbyqE#4^(bI+<$(4(_ztS#dUyJpc*e8oC7Y#>6lG!5u(I3IwY$?IuB9cH7zyM0Iha@in;my1S7zH{a(0xu1t?>Dafe zt2NS`r-)C-HPu~@OusoC*qg--q1`5$kA@g_wpUcLw&4{;M_acYJ)h+lUK+)77x2=e ziq={64ET)qMsqP6#@+P7?$)+-RDS4BCj7ARs#`>FYT6O)2MG@hj9gkrI+49N{dYV$C#SS-5iBK(Hfw} z+3eBrxa1=4Ze;wMcQWjyzu~ccb!%{BVTc8-IV`{>-_x(W%0gg9dmHA5Jy`uVIO-$> z(r5Q~kZwd~zj^<*H=lc!7AM&m)6c_xBSX_`egm9Of& zt$O>p>Km-w7KWt~Tf!;`EK}#Oic6e8(UpE*;S;Z1j&E|6vA}}%hVMr|Sp8;olm|}} zpVi+IomHh;(Q(CRq+W$O^fk_0E2>40pSga6lb;3hI1@S8UsumaP8$#e6j1B1Q8 zX>{Fr(*dcs<00w6e@zEck?DZnyy<``G99=uZ#qEl?MkB?&YKQ&lmu-cTiNeGUszz_ z?7q0)Yzlp05ttJ0P79wgKEd}7Hkm8eI;g5@3g17%)q9stHXXCS^}cok=~B>nw_4@9 zO-+vmT>6C}n`pwu((259QK9JW!m-Z_PAkE*24*r*^&FcYJ4b!PbRO{u+L=8G@6LF> z_^|s8u~QxB~_Hhq-s|V z6TZAJHhQo6(asZo!k70I>7Vz{^Zked_T4Xo70_IM3v%jIaQ!Y%tB4OgO1tvJ> z0$}pB1STH8L*rrqaSVu^VNU2G0Vy{D^aYDM5RGRb#xdPmJDc0T5wda%GL>*?S*A-gQ z`!-;YycNAt3{2jBTnV($_b0y|fXUU#uN%?isyx<$RG3sAn_M$lS>Go$BV{F(6N{kH zxLvxKL`#Z}&90e@c~&FcKuQ2h0GOe~G@O-ckn+J20A?sL^gJRy1Ugf5)vP{5%&ZC-#=fUivziexvo+)cc9=qAy+t#gA40|x zJ{kba0xM+X^bPUWA_>oC@;f zj=%~TSq(41ehk8Kxdw1X2|tAetcW{g0B5xEU+5plaomY~II{r%i9V(7$sNmwGt2No zbHIwgI}>3=gc&SgMS_n}2rD9dh57;(1D~Mrz1Nqyk1n3EZ%iMG1{4H>q|>JA1j3>o zwx|T=IE(ZJ2x1daC9?Srf=`sa<(&|kk4ShOhEg7)Sdht#%|)#Pvr&FShhPKlY{!HqyY%+odaP&5CLaSGZlFMO$ z7+G(V%mKy*iIKs&1v$V-EbyyGP2**&QNkXWPtdpM1J?5UU{Hzq2C|0z9W)K1*LVCW z4ls*QLz5JJSenY1_l6O9aMvJzmvJ;nfr7GuEN6d5HkE7axG!F57NLzgm|$|IT^bm( zXjVf&+>6FM_cDpNmzDF}3;CJJpW-8C5zA1AwcsY>(kOG25!rH=BPKBejbPUsfZPQ% z;BV^g;C9X~RCPhWHtao;Mqi}=7_H{Gl%8BqcChzE*gp9McmtGgiZDjkTue(r1~Sp; zCZLS8ddbVyxBw-BH7=Yz2d#0DJHj4_zif)oMIGd5iPJqyh6C!5!3}I3{7%&hz1Ut< zkM+Paa(1B(?P0m%a5JQX&qe79xsZ;j)sq7^El|=a*hGM14Z+`15$Iswdu-w zMjZOP0p2!uP4XY??71STpLg} z*7KB&REKbWTF781pxfJHse6{H?i`4wJIesg|2r;dvDwMbM})eXFSI2eL)HdTq!Xl( zbKCsByQ|B%V|J*Y;<+hiX+u(1d5Q1SWG_wMrp!VN`*Y@t|k;Az63itkNV zfIX5+f}c_bSNXnig|F!jCGN5323Po&yFTDE+e^;IULIWI`voQo;@XF7`1BV1>|ai;E9rbvJiTEu2J&iJjZNL&aF zC3bV?%<$TZeWgkc;;WRcdemG~bO zOtTp@&SwwBh;ON2nj)YPpFONB?p47s7egZsH{>X;R-rNWLn8qG=NUA7*&1C7SJQ>N0$%htnBfg5_}GK>}9 zRPjnym>_+=P&>SCz>qO;z5YvzO^8=(&UxA93WXFxZp~S#DuNFcorTDC1|xHw!7if4 z$(F=m?Vq&NC&3Brp}^X|WL2}03|K)vWM&t%_9G{keejX*w*oC>F=Pu_%vEUTL$sPfaH)H@L7=mW+MNr}vxae>+vV=*N zS;A0cmN1KCmM}WuC8g`p+7-}&q^B(%dE(J|e;6>2O+X$m2#sX2xt^_F3slEptzBob z+a|DM?ySK4R|)MrWkh z=O%gLH=AURGE>WF7pGmA485-vK^afl*3*54p1kx>YwKN!wp`@kRiWO%?U_C4J+u+M zDi*X$(>)A^1-d+w>)z_6pxT2jpHuPy(eIwbJC0XmqM0<|xepXy|C`;E$@6sKa0hTei;va5_Lk zVfkW*O%WJ$dvX2y5h8yO-JUuv3nB9TXtWrvcI)68RUuSAz<>ydd*ndbC5fN$?~9G+ ziOVTnATBE#1hyZx{xa*qbliY&OC4;fybPj;rAN{lU%)K^qKEx9$xOWUF62zJ4EgK5G>&cGxPZ82H4H7`G`+V?UIR>;C86Qf*GB|N7!K)Q;4UO@!CVo#~f*Q=xd=SU6_C=-syEW%SV^h8mLUTXK4{l4<(orc@{*k&BJ4i~pCD zBcpO)L8N7g+ggI&Kvc@L+Fdt7Zv5Q)bIZXeE&7Z1@4Hd^e{Qg7;iM*KP*nRk3QMjZ zKbx=TocbueJx{MUPmue!K6WU+_VM%8dP4&5OpHI7hMX z_$vvI-DVDW51tIx&dPc*I+i=jb-*8U{?z-_U)u_6?ER^C*1_0Y;f5SXbbKcviI`I$ z7wYrmn#X0eib@$clXPPbM|5HtA%RFQ*b@@*B-0~JtyGWuXP#PVZi^EVs0;+CTa{l# zOpV=ZCxJHYK|yW^Vrp)uQ-!^`4#s|lH&)8BV4lgcU~WAcvH7Kqy-~0*FTL_jOE5nU z)}nkMK}OCa>(S`Z2Z=>~E?X?9Us}#SK3u67V*V!7@5yz|vKyfgTXQo6MZu^PfkB1x zE%)9EeUIuR$B(d^ANvL$Vc!iIx3MqmRT4Dj9WBjnIT#l0-e}|RVpV>LcK{!8^0m8Q z87;4%JS65xmPgiGDy6sFp~w%RGBss)0hKu!Y>iNvxA}@-MnZJfWYziW2$%J}wDH6G z2Jc3wjOCxnyePabWWSsr`A&Gak5T814GvbtWUd*p*_+ju!DZTq1}EIu&0(G}7hRE5 z6|{3tMP`bA2o3Wp&Dm65t+hYpl*h|-fg&xwz?g>uUanMBg!dNNhlZkn7t69DA~Vb{ z*aLHS&3MIq9*=;>IJ4P_BR>fDi`M?*jYygNe;{Qn%Tp04lXEYd z7bGyx!&jKMXz)wRk|YhJx7{Xq%WZAM6nwcC3Zr?`p*LRDe>_H%+~3~K$J=b{(vK1Z4^>a$%-@Y^k?!b9p7s-Xu~4aEt+sQb6Q6MJ9@N@Pf$H9INsi+(DBluRkQb zaDhUepwh5qB|qu+bCO2VtL_DOi|STlqEZ>T*Ywu*myh*&s^b3k7E!F9ch}>st2?xI zCG;r@RGbssHx3-qY}N|1ib%-)y()K1u`;7tmmA1$+o0uh;vcr>d8>}*CcHtD@tzr%w$ahy8_${kBJIt-R z%W1n7h+6|s1+61nrf*k8$Ny>C29J$*>_ouY4@)MD07t9utHfDn%t%3n8P`rJt!M0RWpr|CITM58PiJx=a(uZy`R zAhN5BscySQUu6e?4KOYd8M@@f={0NMCI-eI{~~oVik5?ciO)a;2af+Bf+Kn~;gV5C zOfWib#3hMG-6q`8F%z<5R?cU~u%Go`7B9AgA64O5AeDwRf{h`+pjG7C*i=LTH2tBQfCA$`BfK|Blt7 zYbYcJO!C8XB80jqBnCu(gEr&Zzd%ULm+#R1*IQo}C%aATL<8I){5YFc(!CoEuu`}Q zg~TMF0Z#Iljf6KlQP2QZocl8Se>6aNHS-z@i9s*WU<5^%d@}tkdV%W3@BV`H&Og8a zl4sV5VxF5pQC#*esgl6&fFpXz9XVaHBpCFP=OAByRhh4UIQ>-TI&x6@SrorR8oj$) zYGU+d4Z2FRHARebn>d4RGn=qrmR8!WfNoP>xNchLU!aa|(~++?GTH8AiU#oFv^^ip z$UTMzprarF9a#`S_}r=Db!2k-*_AMWboAJ6=>nrKtI+_=+7$6Zw~1&NfL8A?y1UBC z>2_vTbZWhjASa+mua97t?Lx_ZN_@Ij-VY+4uxgI!& z@%wOQZCJU*Z(i5{z0kiU$@LTkhJu`ZSy4t|c-1?6vxU4(&rnb(ul0 z#5jBegvUU}jNXY2WD4%}8>c!?KH-FW^I%X09w3@)qGRdlpOjevIOB0cKtX{ zVOijcMrosUJ$cb$KYXamhZ*haNeCC4yNK&~bGFWfPayU^!v^%kl%JnWg>CoHzaeWN zqWs=uhG^ijrJ%Dn*)x$tQmE#EiQCO9v)o(}o}m*7aOPmvtP7Wk3mr>< zGdr_HJqVZK+m6&#{$VW9#4EUXd_7z}S#F$Qq4{g@eLAZ&Q=3?M6K6zgt_1uW@AUkjL<@q!Jo003$9^^yaz!@)sVU7^IS zuZiq~{Sd5&73QbdCa8a`V@GDUQ!Oc5qhf%aPd=PXZMRcr85nV@b!d({2xlZgt50gdv5d{bAK@X!8 zV>YcGSdavtI>f~>8>ZEL3hdxmg5z%Q9rKo3UXjH2uq6LdquYC~DV7y?6dPPwc@ z3=IN9Tq0a_$=lLv5Ixv)s%IU7Wza`oPehL{SuVXs8;-3`S)s%;tD3sxxY$oXc3!C{3uo>Ol0^jvR`B$=zPpoRL z+tA%@<_U{ETO6LgsN2@P#cUU!X}l%$X+xb}cNq$fDcXX2+6q9;?`8-1%+M{wtAT@R z!CzPDB2;E=GeTu{UriN7$hWT=?v1&4+f4ttq$)M=N+K7tyM09}q_XTWXP)HJcD1ib z9VKxbN*OBz{XoY8~P09aD=$1BX5_JgarioTxjUnr+n`YX*b}$^ECX(oYHQKdHg^6t%0B zEMdI+Cbd?{`d<}b%8wwLJ;3ec?-|k(Nan+4uDtf8Uc_zb_C*OZ5$@|?GNN4mt5K@x z@Qw5NwM8yAdsEx6tS|{K{^~^b(Wp^YMUYUhuv5$PQD#5d^K=j=>uj^A&aAw^m>@L{=c zC5O^ib};tEV=V}X(Uk?nz`_9p#0a1Z1jOXa0%9QfMf;xOu(?1N0%9bQ*wwF^r?$}! zKTIAz$D!DsNChqK@rMpWf1tRUE=Z_xedsWJ4aL=zLPE{yhcb4?;P|JR+YndNK75%& zQ7a3GZ48@dXYQ{I)f#-_5#P5Rl3$MRW;e%hE$>w%4@YAAUJ0hGX}E{H;foM=b0T5p zyu|;Z*3bhECI2{c=DOs!hg!pFTuT11!Q_g3K!bK;n4R39UA#3xkj7(2W0-P-cJr18 z!5Ldxb1`>(7jO0W2{%V-!H4`wU1nH0y!eFM#CkwYR<#3k37{oFmy%U7=+X^Amy%Toy3}w* z(4{2vKj;$Hw;)Bvocdpi3?$I}FGU6b%>Sjx{0m*mQe+@$rWZcwr?$r^O^^E*L;;XE ztcajX04;$RWM*;BJkLqU?Er|9GK-)~iVV7>R8}cmr+;S$WurH1AFJNY@!fZ-C>cZ7{oNa-G-=H+`A{_`Y3DU>1dym3kT{|n6dc|U)f zK}#A4NX{JG9RJ`ST?FF%-~YAPZ|-z{zC&;S5WADm>E1a&_b&F(Gv0T^`Q~T4OA1aF zVLRQQn;dx48?RwNFahRKX$K!p<)HZX^ z>%mcmz@D@Mq;e9-8m+K^ugSQZ0I9T{I`UUr(E;$jDbS{EZ?q_|snjnD=*eBg?OWQD zdkHNf^k35s=m`R{W=KZXd|o9ZYs}`!8h!5A(#i*U9IzPaPrps|Fz2M-j$*&Y<86cM z!#Ypd2Dk9`fuD52N}72iuE3(w5R5YqhO!3DIoW-Yv@UIYcrcTHp4U|eRE+H{nf-HDhqrBYj`Ai=3iwodB&ecM~L?Qp&KHDG>L z<@e84!aD0F?aq(C0wJ@H{KUXnSc zpN-1z!g6nYHWE2r3VI#Ix%JtUU}@bc&3fPa`TH143G=R<>65vD(50ErEp#*~7?i$Da$tjp{Nl4i3R4M8zUK1x8+uo!D(P|W#l zK+-H%jp{>C%!;umJyUiLF9Hx0vpJIuB#pUQC4yqwe6)e2;V^c{pqO)KfTUTX`l$~= zG0VqFE2j({UIZa1W@F|ckTjdk=m?4_^jQETO&{ZX85DC~4oI5SstSDwicwR0BAhzu zVC0XWnC-4CAZg60B?yZ7;Ij-!ngPZ-85DDF50EsARFnD;6thgNlrp91U=)a;7=2d` zkTjdARS1g7@>vWdO)q1)42n6g2PBP}s$Cy~VpgU<>74R#FbY6WjHPQIkTmAhG6co2 zeKdfi>1S-Vv~UOj$up2NN~&#rWm}SKRntq0r!WpiL0w(H3rt)GfTY<>ecHayoJI4I z6RFYJj3t)l?f#SJSMo4oZ3viov?a;mC6}<~m(t!7mLE28i&?aD^77h!uT${%7XG4y-wv*}$Z@aduL8^?_+miUx(W)m| z>wr5sXAx>9J(p+^nphLA+~8iZg8Y)dgt z>y|v0T*CVL;yiNs9@egdM~A=IKMFtDO+5LXW}o}h^AJ&!T@y}e*ju4Z?q=+**x6sn zguI%|*g#)D_5;2L1P@paI{;?m0bLelv5!8zmDnDdRTB=u1MHN_O#sv!k^wcCDFoEu zWIzos=4o(01yvKE4!^39RL4>aWcAF$Kk0ALAJQK!0VBceqHC+{_rjyF=rB6j(ApO6=^_Kp8fsEc(nR{W|X;;BS^m zJEe_YTc*hPgZ-5@CB@lHQ6{VB$tfaF57}T|klA2XX7ozau!i~qw99_XNB)pBBy;OI z*rF%g)Y>;&>NoTYEUEJ>@G7I7=M&=)B+}DcpN2KiS3-dy1KtUTq@I~uQ6vc=^*^Bi z<aIv@Lk)rajL=-^{}NE(~D6-AN|GXC2I;#uQYjGCTHypK8vdtar| zf6`AuBiMK|4kfa zDz4#voh|ekS_e-MY6d$wi%3Xr?hmiaroc_dc1KJ-#LJ3JT zs1TwzMXW>@x{cdC`@wf;HM)>7k{>W-@&oo+{KC=j%ZZxkLdjfxv<+m1?kKC*IE`+m z4{Qy@**t+QA!mCI@+%Gvia8`5%dA3A&u-!WiFSC@VE_RJwuCC1Wm`gQZM-Rx0T2-0 zf@KjQk*1<^J8MDfV7(ydH2SX#^Bg=jwaliF$tpe*c6(s+2JQCn))THy$J8_Y&@)DW zxhAB}Aal)1Y;URzl0giQ)YtJaN(?};6QV3@hgpKp&xo!W- z%9tQ4Gxe6`5byxG6qNvSIYit*Qlozt_$A|`g6E3~3HTzb1b%`Vf_pJ00X5f;fkKy$ zNuDqG$V++E*hiPCB;1Sl$V>T$vClmq`g-vX@={(gHVq{Nl3wEA%+fJ|)yFnSdf5$U z){ISAO|{`(41>;${{7gO%8!O1I%|Y8%f~)J1;M=-Z-g^T#)L5+3q4;<>cW}TY9C{! z3UNm7bm7b*wa>zjCq0e+z7J>A)TW_;;EYD@!x<$tf%3->o<>|hIHRpLr9AZkXEfvo zXBMb^p?uWzG@2NOGt1OILH)oPjSa(@#cD#wk6E5Zd`O|8Yovd4oXWz@>(=DbKX-og z@HF}-70#?opN8^*Ga7~TGP+{A!1N;<;$xiQj8^)T=@c7hH0%s#7N&nG{)hn+oZXD*mU7}Ypj*m` z@hF#OyVMYJX`*9uYbGfmPt%lM26@`en#nJK*(CQwQR|8i3u_kuU^BJI#PTyZIAfP+ z&Yl$?R@ayo&6R!=Z~t2;XBW(8N**K{u7>jER_mhNkp7 zuqr-UQa;J>07+R)foNb+#h1Y;1l07;12xJrpauktOdolmUN}ukb&fotK`cG`^FkPU zF=lqBc)vPk7O+fAqX}u%Bp2|RKlua~Ry$<~hvSYRW!VzEa2O@o2vsNQgy*a!j!zQ+ ziveK9vGEuHGi6D&GGOM}001*rlWJwaOv5+;Gj8fRpUhzxWv`Fqsc~FvJpyLd$$*)^ zhh)GE(c24vnFwle(EG1n?Lx3JUPuAO8()%)C1_6HLXDRm(FtZeOSPKr!>i;@0+kyi#0%lML zozmg|Y1GIK2me68jLd;~12{0N|LedUgdCUwm_Z$KN- zX)>DUGihA7TV%it>flw{B?D&AGmslQ{y?IGMW}-@CKZVeZlQ;c;Tj>)K@jRNSX#OP zi4M>+%suP42YhA}e5MI?daIFGzy$SG#ZZur?F{PMi8xFR8HYJZFHu;JbZkgMX6mng zSQ?yJg%W_gUYOhyTWpG044v!JaC=dh*=G3AByOW!nC@v{tV9J7eL(-beDpZBN+8*B*e=&>DLKnK*|I4hV@6aN2Azjd@SvAaS)B^De zqv2N*JmIR=hzvaz$pfEX|K;SSB&I^>Wt)L9RZJI(v73mI0 z0%-G+0EtWzkRp==%mXt9GD!d|5d&^{Y#z6q2WI?axaB-Bqx7$`3;{C=^SC7fW^52{ z`K#Bzz>J;@m_b+!05btHU%F#yaQLBI?HVKD&ASjd1Ggv9_b6DIju7&7dkaen16MtoPru-Qlk*=Pe~ z^WrE#HX)Rm1pwKk)aXFr8{0Bi0-qjBRD;#ZFN*45}7@hP8? zD_#v6wmbh3e{un+pBt@(E9OYlM@(EpKRKUjHk=}wn-0N zF87NEG_QhM`n%tv6C4s5){RJVbpn9yeyc!J` zTcX)?R{TmuvuP1rEH2qkSL%8>*)O$5$HBXcb=7Jy^+X7C(FoCo>ao_65(*5>Ib%P} zz&b`c6|!xPmPB(I9`UU0VA%X>QAILT573eI!(YYm{FDbw7y>E~LrkB9nT6yX3?-^# zKgGbC$)aGF0bTQlfSDTc64SI6kef!6h*gtod^5KG$+vZ38K(^4c5~I3@aRkM6sR6+ z@f20ie4Vo%c6@3JYOn`2I#4~P)TRHfpk&5Wbw;mX#$WT-+i$ra98MV9a$w2i0sQ}n z>wZC1)#DF+gvm3wjGqaUwK0N$6D-}c*^}0tpU&3KiLD2PoLMOJ!i za*l?jvMozwz_6 z(=6xL4KvTJ^|ZR2mwTI@`Ck85zTQ8#j=k0tTsQEUd`|p%VVCXK2GaBju4Z+PzyH_$ zliqloT9`Zs(V@X1b`KjY0J~y&8 z^c&y*ct%Z$vSU}|CSCuZ*XVWrKUBSUToX_9E-YQT^bTKzP$Wb;p(~0A5l~QiHB=P{ zEp$+-6hT_(ptOMWA}!Pe#Gn)fr9*;|ULw+@d6#>C_x;>={#de{nK_eXd7ha$W%^Q6 z_4JKgGLmB9q#&r;E-!u{DYXHSPl|<$rlf0pf%36R+_Qsf?NZ|h@4tyhq>n6kxg^zo&plbpr% zU9e6;G$Tp*VBH+292(cpRR}ZEv`JEqOb=Fdm;Y30S5Ru?OX?!QCP?Ui= zQP@clk)mO2Apgfn%jQ2X?B7(Ldr~eh!O@8%nKs$oYf{7Dv;jnXCo_+p-8JWIP$MTK zJ%I4-)8|rf(|r_<%Y+4z(y4Rm6;mBub%!|gsY*5O%?WESPnznCB#{OvsL&Cg;a7b`~dT(i#*$w~B~w?o|}n~**~`QqG8S!Vzbq)|zI_KowL z?{BUmycTay_Ht+WmvGv8+J7qZuQ#HM=2T+0XIJ4*olTrdDX3HUZ|o^{?oA#U)f}lJ zy7=Tz)8Q}WPTaB=kgL)0;s(+=xaLqBube2b!kjEj4tn#y76pQ>6=!gV z-Q@#oiiP=R^J~rMov<)U^d$8z$GTBnSLn`0IyCA)T)T3cX=d=STPsH4sr|0$`bfnY z(4Tm|)|?)dsh`E8&J1t^9Eq+PPTsrCgB zCHk%+cAM5m-iZPtUnn|ROdGwOuQmjTQhe9kcK@u8l)-`CJ@fSj(|oJcQ)?e36Xrv*e5wWR`hkqS(jC(ue{ zX{aOT^j(Ma1~(vLt1Y(1i&SC~Jb_jsKoYXr;7ZGE=B--`!Tcdy|_DHYmrf0>r$bK;(iZ;BFDo{DQ z*9d%0%3I~~f%~g`t?EAcP}Nploc46j z^V6bgn)B(}6Cv5dQDoHXe@!3yd!`J5y5|F)4ZkBA%-TN8AE-NQ|3JpQUYwaBv>+#U zUbN57Xex(WKZ~241s1r=o?fg|M_V;i+iw5<3dsiOg?#$B<5qxh4Iu$a3S?r+$40oL zFjH_6uWvv@#^yQX2UkGTqj0(}^TQ@hCm2icwr2`3fPerPZhObmyDhG4AJ8a_N}gZD zEqNP8;!V_0w3raqu?+nI0btgBjO8*Y4|QRMTd?HzcSxAx}?jb~s5Oi$7N zAQd=qk;0d?zIyYVLiq}7Bl?UbJLHWs{E)S=FA(5tpz3X~Iws;3^8jws66jUfMRn>7 z_Z%whI12Q7c=TVW2 zuRUD;^G*mJsLHYBjEpEA?$tj?07foh`4AbAHr$UtczD^cfKUX$RlAcvAdTz1#NiXTQ}#fCM4!la{*3e`?k#DpPb94g0P)oTyCQ z?Zj(c493tbXTAro{8Q#270}?-aDDS!riII^Ddl1?d5@hBy_&WHEq6?M6Rejrw@Q~kHCv$+x!)cz)^RO1cr#u%>8P?nFbSP3E zb@b-@E}-)E0T%{Hvip+N+5_kZaLsQzVVz|Qomf8t`gwQ~J0CHTO#Eqf@m{CQwr-yP zL{b(t2VeB_ts8z=)NFdx{Qcq~j~{4a{xui$xz@Y{hF%i zZ7&|O?z4+mkY{-6jAx6z`}yk&S)-EUp?!<=(R(G}y*97OL4F&Vxesqv-4s;48}Z~x zGvnra8HcP-w)?K-RmiA@8Fpl%U{#N7!;H{Jcj+ljpZpe=rDnRMN#%MyQlQ7@{%6M7 zZ?_yqKhe(=>W|sgXC3@tQM(_9W1C?Erd5%Pv-HW)B}ZKa_qHFNF7QJ6lZl5P-^{^} zx@?IS(B#G0Op@^6&g+<;MCx!aFkbWJq5z!)R{wMXnij&|qath+YkJi6TkMcC+>0`w z0sXs*G=Xe?f!(;#hhyEv)DRQcIg=*>689Uk^Z+FSxdU!oYZvx|I(|Q2;hR6 zD8FvV3r2rgn=3)@SRA?6ueDjkVRch^{+z}3fe=!QUcC6Oj2#t9NYv-Q;iC7pe}(ou zM9|pa4+V66>PRQAUse&Gb}#a#g(xk03EEIi?w);2oqP5-oP(g&R=A_`l4Lh}m`APB z`#Q8oU+nO1_abK-z31|Y;njAPKWF8AG6?BKFMfPi=1$h-*1oOKo;ztF>Weq%w_m9L z4F?(+b=%9&(FFYAO4r|>4;`IGu+CNWU^P<@*9Bebwxdq`dd|xuRviQHtYC4eDXTkQ z*fbeZ)-(bYcjb z(!i&vr05q}qw}WrXZ>x)y(V_M=&V&&5I!tE+zbbtB7IgZ-d;C0Y8A+JuCM(}e_MOa zRkM@!UvbgC*rEMi6WclSzjsBi#SXRin)uEc>&8c2(oWRQp2w&V43B{*^TpDlEi!kf zq4uo)_Uv90$HL2iHCG9ISXMX^7edxL+Z1o#86UMfx^%7w3(?>HxB5&w^s>kr#nBa! z2~J`I;(=-GZPH+*y7N(#ooVbNs6X_+5!YNLjxE?wNO;g4`*8PXUxBz_$)w^{86%$L z^l{pN4n(Bgc#H(%7?E~*2Z3fquPTKFvb$bJ(>HUvYigiSF7A?z z46y*Y4kKp!sDU*#CA@8Z?Rf8`BYlU_ZJ(^S8-8SFi?rJD)H%?anr0{ms(52hEI_Jb zC+pqD7qXBAs#YO=&UsBu8cY{c+mtq^GdcYhdvkL7 z-ZpR{V(K+)T_xH-kNdM630L2oRK2&&;8(!W%_{xP-zW`p&2}YHq}RZ&AdysiXEKNo zS1)h7a0<8+ww#f^*;5peMPFhI~hYn*Q?p` zh__db`?DSKRo|M_y{EK#@`p@Qtv`DseNM?8BYDdHqj;-WEI?)kLfaGh!5zbkx6Nxw z#ML9|4&nv{OUe1FNKMy^!|2amjT())7N2P3Q zx1R60(d;k%?d4`$igRk~F(geIhQ`A@S{sLIwoN5@>d$5pZq##mm|ttR{)%*-;%|Ak zH6#`wFjGr8i$ylu#*jSqW*<9-OucNWN}CgM!|-ojR{x4rO2cq?n8y`3GI^Lww8K}s zXg75ZZEvga!u}Mc&owC8b0Fiq746eW$t8cS>gkfuf33s+7daYezJX8QmV^sE6 zS?uspt`{A;Z%uJAnN(=>N66t?{;fYk_qO>I7wN_OaugTiNrn1<DuFowkM4-+AJjW71X84#kryfZ2lW5eIXf9hwS{RVdz8rwl?2L>TI~2C- zb86$+{>gP;y!|ld|2wugVsuh?Q1=Sk|5My^mwiVX4i`dpY#%@V{^&7KY$yF~Coewy z@ABb7)-}JfaT2I4Ji4;M8hDNnUXk9B_UZ|^B)rGnN2MjQ*S*XV97k~UlG7mlaoiEu zt`DFV-beL{YKiQ3|4I%XBzSvCX)OQo-cjFf3&2`07wj4JjV~OMGe*%}zvkR`m^3!d zE{z_A{;=*V*dOWpvyej0A6@CnoD15a>|8ygzHt=#6$m!igYy)C zcOoOt4(iX6@+UX<4|F2(M>pXo0;)yR#?pW9huicDI;^PpQ24$vW5~W_GbZ?ij<@}- z>V0V%!~Y;o$@NsPZ9WN>W$K7u=20G;x^)-wJbB!}ks~QFn`6_#v_F4O=l(<9r$%2N zJQR(~9rxpSnn0BGjkW#q$#EGxADy!Voy&*wsQ9$08mkb?sgq@Sra(!8j*xGL(NgiE z!yu%-s9x@8KMr>TqNHz(?a-%(%h%`c9LYZB@8s9?Y>uSdkHe~a{85&5>kENIvU#RYxW`kD7jM-LmuKc_& zW08l_vGDP*VuhVq-q2l&0XQz)T`B>jf-ZBLqHHNGb3@Igp`JKMPvxSXSol1%N{I1ru(nEY z#(TI+v9v^Oxl1L3TG3^JQxw2iJb|dWR1~{|#8eK7-NMJ!${^a-!Q9Hg32*BzMURY{ zcbAF>1)$5Erl<}|%bZYi=_nJ28&i2G6AK?N27?RCWiW8Yhe=5?A4Hu=NxlXhqTl*K zFR_%q^+RdAiIgY&2hEO@x7c-H7>d44c)&2^gug8%$;c9glafpXv7+BThf*e&zI~3; zNQ?9#NFIsH{HkA2{FtQd)hG86+Zrl-+nJ@CdC(*Wj)zxPD~Hl0mo(0Rg9J0b$LwH# zKQhY#rJx&+$(qoYL+ec>`bR&mbhG+aR$qmUlr?@NQ$bUQ_?t-akA855UzT`iX^PT_ zAIin05S*?*8%dKewS*nX_J2$_%lB^ylMi$5vfQ<~zQOp9dF7#{JxU{G=ot5}_wS=0MW9i1<4bZ6 zf+CaI3C~#ZT=rDN=ALtu<*v?k0fwH)#ER#VFuOa>IN~HiE|b|6pIG@^?35Prt}zuH zMBmj#-&59JwyfTkv~;(L(FZ%wPJ56(KMr2z*X6e=r9<61CCy{%Nsm zkRZSs5m_basR6Uo_2wXqB9sOU9q?6B5T0|&$p%&B4MooKo1o?uNm+S}CJ z8tsodAIbScrreit#`9Eo@pWFl@o-Td6&inxx>y_R%edmrE4*kr8R{M`GN3|VADdw} z$h7-4PI#3HFRo5{uZN4*QK5;)X1G~0%YKauKA^&jxs!3{;UYgOH0AgQ_JmBaU*n8t zt?&{8ru`2W)ljFej(^|=$@Kd*u6W}LFRD<6qKAvLsMDllW$X%>X5YgJf1|>SBb46$ z;i5R|G~rkor-5wx-DEu#RK4li`mLyFIPq*t8EGedGP||6;`N7kilkF7%KS51u$Q&S z-AjSw!Mw<%+VnhTtf%pQJCB37{knM|pTC(UxA3^h z&7k_^4z6L6GU1ybtE2{J@9M5EkY~RvLSne#9;$+_b&a1bRY&Yf2B6l$gR>i(yJpF) zDsFNMX1(+W;+V)xj!hY>`}JJj#a+X-Zolq?#BpqNN2Y4aMbGTPKcq7_y*H3W3C#T44AL38nErVMfCORo41PS!R>Tm;@HA^ zo-px7lHFbobqG#tFQ-54WXx55lkw!Mt2}I5uSlw1TkvL)6gcQoSUL73;!0R~Bk9&& z4lNz->R!&rw74->H9>~BudZq^;k!kA2&mw#B0g{s^;m2`c`o3+5dy&b(D4PlU*EWZ_vtGa@a}vYfcNVH7x4c6`4<4*olP&`J<||?_xI0P zF5taU6@d3586sLyZ8)43ZR#eC(kRfG?HwgDF?it8TpzfpAJOFNzuU6dz0a_9sVBNg zvY852sQa#fpiOwru+^XDYU;*wWDJed!PJvfIcJ#98Ar;-Ji$Sbn1dJpV*f}s)4>Y$ z-gOZo2%!vH18FO!ZX!qaZ7+0A>Pa-5Gpy&VkkYYmaM0BlmKXMYx}0h+KsSoVZjqn0 zW$Vy2kOnx_sKcqU9;{wLbw=yN^?4CZ7=%8iWtpsAJ`%FI`%ctiSDrzLrYG*RlpiO| z&gfkUkrhD`lk9?*mh@vfWveY6lLH5VVv=LD`yO+up+I%TW8P%fwp^XT2GVyP*&=z%CMl845n7O`5KX9nsC*e3x#qOUqEe z^elk~fOD&SfEy|Q|EuVcV7X12ngAGUC}W}Jv@i?TvD zk%T7HsJ5dq75=oa1>G72qGwy2m1`qOb3%=AJLaUqpBuKISJOlc4}4+e+C-w7P@@x$ zj;ruzfoa{T@g+J2Mq8~kl0+xeScPK-EBu9FT6b&y5WNEy91|Uv3Zu~#{uD4R z-5N%sdtj^;Pa{cTqT_O5%u0nn8%#^DCXpBv_|l4}iNrY3K@DvERQS`wy6@D$h|YmA zR#}ZC>4^>|yO_xee?D0E-I~ut|G+4#tR|AcL4AK~ zWtOj=+0x0lc}JwTL>xx6Szz0Ma5*5u77Ltb$-8y8Ee-lj$TiE)r&RqGCbgC`*+*i! zEMM@K(O1t5>DMLm?nrkDJ&Z!Lz}66fHvnb(5ihqbxb?BE5&EORHOt?p8gmoVP%D`1 z%QO8eUvLHT)iYl@<>$OJGDu<&Bi$^pbDcmPkZIe8r{Ct^5^qa@ezkQq_wy;n+``1y zvK#rROlRijlRUqArd1`S&wC)9B_uHF&3T*e37i30wgY&nZQ-r*w)fEQ^RDLpKJP$Z&=`s4yzQq1ro#+d9FP=%Z_A}E7do!#I_c+A z9&!tlv#pf>%dXMh*9R4%i?Q7H&)?eZoh-;V#D$ZYvv1?V!K!hwXYc5knNOOMK96#I z>a*I*F9J;#uUxVa`0;M#63Qm7kjl~4BBhYZ(nm9xC34@9DwyR7o_ST3S2_Cis%kb! zrFiAa6tD8Tl`E*Z=)&8Mrc+6U0QM^cvqTL$G6u82@iMEbf-TWCtE%~+*5Z}R7keoy z0QScgiaJ_PB^QcX`ltsdM{YUN1t&kj3$3d1BclseRdYcB#Vfp1e8_hzyr{XDLb#*( z)aydHrH@K*a@4#dYj83guequ!d=TBWs#*v-EMB3W5;%CbLXDb>E97vrpGqm@u=LRm zHi|qWUJ5pPf~Q(lhfrk1<1!A4Pl4?(aI z951@6Dg^9xuc{V+Qi@lYj>H8GM*-|t)O5wR5_p40obd2fRYqVI|IL>hCJY;xCJRB6 z6M#t`^!muli&=a=E^TmQR=8VloMY}pHmg_$fcI6a3wSSn)A9|_FfSIsVF@5R;)q#Vw({~K)+XFcgGpr?E1|6lrD+YXf#{-@XjKSdQpul-NC z)8F;KwO+RJ2VSUEnJF!7FkMo_duuDE>UVg7a@M2kc@7-y$ul~i<3%e*ZV)`>E zXq`2?ZXq%PnqMbQ-u+%wf3u7EpI_TeLlRd-kHfoQy7OO%ce(x2k0Nl__93UU#A`}9 z=B4K^2i7%@Qg8!L8R{&~7-179WcxVS#3Q@-V^-9ARzd_z)_eC(lMlS8N><{NxX%-Q zb~#p*pTFxN;&H#8($?9?#Bi-gAS!WOPsQtWWTHNQA4KpKmYG$4N4!3!u0;mE&KJNEsZW?AFgW!I$_17W9NQ-LxB`|P6UHFSglC&x7*&9Jr=HBx z*255@*1pW#23f0Con||Zu5FtWb#!U6fPocz+a};TEcDfeQB)j}tycXtNDJ$a#+8tH zCksZ{MSgZD5CERp7;%UBLSeA z4TwX$7h*n!EZ#Zb&(ql7+O~ZyP+)=Bk!Z1`ZWAB}EG4|>2f)8IYcbRdajb3I?5HD( zNm~XHte#DP6zufX1|%wl$We<)0)=D!vv6oK-=ysbyY|lxX#&7D8?_Gc_K20wj-W0b z)wQ89%eqI>kDO(YKdRh%EzSEN!Ah6}Zp^BZBB8kIiFp^w_Hm-^z8XK``}g`hyV2V6 z0ncD6=C-x+>2RGOZ-HN-9GPziU4A52(JD}9(U^(vt-wq*=i z+uF3U+AIqgdy!T9x@+GE=^=CQ1LM|fJ=v!ftb$4AK3UaNB$RACvG2;VHBSuN*WxFC z->)xP7_CJQgdbmGK3Y4TvKMihoO}`y_KNn@pGxp8YmZEXRm0N{OR)FiZrRa zc}IrpP|XP#bSZDrHQD!}J|_Qf4i+`m?fv<{z|H9f&9K zGPY10q+Z$<+SG5mhNS_vYSyqZplu_b+IqT8eV$sdw{Zs|3OG8T0|Ac@8IMulq&oQ) zqYj(Pf@nBsPZdElESApwi1c*5Fa3ykcGgnH8_>%v<%~B_fwH!NxT^?B+rXy&+iO@l zV6SEk8wV0L;^`)XpplSJ%Oy5HBAtD#=o&T!B&^SKPUeCpKu#@}9{RCevX3cQyY>Pk zY{>JHtOSjPP+Bg@`LR*k$HCXGy#fitc>2i9&}2xY<&w7_8|7m3&$VmOAYo&kcVrpp zONfr;61^WA?P4s)+O;H5;XNKAz!mWjtmTrv9~;GDO#0fjNKoN@o>X8-5Cgepxg_Ps zMza`ax^^uARA|8S72u55kYdXvcRw~N?db8fYq6k0Bc2AbC^QQ4#BzzxkB#mho$EJU zL6}B5WQjP4;I{2=>Tg=TMiEZwI<89z)5wFc1DiiAwqlL_zN^>h!)aH(>9WH#vLKwN z`O3>{Hwd9aW8mMRL-k}>_I*AS$f!K2mMq=&LcXK1pHZ?z{gfY4nzRl6tuwSr-}5q2 zvP1&b`AR+*=jF)Yyyp^cDp_*ll%?F{u00BxDSr$YW%!-$wnaLH1*nv}@(zuv@yo`e8$=}7=JF++@ zIRQS`aL!ZaWs{GRC}@`aUEHFhuyc}2{2xg;(<#-m=|^4^v`GFVR@;%nImtPmQ4%hA z%E)c<5sGSiBmWWC(i&h@ z*5@0yT*}OlqpFRpl7iF1I&`C+CuUEYcldL*9$TDS%Jb76QeSZ?aaREY6y$A(mh z<$xA$gBS^RZ7Bxa_5CLmPFLqxSs&na0Nl?6N750mUbpOL7b#BT4CEDlmj=+m<;+qn z0Q;Gxjs1faV$8yE5*1qz#7$2oE1DO9PZJ6D@I888+&+c6sKm`n$BrE{=nWV=4$qF%6JAmWF04BHv z`U@bwF6b}#x6E~^7p+90>rxK!I}h})UhEy|UxkIH7$gvY#f3qF#S+~k6D9!fADMh% z*Dg+DKjPJTmj=)PeP#w0fcwmh#{SWYQYNjq8x^Gh3)}+zs!x-eG;)!`q_L^zFsOOU zXEroheyn}oKjNZV0L{BTHBjwfF873|Db2{{{^QN3W(`&R4ONzg61C<0NQGxp-;<_> ztIL3NtwNYQ|IPKgo1Q<$<1v*lHNVgznMDs|1=jj;6a9rQ9GG&pklbOvCHJR}H=1H> zKfmhwVwUh*{==~HmdE-{k*0$1b>4=Q>01g~<&Xm<^=bdtQVN0a1D2vL9oA_LFFWa&PCWcKfn^BR&(Q4_QZ|eNFSAbn2 zH(s#@k$<6n*OR=GVAiTC|me)I}{g#4UxlFQ>%CyI8 z=#RPL9E)+5!JOGmmGcwWT|M z-pqf!Fihm?D;6Nqg1&4!_3JOw4~ylDSmdBLNA6aCQOJFG7Y;9MVpCqG8Rmt1^O(|G ziEitt8`Vp1-c>gOd#lWN#{x6_8E=>Pkg;UOP5O|p$&9eMM5K+Q*Hj$R#&Suk`6ulO zHoW=gNA|-Kp@1n7mbXFysK85GfxW;!&Q@T3f0WxG2TSa2x50Ffu#UPNLQ?Rqx?Q#7 z$5|$o8+~kkq+#}yx1u6P@`Cz$THxRJ)OKU)dLR0cKC`D3i)tTT7c|rh2mf|c+l{O1 zlk+2uvZuT&xpbr^2-Bkg|F%$Li1zCB_T&7-o>EfsAFwzu*1HA%9jL|->(xi^$N7cb z_-)CRBjx;idRM`}@2D}v?DXpUaSpK?7njH$LGthGIf8#XsWHUu^Z{EQBkacSN-iI1 zp8UjO zDaVhKQ9HJ2RT2d%)YbDQb3pTwmn@d1{Ww{*V|-U73qggq_2|h8(5z&3i=}qIjLX__ ze^w<^L4|sH`oMajC^^ky>91cYAxkzVnG=@(oCH(0@1yZ^vRI;-7^ewm<}oqmK?Ppj z_lsKW)twlB4YIqVsYz&+jR_8}@0Xew2OQ5#E73f#d;`re!lCTz;N$v!_la@V^GoBE zXc1VxktP)(MK&s!w!WWlLgDf`wL+!q6)hGGI}1YP=-Y;18;zr=Eh(tw}p zUt~;%Vk`(!TG~MNvbduUw_Lhrz)$z@jVhEL)@OS#M2Fi16;Sd+w#-O>-&LUU$>z! zH~IDm6$k&i>l5X?3(eSio)WGGVAMr}yB14K}m=7MELh=PWl;#Ew( zfdL<)@=|~ZTnuuV0dqlyB0vNN`o09^wt_T(2xg_+A#)S~Cf{sOffAtMC=9{VAa{^` zr@Ztp;6Mb^|6{q-m65@8*mstZ@r8W{T}nf~q;XH5(PAkoqxp((WSo>c0I%6;+*lG!`&n_54cc z3jWRfekewTDD!^EIo|8xW8qHbod=JFVWDyFcZs%vDereJmqH#rzO;j*d-S-MUHk2O zoueyS#qR;mVtdTh2UrUpb2aoESG?!F*iL$%4GO>e7(`YLz4aIb{>}LQdQ9f?O7F(R z>CEaWJnCy^dQDQn7CX{e8rky8@7MDx2Odm@?LdOLVsUZ!e51KdD zhQ@zj?T}O$6j#XfYrkL4&){B|c-z^8t}U02-4_4)lzD6|d_W5+ZToksV`|b9KKbE^ zJ*J>G)I)RAHK2aOvY}{R-#olU>!1}gwO=!}xLqzCF?)$_N^_IeOwi%*+v67=uc)@P zkhi5-noriQk11ns9bCesa?wpk`EfZ3{B#I;H9wUQwdL5s&|-*qrHS=?GH~@k(l%zQ z96BKkSNX8?ASAz5$3sET#kfAt(mPvI-`uf9m8BIuHCzLoD=e=<%u?!2DQt3^2|A8` zdmQZ%FS@0Qgh?kiudH4F)`ER_K#9rZ(wk27<8rqC=@t?{Ka)_h<;;-S;<3ra0iVTM zgk&zvSOFG{yYPk8)z$$imwO=qzBn$lE+H6ZG=>YSFp%f@9MINhfVoAK(V= zL-xlXKrkg-5;Gx+C^vPYSV+-4J|TURiXYZk(9p;_T~K2&t*2OC{qdug%b%#^*I_#F zk5+R7TF%ze+fRzVTJX#QF1w@Cwl!^eVYlG(&jaRgfXiu38(ocRAFzR6quS8lKPs zrsLb+k(vc8itTMT|8OzMFOa3tQTII|pF)|VUbCLgtU08|F)A2KSEKSE3j=VWy>?tVDrODHSOkm9IooLK%Jk7xJ`w(={deQTFZvlC^t zvG+`=&sY6QrG|8KiyShfXd`^ohB#M>zl_SxWW3N;7xFOh`MS)*3pePq&rg;sYv=tV zouuSUs52VOZM3QW%lt9NhpdABc|Qm;a?0}4kmd+8{=57dkJtGN`f~PD+#0oM4oo%z zd&iXKe9f=8atM^$bvN4&`i(bxd+#2+J=KL%(1SYJC%D-gQDR6d2%qU=LJwvi%<(F4Co2dM zEYO2F$#c+j`oTOS+3~gCS)+oR~WgS7c zTtNkEeTc!*Ag+FhX`EoJlwpEICRWNMmD%0*AR_2(3_OV7eUqn&FMt)o(?o~LEK_T- z3%;-xgLS3N6cetu<<1matbd>2W}fW*Fu^@+Qdic(d$B-h;Y9^&ei*o*iw^^h69Sbo zES`~hl`_d?3->*KBN*olJbr`s&7Q(u%o31KVGfn8rqVbqG0 zmn1B#a5ommNWS)q*|qY9$-?D8db{#YHt@dlkN|yMZ&ij$T3NS2Pzg~fuweIY)5N5d z7gHyNm&pPz>R9~O>!?NS`IpQb3JdyUx!#)5dZjyRN6PCe?5uE?8^=l({Uzg4sco`gazr0q z-dP6TcO9~3>AA^+p$zw)8K^DQFsnkh7T+hVwM7FC9RIxLZL8{g6mo?0eWux}U{26t-+j3Mo|u@h~2DbhoC> z`rta{T-{Nv35#>~P_B>Gu=HN$3Zfsml8+UGaM z9i6Rd%s-eOXx={GvfBii}~L70yA-bQvgxE z1`>^kR(U=b8Fz%wQmcG0o-(ZNs0=fRGxSiHo6~};G_5Zms0t~!${=7Tk@ML1)-zdt zQvl`F0Qp0tt2{mI#~q)|(zbr^n)0abs0o9`dHlW8sq~{8F8X{cM6KDZ9vAMA_*i{Y zPt9)%+17Q|@9Fw|Ys*#bxtBU2`G64<^*EpzVxu=pk1W^YL3Jo-c3>g0FNA>{vX_GW zqHG;ig8kr?n6D-3n=Bz?CF-#GC}B+pW8Qly!eIaZG=s5q4p%gT8z*k zn9z-vsGQQqj+LmugmhB0$kx0DDOzBENzG&IjsveI@E_^8w@TL&N8DQ-G>IE8(LSZP zJ656%6S|c`Lr7_P`?^`XPjUwQ$wcp~i{p_(Qu#2D5KPg7JEo_XcLqFUGC1bqeI%V! zHoOG(2YaBRd-|X=V7$rTR~Ppq^(6H0H?Y5k2P!6_mwE>L*+gQ@CFn@ve(CTo*#DY` zS#(67*i7{&6N#@b&POWu%ZG=+{_-AXG5>lwXR3!x&|@zCM>jTg^S)cI+cjHqoYR@Q zH;Kav4Dvo)EYFmm0jVY2njjZub{AV9OR&d}IKTokQ$1?J`gS-L@t@$GJZ2zCaPu_p z#rmvC4oLZ^oQHjf;Pp@jas+>M@r4SO16P#VYh`OJn{0uyb>qZZ^E5wTj&W;Jf@vlw z&tkvIMk&uWPVkx8#|lvmek*sb1TIv`Q8~?P4wVY-O`3wNz?TX^1ykxGqdACT37elx zjEg^wAmsCPQVT5Cr8xO6p|S3qe7z>dC7(PIYWcTP=PlOxIH##b0Q-0&9ZWOv8Wr(5 zH0~OZ$XAt&`p}U za>BX_)k=u8ZK-M{mg}!4_^4F+h9~%NCSK(u7Hu@1Wg`}-U}^u4eI5vP|Bnq5vy}?M zK-58{LSfm0egXZ3i%|hRc;9pVesl=IZvDPf<)Miy`yAb&sVh6It4J-KND=r(E!}cG zY9fb{wGTIugEbK;AF(4})-WjOvRE&f$YDt7`#F&_WFmqd(IQi8=of@ptfx;*vld38 zE1N`MVYzB_y$WO?N47)dsL9HIfh>jSkz3&XpmjmY8@-7Ws?lZL_X=g2j5)1Yn(utg8g9b7#3~E4?aY8UGQ-%0-hEjU$$B^hgkR zKV;qJl6G&(MEi@f^Lqt}1c{>@HJ*lvqDo)-Q%YMWsnezAL~w~Q4?fb?Syer;!q%B4 zti!-;Q(Su5Qg=(dO7gp5BmU_IWcpoP^}E^4;)6FIn%A!a1W*lMx$*&Bjecy5kMDA# zQb$*GIf>Q;>q9nW@}}DFY{}%gf4BC5i%erjWS)vl8&qkf(VYw_BZVKH3?Z%dGdrh@ zk@bHPW{i>F-WQQ@=Z%R4#uMj_d1hxIZODHiQ-5`BZytPmN+I=%5wC^3^^hm+tl8=w z!_R9zFKjI)bV>i!aHkfxWeYyx;qsSLy$| z*=%KC@KfF=($;FinIu+&N^NY*&wrA|<*%x0SoNz^qxL`7sb9Lbw?@7_rKx-+jaNnL zJ~U7J(`==G{pWR`m$sG@k4e{R%u?UC<=Q+k=kix-zhCveRIT=^`P6q^+uOC@o-$X) zKf@~_4IfUXZ8TfKp8izxiL$kx@F$7a{7CI+E1Y{W$>p!vZdkQns^$4#z|_9({LSre zPpRBr3E(x6w;!scoixwfeMme>-El1;t3YWh^(EY06QdQ5Z##6(( z^E!p!o-(?}TjLdwu!pT_E6pFqJV>CLmeiK1Ptr*1hXO%{RiV=jp9GY# z*Sy>x_rILQtGC>q*dl4xw4@)jMTI)F^7zyJy;mhvD!MIJG$o`vudg-!RAMa&^N9!P z`Y?cvr#6wgw^nfjYZ6dqX$~AZhz9&5JJg3e`h@ExhC3R96EM_6=CZHC=PlR&JKld` zk8?N%rkhb~lN{kxw^t|A%UIso-#C(CDzm?VQp*VMA_mANg?Cx5%O3B)T*M)c_Z=&7 zYm+?Zmqb@5v&xd++G`()CY9Q2qtw#E&xppd3E^iz{>o#SOA)bO_pD!(C6_Pika>7- z_rw6HERSU_RXF$Ai?Fa z4AsBr_&w{`GNaN(AM%atJ3YTF*2Ru%F8zze?^(YrGb&#+Ad6({^Z*&x`GM|ij0V#o5&wfOPIcgu*06FAK7S%2sta^Zqz!ZDd ztQWTTR$-Xtt8jtdKE&lSWrxbRF)vXlCrioVUGTo_u?J-wPVv|SYl8mjr2wTYANNu~ zsTG7<_X6HlXDtqudSAVyq5LjmUeYMFsPNfdIRyBu#SuXMx|cs4#LxIqYNmV6LxIqj z;W1@}H+!}SccAg%vkepJtF2V|Oz1H$I#e*j88Xt8sCc~94*>rRYK{>~G_z*z}>q?2_xNt#=@Q8Y{Q)S{U##C1g8rd0l^7%e3=H_3)l!~(Df68l5IR1EXEHIH&DmaG-zx(nUe$YTq*JZA%5XuiV%mLy^56R1@??+j2$YA9+1V3IJHU$`rv&9 zhC<3B91o{POj-D?zHtKVC?OKvFi|HNNE^z$_<+oWI%cLBjr1a_GZYS(^pp@($(pA( zKl@sUto|(tjV!7R5{8}StRqq=nI-#)zfmBM%ibSc#DQFRUF>Lu6>W6voQObsHf&dA_I9Bn ztz(}zxW8(RZQcgA2Y|VHkQNK{$at2H<}US@ApQ52f5L;FU!m0EwJj~3RzMP z7I3@_Sy%pgb;M@E<3Jhrz~+m`yW0ir*|X~&F^+9*vv=cEeZ5=fXIXIG^sQ0Wsc zhx&W}$16z^%9_2%mO*73dlIsTvJY9tzArO&$ySQVp1raq8QG1o%Ra~wgTV-c8bgsW zS^7Qa^}9abcP`iSy3g~R>n!ufJm!Af_x(6bZ~}`@c@wKPKFx=dZy6)J;Nz?C@^dMU zUuhyoAJw*KcO_mQ9T}7%lMOp;JHE-C?N7Ji<|?(q6h`N8QR)ZN_^nafX}*1?9!uL% zzPC;VsC?z`eDVIxt;*W{ih-eUoHEmjKP~)e3Y>6(R03xa!6%W}3h)^A1@~=PgU^{b zE~W7t2~^#p)$2G3bYM`Lj0J4HZTBXZg+J$pvn%qOR2Zkj1>|?Baq&^xuaJGIo`<$0 zkhf+9s9f}SNc>lSMtCxkgpmUZT9gcWiyEf0F#L`yxNe<4jQMTPH~#U_oL<#^e$C$B3#`@! zsP^#hs_}p$Lw@Ol$1pAgZYz4N+Jz)T0GZ3SJ-ZOYd{jmFTHDhRzp`*cg+PKqc5?P+ zZVxD%PyX+il@y)yaKjTxh!OUidQ*_^j&%zlT{cW>bG$xHGPLuYIoBABa>)COn_gRP z2=?uQ;X?R_a49@v4D&rqxDdM`Tnx_`opQGkE<|kz_nc>pc`2a)_46nLC8J<>s+|Vy zw?Zox$m!F1HG&hi*2iEgTr%l!AiYnW!^|ek;OafdW3Q z*N@ot*ZTBL3GKKL@dGYme{%La%yIM2iJd@f5s$QC&SBsR6#o~0%BS@rl)p1MJKnxC z1{Y#Ar1FYKnyw=*w)tm3M(8C4JN_tIy1=3sR>wT&R%K4qi;TfT46r^^T!Kpl)Ln^5})y#k6(wl$& z$p~dpSiE#XD;l`MSTTsK$@_MaV1^?1WrW@!FS4D`l>2WRitoxIRaj3L$^+MOGD4Ls z3Ta(Cb8JI7poN2(O>uR2FJ!AQv~VP|DIpMVi)@vH77k}N#qZ)Hkgbx?!m-SzSO)xE zWGf%kZXgr>f=q3t*lVHp-gxJz)c}rWuSJWneZ41ydJWZ0ctmx z36BSY8OT-%sNF~=JQnavkgYsWyWvcD;tzZnvQ-&sHO9bN+&aSEz7kjWeel)E7#u0qu?naodh@FvKJ zOHj3;Oy&fj)(shP6RI|v$s8k!*F{F0hpG)`GRFgC6*58$sy33z9P5g=LPlJHstso{ zC$8f|kr801+E^yy2`63)8F3ccK9Grs%f>^I5!a#Zm`ub|3;ZKw1RJz{C=-z|gAYbV z$U@skGZ8UBDKs+TBD8%l6A=&aRmcbsw0$HK5evsdkP*Dl_TfxKA{ifvj8K8Lk7XJ^ zVa97BBhEmh1~QG~5O^nKgdj8ulWF|a7;lD*V1Y&rwHYUjHB;6}v=M{7#&0pwsDp|f z_K1;Q<5EmD>cIVGo$>3}A2sx9ry}%RfhI-~Dxy$iw%Cbs<~UAx9XROa zqn6$JJaCxzkKwKLEEF!nXD~L>nZG~>qjtL}kDS_ywcz*?+DnvRK!w^5CLTLqE-=PK z)xORnrypVMv%XMziPHM0&}i`|G|pU*JDAh8`FYH*o`N5kf1&N=6_Y?w?uf_5ID`W^O)1R!Isls;(K{Po+!#=@u!;3e2{yXYqf=W%&G6d59PmT_43}}K~WxvC-gY8 zLJTktwK;jr>EFP%jbD0ud8KSn6hHA8A!i=QUCia$f;_~lG_Wb`i*PTmxH5`zUp&6p znG>RiF{#bVL!?cBt@po__VP-GqbP3Tu};nc5N*t@+M+x}YAx8D=?k=%SAq^ju@p~S zS-k`?!uSWg&O@aC0^6m0S?lGM)eUV&&}=mYZej9(>zO^m*<_vVX~w7=$Fjie2^=O;#~36Y?T+Hg%J-Z%rj2?0DdI(&juMPkYN$r+Yv=@)|)FH8fJLYiO&F9GB|_o`Jg!CGtoig^f8!;EAFr znR1p7OST|iPp-);6iNN|OTfMT@mx`p6tK(n%|Vpx7ByRweu!&j?YhD2cDZgy)5oq9 z@P7~+e;k1)igIq^l0uqjkmfCgv~pa8ko(KL0hyK>+0=Dpg=}ojucwl3A_Rlb!DCmw zzm?qva!k%k{(gm0j8i{wL4miB} znO5Inwy-fcpt7QqX|)W)hdnKTy$o}tix3MGg>`baGb-27v;%mdjHg?QfnZOo58>kZ zb`rO{k;EtJP|^_7*-r9f1=!Umx8Y87XO<5?DJx&PpM$QW(b%EE#`2wihf&4GJU+Na z9Ar9cMkplSIe&D1-;wUFSfDiQDqGaW@H(2e09q(tI;S2JiH*5AgiGW*pIFr0PP_w# zl7^Y?b&fq&hh2Soo9sjCAZQTpfQ~LOxzITRSOL4r6vdeMiN-%8^t|#V=8hQ6*_ekUU{_ObGrQ8I;mLkym0#V<(fLF(xI+g{jrzH1R#Y2-3K$mE#e*SBia-w-Gtb#CF~8y5qpXgorej*pLmDw zfQNUCY3eOzWi}>=iWE#GW(0cTopVPtM$UA%#dhxk^+7W74sw5o&an0nBLlqKnv2aX}%=oNIz#v&;n^F z15S(s{hSFw7^Feh%qHbTuk?Tb1!;&j^CBP9E13}3K^m^jEJ#OsB{RY!kOpTnHxiH~ zctDT?X;=WY)=u;-ri5UShC(yH&tv))69OYh1Kxbu$C19pj9>`TUBGzj5RisIGY`pu{?7w~DoDc~D71E> z|6@vs1Zk)@3-CXt|6@X+0ck{sT;g}6|6@kb1a+MP+!+V@a}NkYpsqV1Z2V62=S&Gs zpsuSSyoHbH&zTTdL0ykSSPC8K&zTX-KwX!BR96T3ga-r}P}hSHjzTB;1XDsFsOx43 zzujZ{1QP-SsLL)}k9q9WW5R^e(mLZ&rPV{}Igm6$7I7C40F@2v2fezFa z0z|?c>HEwGI-stzK=P9Vy~qQCD5&cm;MO?Niun`@UDq9(~Liu$c#L z8ZB9R80b^bE44bUg4J=I44ont2V!XjMqWe)OBGm*v@0PXJ6|KP#_N-mDPlPTmR5ZP zaU6WDz<%VUu zTdItW7n4jb4i<%2kBqmxfIOSN4c3NFLZ^6(!?E#KBwsiMOGC^@YFZGGXI}%rM&!x0 zDc*8AY&=*JvGV&W#BStg*fYqpnG0YI=1KOc{5Lw-__I~dSbvK_Oh%rEy@cGK(gW)v zCNrn*1WXvz)+~oE!n$#!W&7J<-uy-3`Hqx+n0sTPVb44jE6In%23QLB( zFAnx^u`B=aK@sBniDA>4*>%IPr7%}!j|2VcEk7(QZ-0uo$As>bRyJ?b7y1~6fIO%| z@N7i3wCD0{(7@it$bU}WySDIPBzf;L`pbQu6_G!$eLfOI{=`At-WmGKMCvI#3Y3ZL zlaie@z}Xf}=6LuZrLpmZ+<2P}eI@-7$vt-Tm%BFK%>U&2R1nN_lV|jR5deAm90meCNduxfx-xiiH}OKJ$0S~b9^+|@Ljgv+3KH+Iv(61G3Ft7TC5 z8oN1RG26B5?HLqEV>dG_e!GS}Dubfh*ew8y-L7Ll$)H3w3Z8-`3VdQeok5{#6uh+m zM4*;ECWE5cD0qH9PN0TeBZDH;D0pT6sX!fjR|dtYQSj`3LeVGoYZ(;QMnSgyn4((t zw;2?(M!}2w@kKT44jB}gMnT^F*rGc2S@qs1CmJd- zfd?P(*raObL_0}%bp70cH@@S5o)lEQrxEQ`$0jLdP?#G9`HbSCYuPt5D8`L~G)7OO zYuLFmDAJAHU4woXY?j4#*&2M4P8iE;+4XXO2P0nHkK1Pa$!%jN3GS}w<)FrQm!XBD z=CUy|c!S1w^niC^E*lGY7me?@0Pn(FHWBbH8s8}b-i3Mn6TrJ@d`Am-7kBHQ0^SA3 zJ9fakxLY3sco&WD7#m)HlLzXgN6^vYy){QU4Oq=pZ3MVZp2Kf?}VZzHmeE*R{7nFG}wJm!}a`x4jVP zQfPm39d@4QZvU5S9}yv%8=t+;^DLKr;qrMIVs`L)o(<5fe9M62{h75x-z4l6KY!Pv zS?-(nvYC$!mI)r`;}`x}`7Yc?2TKo+=l65{jP7>DAwThu=v9|V&&Y9OC{+h(9A{Wp z+vRNrdp{tTFyv|kJy|E>>K*|gDt8`i&@gL9i0!R0#rN#o-=JgGiMYPE4uqg~EU%nG zXh(o9uBODV?A*I@7NHX%c#&Wfm%j7x${Azrh#MEzjN&JE46d9r)`<|gxDL>YJGNI& zk7-9py<8oQ-`%-;aG?-!aQ7e=eMzcY-+`_5%Foi9xo9@29*Je15XEm2%Qhsv<_p#F z+xV}~fG5gxS*i7MU+0g@OXr`_k?gxcAoR)Olo9iuCBzB&=2FV}&Z#2vc> z`o-h}yMr7wC#i>SnKMNF8=b=$@>P{MM0fR$!)HYIJ(5Yt#UenHW#w5>oGuPv`qUUY zdde@WGo+&celOzk30;xSk)Xjc!0PQhI-qHPdGP4~Pr}K9`;ia013bhZBA=hlkMp8k~i3 z>Su$4as&uHSa1sHaX$x~*XjyKcQd#U`(}IV%L1(BXefsc+S9eXjAJ!^;ydd6Npf?>S zcT>(t=x?pr3+3dGjGhkG`9>#B&n;4Bq^tR zz2nQdE;A~aH=EO2pw;*#1(e3_nqEcx_-#~V-c3h-FePvR27~#pzv8PGN?iSnau!uC zZw?%knoHFWSW&6XE3m_Q znQ9r5(64yk@x_EKQ zT*l|p)$8bzX8l3&ITJl<%qACNjf1XU+!7Ol`YS39OG%Id0#aZZ>A#wv=DTKd!f8== zlyBVEvelKUl}JLT*}mh;EV+#o%%08cg=itaq}(uNw@9xfe)xtInQ_yRt4Q%22!;jw z&%ENPFHV^H+^{NwE^GE2l$m8%y7bKLhmYb-`B+5FRbNBLZ_+}^Cg$u@(~1X#Oz(T zXZIyVWP(j0J)2ne%}1o#O-IH(#b&@02C$5;6zYrP>pxemN}@}fZ3dO+#{UcD6fRif zysuxF(N=rn?S7vv3oXg`{G58+mfL;VEHoeE^V8~2x7_bv%tEU&KBq#J>~)Ueh17)4 zF$IrZxmgm|NCukUx)2VW+$@MS(p^oKPUFWG+-XnNNP3zqUB(Uy+-c9&NZOidoup56 zjJn+c~=q(D_L${&84Z%o@Le@$D|4_C$5?a{zOxs1^PuIKR-@0hKtJ?&Wuu zSJ$|<1}C(s%XGkglv}v^7}ZfGio%P50oxI!GnuZk!*d|1I@l8st#ACRYa37AyDl(% ztg0%0s=u>lujJIQYE~iJQ}s@PP2Fo*hdN=VXH#NVHgzv&g{!CnX=G2;C(|84wfSII~S%b}*w+x#%NH(&kEOQg#0vAYCkK2w<6_s(W{Ubh+pV{z7wFp{x_i zVj`WxkKNYL*!mN$a#L5L`P_-Vv2_jZuW72hpX3oDsonwgw^-1784&kNs)wRx%u-=~ zG`Ra(b2T`giu>Wzyj;O8E7Hir5+R@x!!$JoL^i)M!Q~S>Uvb|xD_og<^9-Ql@vwwp zxGg;?h)QJLn8;jPG}LP8QuUK&-|P$6E)`V8t2GxT?L9`3C({c7@nfmPweu=B+VH8* z5T?1(cp$@SBBCompj@_%nut5G=Ylde&26U&cewwl#g&U2MpH9!Wsj{$br~*KRp`dd za$`W`P(7I|)KuJ&$61GhrCb-JyM{Ad#D*htCN3rdq?Mjb{#v`9yLPwdj#4StV1RwO ztbGs9jz}pbcdcF5UAxC-N4}J6Bw(pbcBxx=N2-*Pwbrih-cBX0^mbK;Ba0?p#sj34 zo=nzS)P46TDru#+D>Qukdg5gwKw9a^6s|?-xkn`cv30#&nc?I734pV5LDqzUBWlSscW{R9%J0j6Vv>@hxWGZ(FCEnk`TLVOGv;G5ZlTb!NW}6mleou1ZG=ZBe7<(pp+_JT0{`A7#UsbHMUKUiGxcVl;9L}X`fEaSKvNb|{ z8vcOJ=P%V+t5-q3>Bp{pg=ioqi!WGTwwPcgCVfYih;Et)SG}y<5Qjm7EygGJ-Y1#& ze%-r>{?JmnX%+U+uyTbPaBLt8a(gPOAKuFOOSW|)D@fL1sr+9j=9*LN9B0hod~F7Z z?`{xV}hnougKwGg7bU^A>>}`MPjd zV6vEFj-_crl$iD1r_^}UM8Z3HenST48Qe6!zV|*k>dE|fhJCv4K9$m&CN{^HnkFrX z%wHFb>?ez1e=hq}`(gn}=C<}yovj8M()(>J-_(N1(qwVU96fwm>{EJ+@?`1z9VcwO zlok|){!+TJ!-~z6(sD(iCrV%Mm|>HpdYw_|@lq^RUnJYH4yU?dY70a(bhE$J;#4K;F$ofv8CC`C_~K=SAaRM|V@AjjQ%C&LcqtJ?h6jm@9`UEV z)mOA4-3k&{;AMz@qmQy832d0!0j!oz_ODvp%7$qanE&Kn_OCje`grNn9oGW}$c=%3 zzjw1Q^w95Ev^;#hc9JQ@9?!b$8&n}Se9Qy6!6fQVf2;}QP+7AWC5Pl@eJk4x0>{EK z9Z{}dkhgb?4>-4O^aT9XLJxHT3x=$4hAj`;rf%>0O7`Kl zWLQJY{4&$r*Mzl24fI@hfgk6V$8f;*JK&yeKmY{R;DjQ7LGJrW<8;t-b+|(}3mt&4 zdIRCJBGIr_%BY-YHu*RTB411TO3R<1BWBK-b$Gt_`vpX|6I-RkaO6wJ;Mo3U`lPSP zYib%bVV%Iw^4{-V0Y&dDRjC>vj=!BzV-G&s?SNEtYQk!8>tfT{)Chb{yE97q!AG?n z!Pa{Ib$XV^mLv%_&r?*9r;n#WWVs}{^FX1V*!0z`!*jjdXMDSD*gVe< zN4{`mj0Gz7lH^{lwQJUBccORrd%3#;mWnOyyMQ2k@1LZ#s5>>EWLwz>Op{#((1)UPo2H;EHN2r0hNU(g3xr$p;3h-V2#Zj&33yZUjg z^Fc7dK+#67J*gVuF5N9-7)zjNL&pZhR6iko-T-zxP_)TvPYPAqDef(=9FH=a;|

3h5TS?7%zIg#ocpDdO(1cBQmoy%q5VK(=+Cv48;k8{+?yjCuj zd<*B2WF7U=pUuG(%$}6Nb0ir?J%+JXn1Zh-Z{b#wJfl9tv$L3j1@kXxViwJMeI}8Y zFzQ~p0Wlhm51jXaNd>Mq%I!%tt#yTN*~9n)*Bcu4q^j3?C|l|1Gso^);0+*EkBcajFcnkU1R(rdxK+hvp1Ae2JSzvy`5x3 zZ#&Ugr~evVvh@ZoT*W5ktvp+bfy}|i(G%uCs_z2vHbKfWd;?-7kvKeW2U85pZi?JP zwm#})_f|(gMM=z^4zn1)u{;EO1sTL;Ze41t^m^p?NEw54Bz=IzEk0@h$Syvpg$z7c zS$q3~4WsR3N}cCxbXmb0xLg$n(pz-40Rx#g8b{BW19`raL?r^!({%#^sZ5M5fX>Lk zKLzedy?)%V>!r42xpk(^OvZLes&)kTdyA#5(iaGtqcJ{@NeQs{MY($I<{Ai80f;i7 zwYRx#SaDOxy1duuiiJ0D!zwNxABEWw%mU7598H)$(^NlCM8354`}dO;BfTIyqWifEy|^MQPZ;@m zaBtC^4a_1i+GBfTp?sPV-b5z)5J)~I&7^6XC~fk)Pq?D-Hn&frNBJZzCzN`5_^rp% zFWlRzn2DY>kBgoiT6-a@f5#rXJ9z<)g!fAZz>%oGB?UMV@_mVbBQbZS2yi5LSi1p7 z!rNc~a3qqSF9VK*yod|nNc5HXo&A3`F|jPKXIaiU%{O{0qx?rP&%NhINqvppK${0$ zQ}Z z!fB%*aeQ1SD@4!dUEWzy^g};W{Gb$2Q)(Dv2H^$^?>x-T%KKi{wJF7y&k@q1KV}FA zNU7}8qvus7p{p(}R(_4cXI*Ivw{8d4E%1V8pfQ1G#)7R-C?Ed(?b@!;m~}J}7twS| zDo7Fkw37j1M-G5Qex=IcAA+1WPVv%1!#1Z+7%P{U)Aa3Jz+ zaSD09b%K4>yycN!gTz@2TDz?aft3r~&C~2Lo@c6qwNMQ{e74(3UBNLk=x?~Nrqez_ z!uW(v9*C|_F_3|dHuW>Zk0526X1!xYgWp2(HZ5#DQHF#z z3+)(L#LREPx)zXvIQEdH%FkOcAOvo~MWSz=d9=LW66x0w=x2{VHfla5ol@Cg<3Atq zC6x3=`7AT~!_EVI50V>><%`jQ1czMTF|mkM{+`mcf)r8VIB2fy<%0uJa0{No^AQtY zmyKG?cIqn`jA_-jzWbLiaD_~NO>{j|6wKIMN8(l4=I#!R8AEI1LgAF!Vb?nx-Qlve|+p7qRlE zdnRV7ISR)^=4cN*UY_sV#+@P+gfpu*tK$zl1#m?`a`WY=faoqnj5;a?CrXw+GLWOI z#S~Q#9v{#~mFC7;5vm^siXENnFdKOb+J%2m@b&|Z!EmLdfESaPG5=`;r{IcAMLaOa1!RR4o4tcj;=!S{fE4U zE;YqTin*Awyz%QwDc1tAUbp&FU zKKK4~zO0OVy)x+7!e#tzwitLPfCn% zHz41KBdxpnS^i^Aj=;-`x(YAuo7fr{)x1bTz0NPo`!BcI72Dfudv|nVgvFF4J1vly z<|rk+zUd+^o;*B23v<7fo%$lE?&T44cBVe@?{m~g+p;`pDX)~Y1$OPTtsQb=~MOI!=UG2jzBcGbeXS0LeEt~f~4ecZkA2De+PfhnYo?(5M&@F{5i~5Ed*)GF1 zIrQ__c|$yNa}RTu3BR%y3@{^WeBRztK)p-om=6%{{}t2Nm;v0U65-s$66i^}XJexe z-0O7$ZMqz2Lou+K*HgyXwSSs_j%tf2t=cN3A&D(v`99-J+9#(|Cg|Z=0z@N65=h=*11?Y7* zFSe}EOUcPprA1kJ?qQX1yMucp&V4aUjM>kOD9rMQOPlHe~Zf0s{A$r?rMoI;PJY~z*T*7&}xCxAvdx6M=NqPCNM z*Onh$vSjb=?Kd@0JfIZPm*tQHAIH}EDd04nhHEk5ZeF24;JmnI&595y+b9S~g?zy# zfp$-G+i(F&gl=BYEXu1k%Fa!dyH0$=5pjJtR4yRKEulGxaqNd3u!@FLWi3Y3%_|s) zn!l_0YDF-UZ4{wiLV?ZwC}V0fI!qgqx;X8UBRw(SLah+@{ZWg_-t>+Pp|vY*`fWw1 z+-tnndainMJ^{2F95#Pt@rlBww^J5Hq|N(+Z`b@@qxf({-lBXWkV+LcpAT)<_qFK; zu9ax>9=yF&-c+?rbMR`h(IrP_QcS4z77*TIiIp4aFDSG zbIvK6m>=EYiZJq3uEUINdRrWvX)kGlSP`-xHY!KZ+MCUiK)aqd=2_cgd@H<+vnZ08 zTZ~IxM%e`OZXl|9wxqltod$p?vY=X-5U|W)?*ulkc?V#(DY* zq1kIwyg+f*>?`gIiPPi62OdmSP8(isjIll6En!#W62JNf$$w1fSq@fF&d=@naaM`P zP6+=@DV0xS`eLGt@lKj#8WN(o^=DSlLYS`7Tb2&!We_^?3Rwb(+$eW^f_%A=`bq)G_gVR54zR342V%4rg4qN?rsg36;I z>Rwc!nhW35^){T9st}zOp0YJH9-mFKzK}gVNi_A4s&W0zKmq~Qx2cTb%5 z(kGirRC0T1uXnwqPx&_1`zy|9Y4RIvjvxMonYiykdL!WOPq+0GR@Gnnc#u> z-={S00hU>zR}Gn%>AJdP6;*~+UW@2ZHP~}`Q=$BN=|VjYeqh5C%x&iKL6c?R+6xgK zXpYXr>V_X1uVH#|9j4elzyN$MrF8I?6`@8}QJ$Bse{vNI+6~F&<)q$w5n!7YDrlI( zEZQ|FtEe`tk|QD-XNG^6oBs-0sF%*#15}P18ewi5mk;VLUj(O%h(dE*CstSeIPwkC z)9WyG?g4sWy3x`>aVr8!R#7gWZhdlf3bY%Mo6iBp_`kZ3boTqm+67}DH@JqZcCRBS}g-H4iQ&qj>W_(ks9MjudBmUx(66FpF@@o zDq0a5WEB;F%E8H19B4NzH=onz36Fb#T~?@&;V3guIw-5CKHQok!Wjo7=;qqKf-dT% zGxPwpgN9O=+rH(4TFXq$=^~ua9QefQH)`=<`inXY+C4zGnT}LCC~QS&lSRn_Kd>iP zCq5qkQCEEfXiP~mFrQN^S!>n|0}4py*DMJ_LFhA7=-9dd+P&2Rp$Al^HekY8l(P%S zGsfL!LFfy^5kveDK*H7A`R?0AZ|C^|4cm=I*POBOEe|Sv)l^x7;oN{R0_j%o=H)C2 z`ax)p){E6&wyZ(B>@5&FK*V+f#+OCmUO>`S_wWRv`G+H7eH5N3EK9a5Jb|`rHSthO zNEUoP-(Gq0x3{>5bc}YuxP7sNQC7-j;;u4VW^upk#i_lHeMIrioy(|FXpd+GS{2pl^?>-hhc_QP>wqXY6|v zg3t`ZM+rXgSb)iDSxAOP=`@9Rl6PnpNL{z1-^{n+bq=2UkR5Z>$G6mW><@B$$PPK` zW@L+4r?&>6~9j%Fh-S6(^wL=r~Vip(43S^N7rK1D}LWO5IEAR0vwo*_rD(> z@FeVL;rVXq_UI<;zAw{S;^dD9YO+i#I-fMd1Fxv3*(LsT@CU0W@lEIT zP{#L^@|h7r2e*hji@aTk(wWN<_jm^pPC9#p`%x-wDXT&&Vo_(5YZY{UvTg>m39#$z zrejUo6myFVs*5Q=u)e4}g*0;O=wTRNPygqdBUzY%Z^~Nv1yz1$8 z{XhNvRn%Yb&E&06X7`i|5eQa*{61LB?;Ia=o= zg6L0%&0v-yHqqS|Slu?o9D9T6bjm6?-oD$2G$MXv8dlI<{<-4lHcZdgXf1p4$3rza zqZNx!X5oR{9cgyuKi&P6I+6;e^Cl@@_mm2Z5i$pI#J$D*uIkbmt_bb?!D=VhJ%T}$ zYTM|l%nB%qM!8PG^(W6}Fh>!)>uw&_s!cI($)LK9LIfu+b&rxp6pzfq3VPN*qmJZZ zM!utKO_M)t)f7fo0B$NgkiR3{Zv7_^#_xDhFq5}SS=du58bdG~s1lDBZ9BJ1XJ{id zYzMZTIQIy5PyU|zujpqwkp~gXli;tL23*AZ|Npn%H79Jh`|8SD>E6B1Qd>7Da?MIN zE%Rd>vEbvSv{%=4S++w}$H-vh?g@ihf8C9S8;>Rm8@^Ud?DN|6wd+TEICg_7Hy%?StX_Ss_bSra13I5E z_qqK}CDw(+X;9^8V`_u_P@n6)hV<}|o6nl-GcT9(Qicbcd16zhm>NY4XN^d?Wo2F# z9-N+2-U>}Q-y2065*e+B+t-=QJZBfD?Dn;+=cDjZ<5P2u(hZ2n=hHREIO!bgex?$p zPtubuuvdprvmbMBr>-^fSNW<3%*Zsf^BM8E@O(*t#i3&=7`NQMi_JfiZ!Bu&$(h5Z zl%~`ex|u1DE_|LUg9!#djf|JCD?-`dYRD-tC~(f3Zpr zdLAslZn(9zopCr~3#w4NExZNY3~BqQFyHUv7%sZyv>6)tQF?yN#|2p4l`G`cHeDv(N2l|p29-cplIxkVCNR!JjR}*i0)-*%IwztJENJ3ZR;)r_U+oGY zCgC;VpOvmK+E>R)w1Y)V&}Y3+=n|<-C#u05Cv>y&6b99tMdy!+iMpaT{EN3svI)3ok#o+Bn61lc?Z0@}=Cf2|`K*>Mrw#q0qfNII zAXg!*n{BPTXnL3gNmv#}Wm$>a_()(g_tjxq6Pf0@O%s3ypntn;b_@6V@Ta9uo__ql z(~(`20Adr-I$!6w#M8fB1)Tyx?7lAwQXRE$?r>7Tt9%z-c(R*0n@kj;4$1RtzktqJ zcuF0@Flyn{R69?d#ixhDO@zVdMvJ2+_`pSu@LDwWHK{w)6vRCX$JE)Jekq z+vN{w(9G*5>YHl$)jcFvX6ibsPj-W6V~N+OLlXVk)eeQiuTY0LJa*T-@X$};pfmP<1m(Wn@=5|%BhPMc2mrW)kJsdjOG6A za^VT8FMyrzfkvAgqMgQ;sWT?-de1J+?owwQTZvHKXWl|@pwukk0{I>hCcJ64nex$`@s6d4Ay2AM0qQ!S~?&9Hccvwe{ z%g%}E($|Y>r`q<=$3HfVmT7BA_^P_6<~hzPvg&N-6LolU$SX(4m?AYzQz zGzn5O0H)|oTnXpsh;{jW8JOau8p~4Mp90yu=&QgKgyJ35yd|x=LpNXwm_Y3I?6FU7 zPrqKIMc@HAVr-fGC11@Bm|{{CA8yo49B$(OcnT31CmU_6d8!$5wym^e6$JrPuxzmqyZ(|Jq7!NvyT2>kJ>t3FK~4+p8i|k=Z!%n}!Glc(e5|c_1=6b$WjrWgpbgxSe@;!cw)O?$@#E!M7wzK89M) zkOMo29uW1b2zsE!ofWI}!XUm#fYl_ScO6s|uv6us=T=^>#mOg?@zHy6DY z_4Vs?lr`xP0F|QUsFAPoQRbw>AN5s#`;J>Gy^r&8WCb_nzXgYk7W0%))HVPqe`C>Y zm3&7}fgX3Azv+jD7RrCLP}>D=u76*k(<=*(({SV_H_pF#hmscP11M_ScM^HROrA%= zeoF)_ExlbhvM0f9w+I$%?H&5)LjYV_<`$|4Beyle{{)lxk^6hHt%WE2^9+qznLad-`|9Rf4LG&6RGdd!><2wT{hLBzNaIKt&R%5E zQ#1g(h6r9A8!b)I1ni&0v#VoWrABuE`#JIY>eyhZ5p_KC?BGDr3;lwZN6p&;HD<5W z6|$9RDgI04wUe*ZnX{D`0bAXrTqxR3Dp={)N_%wo=$(@Y+wT`w`y-16NAI2-*?tF6 z?U7N9lQRII1NPsHf)_`$s43I{;_!q^nVG`iU$IL)WFSoK7bn4Voq7OH;+a+H?oui0 zhNNfX2kbNSA=ZTp>@x|=M>i=(yXsRm#-?ErKft)ZrV zQ+LLPK24DbsNey17Ab_Lr$_`=5SmrsQkDVWaj>{hC=eWxfj7 z8vB=i&8(R+Q{N+q4y)B;rTp5!haf~WS@j(#<<|mi06zwnUhAtS00-}%WgS?`&{vHC zZ1p%{ow_*yLVh)^m`YPOR7BKT-R>;4y9X@Xfz=;aO4V>h}feV#9V?CrI1xkEjbE+7gZynx{O01)>TJSyiz=bSLi_e>P@S z_)`CqpUn8SEZHtV-KS*podP-eZP^Popz&_ASek73whR#kG|IT%sgp2UCbTuJT0+0Ps|nS19}n(FMs05ak(-g zKul^KaDETSV8r%K;zmDj`tmj_?)Q2Vlelo&m9}M?$qXfn#fBlps z@4n4KGz6&M_>xqUg1-Bn-KhI)Wd44r&s-yxt3Xz$5NPKnZFy$nwpuRWdl#N5YX>e44$5~74 z17}K|P63ePVgrl_?3|^F6+jL)j{>YHc?D{%upijL>#+d7N-pNF5&)1R-dde$@YwAR zPal9BcfGG9u0_edOI|q-FMhz&Z-NpqbelYQj@Er|xbhl@uyCsbj~?s2ANdxCLlEk< zjxY-{6P6F`o)uw;n<^~JXf-vH#eyTxI#7S*bc?gx+~ zVNMk}c)Xzz-UlGZ+x?dSa{T-UIg0;5j^cliqxc`>SosGz`u{IDY*<c{$bt9=IRySe4uOA=L*O6eP;dVaIi!LWf2~9T;^P2%ATv#r%!Q&i zEi;1vn6^g%l$|s%Fj%(RrY=9l*h`1K*B=!3Wa28%ySc$bPRy0{_UUt?`E3$ z0VrF7x)8vK6kL~}AJj{87N+kAWYRT1KO26CaTcZjKN{u#_>_JAg)wsBk1*1qbaQo6 zc!0`gYLM}X{>sl1QMd}A6>AVyBRUasdvl}!2oIO1s}UI#XO?LVjB5eOilq7^j(Vgy7ghJ>Xi#4P;h zV!!Y6e|(-ib0ufay)&74=bZDNcNjf>_anDjt;vK+BvQTo`f%pBl8)CRg%yUgi*lm^P9=W}(s%>qR zY)UMnrH1=$&2G}vseCy(;A%pe$&m>sm^YfT1QDeDKIcpV#HZSYnhGrpo!1&<;Il4| zX)`s9&tvZD`nG0+NpR$lp_@rCJ*Qn*$h_N>DL6(t=5xV>C{79%>M2YyJ>0*8j1z23 zPKbx&3cO$B8TN3anT<{9`IeQ9Tio-irT3Re91^o>tHO3wB{g9^?`lsbxZOUwIp<=S zp*PQgv-y%Y&(pAsyNP*=lHz4idF#$z&uVGQafwA@G3|MnTUA;U&a>8iQtoy;;zZ6p z;X-B_C4&$^()7u{9f-Tzd*O18!=YY>uTJpBW}Waasx-S6ONibB&G&KEl{Mk6)>Tl8 zm#?S@B>4{HhLPU4e3Y#6>*ai`+0J|D@TD&Iv;C>pyAa7IrKlr%C9+yr|3| z1d}!o6#6H}u5s-I(BL*Vt+q)rIq8t>31gx^JCLELEmU%_~dwyrg%q!f>NWKfI(SXT15tY0AXPSDl<@@?m~$B9mR%BEQ5H zJiaBj{zB_+%R*O!+VnGuZco-V#TtvKb)%(n z$vV5D>CjfrRBo|oU`F^=ckvpG(Z0lnNDS|%T*7Aj-ai$;3h>n)8>wX1D~_-ux8TzK zs&x$8OxRJv>B7CT7}D^s-R{zfUF5~1y{Yix5dyz9pL;2nV1s}B7a046gzi`EWZ2Td zpy1Kts|uNUlos;5#4djtd!JK}JeLV~cKd&g8QTw@D=KG zkilZsxoK=WClwhK2*2(2Uy8vZgUCv{F;(65h$}L9WO`L`-8@Pi8ECO`r}H~F*O38B zxV77VF(w}wh%0-HsfS#T>_rB><$owfEut17g9WS$(_Z&E7m-1_a8tMca*P);z*UwR zQ-fWPxPbw@4D_nvphc7>GEisbO?$O*?jeIx;f`*9Joh_Xbxv|^=BzwG2}K?XKL&u+mN zm|A2Ys2bQvb#fitjSM;?N)?&rQFh3{o|Qap)4|b02HL{C-GY^vxXZ8h%Fpw6$uu!> zXgY*SmN7M8I0BugP_p)DWEL#}ohN^8Q!aZEUUVJKQ(~}mzbcPWmksAhp2>HY`4EQ_ zkwKi~i_yqrni?`7aFcV{dU$MF!dSdu6P^;V5MARlk@4CXtVrml{7ILQB z)N z_E5Dhv59_((%2vdB%$i}4wF2K{}3fuGx$csydw`qIeirNg13`hvzn%FCymFO1z*Q} z{8_OZ8}o5ntROO}Ua?4l$HvHhg+W@qa+e|m8zWx|gVVn$3>7-qnOWCZ!A$E()z+ey z?I#^v_)>t8-i{fT_x-UOa=&Fd(_WKFraII0u;(Qj^0L`GraF?}V6d1zdtx@+RA)*C z11rj7>LGxHZ<%4NsnL5E~MWsTyYQNXGuRMRLFp zcBbGR>6rhYBDpW~EIU~+N?PN8OC;wrZ?oxw3DP0|5>di-?^lAho_g{bDI&@)dKz@V)|rKhT+XHr-VVtD8IoPOjm$wP2elV4T0r zX1k?x%j2+ZaX00!mRd`TCqqoFrTp<`!UCV!-dT)k&7?gH)O|jh&E}Za{M!VBMLx2M zSr5~iY07wWZCbd(rtuy<{3N?U98R>G(~EphvLpNxk`be0qHYI4<-}&e7f! z+6Y{y(lMTTF2NGN_Yb%>jjD1sQaxR-p!=G8Na;S6kYSq&J1Xwpdrh(3JZdfMNcE*% z;q(}X^A0X}IbFE4yI2Dg3ZNr4tF$zxMz~)235?kfAOETN%{)p6c`hh)lFbl@*CEfV z$fl!ar)V+Avls8TT=p{j<7?pAKXA-L1r5-};Eh`7`<;v)F#IJl&?Fm-nq|;5kbwtzgtKkU@d3qs)akyaE~Mk&Q>qGHEWzAdVND%U+5%MIHo!`5vk& zM%@);Q0)7?tS4}I2{KqnHXN-uMca%Fw0Q?}+27*Jkp}|F%R|-8sJjdXa|~Z6Ss7_~ zEi%B6w~SV#(M})(cV28Rdk4N8c_0M7^iZv3)DJgHU1je{!y#ywC&(J371=Z&xa&$Ixp#8e3-A%>uFDRt-LFbx)FHs0 z%i`~q=@N&@$RL5NHX3=BW`_)L+`?S8Fi3!wZmsw&a`9$aI<- zG9d7ra@qEHeRPE8NZKB%EJi#!2Xh7Xjxsyq@N#5;lWZInC()dd0hJe+%ifIFM;_(^ zvprNI1_@C2W`M92IBN#?X#5 zguA*4%P?jDDB^r>PcGFQ?^gmltzdc?&+T(<+ylQm|3DAYR|#7(jnD6y^7vNI6}Nus zSA&x5{o{#2r`7~+D>C4mwb;$iejiY>n(mJ!m8>omb$&39*_rN-BfVW+A#&DXSQT|v zr!cJWH{|CvL$Yb{HOs0yH^dSmPHiV`O>{c5os`jlvuV9zv5IZMxnkkQ&n^fkk6*wx z<{Yu`g#l3%zns0DGic$&&-Mu@idSbFa&}sH@Ut7u{Tt7YXXmbsig7-9E2ZJn-Wi)NewK5LCd#OYZHCW>>@S8pzY789f=8Nb_Jep zz{TAsO)XAzKGROR+JI{((&8~@`Xrk7Un1w9yf?jSbxHHyq*Er3GBVMN9EnTw-jR1h z9)&f*h+oI0dgJqM$)jvcbRqUg_qNWvDc`S^mo1%)Hpt5!9ZwuwxS1oLwKwoQvkC?p zn>*BIn+!b9UK~&SIqBkvQ{#a%7XvRg;0^{npZ&UG1?jo>9Z@HRdHCd60hV;n`zUu5%p|NJC3F82bzWlBt(cv8uILz0zZyr}Uboa0PXw&`G$$8QkcQGa=$|60UWAuKZ^)tCJmH zyNuOo5li^}e$f=})p|}*)DeERA2aUC{e)Dm-QHXBr)rW+kNDzc@{VJ%1bC}R)M7D~ zG-nG3`wLt1$3qJp*R39Vu3Y_MO!`U=n+q8;OceyUXw%v zt?aV##IvVvMrq=flaBxO%T0?5db?yE=20u*dbGH#=OlX;IJ_FJ$3PRitm#k=Ckn2| z&KO~QcdtJ$t7U%j{$WEex%R?szHWr4j9K@D%>TT2^cm(o~`ZD zr^nhj+mYwZ!Xw@OUtq@I>c>@7&1|F^xL$FVX>|U_En!G;4-T&0ZK(cJ)de94#cuA} z+aOLboKKMa_2WF-1O#Mlc>iTec(lheP2#bcD>K+DIMJ-R!5b}%lRnw8m1baOW8q3N zNs`%*I2zYEordNJhM=LN#(N@d$m*R-w_wvS*|38Zn&~0Bigdf zgiPMp@pzA|-O90$^zFjIeMyBmzeH^8dZanSm0ewFXEvnRcewzk!_;tEL;k)s^{Pp& zxd{oI9xePch3@oQM3i@{spcxKp?ZE=#^B$~v%dP9DtlI#YG14$B`#uZ{h8>s;mDd> z(VN>gkz;Rkl<%qA@$;Bxx|OhhU$Sq`FX1=3?r6T}ez>Z`G9CK3ufPXq&D3?$BRjQb zUNxe7^u!6IfpAA7kv0VC4bQb!ruOr{>~O1$_B^=SKqwtux+ z>w4i!b?uZFgQq7o6U=vZC+18B9BtEEDyT|t4HVX@YcFbY8Uit3a|tUYJvk=5O)5OU zj3>tEtB2RGA5xPXjlZHa7<&37%WiFAPIy!#uR<6SC1LON`m4qD-DObp3v;pBnG>uhI0KeMN10nJ%(%~zsBb&w+y{|l4_@y z7}}dM#uZ?S)P3tMhqNVO@kPq@L%b)McDkyd-c&4?fGN`8*T)Q*NlwO}S8g7XJxR91 zsY1+B^0{n`-`Do_8bdevzKQQUOVJXQ1S zh1`LFJqa4@N1R-1^QA$N={JSp3Nprn_lNnJriCRnag%nguHngT0g}Cpj#usCZ5ElD z>Q3Cd=7mk^rB)V&nwm~LR>1EGICc-`Uu%=4f#S3^!i9W1IY4~>|T!0 z8h@53hTpWMA`uhA<(lVFzIc1LcS_Cm;jxicTWjpYaR1l!t`EaU+TyLXAD+NW?!>>n zM;mV*O_qhPDC*@XtPk;PzhwU5)}q1ieo}Hv?)kM&&9Qg`jK%=OS{e8Mv|eaNBJD4@ zDe|%K9t;_3cQuKqeoooEz@>j(V+zeMyu2audcd)Q^!%-rcOO9lW~s>ujmfR}yn7Hf zT}&PgU+(vqGhO4&uU*3Yb4$x+hWAszI8SxYn&5at%;tgUSi!CPw`#tuAdwFGfD<9y*k$e%gAF^Nv__d({ zq4HIb5_?SE0|O#fu1S5tF+~}zu>dc*9`!M&oV*(bL>u`+>MKqJ*^^(34TzIts4W~3 zc`poz+K^Lw%7It%KmsYiU9L;*;uMj;hXK({u14i?{K)(HwO#>{tQ0nh6v?^`X(MK~ zZ;QWGWw!4@>4X*cW%*FWCHO3pOLhoD=Y}f^xV;fKAaQi^nW{}-KIO)OGe@<(A2FnK zTNhwlw?Lv}cW%1$KIb&R!@z*>o9^Nj7`6Ef5i{ACy2*9jd`hWD>#E|WMby^-MiPCy z-09L5&Q)X}7n*k$YhlvoGepcbW9sj&>rhIG@*wLE#TxUduaLoVy9)^V&Le|P;YI{~ z&hr@}<~3ugj_bPllu|bYSv>RkAWO2wE^j&%h@r?}OlaF(yb#lg4E&iR#?-y8>rh(E zEx>3V?#{zQZ~OUd|Av$Hfyz|Go2 z!mw`tm6%Wf;K&$meJB`5+qJ0Ahn!mDAs{a0bUkb@K4d-K5!a_|Br#oSzvpcRA% z*c?D%3|hee=l{}QHL3hPviE_*p5S1@g{(Uoc_zGicMW8?swULqP-4=K=bOv6z>84y zK(0>MNX57gI>8<%Q2r|9zx-7O%3lS1%3qy8`K#1V`Kw5jzq0z2ziJMK{FU{8`Kv6* zUoFJzqhO#pG<#oSCucocmbP$zw?GGzjWSS#s?Lp6JJ+2k0jK2XmntSKrfktEwoFKx zj=RsnAOkbuPu+s$7-RX=B+9B1&x0u?A3~UlkPfPD}G>`!SVi*BtK8A@zF$^2~DTXPB7={}8 zDTeV4tng4dFyO^tkZH?r`d%g{4Qrw!N|4}3#pyH~^gHkgTO#g#wTIEFwokD#QP=fd zlpoNkP|X;9f61b21ZBv4SA7H$&7UJ zu4&24RmejvdM208`kIzVi7?Q#oK>6Un}$pd!C;}~$Mjiy(~y6c!$8AQ<~-YE8Zv!; zyg4-OkRotrHT}@ZT*$W(t>?b#9Vc1i?ug_Ra~3s2Fh(Ndu8HJI^K@#OKuprYm5JmF z%}c3Sgx93KxKdGqDPw?rKL+IUJImxnYHvTN$LnKfKK=kHwibnhdhZx1vG}$qVS)FP z($|ZQnEd1MoMQHshJ0J&IeIFkp+USWV1MyxY*giq-k+sSt4&1JlO$6eDQ~E^G^#z? ztB&0M$K^St;;YT_t&JDx7b*1(;)4MP{ZD&ERoUwW8XQ_*&!-h1FBIDy=kX1pHu{m0$00;rrct5=MXC<1V@ko%DcQhTCgHi(zkd=O9lL-HX&DshBaICqL5g0vT;5;2hWIzt$Ov{#8pp2JL@ zOCh`?sX^LG?!mk^ca|_pawfnWpD}ahk{~-~M7SxETQaNXG6*B2P{MVQyqZa$OC!7| zVF~aXh+&q^Wf8_mUIh3J%rFZeqb4TV5a2hU!DP+R*sn=(gi^>KG1KQ#+5eEV2^Au_ z3$t`Cll_L|&W0>p4>KRykA);N_6?DIGqcve)jz7T*54DTXxz?<*A)%$c}uQZD}4WQ z^5qfugoCOlnW0X-^c9ZT^iik_o8Qe1Mb^!h2;kHa*1fKsRGDf`t{nV=O4s{Q<`FRb z9b8k&aPqQIcN#4j)uZm| zv^Hd*%^k`m*yFWOTAL8$yic{1VVeX7b7%B^LbO&dW5X56Vm=r)o|~SZbzZiy7^E_ z46#WrdjtM4a)4!O7*mhA9zhPwP9*sUCps6!+>!ar7g`B5#C@MiixEG6 zwo~nYLgka6kb#$EB`TkkA_D>USuWcW57+?)p}qT5ri^%0!kbIBcagD3!-tT8yJX#{ zIE^-j4A`hlV)v;`G8baDPo>X@KaC91n|_ofkcM|5gB1yvrlqZ%9<;Jml1-@I;)M?S z31MuvKn+s?z%8Duve`)WcWwM04$EBUtG^UV^C>(WmPESSa(Q*_ijp%6HzsDp>$#^7{*yYO9MmTUB+iavq*Wid-&Ohh&8k7MOGQS;iK?9wdzjyDfyoa`>g%a5v94a zVkw)Dv3=M(mf-M3Oa}xVe=;nyJfwl947~;XkasyK?^^WWUk_M(Xsa8#6LTwC)ic*2Vpv zQ|H3Zb|Y5T=|-+?4~fcdz!kLVM47$XnXzTqA(l{+a%cFz{7GKAu2v9Kp#DEb$#j>< ze(cApYR3n!5-K)qPRI!|{HdxsC(Lohg~ACIRUA|PK2AVwq1gLid{&4LEgHCdBVb^~ z05^kqA-r?*W$!NX1~T&^ag}^kMHp$-z$I>%XXuatBwgcvg zPCN$V%4IL^I?blzU)*h@wF#Y> zdCJq&Akr4Q3%0E<(qe@h{RTKY$R<%|?`XW|703Hx-t(q7Q8f->0o0)nZeamND~!0= zL?^lK9G8+M;$-}C7Oa#VNH{}QekJUN6$e#$6 zGATL2&|~$9FN*a7q*cnCbph|{7FeIt3zAkY%vl%ouKs&#h8{^;ot(3dw5HC$I#(}5 zo9UFZE@VwT(fW!WQM;-wXC3iU-EzrxyTA2G1o!B_g5U1$#|k$lFqGeR?axcq4fe0P zHnCjNv%%TNGvaP0v9tf0kg=ijuyw@k%)ripKw;g6P9JhaBc**Wvy)eG;Yz^vI(6&w zg+cC2OwN^{?e#xdUn(TISLNhfA=TFzTVE^;aj*2qxe`)ef6ThDkmz3hEawXGx4Jdf zIfcQ!Ov{`r!N1iXvMwkL?1kUll|WhDc95V7?X8T-xe_X?|4m|LCnySK?~QI=_4+Lo z+rLaWt;BaN@;v7gAgI!q*d-a*fIVywadis`JK!Sxa|70g7*V-}nBU(cys#nv@bZY8 zlupxXFRu8)k&s>W!BB}G^b2OYN7t_!$%D{gNkXR5plg-q6(1s@YV$;_q;rGUVfBdX zN=|u|4Y&Bhkzm96gAz+t|EnPC-spQ5hdl?@lJ7LNm;V%f=i=KJ{X57_O^?c5qMI%b zy%<d0}d?Oj_uiv~zOyl{y=%c={Z-T*>9C`h5& zPOU$$FqggJQ*9?PFmAtU6Qd51G314OH(42Ro)r+T&B~uPGLJHYJA6$x1W$SxDGJ}z zP`tF`Q@o^AnGT!=*PSRPR<`J0RajX}p_rJQYrk7|f;eme&@$mJu^p{QrgZ|G)Uvsl zT(&;G2$cm@A?^oMiy843z_DFB;F;!sv5+>zLcm^_kA>JM7FzjPy?@RB)%&R$pVj;G zKGpl>*-efzpTOZ&$n&Oz%<0+|&Iz>qSjl39xi)C|1;Q=ef(4jJ6hqhYR=&E_R2Z~G z>^S8yt7m>au3h|##q~H3sd6u)n>7M~O?LtmyRohl_EW4Qt`kjA?$<4udIhme$&>?% zWuE=+uh7Rn`<)Og5U0LtBqUbQ-<>>#D&PM)EMxb!|LgE0Un(aq#+`+TWibvOkr$d@ z({9K8VsT9y+!6>oq$9OayfpEQtB|gF+pc2peJr z#VPeCx2pc4*PqO25WBYwI;^60w+}kF@uj6i{o*Wm_|`9mCvi*jie64}lto4F0jcs3 zqc?PX&uQOROHRfybdIl!2K`x2@HU};H-$ny3Fp06$nllDvcDHSeK8RV32ZM5vp**y2y^hAn7_q zQu4g_oS_A`htwvukHF`1%G|OS{{q68zWBFo zyqWsd2^ntI+4RLUl=xq!#VP<`+IJ+n@}&*TqB+{j*vcZDm$b^0zvZ`f<2KFZu` z>wno(*}h@_K#4!{D_uFj3q2E+_z{ai4Y3$dKm_PAN2d;xEFmCDfP8%K&?Dclp)v5k zvgTg^7vL+7%*poe4wS6h?aK=O1chocWyTY`HH zcGNwU)`XrvUxneii`Qc0sG3jVCFT+|@e)+Fg}2IVq#kx%2bG>??I5-Ns$Utlh&Y6M z2+1|z7oZ!7L$2O#nPtGRIXIx$+9jd9*>d!ZIwt&aK5vGezWtfkP|0YX)JE-sFzAqZ z6iTW%q@nnSG#~#^=1FaB^Q1Pp7b3Ok&6C>1^Q1PlPoy>@L~7gniPYwfNNqw8t4Ud;wO56 zGbGo{vOduZin#}K*XqRpwD@uou;4R?f6nE2j`y zMhZ>4{TI!%$;9kxV3S$74x-ehm?%A<`U)?KZE9-%Kim+1jvLya;l|u&xRLl7Za@t4 zzqkQ0%>UuW+-JB!_zX8xpW%l5Gu#l*^Thtc4Kv`0{SR*VBc9lQxB*=-|BD;S&u~Nj z8E!~F!woUu##4kF8UFxoaP-WBe{C0kZ4vydhje5wV^agv#~GV`NA+=6J-`h|SG}e_ zWu0yb4RFJtgyw)s;?H*bD~gsq+f9fih*IDENl1)5^Nw_?0SCBYv5alT`Pbq{ezu&L zw)!j-zSCAiiF%=V;L>*7*A{_G!Oiy!xXLrH-LMvyG%Z{@u3djh7){3{ds9fHsFRqNcPlt;!dp?4&a8t+cWAg zSP;|kgMzj!rUNJnMJe^9t*W3i^@ySXxM8u1x(SE~Kv5_q7BR92dZZ%8RZ*vyqlZf1z%i z>iq02FfFys98X2lvdtXpSi+{(J(YSDt4Y_nS4Gaj-iC?oo724w$+zVDS9#I}leY{! z>Aqm1jhV7jC7z*i?}*aB&$hS4{r*p-7eD*d^FW-wfoHZ7La#4Qa26<%m!05z53y;A z)yOgx@r>07vjK-|IXtDN+JENW@RWPz57U_&E>7W^ zJO&=Xx|%aF@iWVOIKP2?>Xwl&RmaY3LFN8C0q^TlYL`^TLb-p$l8aZh5i}~@j_j9S z+Y*NLnyN9Vp`RE}yqm5!yyKMvM=J`LDp^dAL5^J-*Er&){OKApZ6JPZ8S957lRjPA!Eu0ljkA9ZZqz-Qri7bo zwo3R#cd-se4e$iFkoSEqK@YD5nSa9KYNw5;-U2tU#HGRM`%&Huc%{nKk5TQ#V!rk= zwH?)7TIXvo(1ilEmo@XX7aW8k|5JNOJWK36fM^GK$hZ@-WKf4eIrphmcBfGtMjd9< z`2%?c%BSU^8FdL)b{AtXrD#STykm%huon=HP*QEV5pmZbVUqYTDD;0g&!}JKDyq*Q z&a*{+p)=yK?Sv(doFF@l_GZ&&;88g{lDjCE{RQ3>aT%I}q4ttAUwct{quNWV`F!oA z64hQbKGj}E?8s|I&5~$6Xhve*XfAsr-W0M*gv0=^eJT>84zfzh+4H_X$>@Q@dyqFH z@~Tm@)3h>VP|SOdGIr&VRU*7#o-n2wyB=|YY2QZdv%lxrXG-pUpqmAoN3BQ4LW4z{ z&Tr?qp<@vOJTtTOaN2Zb=5&7c9nk88z0_3OZKN)89Yl*)bOVj-qIr}NveaeKr@dO{ z$z+{kAd~q6BN+kQECO=y8=)Ye#s~8$RCk-Q0Zy^a{drDtC*l-G zo|)$qXH&VBsC-lesXu}$xc7jngb|O?&@ckFF)?s5Lox_*kawDy^+Tk>*k*w z3Hj?%*iO4^H!p=bZatl_{!ti)XLI>74{w-LXYbl>K`auUkXJ4bW z-K9rOsc);d3w>N4g~sI65BeNSEa+^0-lwo8TT|3yKlEUWE1ml-ndm|14hLD(Zrl>$!rkQtpE=hDhwr9r3B=mmOMz~dn8;GY@I`X6 z2Q-ui8P=}c5^h%Qa_MkCG_cggJ=rQ*BG>Zh;f@Dc)tYPxt*Gj`eE9K;4JYbGk1g`) zF^xRF2zpjVJpZ~Vv?A~F*v$JFWG~%&nCBNNI0hOn=fYllUOhOWNnYhq*547dzIN@M zh{&q4ONaT^h6QzTPuEIxakqgBVM8zeyf>jo-r{j$;6>18 zss7>4SHXh7<9Y*mVIwcDe-PS}eLdduBtdqy4&edf>h}fx_n~v8PV?A$`6>RBzTK#K zCET@un=b4qv99#(G8Hx5y-DFTzw9fivEw~z%C@e~=1DL0Ka^}psC#rwL!QDP?9-{S z;q^2X*W7jINelY_mKe(Gw7H=_Z5}8Vjye430|jF>xA1Va3kwI+kLt>kl;=RTB_R6T zKweF?886%|JT(1+(_s3s1bK#%d2?domuSYoY4Y6)o}TN(QRfTm2U9Ln+VFkCqtUtV zw+Ve}3f7WMws%bYU^fsaMB*(YT~VU-cj=VgL{6V6`P=O+lw9&X95?%N|MR_gqx$Tp z5t8+C3lA3W=9`tZb`wPCJ1FRH+iO%>pZ%;zVkwXENa(-xW@#;UBAm@kxYR%J7#~_s zIqEOJ$nWfHCYx^Wq$EkI1zdMVfA3?Xv3ljZFCa@H=vzQGsJ~CSpmmqPb-%phi76gi zpZrgNJjeb)Ukll|o}=fO-tR9oDABst(-kc8_?4l2l-S)zBpcSZQZ8#j6M}oej=^%g zSAFl32_GzOx1iNzIC=8 zPBv57Cxic&d>gl5dMxy&2DGwI7>XRK~L4?gg}aJ?Lq1ZutN%FeU5x%g%(>LM$mUw%Vf zlh=S2{380L0d;#`16r`(NV&6WiAk(Iw2?PG=ATkr^d4UfZR82btmU%J)qR<0a`-sR zv{&$aFaE?9QOKK$#|LbXp#DHcYi%j_#e4h-V3(>A1=EVTOdqKG$ zMxtYH#2p9R(d>H%EguMyuNR@&``3k@XB)ZLquH~gdkv_OdB3CMpEy$FtZFv-uoE08 zA2*#+D80uQAqU9`%VlcJK00z>BwaDB9dt7vIWQ9izrj5`V1^tB1~)$-@%3`z7ypbo|R>JWg(qew=fg`kCWCrF(QOHII^9cI;&Cp!Dvx!t9XJ zSgXS9^YWsikjYBnzvUs5QPBd+zg-=Q3ZFiAb@*N+&)in+9whkZcu4UEsSzb>Qb%RRkjy0`V{hTd4t}F4!K5*&e!V1$n<0U3Q>e+`5`k}Q1*0Og=bv`dKd_| zu#Rd{>6H93tCQ6*IOawzpwyOGQRsux!*^@gZRMlK!v&k9O2807uwVpKS@6;((>r70 z%J&V2duw|r74EvTa^c5v>q)(6f$rbCRSQ^G%Q8=v4oW-j(x?w2gvT)gP+0Nn?<0@| z?~i*3wn&u@{SOHtg0bV4g7wm-?cqbk%mR3$hP0c)F0T0(;Dgowx3r5wJykQ^%hj&y z@0Ru?QlAiNUnIg&b-J5LZAp9p>MU?nefl4>gKA^%72+;HmNFcaJ#gfEO}ISytJCCv z0XmLM{Nwzl*{DyUYYpN7iW`G*r9*-${`1MeXo20+$DrGRR9fl=rOu$zQlvan=8JS< zoR4|UomSo`^Hu&eDD6CIGk0FeDB~;OhbOw5KF1M>j%yQO6;b`qs38Q=ad!f&0y{AQ zuu{#rwg-A2T=-qd7$;!XV z7ApT7lz#s!bFNGO&3FKNgOu{M-Z-@~y>3UJSr3Jpp*6$ix!3P_SOH2om$jsT7_1$G z72OnSTGk|$*IT!v6(nPDKY1v3B>Yu&v#8bOLumcAbA;6YR9prv9u%kjr{Xdg^`IEE z*XC7R2Gt)FpZ-t9W$>p5#k5Z)vRtSQ9Ui?FTQ8nx|+*_2ZM91HlJv!h8_ZE5Cpw$EawCm+)0rv)D zANXfoFGmY_S@-@t`%4#4ZK)Gi#Mpd8HOy;3XeoFd@q?<{Bz8TTTo*q@aq!SM9!-wU zS|cl~?t6rsS;uKizYLfca#8AuJl|Aw9jFD=lCuWGp}+om1acs)8#vF_a^a&RB#w#u zhH9VJfQ}I1wde=cVv|@Sa6o;)PgP`kkDG#nxic(XSpl<;j~rOUX;0e(-%LQO!HE3c zM0OmIA_t{|ln0Pi79j_{b@KCUL#TOzgW?!%0F4(5A}0VY?op?x+UVQ)gH8_wY1j3U z98X1E`gyjVi#<}m@sG*=hMJJ~`&Td>_K^NARhG$zjc7Xm2N?3t0E7A&V9b367^=?z zL-`qC$Ug%N>1Ti;{v0r|)XMi#H8DOYB0}SP7fFb`3Fr=RWhWK;9sEF|(%>xDv z0E4D`^m2S2#p%W|Qtn{(-EF>SLrOKRe9xYjpDzkI1AqZCF;UTkt$({(6ct+ZxmtiM zN#?c~*C4^_<8K)oq$BvthWP5pB~~u3U~(~p4uEkrgnkUk%FH;6t5dr@D8kO*0D$N1(diOR)O@v?8A4-jcw(t zj!$qmNk{Y#8$r;4IbO)cLuTj>762pduvtxy@`k%E2LR)$^%=cr!rH%gPc2{(Z)B26 z2eUiw{&ey|#Jb}-To6|A>)+-;66hX(#@!+vap*U32oYe9S98}(uWb)s4uFx~|E;83 zSyxwtmA>=kn_t|wu)#I|Cm`>Tt&x1INIt>0UpIQiVJgknNVDL0zYQ4O2AFJKC9>+hH zJxk-;_vs^fn321oiie3i`--A=i=55+?GlO7CHLa4T$Ddemt;b#n&X7-*c%Y-k+(+* zjT|SiW21@xmM$EQBhT$q<1DJjx&M87N=@FiPYp@MB>(ezHphNrUrW?aqRwSIwy8+k z>aj-eT(2W zIkh6rU>X~AQv+m00++{GOk+tm>1bO$&UhM2yy=3r*Tij~#s=Q(LEDSs45#x0Zf=I{ zkzmoLH)dT2ypRmZ=E3I={4ZZG2eqthulf_`*h^h}(SlFZzdy%b=i-~LST#)er0dF% z%C&!9oyOk1qUyp1gAE3#(Z*n;QG6aX+Zg|k*lX7-`2V;I8@l#X{VA7Skv0^`RT?7P z==RH?1SGTIOB5q-s_65Df*O~SWzexQcOxsuB zTl_k0U;4l@>#71rUvgklfuntd2lruw`Ul=4^$2y%x-G=btS#GuYVrTuLi2>dFM6Hz zBxSyZ#?gZun#6Ln{M1782wG@F>RamIrOGU`yFt=c&!O+)FY}9EpMG-LZkbk9x}&Zn zASvC^N3_rOp}u-AGaa*YMSqlY>wby%(O!MGN$-E|>uG>M+12$4J6kZ{i+`BCWWVOD za(hcoOMMQs!TfwiXr>O0FLfS6Uh|DFRxQu2TE^{AbQG!gRZ;4qo z7Sp)$_vrlAV9B2N)TS%1FG3s43ROr&nir3aX`hsTSDv@bKBccfSd zouES|ix5Pz*46E((ryz<7O9vtUigvKoep2daPvVKL+6IvQzx%FHU1E$ zhxcu)pu25J&1$nq(juQfMLxT{yYhxtx!n%x>7$e_E#pQvR?!ZIr8Yz!BwG2NVklNLU4t|7AbKAF54x>?AmH%7ut)@vUq z;I?Gll)qTM*e)w>n*~=rRn$6%1` zE3{E;$FR6-V36)R5vTYK!|Fd2EATtT7iudOVOYHFFi7D~xGQ#HSOdSoAjw{6rZB`L za2LTK&3@urRTa&C;sPk6-1j4|p|(RK%r0nzAx1)r%yZ5w@%K#j-$UNdWklkQF z^p|6)e9)8e<=5H-5Ll<#L=pjdUj_=8Y<{a?TP2(CDV^9dTcb>82M-UBo%yv3e))sn zuEwx^a-r_DM9c2Xv=MRm8%Tdo_>#>>-Lq)Ph|nYD8sriT@ldhQj49^`GK zy=Q6HkU=lkVxHZMJUE2@Z)JuePXg(IYT~@~0JMdG^g!0U^gwL|7o-QEqK`Z@hZr4D zU18Xwo)URl;SaKyz~K$ZgAaM@Xm2tt2YC>4jdKY&{9fdtIkfbEO2M$bhzw2_?v`nM zYBQa1hBi~PGxKexwZ+^uxoq?KHd6tJFK?tScfEo-Iv$0Lp-PKwIv5x9JiewdG>_7U z%7b+=NDqLL0FoXEH(;GX#rIuMI2j<3)uC`gbAbv>?RBI^v*42&O%HDs;%AkkB>=c= zq^@#3f*jlldAUz@oKc7PzCJzW*A!OfQF`E@rU>Z*5Qi5d2kW6S^Ri{WGBbcGGZpFc z3IMftgx2%QF^Kgvz|+fRFUO0J18B>f@0dXjCPJk9P{#~f!9b7b8e%@ z-M**yr{c~8APSmt8}OS^D-6^=BArn~KFGizW=B@F(MGD8>rO-zRz8G={00AYzc5QT zvIB>W(FyQEqAoybMkhdk3NS!vMkfFoDi0t^GZbJTr)BSqD9ziEfjes22TC(CU~@GP zrP&Wn-?S=!BURhA5vfb~hE(iR6*1zGx`e!-X^$*{ID8Np<0NYlrTHbAHI?^1m#vNW zLyYSPrj0S8G$VBhJ}J^emB@%k$_79qbCSsehxL%LX~CO?T$Vas_FYEto34Ops+#*- zQZ()_QNj|&--33JI_Qu=+96zY&ceK6Y3p!MYqDFg>S5DU%e)C_2F z(LsGM$>wF$G{Qe5Pt*qkxB;y%Z=gd4dLY`ML*_I=2puxe1F;@DWN3t8=#YUPh-=Ux zlS&vT?M1>hs6(bB20CPnfMT}#;fUaMc0Hrh2&RU8I}@h3lYg?nS(W01!jjdz= zk*vwqWC%dRG;b0>BR}Q(NdS%Z0}jx30-({`>B92vz=GC|PTmZF2DV2ApwZK|qro951vegP8y<>Kg}z)@^S?@*>QUaK*RLKB!I@t zlzS%uG};e0@8wGYG%7kzu)I4#QpOMaVg^71`(6g1@xEYtA>% zP$HKW%mQf8-A<+h$%<$xpxMM$UP28t+rx$cG(PFT)TZ2l2AaRyJy8QqDe;6L8(4HF z2!BWtbO}epmFF7702+w^8tvr7ANW!L4P5&Gi`$Y7{Ho=P30ZN!TFk1)7T+&6mlj8T z!FRtU*C!gyEy^|}PkX>|*07UTAo&-`GZ?6`W+P`~Kp4gn2HLET;@KGxhN%Wx-4`t2 zlWCZeXJTORHA`MROE)D;XJGJE!mK~A?BA7> zLnMX0hBRSE(S!o7uehI&Cgf8*w;(SivH_Aik{`}G83ya32}==oph?Uf5N06mfU1}~ z07sbeTgb9v?m#`c-R8lpsbTOYG(iTNFfshH`4Fm5$@OZ82) zys3E-O~7-Pwy;&@4bTL|Zp&LVNliE87&V|eY$4lP)gs~Bd^_O6>b8k-VW1~VnwWYi zmJfrT?0-HO*mB&W8EAq5CX`L2RBDrPR<%g@HqYq4J3D8{N3xFS4C`y+MT356ElwNu z8-v)0(k+@P;DX6q^62{5XYG5L+hsvXW47)11v&7u6ndL(Ik#uOj?EW42lCTg4Y z>>`@MwbP%$1@kEO>*x50o-L0On?N6hv%=sS$5IGCckS&s~we8FXT>k^Wp6(Z7Y0nD%~ddRH5U)`UqRh<5GoOR;^igdaJZutnjjJv^kLyPo_a z|2m;z7ih?0-3O-C-L_nd9SjA%(bB}`{PBW&jv5_DraX^2>UK0I9-uhpJmfJxm;8~F zq?kfT*k#d|ve~0>m!g3Z-7YxqImWVSd$76N?rWz$FA2lJ#{Bc%+JLpK^!;GWk<)3W zN6c(Fanf*DU$hjy_qNQ7EKrf3hZH@b-Pj&Cj6FaYwyp%|!<&CtaY>NXY@0 zB!?72QWX$=c6vOmQYcoUpA61@j`45W9c<=?y%zR)Sr`sB^PdmY2Ce-@w+3Slh_2cr z=C&ALb$&O;1oMnDKkjJ2+Da&77_g>tFzG&1g9nA|nYz5@>;n|1UMR#V+25OFl0rzV zf-BPVD5_E;nx|7CStgv!`@`YPMU!Z&|1<`;|gCFuztJ5DJcz)$Y^KaT6dnaKn{CnPV` z>+r#rJt<;gNvg){wKugxZfZ>4O4ZQdkA4f!FMn1yf4PolR@hgVo?dEJcx%*p^pwo= z(Wdr4D(4F40$!AkT zQ@#o1DV$_^;x7V66E-)jZvMMq!G31H%_oC9aQn!nR22vU5@YTJ3+(OPbkgPAQQO+I zgxt~4ksZP{TmpH(Ad5-wc+gQ|(DY1T0vjLK(SQ+MmJF#fB*FytM>Frv3Dz;2L=AXD zO1$eqS9g4@*(iSYxO`*}{oQ`S>i{r}6&$jieoxFuF97b*eTap-%70x4(7ls~#}Eta^HexmwENWOUGjB!Y$WV$!k3Hz!h+1BoS`+!cbP z7|$oGOkVbTF?w!uv!xg(J%XOp(UYk~%*r*%)J?s+BC}PA4pgF+3UPlmAGTW!&pg#ZZdkJ?lB-dZ9Az+|q0?YpI@SG}WG0Mme0}0(b^xZtp1VwN zCC2{AS`)gtS3yq_H(sjXaPMRY*Ua2 z?$EBLmS7{UJ?T+jOZ1&Hk8Tfp_vn%;`Ve`Bwe<{j)QFVa*Abn5Ci(am4s)36QgdZ) z&wxu&b9=b~^BhS_xWDn-iL7iNeCgzq~c=Gzms z%8q)UkF5LLDXEXnQ9*Tp7l16BxEST9i!0w4Q?f^cN%oEHA5vKX&djHRUurfFobYcB z1nEj3MB(gf4E&JE^2U|2MzsZu7}is%%D!kqIJ1juD;4f&9~3{=#w+t-=EHHqsm@ZG zM(?TiYFB|y4NJrj-8oZP{)q(Mtm1O7DUDvVCXFHn+FDI;l7_NnOC+^n6ZAXUS*(i8 zoG38lZ-WJc6axlPXAiMKbkqbHM8PtHwkz!J!CN#vO@Ir8vTaJVv=MIrZCBQAi8+13u_k!1># z;p6@;x*U0UL_uLHobT_d%aeykWYfo|#=p7f4j@FQTKe}X{x?_MuW9he;`9l*ab*|X zA$xdabvmFcm0fj5?courbbiWswu|my7d)br{v(B-?W+5&3m#dTK50Ad?4mnt1dpss zpR(mUyXuY^!6Py!#+`t~d14$m0c=^FpFTk1Jn~S~Q&jY=jeHdZ$d$PlxF~{S#t(r$o`rM_?nIK=e+wbdhk=teawykO1bQD?2Yp z0ZDV`g+fHqyd#nUlIG5+EJV^gS+7EV0lS^OeN&Pif%6LFzv(^oc70Rv?yB>$IO=Ez^zrGmU|Ax#LCa@=jTi77(3iu~` zoB`I8Y_RhzF`m$aZJ_bQQ?v~PTOw=&jRoMF!7a_6P5}s}F@F>WGT9KY*QIa#Llp)R z*)yHfMnD|M^`qgw(HukAu%n%Km!4lYpP#6Aq4N2do=3~nCp}lI88_GqjqL=Z$3>p- zeUx0LKq8_lQ*Z^IMACU?#^Eo!&cex>V$HM? z4B}kph_FYh`RNqKu4&*!%Z_n?C;%I19Uux^p?xOEA&5W`5CsxxBR~`Yh=Bii7;Jvp z=^y8AJ|C-dC(^D@ouyRzoT;mmV`WVExD)X35v#+n@+W*=HjB{Tp^Her4s~8jkbsZd z6eNzJ9f){S1ZxX&<-=|$lpRlLLauxeyJqGi}PL~}7j zPnykjM6_fE^1~1*gJ`3ng6!+GcF-^HU(hd$WI?|Sv}B}ThV9rNh?cB{CX7+iwvdAt z0G+f>n@8x}3bA8gI~F^U(cG+!Cd6}(wcuCC7oZ6kO4kcQRKU}iE#PU02v75K0Z+q5 zcp8~)Vmfm@!qX@d#CRG=i~&4NOaco%5vF?-$%=!}7dM#@egy57uE5U_sF{amvI(DU z&^0TP6o-g$qZ6F^>!F9bcENK2rf)9aUh=AbPJc1 z5RWbwVrN>|E5t;aF&c#-CJ~^Y%zOtd8i5N$n&``b6JU8P-~{9VCqPVDzzN{GyKd3c z7jOdegw7+Pj{o2UvJp4k>U)!kpQgGA2*&9MH%rjs&WoF1<18;70 zR{K>q?of=xDUm`sznBH;}=PRi+ho3BQ_fcRkP|Zm3+F1NL!j1Jni+o?= zTbqPeCAcCoS0?Z?!SNAR@<(kvpga>Y9{|d;#H%(1 zk0HFy1Z1R4$*~I;*{cYJnc&AsDha)CmA#(uH4~7L)+JUKV%Td5S(#v(Z!ZY|c%=%V zJM$Tjp;zJp@X9rWYGjmoT5{pSHTDL=OlBcS4@&m7H4>#Md)r_uNOCkZZz9T32yFl< zkag5HXN)bU*tNk<(8kfU{>9iz%GoyX{pdSxu5TV&K{?z8ID{a_o%M}lvJ^kvBhdSI z)T@6vCPg`^3!5`HM>B3thNBpipq%5N|IW(QV%d$}PDr*`QFK~Hq> z>JI|!ic%VX^36oPSATfc!JuIoWieT|c^TzsUkbyci5Uab%qB$IlrK3N(2&2Jax?%g z=MZyO^o#tJl;8lk9BrmXbaVa+%Ao+boYTx5(T(}Cl+#?e93`f1^h@AdI&tB0oS1u} z8}el+=eTylK{CIn@@gNrO=1d}S1MN;t^=Irwtpyc!~5jS1VzHt+f*gTmm8`|!cJMX z;@ZCHkc}1Bn&7|31%6)Mzq{Eu7=EpWJy_E+Wb@{VU@HJey4Osrk~acz6S$a)9>QIh zXzK~WU4=XgNv5Ib1vZ9hXagP=DN-_yZ?ln<+y!QyUuv4+bZsLk!3YK`YhJRaTQ`!D zs3Xy#SNaB3`t_Q5!LWUHa8H?*ByVn33;rDj9-Y&Q zHuMK8jjbb$z4~|k)Ww2tb z3@)x@pO^y%tlH?6p7HhwdkHYyx%CZjQU*Q~bW&z`P1PE4z|x$#6s#it8sZjv`)iGE9!hR<%XE4}RYlY6PywC&*(uyI2gamsuL9V;-7HzF29Fc|M%n1cY!B2v+0_LVO0V~t$F3t)V zl!vSUgG6ywK#Fa5)-9TfI4c0>O`&R&&spWb0Za6SmvBaHxdwN7QD0 zF|`>7TMv8D`G5Il07Qd)Gn9lrs~Kf?F`wB}y5=^5(mX|%Acwj$%jGMPg9Wx7+GJ0w zUa^Cv8JaZK3-(LC5!f$H=wkaN!vO4;?3H5sC5BcC_RElf@Up9Qw`lvhhpIB*?PW4x zzoBi_3v9J$6?a%Q7|mBurHufnvGrW=&@7LB;6&d4ZQ(3?lX>FIgf${m~PGIj6x{_7r#lWkdQPk03(4ZUACMm#Ia#F`!JvmL&4 zuB&N$&uK&A`Kssn9g>p;7N2I`5=7r&6>cNReKOi0L^UV$+-qfi_?lO4|~&*JWR#3^@rts7*1hwV+B%(}ydteL8*w@U=F zLN|M9X0Mli<*#~Plgy>oaGN&MEA;h<%!tY_j1By~^rCZ&LU}BA(<6m)NR$aOJlU&2 zP0Tvko33G5wb}QObBn%*-y!F8-NE#G4+BkWZWTTZgrpefxb*Vd-0hFj%a6a_%9|_8 zdFst3`k6YXJsB|nt(A3BAn_=@GVJy4prt3@e4y%No_u4hpglZ~m>88;tJ=thIRBlF@;D0h zMj_gTb*-WT!0J|kfe`J|Yh3m;sx`b580Sy?0susf_r3Bxx-dY_eL&%NaN`I%tzbB0L;X54U6xP2ru_REr~r9E9D zcbdDOZB3@j7s0){QCpIMeI)DQmo-!Sdl(`D&DRf8ljFh>Je=baNqBo<_w6q$Dsigr z^CiOU-7k$cPoNmmlHg! zPF|e88w2W0an`$Nz6iO%YVsiOPCLBEM6bi*Xze&Cm& zy`pQnVe;rcC+K(4VcjrkxL?QUyEc2kkA{E52=<#ly|D=jaVG6 zg#^=B^hWIC=|doyT{wCpEPDGOgzO;?gm;xG)sd+*eak*{!TD#VHX*?@x?uAm-agS! z-7pLki+TxiXBkKMQyi=%IgyKIluO@9paM&$enrk3NmRg+xA6=3z2#5=hu$XSyg@mb zmBrN-sWekdFw0aC*=JHVkuB3vvKhO6NNu0!gDy%ogS;BE){kziCHFLKEYi(=y# ziA|Ut9gV-02SJvVnCgO5nuMj98w!K}ih`K*^z{I&Wvn#U=cAXHp#xm%oLe}NO zJcwMY#8(fe(&R18Kr>0C1$U~G71PhFLDA@8x8FoTx<8hnxeaw!XU!Z(_M@U{ky0~} zrSB}!RVhwwpP2G4%s^K~W``9MV56SvLwz27pu?z zmVO>Ymh7N2_A{$^Nw*$N+`XFLLcCWPwO**{U@tsVJ!aCiL$qD!?a(#7yn3wi?=th7 zQHe#TqrlPNe0n#gz5f0CQ1b)eSlLlWYXry2cXLR-9o=z-)>7XGnanmkp(;c&zpQV1 zf8E>-0$X;Z*}Aj8`f28GnVN&uMT;_ab|=*~U6>c^{o`Y|lS})E{g-;5``r+4VeCK^ z9;+S;?@|$I3WFeZc}Mjagf5E|glY~UKSQg>s(pjV*}1Lnd(HRpBGvAE^eLUJg4@?o zofebA0FpTvYgAy$+a)4cSRACMUBG2?dSgCA{~y1N`3B%6ImrZL%CMgY9uk6=>y1cv z?|0D~N&Hf;c%L3+Sd&>qkAg_fHDwPBPGy`ae_)U%Hr5Pr?VhCz)}3Xy{#{ys)Ai;1 z()z%HJ1vB$l#dSTJ{%>@$9qTapaisIN4r_>?n7L8A7+*593qa?_Hk`V87IV zd{90M%ZY>kj7{}e`DxYpR9Dvf-Sw7m6jt_(+#KEf{&~GKXeG+>$ZgT@-k-0x;0Zm; zATBq$?fuVs2LM}^T_U$c_r70NZwp|{vb*G$(T(rp>hS=!EW1JOh<^M20CLK}N&-%q zmiI4_Q%1h$9mI14mluLlrvJL}R57#NC6EW%$(4WVp?nSp=bx*q$DaE7&&Tz(Ko0UT zUZir*2MFj0E`N$i6N+{O4znrwByiefKvGN~oJiycm)GT|&WWDPyQ|KEQ6}cj{3}j; z-ktoLLf(#9t%{lrC`>t|?EZXPbjSPTX!!k_EK8ejjP8GL1IfyfK0S2usc=$xVt$&i z|2Aoo&2)7gPR=^wD3PMs~r6nVf`Gn=vYR$A=HI`6cY?8Kcs73Fto1 z+h>d@?_R@w(eCgS6F%OB_Q>lY!#f6n%XS6Gy^)RVHn|ISn@VK2kzBCb5Ru&mEH?kO z+XOAxZG1w8emi)(V(+YN*SVytl2LH-6W;WJoWD~qle4Fd# z+5Ju}IEt73ltJ*Vx&IP9s~<{dUktpK=n%Amdv>A3fwa zvK#$-XWzxyAt$l*lKl$YGwT-IGwim$%gBlt8%1w~ zdAQ_+gQqwKc}{|7^QE}KE*!lP@x89E@QDw3$i|XVRU>xQfERQO6EgKH*){z+x?tP@ zZ@=igZkPf51jsFY@v}diM$Dnu{Ph|PvS-er0;W@oCQQ5vCDA)A&HMcevDxl_KAFsa z`DC>J<&z2dmro}BUp|?qP9uh>KyswY46F3rASz%s^~*%1XQ37`E%ZZXisq_=9~{t>5E+YdE{gz59EnWIOz9hz6I8bT*nbp^yEp9tPJ+d zN%Z8{sU;KPo`s6&$>n&EtlVH{fZm?`UPx9>d+3JV9$Hn#4VsB%9dh}QLyCVTW75x~ z(CzsFxBa4M-5*;~fmT>6BpgR}q5>(Asxy+M@4i8S=quS<^Q!1|Ln0~d27`s zrb+Cb-ErUr$$FHK(QOicNAm#08pl3Ly1{kcYpr}Aj3tM==)QudkiTpQUY7e`2iFM# zA%B?yg94?`#|5Jc#v0zAN`pHEs&~Dw^UvaV53&eLgdInW=zmmW!r!zRCV+n=%d@mM zMR$jPZAZr%zYh?LtghUyMl7L1$nqmFc0kCoNjC~vezXv>T**Ws%a2;;UhNNIu(&RF zImt?AaT9;n8m8rHy{k2}C!NFD}_70ANqL>w~hBK$)PX5YincH$un!bnB0CCA+ zxtW5vWPM@%5oyMfYRrD$H;%9L*Z940)R?-z9x{=;>3j9mEWbqG`g60VcFiA;H+yFF zrr4E*>iT4?C=Y#-Dm)sOo&>v)yXi^C;lc`r2s?$Mh45p!zb7c^bH6M#>JW$v-{x8`_m&)p9Ru8{V8H*8Q{ zpnms5QXdQ+{`l-ExPSLUq8SYC{}_}Pc;5YxQU`+qna}ouL9pCJoyFz-DKls!kiTnu zeGUc>l0L)UAX3s$D@M~rtAap$Cu^;ApZCPH$@{lG|~Pn=9BHYX4`Na59NhV#-@9vE4G3MtvC6NrYRlAp#ZgsqX=H28T4&I0~aV;tmd`C{8-XMt=cu+{it+iJ%^|E|+Z5t}bjEUyFd z)tD{#YKnsy>d3hdeFl-zB>({X1+rB?ulXd!Y=fJ0(mRHbiTIpL4~b&x?eim?R?M+3gD@H z>1?$dM*oiOt$vlC|F8qSpmV)5ukaTicEJ85CA?ZXmA1+f){QERbai4XZJQ;m8&#OZ zYRgpG7E4$+sxX$-t*NwKmauMAVOpzoQfV5Nux?agbgGL|k#ipziHeY~M)&{s)l^}= zRV$~`_E^HYQH4>i&Q7ImvV?V`3X@&!oJw0`3F}4`W-PdJ6Zus7^L23X6~UG2Vlrq-{>^jE^2!_54$TjVIc8T_|L>y_?bmxYd*$6UJ#@3VvUtkW-!&2> zC3#_;mXr2FWxpQx81||9_4ts&gIMmpV@a-=1NV;QI^U}IQ7qWNol?@b*;!5hDoHx@ z7hRM(aPwHU^UY|lj7P$5jFHQoku#QE+ezonUvy0$m^hZppc)jeK&hbXkJyD%ZDl+JmprrOYXZfQ1LDg z5j3!NlBCEuter)WMbB&A;ap~}65Lk1o;In`GJD1AIAc4~IC?0uJE~BpMv-JG?ApgI zR^r+I6cn(T60e!;?R!#lu&H3vu`7`@+H>o9Nc>w5!NK#HW;p{l-d(}oOfF^E*$RB) z7-!F&ZJduCj%W2V!z??`W4?Vn;CwnZTjbgK zaF{v2eNTpnk!v~Jtt;3QY2Kv!0r$hl;G)y$(%B{Q)SinwF4nE`rYiEKt=qgMx_%RE z%ItG%G}vxfBXjr)GtomfQi;ZFRx#cs<85%sbadV98u^f()EzN(YdNJ3d`s)LQ~SD( z2)1Xk+?ouG3@c?Uo0zR0YLTk6-sU~Vdt`iyFPiQr<+?Q-Do6mV+kDi! z4hptrvilp0w;R%u4&P@Mc`S}pru8&0G2WfzU3_WjsOszndFh_a9dUIE3Z~a+Bc4YQ96&q1Xk}LS0EwE`7H0M@$R z@?C!jG#z`*8;RQ~Rn~{onAsju!u7Q7<~7Eq*51TRn@8nm733{@ZtaMzTbEu6Dgel2 zw(mM7Fm~*L_Kgvx%-XVx$?{MVs?hqHw;P*T`;1*YH7X6M&aFMycU-BHPp2C2vDUgi zMqO3{eaC+2-)yJUwjZW4Ydn?;H_%wkD~z%2-eZ^6jVjNo$?Npw?1+c7=29Xb(weY&S9|Dq|yd$``RfLGAbAj=;>Vsre3yuz@gv zxr-=+M(^mRyZOEcd_zjV%I zzBT?!_+dpwg>{6Eyfe!?jNS91LLlA$vbmxHY$~LZxo|&5=UQRc2E*Wk4`<+;d)2%pJ|e)QGPTtFWMt7&<7a(BTxpm zv+9v4J@;_z)%b)yWYdvrQUjX~6Eb7KrUSuJs`SV^%vP}J2$pjLX@e=5Oja_4!ECwE zCVGL{$v4E_<*^1!QsxqHK?DpI3^DR0#dEPZ4+0(wFCuMR8yq!nn+v#Q&xy88(E>S9 zS;8FmIeS<^Ud0c0jaUNZx9Z?sbbKUvadD%UbtSlA?8DsBE3^SVFpb1#+ zdK7xbKoiDFJ+;ZdTd5$GQXM1tJ|c@D<*dLQLZB62E`&f!0xuK)%R&e=rWEo5j#}lQ zYXDu@J)~SlEnk5lHfutW(fk@!&;lSDeu)@}M$9j@7iY+zAjPPcHun*c5+!~D6FL_~ zF@Hr96d~~wqL_WqOTw2Lp(y4YbPZATj(f913yGhAX3n0-U^I852_{?>0fwBeva4aS5p7qX?>OBCf=83re6t>fgCn`VVKz z7a|ENP$a=OcX1>^Ac`dTNM8tn7Dp1Kh$9IqQ6vF-mHbC&9TCNm1cnQd1hBRo0g6}^ z!t4vP`VOLy%mdIC3zV-Blf&B4B^ZFp8pvTtWla>Tto1a5kjh#NPK#VnS&t%>b@GDB znuS!>xrSnubp=vcLw6pjtVbP4IB>>{R(eTpA(z6Ef`F>3ki-lPpM9v12MS+KUI<@C zpfqT^AW#}n+~hG3H+hhn?9bUcLop}~)MPg&aB&G*&>aJ-jXiS9fYwY*Y_=EGXGWmx zQenK=zM1^Se^Hv{7AVd0l;JJ&)XWIzAx2?4z4^o4lS4F?;ndt z>ha5|Gl|o&`reZu8^ixX62ogM}iSvvnK z3>LTkR0I`S`VS=-NVU#nfQl@AG7APvTZImwB1@lgf`N?A^hcy36U)#fbbgv36`5Fu zCap752?;w3GBimYF!n?($j~6EIm+CPPXDX^x4tRAlII4^Mb(adiiJWqi4Wh$_!|IO z4CLmIm&@lQYts|UqY{wtY_~`qglAz$g(mmndHqTdo;|O3>6A)%=J6Y$K+4KM4L5x)>T@c<^4XTJM5VC@nx@(ozV%d&;G5k`vH!eyZAdXX+N z>Tjf{*F!4tiIQ?+qjNR0#{@!=msR}0j~nY({Llk@@s-gio&FBGpdQH8vL*M443wiB z{#e*AMH1wSWnKDVY)p*n1rtb0n(PKmsCRZ%x3}-Y0K`9 z8_r9R-_aSfF(_8rT%-lD(owVE+p>L8FH#i6fMo{qi)Ut|2S(}KsK;>A%;ez1kRd_#sz21Fml#xSa8e*#hgv{N5t9OC+2L{7bDK*OR-)cYCZwnoFEWGZq7L$lKK!aIYAjc zku}%5g}p}J1pOFjCC*#O{#L!{2Q9`mSNsdtJZdP$HP@&0?ME3h`tU?iCN=RB;V+xb z(5rJJVNjB?5M3I+d+ruZS(Eos-dv~*ejP3D#Tv}LSsv_1kfjpiopJ<)E29t_TnpGc z&9?E;pCBS^WDZJ<8AQ*A;YzizmE~<=DH|h~YH81VE@1uV6G1K@bQe25=ZHuykSKP1 z0!+#1*1WdW9`pm4Z_Rs%qGZJOEKFqb1TJwQxMngSJtEI6-y{7WTRydU={En`i}+Q9 z59>j!6}!#3_9}iop<+FVwf1j|tc}61B?#7oSc|^RqV_sog^;`+R7ab)`P5#iKh+iYu-@oI#;^&r+-w~Z(klJ&0#vDR&f zS4Kjz`t=~zI=+o87Ltk9gMH(D$!}sI*(D@C+gai+7Lqj}@mV=2(U6erE)t)Gff5Y~ z$p(=4><1{(kdW*K5}(PG_=<&OZ;<#bsl-t%B%>hlnRQ8oSV+c4;5SuC9~B)i3ZoB3QDv~?A4$|gX_r! zC0YVo36yAXJ+7cci)F6^B^q2$U&-DJNo-X@Y$jZfSqb676}CKqo(b1eS7LV|k*!R4 zmiOr$C20*=7xSb!34 zP;YO5lJfgG-=)qB&D!HvpL$ZCV*=e-(@Tl+1Qzoi&jQOdulPK=Hvq$WH}}sX>`PMZ zzN@O9@y{{2?!(N4@D-F0$ZsC}ow=C=WNqeV0T^o5krL}7KCY^vL$rW`!Xg zLYh~epN*Rgku#HhJhwXd?;6HNcsS1N^ET0$t7Zt%Z01qC>)O;xiUS~;2Ujz(BDn=y zPQ38dDaH5{ND~Hf3A&k!?3nQ$v;IG#$$w$5hi;xfH8#^f9GUy=XYUmL46bu1#!~8j z<3DFiJwmH)o{3eJV8j(oXMR#qezw`glhME**ln)AixICZWx|v?L#RryUEYx}aH3I< zv?fO-XfW+^$l0p2Gx51g3gT*}Z+*(yz}wvGSw zu5D-|wsKEx+q31S%nVPstzI{8}0mKjhbd2XTC%ZGSwX%AdE1{@b9L0@L}UZ)`Yh?G8+ z7RngCSEVH+yGh7RKXNGw$4NhOgSE!oy)y1FSJJRDuBG?G_{JK&V;;Ec)q2NTZ zqjxt5kG758-Cb#Rp^dkECo{zpZmU%XCtOKd>*%tuFNm3*{dq(%B z3Y)%1rDq{s`R_|}=A0XU-@vYkc29}3;mR4N#C7z(8CQ?g4fP;jKc^cylpG7mW_Oi^ z_HBcAvEh4W+wzv1G4nm)b|GE)AG&DGq}M;~xhrX<1$!}qkC_E~@w{WxNoJ})DKz>X zAh;AXjd8QKU7R!HWNk~u%8T4P<9_F23_If*fH9*LslU=={`xunl|vn|5OL9H%^e749$;|H+Ec3NLCi!U)n!5dZ<6n^WU0zGQYvT z(5PJhJI{YZ=81fI*JGon`V&0=-I;FrOwo<_(p1>QH?&-kBrPRpFy3Cj*jdX(uLHLXHaLg<1uz~ z;$!OLXHdt`(S_ZZSV*mU26dzy?bt6ZA5$L$K^<+!lk5h|LTXtM)KPLg!KSx9ralRR zI;M_p?55U2YHbkIS?YL{&Cq#Fy%z*^Haj}uUg#83OM{@!I>+O<=Ay^czk;C7PDfW< zV^JaXc@Wf*akR(1^nXl!#Dh9|j?TCS|3Yd759+8oI^gKv9#fz3ppKa%9@q4(kXp}! zIugtyIEM0L>RlexQD-{RUMLq*DLkkn&pbwJ&VEcSvP)CP(lGfl{NG(@`I?BuwG&<`s^{E=v!7|-wO{_v{of_1UWFDn4u#c(t)u4_R z(~0~7TS%pj;@4`8edyJU;K+ue%)QUq}{)l;r*3 zeOtBb*nD&Rrcg6;f6m)aJw%4HGf|C z{yTZ&4VMSg2^xl0o8|`+=vja0ADz{>xmGi+HNq}fGfihLE=wa``^(lmmmKXc8|bzh zwLP|0Cl{>s*;@TO-~Nza=7FHkh4icgK}TnI+*Ej!#)>!@{3s2(7MJ@VUg?W|o?DL6 z7d3k4uSy=8)hRw}eKgPg#y2|Tow;A_a~3^ozuM7R-J9#Y(-;vag1zOBz6rRg%1Qkr zf)vb2Z7##be~GEqkjk^qsn*aQ?c7}Lp>`;4Siwi_kd1Kipj)P`-KTT(EL*#`q6;@M z3aQ5;qJtGu-<8oWu8oP+ka0Vb6RV*&YM>G8vHDQFqJq!rL$<=igGU{$wtw1Bce2|4 zRz$EN52e>}845m1uWf{_2b~;k&VD*Y zcd|MAR&>@*?L?|e#JS)Tsl8>i%g17FYsk4B&AF{%Fj}kuxplAO9Ta@ly|xt=9X#%6 zz2ehxx|8*aw<13~)eotABf^3|q&Ak(;@-uSKUnT&pHu!|%V_cDat~GQxQ_}xs@gWf zZwFl+Z4ZA6qdVCiek(d}hcQmIi--y~PJLTOyR;)F>A}kWBRNSAbVrFANgndr@g@pB z^4hk-?1T19tF51`>D^Xa--^87s#K&NjtB{^NNp*j#g@f9eXye6F6Zfkt)s-vPd$_i z<0=(=lnZTytb@)>8^2G1^llr!w<6M8HB#!yh{#}4YEK#MN@z^(1KIwgIk^w?N5?dB zJroP$!xelK3vGop2OXH!8$P+xyRA386$QOj6{H@B2oDydzAU4~e~+nqAk}Z5Q};l7 zbZm2-2j+C#l!6cDw2e@B5YMzd`6-ItZF};qDEci%HuXdVIaoIJLmBzv%9tw;WXzA` zTzQ~3iqW_-u#>6mFy<$61|Q%?F7Y>S)gQYKEgO)hn;u%$ zB|5ZS$>zq!X|G_L8+Xgd*Q{c@yw;l^&*}2oHJY#4HLz0YlC9$K%0qw78F^{uY+jtN z-Ql$NXLt9RRl&tQluN ze%~Frk3sq9+5b1cSElZD|63_;duCoH~f1641It_x=`OyxJ-tWc&{f+=l6A|Dsv0 z#n<3i`AvqZWv|q%EExCVSOrannP)F~R93)W%dt8w9lAJs#iI(J@namTK0A1dR#9I*J~dpWuR{$YB~zbKTO z@HcUE{l0~&I^>FRSFQ;pm=L4wZ0TkzNr#ZpYCLR1Q;jUY1@S z&fm___0JBKca}}BoEksN(IsYw896ULQ8vY2!Fl5A94hTBb)rIU{4nQ9fOD9R^U@RL za{R5FCw{C@WoKx`q>TGSNu+9=6%2N0nI8RPvLnWQ1KUCO`pX59V#D-}(OMWP~ zAdEJ;B*o^Q_K`WJj( z%aRjRvd5!2Tw=kG9oR*(Wo-Upj*73(_m$Y?vK6@T-#ID)K0kD^zsQ#3_!~JYe#75Y zuq$ONY2zLom7w7td$3D1ne6H1A)3skbuX4Z7Raf!6$*+)tsAa$$yHfKrHSWnL^X=K zZjfHr5buBTt+}4Eeb~3=b6=(5nJ2DvMrOzp*GV30*RK9&gM)3R{Xr2^flG3HP54rr z9N)@Xp|O-r1#4O(#HIqEQtMWZM zvIMVch}2I3k=Hx74+jip0m30fo95nuEGVEn-_w%6J_NcCq3k@uu)XL(W{5fbxhU8H z6c;T<4mw7v^kK#VEsf)XnkQzh`uAoF4Tm?OBYacvqu3^ziaQdWROm>YZ>vTpE|Hg9 z4+9mg1Dz?c5`hqwuN0z>O1Kd+>>;Bj#1xg-UIW2sj2sM#VD?e zV3E`zbReS(D;|t4c%=|=tHYd7X)pRKGX#rD7}i{waA4$MQ3+dSF629Ndc{4U7T!JR z$_%4ZEC3Oh3>7DuhNBYs)W|L5sJ3bZD-&Msvji7rM;$7W!8LDTuMgp&tEeYT*+VNP z1m!miP=2!sBS#XIAfg=0&73{rMpmmDV9gANH=+_Kz?qn6Du+t2sTNzvk!|PD&q9bI zoUw!sWR_v2pgxLHh!}SkO-Mn;`pI3Wgi+*W@VMk?p%Qpz8kp%hd(c(PwLlA{&Tv=^ zKQj>@g_+u-62Pw6LMFGJLq7`uo>|UF5;~};L_XKF1-Cu~Xg1J!A%J)PZssJi(Fjby zMkB|_QA8!kOtLn431=1zKT$$Vt&oBrb%BYmJKw$`zV)~yzOzbW4CQdCnn(I3?c2x8_a}ssaNcmB}%@hzi zB+-O_ZEuJvP6^(W6wkvuvOiniTCSPQ`INB9=d&(b+iDADPCzapSgz7qPTd%1P?b-v6HD zgqqW)%sYp3XrK0sJ<+IaxAERwZPgYSa?_N1Wm`%+9katQrQMQ)aGjBoVq9l_o6ls^ zl8Qy0M8aa{OkFR`<)wkEMX3o(1A|L5qt+u~Lp%r@EN^b+s+W}L(J_XU5`U0px)X3pr)jHTqg!|L)(XAum*$I$dPuWU@G_l*z|^g?`6uaD(-)9cJt|#$DM4lwa%@oAaz=!@yfW+VmsMzGEEcCN zH@)_#Pzngb54Kwc1VM~V07w(+8;~?1S<|B1$3sV`K>pBt0%h4$#07G(wdDzc+=#O| zC|`Lh!Wl3%315B@sss>mpE^OQ08uvG!yb~t)#UzWS>)Ft#pH6eYVRlHazoGFiCRpm z$mc6saRb1y!%ZwnoV!f(B`9LEb56 z)^ua0TS3>5_rk9iPm!KULK+0ZXA>2C$FC&bqXE=7%E zYEu%TM*Ys-9+ecbXc0JhAq!kO*hTez^8*rA7v6(Qr~A^G)V>#zcXqw93%(Mt4ci<< zkSsWJ^&uyY>CZhsyl>r5<}P0&Z*%?-JztozD^t4ft9Td?Il@U-N=i%S&dHVY1yZ!-KOb@xw(y|=sQRpxS1rz z1;CO7ftzv*xB!I`X>9jEh6}7~W0mM|8djWjU(QSO>F&3ytUYn?7G~1&Iw%pSw zWLdn%;m{hVwb1UFYlL=BuA#WyBhZ0%Pk2Lf$fNL%Xu#7%FQD33W55`|FqeMw1H6EP zP(4$`Vpq|C{ zB2i{UE=;Itcr8SP5 ziHWD9s-BNkHinunp}nVvngdmZ<$j*5N^6H?XP~OEJeH9qXnpjR<|q56uH8pXI1crf zB?@6xfsp16&}@g0X0zE~2x&eU#zGRE(o(Q}Nbk8I)-BWXc z?x}?6o(R0T-|;n^?LBvVUHO;LP3Z(*j^6@Y97L5_T=Wx`6vd*SAP&oqc|WTT&Ge!f z8yH-EKj%5C4O|>(_UFg#oRyzFt>`};1cOUE=j>;d6#ajyg|Yn`dg>L6hX{e}0$!x5 z<3~93t8)A}RU~wi`!LnXKd=8`N@Aqmbx+V_p}u9ab`~4^%b|iT4A^uBsTctiP?aSDsGy8$b4R&{9Er(rSrs zKT|rumK2r?J1z0;)#UGLhfFNw@9w{<89r7$9$A$GvrjmGCIkjds1Noq18RR8h-V1J z69UB@p&04Uj#Fx0p^*3@&vBB-wcZ4v#O7 zG4E}I9|1Xl`azPNoAE|7i1I;_oiF?gZ#3?^U^QS)Sq&uQx42y*REU{)({VftmuC1t zuOzQxE&OLdq%*)9kis=ak0f-K;A8Tz2y`(dbP~GQV6^B^7Q>GIfV?e;gV3q1DZB1WGLE_RXdB0PU?Wzj`U4y zRC9fHT?D8JR#9enMao5$rwBD6oE(mcQSU80#v!fJa3*P$zN8NjH%dk@=wf?Sbg_w~ z&Ml)iS&|6y2p%!oy@kluBnPU|9R=v>4ihdzK2NcX9Zq=!z$Vq?=dpv87u#!7k)J4@5@BJ|CKR6UV2xn{+AKQZm_HpJ<@KlFzvN#Tq#?a7@Zc%{I!BsR`u;$;uBP{;3&rQ6@)?%Zk14#fie9DLLun4a;`dxF?|Z55ExsrsyG_ZyQHgWP|MoImH54ISJ5TIh<2vmP()d_=$(Ky{z6JK;+IX13s|d^-f05X zDcHp{>c~u#acN7Kh+l@Or*~@3EZ(;TUc;AHn(GesTy-|rHGDL_RP(-ILseK>FY`)haiL5@c_3{Sv#f&FR(D>uOzE1oij|3c<-mSndizXX zj1&)}RbZIkeGoJSg^H%|BCdMQ>*lG5RIW>rP3!uMpAv0aSB_Q3O2&@+~)SUk~z7}a}P4UtIL+n?)sdcO)O|af^LrJv2FRD#DeBY5zMB` zw0K*L@cf8W7WafZhv}QbqDQMVlQ#4Mqy*_gzn(GZ|5opT)wh8gw(_qgUc_JYuZ z4F4l}Jt1SsbL^W9ut@Tg=#L5JRPuKoWa#XXnO#iU6Xn)vI`m?l#LlQL?JebB(=sLz}z2lZsi$lkG3?HRJi<)EJYYxUBI=W5PI^KO)XWz98q zLbS7F4*r?;tcqFM>wb#kjQ9J@p#RxC(-sSV%3>_1XAo-{|B#K_ln1!0;b1%-cO2$5 z4?mjMY=L=A+Yyy{O_`9F#aOH|uW91-EMwDMjfme#Rjb?2W9O`+KEXe2>!0BJUlK_9 z{_wBDWg1k@GTwJ@|3VmX^YKb~=lz^V(}N@n$9pGE}85|zrf+==0h+>MtiU$~52B(tT>@IR3R%!#}5#+@RLY00-hTVsoU@ily zG^4lBa6*j&NVpp9Y5yN_5cEKYF1~Q>d|~v(-K@#tpfB#Gx-RlqvBc(FYGlA*!jlR) zDvrbJoj-YAJ0H5^H^J?e=f6Spb2c%=>-t*hpn-4RtM6|&I^x5;qO)Yav24gLT|@TGyPV-MgfhW|_D>Fm~H0yu3`f*kIlHis+3Gt>tmA>P?P2 zJmkDHR|HIVzvBOlbKo=tJS_KM2zXlaHeVOnygVd7Fum=H7@skTnb$pKdmLAP~yeqcBWhnlAg6ntxgFDWoj8- zOeyiIzaU?mc$Tv!{Og7F{O ziDBM`lJ@#4xou(|=TNxo!bX4Xw<7zI_w>FJUZ{DzT1(`h#E#1qH(iaAo+>(0ohshL zoNAcgRAN_uOzxO?2AG%A`MR!Inj-a)R=Qp>C-n1pP4h6Cq>Sq=q+i`6y;YQ!kR;xV zh?)!ZlCt_=MKP_LAn8R5TzTtd**K8M&{zxZ1yZBrvJzm>9JU}wc^?n+Bb*Hqt=ww2& zcpuXsH!rwkxV~7vDlwn4JKXJjW0&@Qk=@8RJ-g&isD)yRd8AkZDNR@3)kD(bMaL3S z8GD%1a`XF2wChjE9f0G_5C7`BI>%XNBJGhzx@U26=px08f-r_8lj|+gySiR_x#$F9 zclI*p`&`f~$*liPZlmOKc7$*GuEA03rpR)nhwfQ&HFTNc(}IXvi96T3GyCdp>6xOG z@f1cVbGFaL>=O6-^K!=6GTnVoPyBxD*0g;co2#-pe}tc{h$MiTF#*(wp2xvk{N(w2 z*Mdl*=b2&%JrD3SqUS;Xqfm?7#bgmZ5BeWl$rjhJy}1a(nV#E`A7`VROY}S_W2Ms~ zgP1NIzYX}937?jB-*OEL@4)v$WKKYwp>7A!^E|DW1Gaj=mzR2UmVv9?-(b%b{uKlH z%}VL6_@V1t@bz)R>sggTMqCI2aU2hdD;gr?mR*ne5v2hbmjo88jo8rX_P-9~epTjN ze(s+bNSTgw_Rlb($CO=uRrCIcL;r%VM*hVZ-~O_Y{@O?P-F{WbjWCIly>OL=!|XHO zM9zCS@|G^=p`EIpqIkL!9@L7bYYvJ()GK3521A(a%AW^1rmX(Ag+$HOXwNxh-F!~C z)vDBj4j00$HlPeTBP~n7twyE*!AYtcw5Uu03Mcsz+-i`qNoqmIK2pw%DQ-2!EZ|my zj7@NoYU0HPGB$;i&3!Nos+75#d%hP~7T{jf7jhfZ|q1(n>riGB$U>Qj*Xr#E+K$F0&Hln!I7_!(mO_R7{Yu$ulH0KDbKA*o0dxUk+|H$k>Ei4K(R6A!EN#p-GyAjO|N4w2zY> z3}kH5gUJ_Hrjd?kJvrb`Bs&HlB|Bt5hD03l4C4(JSI2g!Bs=h5KKaJ0Q3dKnO24OE z-N+3SEI7F>r0Q*g52?3cY?FE`x34@;7~7A@4GU3k3)883TSnDe8dYy2*+r4J_?o6zg!x$*q~Hw;i@I_PvoDP6x^-)HB9ewX0M)JIY|xTd=fI zZxcEMF!}^b+YYayG!>&Ou(bX0jrwVXnVm#2vw>fYXVq%{&o!ED=SoehjG5XV$|TDB zvy6Cu4szp2^?hHcAs(@#!$n2H*ZV(^`r5kyR#_$jvImDB2_S4-tM+-O2_}o zbo_rFOR$!aPfIaBwIb~%-Js=fwPlI)+E0V!`Z5>a7U^*yQ1gOiIYTafEt2CfJ)RZ( zK1X)nx8;>Hc8r4GdJWz8Yw2^w&L_dcUNSSFR*l%v=^JVvGV^QcZp6;azIW}io4zeC z7h`8(AH1G#`nB{f#?JIUcs=j)ZRx*^9lbtyJ@4~t>A8#@tv+}?zxHi;&BBgJAH1Gl z1JGh&XKEk3o(+9l279nGR}Ws#hJNPHd$6OS2d`(CIRB%I9RodhJ?HwF|E-IiX?pN_ zhKaNE40h)0!Ry(_&s=f_JG1rPXUaN&b$MXNSPx#$9e(D09@tUSgV(dYuQ`Rn>1GeL z4cYsdcR$3AX7;-_S*5S}OB}Y4dF9fD%{yG{vwCuOA;`B3r5e;|cDcQr3GId|`!M8x$=6DVb4ypSUJb#Ow8J-a1 zr1}>8qlujbo^NPFsR+Z-#LlOla+++TZ-F$OAatIx{GpA01(I~^%<_DnFKhNKc;$i} zBhR-1aB(i^bHUChp2Gn$Q{RGt``FQWG9(@{^(*MUkDZxM-c5A7-|>Fv1-wr4iTrHb za{YaZFARo>dnrgo49HtlDh~yLO z%nlF~urmgQowZWfS%M~|^Vy-&`7{!pPnk;RQ%mW5;2RB{4}hw?^!|kF6@y3!5GTj( z=UpE393Br9-W`7KUWu=`-XKQC9(n+PEa6p6Y;xyVK-JCk&cn4NdFOBSp%7o!#I5Lh z)LX~rX#K2!d9+&0@Zvb&VwpG#T#OZ`jYOO%z=p}yV~nw#`thS0e~9zno_W#%U=Ao< z&vn@gKWR@0eo|BA?@Iv8Azd0U+VS!6bzN-&Wg?ywL1_Ve_43vKwn600dPLr|C2@`u zq$P)VblOlJo#FrjoB_U~fU{%(XC+JuI2+#)vw(CVjpRVPu$waVB$mMeAse?Ujkv3i z;h#49xbddZXr|;FyhV&}iJ8~imdv?K4wMQ1B3M=iaSav+H*Y0a78xY7Q;l6l-!diM z4tJaV1V@e32Vmuav z3St9__xWKW1SO`?MB+mUK_T&>4hPz8Ri>zfpyEFP@SU=b(#1DYnnDN)Cr=^-Rp`Z- z5s8lv`9|T=7y_M@5%iPIv!^6Jr1Va-4qSnkbCfB073o0sV8Tg|y#(#b9OC>>cn=B& z;XP=mkiCgwAba{MWRC{-Ph|u*ZfkA7$Qgs5d@EVeYh(GO+QFDqJCv&0!Ph0^p(l;oLHRF?Oc+WuSIAwHRNb&z&bK3ne;YKN2gLV~>+>!qW1IGgJs?512zFB7%H$=u{2 zc677TP&=H>=RU-aX7)+c4kvR18|)ZnpFr(!HlJ^U9rf%C)D9fl)ek=6P~ZJDdx2(y=qk^Au_aggq|UG4f1D?Qky8 zgLib?C!U$89Zm%%_pzh%Bn`F0xnM5zJ8?6goJ8$N_O|i-Rk2MI7SeII=Im<*Ue)oWR+i9KMRPTabUABId%q*C56RysBoG5xqB+U;(CJ^4r&JsoD<;LoZKl1sM?p- z$*d(%;5^h0Cs)9F+Q&^&_dEV`sX6E%Y8WzLCjiUQi0TrFXY ziRQ!$G_W{c-SdUgH?6uS>nW|RIdo7r1V5>%@>8fCi{z)G0iGQn?^)Mn$yX9b=&Aby z$0qSh3^%gnkOVpWAB6<}@#fj^nJ*&A|E>-p|NrcNffR{D{$O-KJK>l3w%szZi+?wJ zyy2F~xYNvb%XcB_N#BM|B&j*=jriAsKec9t3Xf{2C;$HJXDhC;&VP8vb+t5J-dPm1 z)ZTnoU@F~6Z^ieG4f|uh{*$lezWVcBeH)T~lK%Q*vhlh7-~M^;)Xd8w>qUn4YrDBW z<{9cX756x4#e_9PEi$#UI2mG^yHuI2>qhXp{_3Vn(xjdaWyfeCP z8l!hW3%6fBFYr43g5J#UA2u9_*|3!F=DvFIyuRS1bJBvXNqOh?doI1#IujHAOAGAn zyDx0LkgMBQ{MJb$CZgfcr7!F(TSD}cwoAv7G@U{tzJhtw%-zCd4gKWeXTKD2{dDf} z(sdEmd`A`29GFA*)%)`M`wid6tpAm7;a>jvw|$;TXQjVwP4+vtf5)%)Zq3AS|I)(B zwcY2p=K1J8DSq7$PEuo>W=HsWw?)h-D|xLO0RhhJpv9HTp4CUVmM?oGu}zK@wsyP8 z^OAC%!y}^OayoSenBpzvOCEKXCJhU3bw|jnlYE^0BKF1Qcj^rUiQUVWKk6w>))w}6 z`y-gA!#N=$e&ol_xdSYQYx&}p&tsA>yy;GqUrw@jc8&-E2shs(kl|Ktwo($4>@Mu= z{z`r(snVGh5j&FAscXVyY$-Qg*)30M6IOQz$xD*rojoEVVe_n=S)|Yz&~7as@17EG zweFtDH*r=gUudPYiJZHw$iWYZkAgO1gli9#G zBPSddUffl?Id0|eeHkf*B3Ab@`Rk-+C&K~nkrW4$i@~*<;@17%n~@qR((N{ocO;oQ z*$;dTRgl5O-L)=pD}I0BmNF>v=w2gNBwcq>hl=N@gYm@>_BU~B5B%+xN)y?1PeY1M zh?D7n>j=le(0>;{}hPE{}P-_3T3vp(>`B1J56>0TutPRc=u^!AbC)kdEvHa6U^FTdaIF5kRg z-j?L$^!M$gTH(#^gYv>8FK73Gy(78RdR>7NZgI=sNp2^zg|EA}$Z10RXEP#s6ZERN zANJcdu%)d-40&ymU1P_y*8S;Qcs0^nNz0yOO0Ii<$$k2v=Z33mxZ0xx7qy2^|F!NE zCo~*xBQFDgFg$tP#=3g;w&CS!Eqk10v*^LU!;oP2v2)?8FW%+1*gAEVnOogF#V_G< zA8)v!$Og~(uQ*%Xl8yxj#oyOmu+?h|V!9b#AMimgXHyU1hv_~_xUcs1>DTKr0h}S@ zcQi0D_v8y3>)I`FLYZQ|$3ylBUGI0e1k68{9KP27Pkw`~b7$G-MZdQ4)4AS{f^R51 zL2?$w5z?2@LGk9g)3*ASLCj6LxMZ34)4raouIAzBB+S(AKXs})2_);Mipbu;Lp~>S zY&F=aLb4nV~5 zRbk74zpV<(tB5RXJYQ-8^PBUfUv-w*2QKeO69+MucU&3h$SW$XJ`VUTyrW>C!>=f# zI$gY*Y25M4Ku2fMZ9stGpnLr`(BV*Yu{u-CVmfqO8|bJmszL7dK4wD4?SYPjqN53E z;t<3 z8jEfuq%gvm9vwFaIu;k5>u&^y?p(juaQ`;pGp*;D=kfJ7g?vF+P<~J5IbnkqoMTpp zg$6v&%oYAe3((c4VY>qWy$SDS0BZUoEIa@-H{ny%1o^x$RzQ#YIbjoVn^_sQPyF0H zSNMdu&CCi55lh;*!UqJNG7pOscej!3Gn_G!28w&YFsnzrfP9@4aolaW!oS>b<`d~I zhQyL9y!!xWK9`0ux{;SAdVn(sMPu|>l2A09Ss~q9``j{D`1k?N%#;S#N(#BcRtuc@ zQW{a)U3gyDX@N5fr9rhlh3ABg7B>{1Dz{rH)YShiEv|^vx;)OQeoozhG2@yn|*8Wg=OkDAF2bkg0tWLA@}PAs1{R zv;Yic2qcqBXaRuC$UxVY&;pQmKxhHsgchLBr)U8kgcbm{R|&a*&;r2rqFiJMEdYHB zMGNRF0VK6a1xYR6rB4zI%E=S>v{+mscovcSmuC5G7`@MutRNSx0R!8M&!118U^9sa zI7!6=lu_{j^Qm}%NGcv+Efo(SenN&gvnPQ{$Qd9{5T6QAXwm}P+s-gur5?TPdkaXOj zAhZC(AgXyl&bD@MtXd8F0J%XJ;P5r^vY~j= znuI5PR0Tt!oPbPC2Ooeit~0kQJyn>L;=IF#Q6HAXn!G$i`D!ZFi~wV(BqY@gd;lC^ zx&Jp{09Md&3_x!~(WKQ0P5Ou`BN|17%BdkYBr@B!E8RY3M4xlmIBL&QWkl7(YS^B4?lc%Q2em|o#O)c7t9}Mpg&FL{*@{*@NN00@lgU& z))f0m%31=9MRo>y(l{h#Ept1SvUXWUQr4_hDQo6b${NCrk+PQA98Kb!aS2k^I8@5o z7pjysgEpkB<>Qj6-v7QXAt`GQA0=3Rs;M|YWj2$T=D=-;X^!Ik+O~bTdV%>siHr%` znl*@NKC&di!oNXq+7aQ#+lzijO!FP@r}_Yfrf0uTjx*4||AE9b*S|qb^9$yo09-Q7 z`!B>a1EvD7RQRcx;`^%8>1Pv2OmpB4#570oek;T!3kwEr%Jy1#4kM;nJbmBDcVo7o zZmk91E_GFcLt-T&&5lyldf?&-~#G3dN(P>RJKA^<~>Q2l9M~+^zk#OWZCT4*ncdf>YaO7;n zso==jh(YDMTy%$4hv>9Bv~NjtS_7k*7R^hhfyx&p{7-EiEtYqc2GmMJc(0b9zn^!O zwrzNPlkjP6L;eBYAG96AcQYUN%onz@1p!gK+yLCOL)gh~3^>TE48ZfO z5;n4%1EP7U0r)HUueXf-j_>|GR$MWtb$NnQnf_NHXl-kyFHtrsg%1nCJDV=;McF79 z-Uo?HNtgDcY*Yw4Kq6D>NqZ(Fk#~g6k-v^B853U>sUG0Lziw*DJDv49t zF^(DmV2(;ZyO+OESDi3$j*Au%QUGcPa?y$#p^b4hBJoz*1Ry)90?250GkziSe&TiJ zcpl6$plgI#mSB#-2?$|iaH5Dqh)o-Z3b7gHNWUOc7gKV<8PT#Vh9w=f*0|3V(8h5n1F3j)DMLYSOPrJPj3AmS2*(lpXC#`N^>BO|FeCx1usHl}qB0 z1&UN}-AGc*tYk!8qg;-$MwvhbO~oQ3TCAn(PB1rd$#v*- zT-U9~fr+|{*T#&kWGl&m#qlWw9aV3fRA$_{Z|2QEX+M8n1FV}4dt zMhzmSkx?U{GHMVpjf|SvR7MRVrjbzt;u!J70k!NJ@xh@o@=W8C-++}&{_;#P3%4uN zDDv@eEg>HxNRxbCnFs>;*k46Hp2&gjCY~jkjo&Ry(doT|$2;GZIV_Ryj$Go(8DE;X z*>##nvoh|trPngl5V$=3+9@^J%5AQO>NgbD#;!E=o|nIH9ksR%v!-*cwFEX_TeFP7 zyK%LOHp&P&!R#Y}&^rU4tk!AM5fnsaRnSW6rBN#{E{dSTZ^G7E~2$S={ougm*Iwsrg~es2{gav1zx~KPDT4K zWayt@cCU8s%v>M%*J@o$9l(W^M?ZF`kh*O~0r27jH)kTr|zw+CpIVHIM0wi#8X1@0;Ou zg4tV+C)yGCxLl`DNAO$J*$Udt`d3k_{Fk42+=DQ(F1?BW8mky;?V7i7$A6)d+@vq; zMfQvC((lF&{;?4`K3~McWM$6jU4wZPfTEKp9`1TEn)g=!!VA9(O9NeW(sf6-mVy{kXhQ^St+;*{0bg5QzI6)wkuP-EuA1mEORVFY~9gHM|oET6)lHzHl{`EVpXUAaNw+1~1W!z#lvp;bkS|gG!r2|>%v4k8 znC~-`<+ws;s*@}HzNji8nKWVHZehV@btmMj{OkEAN|*4GZ+&XkVHdYV*Q^7o75Q#& zL5(25mUklD1wF^n9pU!M6QBufL{u@FvOKCOyUn*Kon8=HDO*T?^1B1aA#AyKhg|Z9 zYeBd|M0y6v_1zk+T+#5zYr1WT2ggHdSafInNNo@^ z+CFb}YmJ7FFmoiAURq?su?ce#-b-xoIWN5Tfc~HuTxo?tWH{5%K4*1vX>z|*zv>pJ zUwT-w4(Yi?0Wvu~rbv^cDfKD3Jf2d!o4K))TmEOwY}{ z5?^-_WCKDUSR8enL_@RVEOp~MTKQW_GdnlG`(vJOKzvSrNEvUCE^3Q-@j(NvPBMy;7)s+Gx*>J4bEPjKg9}j-NIP3oWm=O)}m) zT;zP(Xm98pn135b3>P<^Hi~#zPGv?9^A?{r3VnI!C)*Ro;lm}DPaB0Bl>deCeG`x% z*Y8j6{<}%IQR*H45~GRjg#?x&3+ca|e5CvQ(? zk?!dWdv@O8*mCm1GKOjLar(ku(VZK%r}82)OCFxS5V5BGrS0*& zQ1>F6(-%V5+&O2Po)_+3TypwC_?>c*ZED^=cV6`A3;XWexnrA|7wKN|_Vk6wL*=8T zNBs7*6=|OK-E-(pa%q}h7<_$B`-Y8{3rdsx_Ku2{Y{E$XfH<3Xe;KF=zNEz53iR5p zbmEqi2vbECW+>@_)C>i3&Q)Y^L}n;uZA5OfS|zus35486XOi3orlFMFW(kqoH2weO zHptZ=B{qVLcxr;nwkqFn%PHw;N+*$?Qt6!iK1k<8XVyVJ;`d9n?gdQKjU~OMJ(1gh zLQVMBF+@~)(DfOiFZAyo-bNDOK=@1OXfyd4WtCz_$NY#k=rvQ-23etowKr2WtYaI= zZBA3=Hun%uv>B8qTBMj~O0lLXIcpnc@nB6;wybK%ZMJJvryvU6siag4InuDu%`viEg;IgTN> z?zK0=GJ8MQUI1pbn%CsoLzTX+{c6}Tye9itRA2K)gFGBGiHLG2x^JPftXFdC#!@Ok|mQQIhjrqhzegUso)TLTI;tIO;(#&hNd8YOl)!%kxS!=%%0 z4k(T8Z{KPc90aH^SQ z`44kZ00bgI-h8j;$zULuJx;KknLc_wZkvAkC_dCpG<~xVdI!a3_4kys{qG9yk@hJkX#RvATO|0Bv`=1vJH#1CJl-L` zG!VbayE9hu7>t2RGeTb+^L6-(=O(-_S>FwY1N@1;8tyS<^AzN$llNb=kWBROSC4#a^b%-(Jy=%_V-4amtWZ5&6ugrww3pc>_djME@U`Iw~X6Y zaniSp!$-5QF<9xKtVnMigO6rpU7u1-nUdZ*{yP>8ddkJhYw63!PLk+ovk8sJmrOGT zoJIEuo7hc0mBOYrFb?!e!FVXOD1G}DT!K{NF|VrXbmOsOIA5QkG*o7%8;}2jMWY+k zGo|TyV@Pivtb3qTS7xN=jibZS*rPOkkZN*1VB2tK&`3ae<$^@~h%Ab3PMnI3;@u3u z^BPJACZ2ctN(b5>Ck#%F|4>Vj2>^{juBoJT-4}EvqQP`5Dik*;%7yJ7`xD0fYFic5 z2X@~0vWb4Kkmyd>`Nsbm^}sc`7olft3Dn-mTWmh}IH zL%;kO4!sEB&>?lIaA*{ed--!!;n1ibO9@MweDGyAWLzg*6H%2r9t8maENMajNYH_o zGg#8Z%ULlm=1am)@E{-RLqQA!x$Qc6MK;276ib@uPC`gXCjdFp9kd0#*sk>Dt|p4~ z{ALX4?^s6>BZQhGn9hSSO{Vh+b1S)TUI_k|+zfhx3%Ou7VQo#(%^??=l%434B3zb%9%y_fvFqLJK8Skj~;vK0Q2Rr@LZHmN@Cyw}UXlJ*4|yoP-@ z;%9giM5&Qi@l)s5By*Jb6^xrveg&Lda&w_*IUyCGcLJ#(sBAmw%g&P9vO717nz1Cd zNik_~WCm&% zM{q<&3y?C%G6@ozm<5Q;kV!#gcifBQ#!)|)-0BUZY9BQ+ zMbOB2sWdVZ_fao8t!2~6WwiJ+V4?AI$=T($>)!=ME;;x4hwWqYq717Xx&)Ew9hdX3 zpB(;m!g$fOlS)WcG&}@~k-SCMPE5rDeUSQy-=b?7N3odwkpJ1(z?_Htz=Mo|qoJ%B ziSB8kEQoD}8xBBhW5+*0#5Rh+4-nhvLtg{2&A65G$%a8~Yv+@g{7&UsJMw8WtXU^1 zt;;gU2HGrZkCR9vUc_DF*aTC@=#wyN4B{?vtfxU=L&P?7p{#kHN%S=^t6U0w4F=@+ zN(SV?naf64(AV66zC|mVzj90lea!))JXyn<4&})j)-|F$@wL`uui*Py`*dbrhuG#g zV?J|$*l0eqa&KwO*RpnR!8Hb4JH6*^2)Ne6HRdh)=|dr4DPRu&r?w_vXB|6j8uLa4 zZCcPJ?-c!6yshTYhgg~Gm<=^7(&Hegd0w(W;!(9HO~6EN)K)K%8DRV@VEusw^fhag zhoP^5*arHVkxwiQm^Zv}zh=>ag2sRuyaN#1^sM)$*YE>o@m%4f*|Wsk)>dMfjITEo z+qAMX3x~d6lubtO-yWIYv#-i2Z9Xf#f_65rcVv3uJZ7&D-lO-6HXlx}HDaC>qHQxP z+8fp!puyXV1n0{O3ry@~dlz|T`n^hgbYvCF&VMogJa1!X=66LgQUUPf(Ltq)#F{eXt=pf^1qn6C9%aN}|CiP{3 z&B%*3<7bfU=CYvc$cwh;yG3n6AadY~5jD0c|3TEw&dg7LAJKJ(*k(OwXI|?hmJr+A z1ij#>9~cQPX?{KDD`=g8!z1d0ddy*HWGIb|fM{$Y;57=3&BPGV*yO4-HhNvA{Fty~ykQD1aj!V~V$LnuqX47Q;}L z`h1P|a2Y$s^#Xc<#FoI2?S$7xWG7s~gKLBoZA)HJD(GI(K@wj)8q35!pX3{i;*F#E zlbA)lZO1v9&G@M;!=Mkmb%V^Xt`Sn$7QV|-*%v(YoD*~pyKY%nG@ z8{^(Y+yAlIKn~OYH5<*7wEdZrnhnOJW@CJ0QnS%BsoBV!)NIfuH5-gc%|@d1f6d0Y za8k1oHL2O?nbd4FPii(YCp8u%4K2aY&!*OsWSIAnFOTZ?FC9-Vc_y4!fe zFm|0ieCna%=4V{+|HCMi=?|DkvS0%42jJ7 zl;WJm3-X1DcOa4RUfAS6HCps~WIiM^-Jz@Fmp~%(-0M8IW5boX(jAbGwImO~UonvSK_bJ!EZ-paTySx0W3haG zVi|}9p63N!YWGF$BMy+rh(c}SEg_M4K78K0tMAGhX#ylN=@{lu%gya89&S7#H%_dD zM8^Mov*Q#q;7Mwb$c%^h}v3BEca$RK+u&-_3 z@f|<8DQX*842g^&)LO9=5*dv1A(6SVM9P9h<^&@c5}EVa#hH!g<%Y^~&Z_X8-@&$K zAK)Zi`jg_`&^55F31>ftZB2*gmHASSq71~sfS=%V&a>FPu|lqo@Rx((JH8V*s=XG8 zp-g#F@;G#r!n`1I;(6P7C~B^(l13LD9ZzR~pa6whaa&`GoTjXTvc~Vb=9(#nqKP4W z*RWl$AO28k9mX^qYNFfnPQ9vGmE0Pp+x3VaT)MNftgk3JygoNq*iecL9i7tR@HiJ; zA5jc6H;uNXY6L9<+rbCU0x{j;Tf#yrp}Env7n)R6bBx0ZDkfAPUXi6ZtNgkIPb+z*oWBfbIl>3kP)hTjU*R3d@3fat$ zZ$bhk|HAus{}RH2K1Nv3{bf1*I)-F00m;lIl6XTt^vb`q&mwb~OL&8aWX6h0yg_3G z$xI{ZB#a1Z1j)?LB=P1`a)54snpA|J$N?YeQY!I=9EdErOeNls)&r86OGLp>Ob zWRt`j9yt&%T}>t4kOPAyJyhZi=?oy5`I#i%5a9zbx1UJj4Tl`qi183fydei@C1vi2;(CEHYi9{N*Cy zFBhOjc^kl$4CozVamepsy#6+zFBJn?Zc3tXnp9CZ0mLjvU1gTTF1fi3t~0*m8O9rL zqrI_;noVAkWo0(O$NnJCz=pk?c#3X2d4>s+cmPtuMf5`*brDy_PFY1Q#(`1}(bWV_ z($zFh($ySQ>1vFnZp4iymcUn)!p^MP+4KPd{mF~sm$B_#pAp=wp9SYPEp9pKvF+>N zgM*HSWM{;rIk=MSoFMWSbp~t;VjC&jf(~o&6F>%N#s`fuT@05%lO}4dm=g|LMvYZQ zHmD?o000M@1L6np9bnU3*O6a>U!YADBYx2t1IHthkdgIpcS#HQ8{pHg0}6rF&Dx{@ zDbtg7Wk9jSD!2AzfJAh-X!o$!uM<}>&tHU8nd#9ZH%XMu&?PLiqCe~-Q8w>DBMX=s zJ&G!V#G7s`G@=!TB+5oU7mI0cM{-G&jqE2ZX21R5gD9K6_uH^gdpn9Eg2bD@u+V&~ zut$_lAK?uk%4VdJMA;1Su+Y~05Kp3P-bDe4o1!_28iK?d85T1%73w6)My`Ryr|Bc9 zBZ#t*aj=+`{$b-tD-v%uVev`&C`t$nVeVovGhJaiatqXfWmrsi8M#iPY=&-Nq2=-+ zghbiA3&mop%P1-c5^r8&q2Z#?Cs8(X11zT9AITw6HnMYA%)bA@3n*bW!exOH-XBE) zLE?=F3(flqJ4D%J6N-XZ-E5?cMA;18!9v^YLoEDpJl`E6{y1i%s2@nY8O36TnL>Ny zI1+EBVe#qBk<1Zi7|bMNG3(|B_mM^m&-d%G_~hm&%7=yLTLBg`Zz?R|cH=p`1Pibf z3P)HLPlm2yp|$TrBw)fP?{;G`b>AqehsBdOy;x}MQ_x1vSUi!>$70&+k^B)4izl+P zSj>L?ApqGgPu}mqLhbb^iigFMw~w*Ve63)Nl)#8)6&C7-Bef&Z7PdowV4-dJfj#op z!uH)k;tFFpirV2D_V;U~=FG|+nO?<9-*WhJ#o(488*!4|LEgDY&*ADdVckAMeHNud zw*Y!2-8yX8^&f#J;XgbnIyG2Fps`r!UI2~x#rf!WjKVIbqfx6yJ#vt#E(U^vw|G$bZ!4ZdbF7%&H7udcIvKJI7q z+~C<1_D{y&jM`XD%C@PWlx;JgU|>%N3rDUxQu3Lz#*ojbd}qd25&6u~kMbE2vVN~*& ztUSnPSStC94Ux|rrHlxM+eDDhe4&!h)CPipu>GTa<}{JduvGFHGs0mz{!u<7f_%pO zqkJal#a89_ALTRI=%?arjQf zahN#cw`p`AOR^d7yyKRHCU=~o$zfRjIIGrzew)07PXx7ZRsNuI&zZ>aa~XZDa?crO z#2EKR9^t$ufZGIt_Yj>!-g_eh?Y1j7aLWm~OU%avXp2>p4c@nFk~=q#<_1xtp9I6o zyfcl?a*hM`VC&mXTtxinVRn?4CLmJ(r`iu z+88Z8bWg0V3&*pHL4D!Lf7<;2+_hoW&m#s=b6a#QCHl$SCDVUXL|L~iwCvVT<*u3z zgBOh!J+RItdZ<^#on2=h8WMF1Az<8A)rs(3Yu4)uWW7N z`m{UDax5C2$)+dnPP@ZO!lHhijGnk7?GW<{7L8Rhy~KlQhgcvBwAUS$*(PpD8)g2E zMZ=g(D=|E6lywq|`cGsQiGIMM3b1HACNoJ)NSnsG0PCANCz)fSb6PU93X6sY+0;Z< zS~BYx7WJRY7QrEBJ@XeV8h??^g+tDI)@kC9!<1Pkx&X5(#-c$Y(@5M0%<2Rd^*XX; zi9W!rU@d~~LuQZ|pSFYr*d(@YyKGJ3#c5mNR&Gt(!Mub;<8LxuguUz_fj60T zdu2AtZ{RV*!=mApOjEfB9y1&)>KDq)V0g2NV0h>}WQH)jS;aaBbg=F_88NB(gL#KQ zg->PbFsb>2m5fFGTA3+KY7R1yg@eXJrVo>vgDe8?)%_r|gGtSMCV}@FKFG9{VTjBD zyw_GgU1ka3Z#MDLLEj;x0r;EE%B!GR)@_h6U<_j@ATpaqI&W&E5%Y}j*MW^5LUY^h zEzV(jxp1k`^5N@M9ZFlyyjek89&}TfLg2A5V2yys?1!E#D&^!sS(Dt))bFz8$MAq? z%8Q((^&P3gMMO+9H|9hFWkrK%oVAS>QBwwum^rI)iI;G>R=1zCR1a1-|7gYY=T^ld zAP0Y09fD4<;@akT!0)E<=Vpxy+CTh55WFMRl6fYJN|?}UqG-<SQ`&He)Z#|N$cdf+6>8qF@8VoMY zv0L1-DlnQ{)v)k-3OXW%3(QM`B~Pk7pA^* zHb-G38MAF^tA%U%(yVU1WP$Yb)@;8E`!_9ZCN!4am)2FT>@MDV)JHG7#LQ`Z%%O() zmvqWLFWBXkd{25W>88{Ch#lUow>5T%9&5BG?R|E>DBDl}w~~Fjw>4Ll9}IG+T=ZyH zyXmtzd!q~+HpF=R+Dgf3x^0tNk&lzpd11fHug&2ah`_w$Sh=dZ^xsE1^k58~nj+n( zQI@*lnS98)a`Cgq#0&d3?rG_tkzZurSS>Zxh)tnCdr*|#X*|G+UGmKM{P&eeevK^8 z?>w_Va!*tL?EGRygPQcZ#&B}qvzj4;O2)cB6SMcT_tf`m=a;BAI7?q>XeVVqJ32HC zl9`sopN5crV>~OMIJIH1G)F@_`N^|OL-XTTue+Ccc4+^eMq@45WHzv*?HZX$rO$pE znjOD#-Gjt4Lwoi#8_yOOH#g`?%QP~R<Rr%x4u7CJ>vlzwK8*HSR8t%ytp4}MIjbFR&zA|g*z&{OnS}@FPh?cf# zv?bksc5X;B-gaG^^7K&HKTUbFUkKkqP2=0}7O^^xWB2@LSQgjPtLFT$U%w$zDj3pH z_%x(nGKD$d1OJ(pCAFqi^L~iRX`o4ahh`F$%q6?nHDh=EH42N_^eUYX`@I?hq>qOd zD!dwQUb2i`h438B!V)On1n4UCM8g6y9IvMEEX2S*3fgT|i8R*fCu&oRDT~j?e?13I# zE3FT*iZ^v-4N79qbDL6<7D@fAZbAI9J2ZUHHz9+mm%1Z#ABu{sPHxe*5vs@iplc%> zvbxYZ!$y=Ew?WrNG~=LnMWeZ`PFooKY_1OY;BWbco}GxJWcpre%&^ z@yoccbTvh@4vJ?ser~I483yzWmu&NH3GP{sd}zq_&9q#EzE$O%C1DpUBB40T&CFTJ z%UZs5_|{5umZt1KbQm(fsyUn78)ohQg#Udhl8E0F9a}EApX*(pjZ5Zw7Wc-j*EJK( zJShIM@yk-3!Z5&DxCH-IbgCJ4Gy{XP zErqhaQ!^I^%+-LN@~}|PeQGi0sGIu~&A>ovYnV15xK;urY1j41mnn~MTEp|PXwZ`z zD1YWG539wZK3l#9W;VPzLT;AIoupN2(ajC?i#xPIxcNL8+r%a`NhQhz@eiv}&ZvGNMXSXz!neFjW(vN*>5 zNmw+x$<=_ruQ@Z|^c#mbYMy#6dK8Z3YT7I6GXMsr*JZFuHf;P~~EjONA&xhCMZ zPH90!q-EVej263|KR|#h{UB|{eD+^RiH1Tc2Ug3o*>^z$+lEDurNWpk;`_&Jf|-6` zq(Y4?fbTMZIjobn=*{Mnxym^wgClOqe8)KiCG$^Fkj%-aAKDBqMkBK z>!8fiQmG!LjF_ciN>9wvB8gcVrt}ZU1(aFZT$S}pD=|w$vqQL3xov=`mZ%`A(jWc4 z9(4(ld`>Q~u1tE9Q_o*Qu%Yc}AfWIk`unn8=}dxkd65guL_4x-b?CRr1)e0g0M0Mu z32yCfr@Su76Zo`6P#S$Q;V&f<{(V8zgg@M-GMVt7my-)T3HvKm#r{I#8d1kYkUkFT zn60FLASMM+$7~ukWJ%n}tJAN{CDi6la@Zusl2DuD2{z96Vx&P z8uba3OeMcDP5EcU1xB45uoO9G)y}7zkVmK?<}FXSam0}t>KLcdjzGy=a==i&6tM%G zjpRV2h($UP6T%^=VP$$K<=nUnA`zqZQa7$ zn%8p6_~YP6|BpzP#<3%hiNY~R6%O^SWAhRXU0+6am)+t1NBuWNqP=TxWKUTo_a11^ ziR!NXgU@5jp`*E}NHlfrLE)(2Hl@NKS>N^bV0Y{t?vqrUv2%Sf*b^JSmKJO-s&al%$4p^Vb+&b)F(;`_cYq1EHZF^M{k}NvB^5SDb7_n-WTBT+RN-DXH z%{LSmb^G4P;@tG3M?ZzRa^pJ3xow2u1wUYaZkuTj@l*DIR0n>_-%@_c=PL%M1ue?Y z&_BTYv2E*cwUxQVM%DntZ0D;Q{ba#mTmMG_rXxfolA@$GL3*Wk!wUo~>tw$fjoYN} zGWrQPU5zVh=2-SK4FYhevJvk&K13_|fdF(dLH=3+5&jvb14?33aiCJQ7m zMRgH}eN6nCsVlxQphzgXG1xE|&AUMySrUaU!_S8+@f8w~DPaTo676!~vtdX;J`3y4 z@7AsmHo$XPIVbF8evkGY;eWJ21G5V24S23yDZHnRGc&^a10{>`A=V%%|NmQ>1+w3Q1*?l@yEl`z226AFM*DUGDB7(H!wgpb>B zW}URJ_IX>Su(b_mK9LT*0P=>fpa{;D4+Y5d{iNOcAg^i2--(BEe5Eh-v11^ABbMd( zNqcj!GfggM40-uV`*W}}UoK;S@Fwl?B7`^j``V#)U+HTv>=?`6*2>!bq%YbZC20S9 zjCMiYVP)-YxI6zdAsJx4GX(B8Rs3#Z$joq`GIwpB88BN`O;sGTjW-gr<<)pUNmyrp^v#Ppz6SoaE4~Ki z)t0QnyzWZpk@3uR!iGYhLWcBi8Syh|+ZD2nfM-OKnBR(#$9k(MGV*ZD z$=gWCUF*mNj)>05spC&07dQcsvP@P1QUa+CnVsa23(%ir)f&+)$OSniW|Wf{xu9?t z7-K-T2xBau74wK4s(D0Bw%ko;ca* zPK1<+yhnZ^T(MDz-upuM3+r&M8is?m$XG%jv&j9{PC%>ve z>5_Y5U0s#ma#7AQw&h2^WumaG{^+-?PyCkaResCj$$ra(OU9VQB}?JhkykMx%q5ac zBC;IU%l%v)0=djwQa5C_+!Zy+Vm(1t)Xx>P$kZE>j*$10u(Wr@=~H0>D=b3p9=AdL(3dS-NWWtQRN{g zXxk@F{y)0j1RTme{{OBdMJPmNJ1QhwC`N;+<3wd^wPYV5$2Q1N)~q2EHOO{Ka$4-N zFJsGLES;P(GGiM{Y78PvWB7mWaelw!d7l4sUFW*5=5&AW?|hr`na}cizb|zP>Ho=8 zQR@`YA5F_il}@<4V``$NeE~|M=Rm8K7gWF? zdUzemLdV`7Jp%Ae=BOboeR69AmscfvlxcE zoJS*b%#U=#MwiSLy5Gg=6`NJN)#Swz3vrWEeTP?>#z+v)^}9K z7xO9d$EZ`ShtRiH#jn9{_(fat{aHuV?}528`S}0(9ka0iKf+(i_0n2d2z9!%3jU(uOl=CUOVl^$Z&3g#Z``kEe24S^I z@x%O`^8G#R)ayly%p$LOzdAtf=KbYI%8NXkj+O5V=|#+JisKP|5Pa>cn2&D2)G&qL zc%bJh4BvRv2-J`d7QL8$=bCEXyLe==$g}x+xk$)B%k^^Q;=~}XPvxv-w9Kb+ju;L% zup8hT^)-Cs8Z8h_2FWz>ci+;?hZ2q?6wNg4zzc=+wd}wvc-?p3DcbK<`!pt={@dvG zf&J>mw(O~Zfv3}8ntOLUDLNlPar;TYW_TemY@4uEXwz} zxlhiT%9NMS$IQEll0PX8yBoT>!jx1$zNn6sP@z^_XMMp6I}|NW=U)vHM3cu`9Lo>W zZ-k7Bg90beGGwTuup<4`i{q?~tQ>1Vf>VP%D4^&S&)R~o2$|(Kq&vS8MU{ab#tn>dNp|kYPm2(ID7AIb#5**1dRxl8+)rqi>>ke)=rlCTTu@i^fpBvk2LMzI0xx zD2%er@-L;5l>W%`N+rKi4l(aqyA>8;j#QDked4C|@iBEBYpG)b@=c1x;8G9}Z~bIU zRlY;96q2ccOzW&MZurqTEsK{%fE#(_>-RPK@5s!88S<6h6`7qD+L)aNWo5aL0~^g1 zcci)Ez0q7@ra@*UzcD+_&a6P3^S{jM|BtKr*T!bksN4UAt2zE3R|EYaw!8p))UEYD zt_Fk++8_{Aut(jtY;ZLVMZnb@HvG?E&FJ_BS0jpWHLV+54bpc3c{QZ(vXS-(Y@|IR zMkTPv~w4UnJHi&&|Vx8aA<|aYyvlPB8gWBiJ z>?EXHrSN4L)IK#qesN+6di@PMd6uom;l@{Uo37M2_RAivkT1i}F z;6s2&h)uwa#KuJi6~l|BL%@x^iaQ$^11}n%)eD7mwLYsi1l2`*72AXc23ExuP!})r zrjRBiM>7L68pzSEv2!$laF85r2RlcDPC4XgeIrPY2JLtxN87>9(V!g_yl83@R2FU`D@-Zl-dgi2^ z(NKIJuK2|aEO9p{Esc8Cz!lOJzBkj%IX|m6(_E0TaeVuErZyqYm-V1}2Ix)9{XU=K zdPG^knoLX_ovDT^Ffe=6ztjA6MMPJ2fi`Ee%o@)ixm6|@h^i&KE9?70dRkuugMX1z zQPp~shEH)7$OqJMx=lIH3M-W_z^GS6SUyW^u%q=;FnrAFMftYGflBfxWk^K$%9n=b z@Ji)N2Jl(%qn9O@nM(Lsu$S?R$Xqq4%>kYU>}9|Yc%5g4juk=dWz5K3>+fo+$z6qT zmaPamBMCB`g#br|POOgxetEhA;EZ_GdUQ?Ph{gs_qgWdqr>)J|*NeFkaA{2_m5d|{ zGhK^MSLIZS+3@+^ft2rgO)d4vx6U><3?do8#%oypTiGQtgj~beLwl$!6@_$fC0qDj ztWF*GI*xp0l|x4$Q_oUJ4s@|$C>n;6W}5r=K3dR zV`Gf`)R8TQ$ud5}3}mt_gk<~_8HGkB%h;ASP=rIYYuJ`HF!*)=*_mx=^KaiD;s-Lj z(2exWC&7pmEYC8Kv8Nsfu(Vm-u(VmvvObO2*la@{Bn5KHA}wq=WhOr&r@WIrM?Hfn zF*q7xzagir?3lvlY>*7Xwj0L2&Z@A5FV_TN>`SDXJ@&;shCrKlHwj;IZxX)Ty&-(5 zQ6wsd*w`RmlN9GFrL*bob{({mPu$g;cgWy!CsR5gb)YMomy`(iwzUp~`-=J?BJU|8Zu{0F-0 z`uASu6e7*>6VP3EzUT5R8)=T0{Vmxb-%_S>nwnT+U%kA&0S&MWIFs}VmTls*(nA>oBkm6sf>^^0wA;rq)farNJ(}c1fflTkoqhHDalS@5Zd$y zsn0@?lI)ZK^#Om7`t$@T$wbby2J=isb zyNN40-PsJN%V~tWkv2bG-)75QY^MO+i=+8b;ZfyixWaL`6~r*Zg%Rc`;n&Q$V+sP8 z(ZVS6BLw&(IH+*YV6G38J`w#(Q2HES{w5_H2QD{4(bA)`wTPDu^z0EYn@F~ojVas9 z#&!0C6sYij4Sp0kjup32Z}Bd+p(cJW`qhm{iZ+*0f5p56iCOkstdwc25#TLb<=dQA zKO@FfM_^A-KYA;?{R%6 z0ZXFZVwKnMFj)Yf{|xbSu_VwbpS|GoKUL%**77aFT!Rmm@PA5dW?R{SFU?>i@95Dr zk9)kMFge0&pMy%!*j;=n0~j6^H-U!XcwILE#C?WeJ>GV134Cq9eFpKh@nrkjfcp#> z0(kB4+>0flUNiLN$)~zU@_%&`pM<#rj)8ZawMO^cX)psaZO2>|_Y}c0!Cj_}S){vq zw?PscUHOKguTPd+Ye7emb*G#v9>!|#x0d)9@Y zMY5i=S3jhnA7?oA9AS}rHT-3!dNR*8g1$tU1PFb+kMqr@7D!~%T2_9 zQ_EH1Wh0pvB@f%rsh>1-P#%71WayBSbjj^{YPTNE#mLavAxVYab-b?b-jEyij%fOQ zM>XPob-o@ute$B0eP_?RX>Qws<6_d>u&NKcc9)WmXqCoTxOuCl)$Mb7E4Q34Z#2(O`fG<>WAB1lx5YD28=pg)RNQOKtI!I1kukdP41_Q z17i~gs*@7w$({D|O@WKY*zJCeG}7R%`2pAEOqq#_+wEHAAg5VK<1|#bIiI}ZCS^_@ zB=V%0lZB7)kZbJr1_(BgS`L*TxA<$2H}10~n8OIaJ==b0QNp})@ZF3`ajK%RaA~Xk zt^h7&59}_>BrsQwl%BR;wGlM<{NaY2B{!jbY4)-Vq1>@uwS2dN>kQ2igkDI=<3PS- zfTpE0RHz=lv73n=pVnOs79o~1YFd^!i)e3NYd^NQ)w~_p)Z@iT`NpQDkL~v{ zx3fnDlHb_BZu%XBXg;?5UaoFI(%JcXz{S>15l1)2l_{Qa1@()DyrJm@T zWPL=;hd35w}cDK2m~R4;+NfyrIy47!?+fJGrjEuI`CwnUX{_IK)R7X?=vU6>tE zY!_a%CzI=2CDn_I-Hpmi5{WX4HV^tT2o{KrP91 z`++9>qWs{e7JRuzKS^iTz<}p+vP^kJP`l2e9Jw*LEnBrX95V7!U|g6SGdHoa=m%nU z=w_uw*}?u6CH!&CzjSzOKEt534F0WdyVT+Va@X2BFqcjjH|8ttX<{t$!X)2k$GjweUF&if2=S`y}nLSZMaBeZh-s2vRSjD(v(xBT|`&Fi0H{y7VuPvo*}r5e>864R=ha z1u2Z1M8`NF1q`-3ro@K%iy)Fp9NtJJ*bzZyBb6|+&%2D|Aqb|O{?0fLamqx#V=f9n zL;&apQi;U@627Jj$w8bpq5E}B(1-zCpUs{e1NH=(|7`Ze71)z({tfn|5MfV1Aq{B( z)HOn5pHpRnJ&^(SMC4!g#1dgowr{W}o(Ova)|hMts9A~4&maPti%P(rKzk<_zVZU0 z1MG>w275AtuqQ4X>`4$2k`Tv@BxF$vl8}^*Bm@W@U{54A*b_;FJvqCz(CuWT;Z;>6Gfjxnwg3X>}mqLZhcca1;)QHRuY^Vb)O4mF@=$3e7ff*vOCc+k2 z141WVE+l9}U@fTGuTwAE5Z+ z#f2RDFe0$l%obP!LI(uawz2aZAarUFpG>yE8mo!%bTxlNU=0YJ6h!d_5%?eQLrT%$M59x7L!;BU1O(Q=ItCFS$aLztxK^L)j~Gp$ zNgz(cQelhJ;IryZuW(^UIU)6$J_NG^{O>kq2OOb@CK!XAuZO9u-Y*bT}J6LgfndJKT=f zm^7DGrBd@?iD>7DGBR+`427p)bmx>Zts=lU+odW<_(u=orVMH`5JnophDMY8^!#j^3xnoMvC6rI3y z*{}K8ns9JM+}R+gO)U|zYwE#?ajk(+n}Qe;!llf>?gAA}=(9g>h=V@+d0Qx(fL`-Z zGa|g10^5M_=F*%hP%!|T3~`ZGxPW~j%uU3^K4^0P@`eGKU9~y254Pd>1gG$Af98c=E4qP|c#w*y#}E&x}y zi>^TQP*VS{2rqVvGAn?vQD4l57u(X8{eZAhXPY~7r~h06!bbfE45(ptq_6M;VWU0| z18N%4imUpyRdI@eVEyvH3NJ&s3?{AU?fQKT1M4~jpACeL^)So`R&)qX13Y9%E8?__ zj369wn`uC%0S1w}nGk)r0htD9gb@V9X&J+IS_X+4zbI>{RsBo2m^2rx0p1BV%1}}3 zA$qK7tlf{Jt`;Hg&WtsP8*x}>3O?(=> zS8{apVgu9-kE1Bcv}P+Y222b{ZWU~ZO~k3pxqj8T+u+3L^V&kaf?C=XHySp@4Yu~> zzi|VsX#O{DY`7|K#EqHFaf4lg`ZsQvY>FG%o8rdWe{qAsB+(5KSkw7306C>vDLqK} z$riy*C>z*G$Ya1xK;{L(_yNw581MPlwWSZR6L6mC-hgvN5M@oaYfDHnU?)8PVke47 zm;YbvqyoWCj&5KlLkM<)-M~&lQUE(Ku-m{+w2`T?m<{Y?3BgWIY+xrC1Uo7D7dv?X z*a^vg13Nhk*okuD26mE&U?+z*uoDb|oxr>R@F_;!&=XY`K#l~RT`a>8@OSfkarm=O@v)!;CV_V3qZP<^oEo3-0?8ky1*8R4^wV}?;QUz&E z{??5m+BJ}2mQ0B0TNucy>wXatJZjs}_~@5qy3z4#U&deTxzc*h7jqd*n73t}YB`|- z;vZ$lt?$3vyDz5Jv0sJ-)tBHG^1;V(hx)? z&7P1N;f$CogJ6tl>d6|eg8v<)O)Ji;|crJ)GN_EYwV)}%SEF7EMp zXXuOZZltL2^z5s0D4&c0x>HKf+wj6t+2F8e=ls zA!QnK5@cW^KZ4rK??Ac$XTlGp%K@NcJ|djSNxQCPvBJIBE^|1IqB$DU4iI=z2R9fH zcu}4^CG{yj9%Nv`f6(M>Z2(#UH)meE=H-K)d$7%9s6gr#T?z~bYv~*^xJfsIn=)c8 zO+y4`4i}vdjK&B6v;qP%IYlmkiI{`fUNW4x1>jt58;7F z5h#z%o+U_?$Aa{X7N2EX&oWJ_Jb?-?whpu?FAD z!aV_qlFd(Iy_dj@Aa)3q%h1P=F~&63uXIW9kJ-P4>zzg5>3bYB;H|=khf#_A++H}V zi%SkW#SV(hNlEd=8+;Ak9o|e+)lp~}0r8o{y~kr-FW=jf45rM{XA-|vMt*tNbbC8M z^m%q{4wA+R9HhyHjaVXEXn)6s=!4rWE#>Qdt5<+6KeU>vbCI9soCPXt46eomT5xD`_%^>=JiX%cZ z^R?_>)8YRH6#hGC64TAH?)j1F6S7XM8qAQ=Usk=(uesvwMQh%UQW=X96foI+yE4XJ zAVchX)A<ZKEsrYV%_JpG#(94MBT;(AKf)6oh5m}Yrc6~1>Qhv^Qn6XuA^c_()QVfi9qLc({^x08)dq9G!l;w?dm9iq>_gJUrL^K4Rbi zzLEM=j`G;w53m`&fZ@USVvO^(8fC+aQ9-y6|NRVuT1?s4Vl>CBo&R=;bAiXpM){(2 zXL0^Q5*GqdWfOeSg~JbL_pctCd-y&IEl?k&-%OkR)}pU#$+)?^LxRVI=qVt{GhOL0 zV(G7?wX4A>P^)Y*A$n#wW>#obX)fmdUG(1iXxZl8S!Ro@u0G?&^42Ww9-^c`Hg{&_ zl@WdaQ(8PBhJmMUO&CXK52wwFt;)@%y^ll-21hwJ&(BV_IP01*+?F}Bc$A5@0@*y& zN}mxkf1TQ$Ax42Zw7UN;QXT$oS-QN_oz2Uq3i9>0&S5m<{dYyLJn(_*G;3sy2*Hawoh5CK~r zR;BBRt-p5du63h8?S;vL==I_FS>aXXx%gRizr&OWIrS}rh8b9n=F|Q>#*TM`;@V$! zhBaDx{qD1vFOyfk|4=P&Ua=p!A}!t2!_>513jzb>a*G0v4G^25+0uqCw+o#;l z?cX-jGBq8vm?@K2@znlkEst{%KiMxWqcJMde%2%7Z=D-M?%sN$5>d?oI2q=>pv3kx zx8L?}ooP**c37;FaV~#of25Y*x!8ofl$Oz|7kOzmIOC&^YR+ss&h34K#x5$Dr4-ultYTuO+|khvxk& z+QITDYwFm+a#L!>lS)FqL*I!iLOv`}FK`egzcqp@EQ$NpFN3#E{?-o`$6skX$|J3Z z#&(p$S)wW*x;to`sCwuQOH@3tXmY#}TtRy!Hoad7e$ygsJk0u7UjG5vu!V51~bW89*W1kQumNuLyGnSRw?oLO*7#dJAOt0jD z&zF3MvWU}k(M3U-ic*KK_1ZB50~K^xBM;BEk3j+4enm56lQc@BG5EHeVB8!k%~r3< z$-KHWeSp#)Y=?31mPO|>21+YSS$_q88tcYir&dhjM3^zGjCN}}LdMVllVRRYuK5g9 zz73H|7g^+y@hkNKkK}tbAS^3l#PEQ5=YU;HMMGreG)l7)Y^2I?X(+W-ugbHAjrLJU zYjzD-Z{-@W^?6-c!D5G`S**+ z0uB^B#<{&2S-?L69?f^9`PuYG7)(9u5wsAU0Oo#Z+@zU*M{G8y1yRuPcWTc;7MP5H zM{}ffBC>-{xWmy9Q-T&G3c*Z@L>XIPMg#{Io83umNa@G6X%2)j4J44lj8UkdT!<1u z3t`jKx658x(^9yNsK%>v8t1OAn18frvm12LM6m5@NUhHTr#`OS zl;n)eXZZ|Ub=cmQ?m^}ZCK>_Y-)@kFY@is*BNrk^s70n5EVqkXSyNTGtpXdAZ&%7) z9XJ1uSX;Uu02vqgqOoyg18xvuHp8}W41nRl!Lli7V*qRz94zCd3y}>zHA0%R!@vgF zz&%tHtOW?Q$O#6vYg}17rEnXe>&rPka#w$te@8MHcTtM*ifGZ;G_nC-B^0=@?P%l# z);NuE&!!}VuJ;TWw&@tUFFk{7u-FJW2&cgyvVnLgmt2SeArLtMdON@|rV1;_e8%EF ztzTCxOUDp%fPubQfMc4I5)r+ZAtJyriUvAJ3q6Pj;26BZ3LPY*3G!fyGmJWi41?g&SOTI6&djOHyXqW1Ny+{MR{UQ%HGYhNCoMfvLedm_|+qI#Fn^Z_el zS~&cMc-{}c{)(E6PY5c*Wjr}kJcHBBqdBmVr1KAx z{-v6XkNQAQZ0VlJE|$b;rYFKiXELZ5XUv!Wpf}^Q%73owDoxIMfoDxi)v5vBIK^kh zAD|kV7U<0mjB6odVzsDj6YQj~X10mxk~T(Z_zHQ&8m=%{b&qG|1pZ74&&n;3O{N{v z3AZMXAJPF$-Dj1lJy;Rns?;7BSwbm|_>fzz;R;J)3|6%8=Ti(;U@**prV667pT||} zFH)CEDl?R^DZW)1%CJPMVAy~>ZVgw^L%t`!qKZG8BERAdddRe6^&hc#S5v*n)XiaK65MRWy}yhLd-_2E?VxC~-I^P)0s2CLy)l{N!QG(AQF z$X(WOg^9QQ6$?T1XlsutCcM~Q@tuRNZ{;!01uuju7Io?R)*jQ*@M3qxTp>BJit>() z$as9SEtT}E%14}Kc2T+;U|`xCjd{UfCRf&lkb*yqFM?LfQXYBVw`4bFu5vA^DfxYe+7%`a%}k^_TQlf+uo0i%TsFCl%93_ zh7oT}1Dq(mvSFPR+&jJ&3<@_Dvjc0AOvRL;MZjkGi5x7MfKU>LcBJT+tp~?V?Awul zY3Tc)k!g?s|F)QD+@5iht|TsG=xe5_5vIiqB#^>*16TCLwK9dp@7_!I|xf&w&HgD;cj>xJ&Lf-sU z%$)zJY%b$a!6~heCRz`>C&QD6Z=5puA!zbqo^x0T^|<>AtL4g6pcIPNn0@=LQ?}c? zL)F-~CwGg4mkzrjxA(l5mmU^Fr69NaOa;oJ_`BJ+2cELsp%lu4++O>jJ0(1B7>nGl z{9=CJut4P_W#9g=yDvO_*a^Aa^Tj;xuy7^&y<1KN99MzM{bvVG4DQX zHoG%3?`KTn`<3!pSAX_7^rr&R%e-28lP`lu9d6ZYuL?`BALDguXlpQ+$@|-{vX{4K z&g^W6mSsOOsoO6MqQd~wFve=KmeQJnfZHF@Z@!K+Ic7;TXupj@hdnpV;2J-m&-zEc^F(Z!1 z<85Ee2MycKawCr~e7{mZ>*>$_b26I>#4PjIvfsVyt$NI=hzn)m ze%0AC7$K+f=g*ic&;DL|1PRAJ>yL@vul7#`yz-Yf79YCwBPZh&-?N#QGs% zSV%xUsdtW9qP>|@gM}8f{Xer#>V%fI_8<%69Hs)wmw)M&Xt?;pVG%Vb87p{nS=Z~( zbnwv$q9*MKKH=!({bU!v?o^)1ge&B#XBlYYTxykrM23d3!(@#2ASx3-8NY&CLxpE5 zcdXD1KWAcuiJUYA{N1A>_e)*;8qIf#B>0dcpIM>0bE%OId#z4%JBY-1|BcGVi^R9$ z<|;$26*^jJzkasXA`qo%hww2+Q|`yP__dpJrzC*f>@%xVO1acrhXYpXN)9P8-u;ys z_>}l@+)8DbwNl48?ZRhk9aG{y+A)0OQM3E6UHn?jcbO%)k{>^_(k#fOK6cn=b+W+0 zEXMm|WftBn{v+;3`1qrJ_tRbcy3Bd{5-iDYpIMoyv@n>s{I+2&AgbzQealgjJubI44BO!ns|BQlS zI45C% zeton+(Qlidow$`5qvYGYngR+BCHx02sH&$xmhtoZFSA?VkmFJ{cPm9Za)^iFjD$9z zP&^b&wwc%oQm$r|q~EJ~sl}re-i7nI-R&X6nD~BZc3U}7aa_74veL5SHBp|-T|wxh zn~r)YdT#Ty6Q!6AmJHmh$*<+94WGsZ-0ty^Wh{P|n%#;gijNh~|zm;x=41@9g@GQq7QF|PN=l5EAlzlh5NQ9x- zK(1?{TKBoubuNUV;?v&)-6I!z7M{_~<(B7gO3hQuIeO z2}8jh*s6cw2bQH)?C?0wKhpd5^M~UIXO4VzlURR`bG_Jj9oJ!ha%yXyu z$k+?gZqyZl#sg=19<@*m(>R?C1W@u{`jZ}|D?(m!e#hi?ZL=gFA4iD{Wx3(nKPC+> zc+@L|@<{Kq+OzQM`7XsrXK}^P@5Y7MD7aL$mTXsgd<%E}y@9lL#Z^M%c+yrSo;y`H z#t0ZDw-;50K3t~~bTW71`~_yDk?2P*|VV6-sS$t2v<6GH$7}n;dWJ538%;7K%BvQOKD8SwerU4%Ue8n?$ij4 zm6P}?uW&btpP6~oL^1B?^fTC7DgT9f`O$zPY@ihwJ}XGC1$i zAQsA%E2On&;f!3D_#@rfqDOb5!*moJtD3#HOFZ_TJ@;OJ3V+*ww?y8+xCMiDtLBi_ zR+A6Yu?kD)tE~@Yh2>4*9SO`6)B>n!mENj3MhYT$m_<(x->$aK9M>_aaBbq5Jwz>N z_qKgL@ODJnKh2}hsY@VT8mZc?o*$;86qbyt zsrv`RDyHxb1g1LGgL{TpbE`&<6qMqj5xq1VR87qouQ#c%Y~q@gqI$G@W1bJ>j~rf3 z^{8;FlI)!>pWPLBp<q*DZDd* zd6LTD9wJuUs*xwL2p)b=y9=nMW{wA&R0K5f&mN{S+Py=bf2tdiT~52c+>|LMDBEu!I1-_Y^3v-$g?E%-?x#jp~q4^Q2=BYu9p^`S2}(>DH1uXJqpAAeoCC8L<|?2^}s%+`cg zjGL#@h4p*CprX3%_09ZtXBxyg+=Tl0WU<35G@(1MwfKa3LJkkP_@lVK$cItXL6NrJ zv-HZZ33+iYURv>qf?ovifxfcubIVY7Wm;lx9Y8;=7M$|Fo^ei)cBDEi)0}$#mb9Yr zv-4i6)~&{`7&oe)2+Q`4p&|?HWzGD!tqtO?+`N!+p|AMLEA-=nyw;Z|P8#Qkl3#wz z)YJGddg6(2Uat|o?5lBJx{KE-qeQhY!uUJB&hPWfP~lE3X}3htIn`=Y`0E+x)o2RU z;h7dxi(9ga-Ont%)TyoAuNZFC>B7#vbW}9M-r3BLk7^L-bJIA(V5!*W6*`5H*IIQ# zqdSL-T=g+ipZ~+C))SG*UR`?a*Y3P%7cZTzM4m5u@xi_#?{ms3Lxo!+7xvfuQOz^u zb=~S5WNdfBtj(!s7mg?@Jv-~AqTQ#LeBkYt*889RQ0-WO0NOEBa%J;g}c}U&vgj-usuPw+Z7CgJ=b+WFt;1%QdJL$q^yk-lDidiB?WyhIl-?fgX1FM{|W zUyb*9WtDf;TjCcE)I6;go$|R(IR`o1&oHVv)o4Ll(c_tsms(J($18@@&nLnfy%Uv@ z_4XQOemp@2ah5j?C>Mr`Enn3JZj>jlhp%GsJxkDtlZYgFoj zh$7?#|aWc_ib@q}^#A3%8y2;JN`XWgqzGm;GoOXMA zn3R@@cC6xhKZe8oBKBKyb1^0Srnmq8&|j0AODVr5A3hzamwdR00+*&n+)kr}zr6XM zwm%%ch`KMMkFARSd+aY9>WBCl-<;tz?y4y9?msHp%^3$4)HySDIsZK%q_CuLC!M=H%IAywF+u=eoe{@m)9nRyUXpg= z;EVA2k!^R<6UJ>$d#Vv5aeM2!h~CQ2D$<6rl@WtZ*s7HG0}41fD?QKBA`0n?yLzQq z!5ReX4%>Et_Q&>87C!MDt%JmC~)&}s_b#e4H`7WsQlj;%%C&Ist?6{Mj zI6ipVlZP0O6RcAtdV4;rh#YpRj2slkR#m(oIE0g@=y}E!Q3k%at5k|J{-|g*XNWJ9 zZ>Q|?`+Gn@VM*zZB{w4~;ETH)A%MS5kz=VTXrX*JsV;Gl5Y8OgddD(hT<5f>JTZKB zPhCFITk=`O<6(RMh(S|qRpR@BW3xxK^gLe|QQE$^pY#{Y`lI5bIYV%ve23O9lfMTJ zDlEy}vE<2$a{l73NMP=+%jf86@>wY7BLyW4ehQx);k;v+I9`9+Q4PUCm#7@Q;cvbB5?bIcM!I&%XzR6_yn53~&!d z`F(MhCouWy+&N~N0v5`7NkNH&<>5;s9Crp1#)D6LqKVP7d+Vl&-k4_)vz4E*U;p7 z)zqBfdG*t<-*R}zK@uyxgYWKoWA;`2P7*7*gDZ0vLf?e-T*X>ME{jw=~Lznp9PVYb6TjWo;q1L~i zicac!mj94$E7tE+x_;8{M!K%$VM`&gj}s+1spwZ-Ir*0q4<>lb_y64~P8ELt>5DK& zTUjK1|3rdI+)Y=*xJZWqEMCEh}URgX{Xu{Yf{if>$gUBlbPWWR!dF6^_C*S5ay12`uBGoE zd1hueWy+n)-N^F*PH&*S+m3O+Jg(fS%n(&N?c}WL=QN;esb(ph@o}-FG!^}NIw#*k zQMB8G+27xpN#%S0iB8~{EDNWLO?10N-*h!5Mfwcb+J$)3NST+Ay z<@1xi?<1@DjLaU&%Jg>kE>BL}8Fagn9U}}MTP|GoYh_WUlZdLHJzCe2$8xXLKiws9 zsWpFQ=H#Ev->>8`+27h}O;LLPY1DLEN?9CTU_!|y<)-U-`ACBSN4%U*O1XU5+p@=v zj?>3#?pKjMpZt9NQ^&}6e5Q3bWqf*vyQ1el+}WD;0y{=5KCaxf>_TM;)yYiN&#^|= zQr>c()kj9j>(rW8)SUcl`3DO;mij+-T2m$7fBI_5(N`8tKR8j~l6ceA=yjy)fCFC9 zr>|VL>}~nSM&D_>pl7pEK0?qSD>T8u4MFVm|m zu5;2*^>YZ&wG_1!r2I2jlAc=gvMwjzDF1+m$4q}$CzYb}{?o*#Z53sSbm0jPm+YIa z2Ck8o1I~DPpNewjvbS&FHo8v(CQ&u;S>yBPPoGB?7c;5dl<8?McXv;5oIW#=+DH+f zmUW-LnTc~|wlZ!ox`o5Oxmo||Vs`A{!Kjhy>l);6tI;@t(cLnQ47!FplgcA>#~ z*eZ?%HC3TM;X=w^P&;-4l^U!EA*i?K3vQ>_1x!vUMR&^C5Ve~xFI&ZeF|wehF1A(w zLl0SD>Dw%dpei zV0M+1{Qp9CyYAOmZpZ%9Qg3q)?VEXTg|SP&=8i)%iouKMG5RKhuP} z!%C-|uM+~)S%&jqqk=i$3wY* zi7Ta8PC1`PN@dy77L4hI(i4=eejj>1C@dNOv=C;5)l4~C5CSw<3j9mNx`k3LQe8^0 zX#{gP_@~9g@j(-ed^5-FFRVA6-jdI=uZ>9grv{rkR`K`te4X8?bpi9bl%hZ7Y=mSm z*(V``StwQ4f()kTGh{G7EkuwE#@QM&7}gtp$Y2&qbs&QYR*!%T=4bx{$Y3y{&5*&c z+&UqH$u>eVm|%5B$Y6RVkPHTsUJ4nEvjLL9WGg}jvru{x$zXauK?d`)Ul_??oGl@P zVY%@`2D4DA4H--@J_0hBpZyOZgE@(Ah75*9@4RzPQu6Osi6Yshza*Mvg^FcKtcXL4 zQ6?j5vE=meV7T{pb}GMd#UB_?8!jgy<8;}&!gBa^^r42 zv7zI^lRckb4| zILi3h73|!Tc{{iyou)N&qIS@hxQkIC%sQT#64 zcm(y*veS69^E_3+bC+RMa!oG%Mt259ah}j9gvzz-HXiAeqjGz48b)5Jc}&0Bo$)#U zyJ_PIRK8`GF|E^udeD>CFuJ$qE&Wz^#$f*Zr^el=9Lt{W;m!lp9iAMW5iF`6UdDOR zFTK|2xLWt9dZ`-PN4TPOU|Z>^n}x@n!asXCsLozWar3_Y-0?8{;R(>~aWC~i0D zH?fWJhuQ8qJCk!Ws>;ycG9w*vMd&sqT4y7p)=z@aI!@ul zTR1CCD%}yWzb!VZR7^tq+^eBmM?u)|i?nV`ldpa~cx`{Ie3dKxqI1DmK|r7V{sOv3 zQFBIBNzD{BVx%&*rj^fc;uxdcPgHOC6@w4ml=iy1X3%#aMD;^=-Ja93g8u-a@3 z5^ij^5^k$g<+nq)xmCD_q?N+hcNfAwc+(wtm1Fh55_XW(SqRNmDj1gxi`_o+$a@3)Lc>s)f%3#)pqpK4Cm|NHgoL+f%ut zAU8ltVg7ox*0GfR7dv)LVrz6pp12bx+>ZJ#v%e ze9bS4V^T0U%=GU>)tNl&_~EVo5AY|kY5X4wWM=nh8P+^0>Wp?VQ-c|@T;nxKJ6#0D7GHt8fsh;aup-3 zW?|!_C5QE6Pi8r;ohz0hn#`)TJ|U`xyf7CW5m|nWHM)`3aQRL9tmvw7u}o>_kld%M z%N%RZ&97KYWDB{TTICG6@@7h5-ZS>3;LpDdMU0$^gIdIloO<8f6cBHFph^FNO_1_& zG2I;#j5P`k2d zHa<3SqlDljoeZ_BHx9BQKHL^2rI*h^?Ml7qO04E%x91iwkDN3U^TwU+VZfD$Wn4PsoSgXi2-lh0aM;Yd3!K`24DuQ-ABCh>ugh zQK_ndaK^R8qSAYsuj#oq7Sic-zy9$p*E*EArY_;z@A`B|;rUc1A2lx=&D%a~EpBZ=`O3Mx^|VN%#fLen_$`#GBo6XT-!Sk3H==lUy0_DrT+p`TWnOzB-X+8{*a z9N%3Nn#$8*ap&qq?vW<%1nYC1k>gt0#}v-uE>#hg4oy7sekif;F}AYO3M(S$<5gvt z7;CW0>T*Gm!M&>IR=N7p(mPX|;@7l-O}W)5?ZXM2e}ZrR3_5OK&%O5J2CfV)lT!@MqZ7mZ#rCtuJ*x{r*5A@`ALO zOd|Pl3tXozrCxnE@?E*6B(-@Qb4N(&8ZJW1-fXW=gjTNp-pl*+JJj&kRP;N1?g+_! z#67CDGZXZARQp{2(q-|!2WIrI*aE4-sY|>sV_e%s@kT0rNfir!5?1fJlF*Bi*aF3w zsY~uJ!@qTK;l)+@9w#pRS@2Kuk8#18tkiEL=Q~$z4-Q9tkEEXNeCo0rq3W=8EDyS|`<9#% ziIS$&8f|T7RxOn6$heRq(Bx5sw!ZL}s$ZT#j6FX&SxMR{E#O1$ks=YgQA$!#XWi6g zRWjFNu0gx`MExx}PC6H}H&;l(C#3ojH(w~n)a7G@+rL;};NWU(yd}p=(n?jz4c8sM zSp6u$QQ|_1D$yQ?vhEDmA>%SmIK#;2LUCuQi}3*zS9h+aqxnSDi9Cf2uF2kv5{3D|Cs!n(!|!5z zn{ZpI>*W@a#?}*g+eo#k6A?|)IJvG}j%g_$GL9p8i>AH8y!R79J~QIu6S5hOHs^f~ zA%SG#x1!G1F2+Z!IQl+hoFm*;?1~A4wB?Clt7>81{~_wjqoMrYu(R)E--bz=kU?3; zUa2e-LS+xhzKxx1S*MjkV<*{?FpO;w##pk)5HiZh-WZdF8q53m{@(YzpFgg1oa4+h z=gc|x_1xEe-PcD`ZGpMv)ykxEj>bg39fF^DGuXRsIM2(>uLUrNX*SxfRI64f5}BI` z`K{33VY$HVi;uLu%wkZHG9zBB;kbF#3aO#?ycN|HVD^$r$aeb%56nQlkO z6;&i4jpY*Z-SjSd*>wZbSee&=GWe}(H3^W$CK}vx(r)E8ok(L|U+ahaWlf__q%qY^ z+ZF!Ia%n&sv&c<_{+2T#hyv1>tf>d;FK5|0J0Oi^mdg)Cr%r?*0cp&HP~g^a+3R69 zAdQu+4=6jmRsBc;q_K$|_ng{Wxg96cnAgAh;b~dZxD#ni_0V=jJk$LQAdOk%0(`Du zq9OuFW3nU8sK4B0qU?Y)mgz1#6rDD~i3FrE6P0|o*vnowx&djdOmsk5|E+2X$#q|; zOmsx~*ynrjKG-z!D!i{dkjNj&US#nq5c*r?g0iF@Qt92N2-IID!JJc0-PX4s&<`Cv zeR?ygmLz^b`ITF&!S(a_W~zi>&aiT4$yqB>)PDTan{GM!w_dHEbTZe=F~ddgKHWU& zWX`JCt|;AppnlTHyqbmn*0`W740JN@J~^QNvIyqz#C1E}exNsW@a!oxsk1muF`@G~ z0PJ|vFj`eCwFW*jJ_&Nnns~J)#?U|MMi%i-tsGi6y7?kGCX`7Vl^YWZk@z(=152&B z-+wn4zr0TEccAeZn{Kjhp;_b$;;eeJ|t zl?^(rbPE-iINeX9rS5UM*9;HTR7T$-XLF|Vucv)}19w@^*N8c=&e}Xyk^9v@Fa0Zy{$Aiafe%iZNjpp;hS5Q zWw6&rLqiiEQP79IS+_~h2c9(}G|-alNVR>)9a@*xvNRj|aXMyLv3BM3VODCACzFH4 zv8(mxr~j^9{UgW3Hs#SEeOsP%F=n);bmd~q747Z@W$x)L@}M{Ov~`2DL)8Ln%t+1K z71kI7?XwLRQYV|q@4Zte!yBY6t4OjjV>RU~vWJ(o-!^E~6oY4{rW_3P$5X}Z_@^hv z$hqD%Wlo-k3Wp5SUuwzvlMeQsHac z>VO8r#rRm75~{t7Kq#yl*Atext|rxUl0!sP%DG3I&QW6NkKFvt4ft@}XlX9v9_8>- zsaP~W`KG~v&C00iIXN?YGIt~?i zk!ye+Smsnc$1$XnnereJzkm=+f8i!+W)RfHi#W$MfKyH@m7+W04+DCem2uT`8ar=F zq_DQ`h6dYL<4p{<>xWGY=t&|lR0?DLu&b959gO?P*nk~995?auDnjSrS&G2@oa47r z*B2P~HMr!um5)sgp5lP93YT1;viVyn$jMkGCDS>l;`7ThFhR2tDWG9c6|)pGL#lWh z0SyB)7h8%72`a`ntUUHsYI(sIhD^zH%~}2Y@-$3%v_uMM7<9xec}J&jIbPPifV=1) zubA{~?S{xu`(ytkd@q7+p4B5K>+=iN76HW)abDW_Qi2=^PFLhdOmh#NgB z2C8xH*-RctA;jGti~;gWh@N|KXdA^tH> zO6@)iUMd2{nS?y62kKY>a~m#hn`hD&jBz$YZtlE#lh>u zd8C*Erv{ylkXszO@)!6`Bh*CN`F>4NrH{Q^m}`-*sCGTmc=H2bB-So^11HY=om2mJ zK#DyzOoS`odbIXIcc%oUtIdV8o&VQ7RsPt&g=Gs_N4?jxhYvUa*gRUK568l*&)NAK zoMK1~6yX71KY$(J;y{I66)yDAe7^`(nPaaA<|5=1s%_60-su3~=xEXNIN|**>V=yt zo$SX{r{rm>X*Ru+;lcTXp*a3;-Lg6C-*12*$H~XeoFJ?6@k>1tR^`a`i$bXiDr+{2 z0AhLTAl#|)m^IiUfLPu@h-9i#@LI$ofLPxD`kdcujEiQnzp=b*5MDDV$J(_;0I|GJ zAZ%tAo~@l*1Q5$>3=uU`gs(kV1Q5$R0AVtd8(I@s1Q5$B2N5(=kYBsK2q2cX62fIB zU$v&X2q2c%3nF2r2q2ah3*i}+Z(Gw@1Q5#` z2?38PMXg0I0*K}PTXsrOmbON}2q2c1sqC~Olx6MeB7j)lNQORR1 zXc0gx?{XP^jqK_g`yzl?UcoZH8mRD^3E&G8#`3-{W3Rc8wRT|oxC1 z0I|GNWz02l(`#aj0AhL7%7khZG}cb6VH#N8#xm|2`Q|mPMF6q9p=FXaO5tk{7Xifb z{wbsLm)%=qUIY-!%T>nf59L`qafk6>d0&*V`Cmv|JHH4Zme;gQ)L+qj&3zF-EbjhlHP_|66l$-EM~;MgN(Z!X_?0U7%LJ@9E&9t? z9E43shArq_v!HMjk=bZza!<@-W2|-{_&z?|*@I%>J2;<1J1yM7(S`soT2d5=$bG8r z%xl`|qM-0)y>m%hcz+@?DYH#{PwLXasjyYObLGT&ERQh>iDuc8l3S+WVvM-;6LhgW z19IW!6a$m}OXS`dBV%k-AlMRb?i@fd@E)AcqrDW0Wv3y)6P6S`B65eSGLuDnSs1jp ztoIyCKF@3e?@6f~(C@D5Jt!w0VtH&yS%_16Qu51^T``}yI};eNJhO6pmnjBj z`)Z(`m`}!7mOyY2{uqZNB8iz%7x$!;4w$(~dP`+QgBG5@q-KQdo|OKwJPolQet0> zELU8TG?u4KZqJBfV7t!`>Wz_wV+{krgZLtnC&eIgP&b!$i}#4W9|68#Nx4Ks4pOaQ zDB7)@ptNPZ+sU1{KqB&eW)x_zSnEI{bye?n1@Q-#M~wtV(C-zWUoPy5dB=5}#E<1^ zl-s*TF>u@$&hLqN2gk|;f;aG;qz4p(m_gkf+UL_p3>65lm?cGyi0q>}!CuooX9t-t z>!~Imy5_pH{b~j&A3g61ij2C z?W{LSz{Dts?Vly3z#$>H1Q9v4hdXpgpv#QX&vHJa zbBZU3joFft=#X#)(C>yJBG<4y!EDTUX{v0N^TlPoL5K)_8t#Wff>b6AmgOu942^== zQYPoNva{dP#;&7`&OYbyyaTvv?0MS12x zpP04arWkKQ8FEN?1q_XVPd3KZ2C?11`B1g8oCTIUMj-&dbmciDjAzo$XE}4WGX(^* z_1&V(IV99kx!ZwHhGVya*!pmQgqP(ETJ9Kw0Nm1rbV&G>Nt4QQrjKR>2eX~OMd^1) zcmg=z1|iu-*rFh|^EeNxW|lMGa_lH1n-dW391;dHX^<>u_Gl)jV7BsGlxc^AN-9_M z5G30K`#p%Q94A|F{!)mRL>WayE7AV60dPsIRz_+i#YXFy{b{)C513Y7YBdF>g{(Sl z_+!K2NyYCL`d=Y>w6uy}f%J}-`>n(29X}oMdtS{q5*}xpDlihp9AC>7kr{`C+z*u* zFC*(eIo~@FB3f{Jejr4XW?nuJ$nJRMKO@+{(SJrT>v6fM0uyV@@$Fo(igC3N@6d|z zMza2k^4|3jiGsWH>mj-{^NRIA=K0GN*5S>KZ{!6bNAMFHnE5zl%F)Ceb8KlRmNl*t z;uV@Tjv;qGKWT74j(6vmcXVh<70ZE4A1`NFhckWrbR_S2-N9ICTyV>?tFT;cWOt>@y`6zjux%9d@7LZG3vlXX=;UKRc3+*7JXM^k}see*zg|E>ByBGsJv468AJ98iB{Trsj>nnByB? zMKs1`LR>>N#w*C3PtToC6yQt zL-R)?wNE1-8vgL|P`n(S{ph>x_hiSJSo*>F3Oz-&B+wwC75jT@Xg=&yC%{ z(lCc0DjfD`bM+08VJ22_Ul8bN_xhWj~9p~cP$+|4rxBCpYMM5+=JrCmXpg|UR5R7w5 zSFncYL(7YoU1(*p&FDkrTg7#U%9(RBv6}nRpn=^X3(70sPWl73)cZ6+**)1NC$=L_ zL(HK`W|Zn&Eqz?P>rAZRpl-^hbz*0Coo}buvgFXNPuKSaSd&v?P?{^PabXLgGgo^Z z+mAWaqVmAvH>Zn<{4=q-`!e~Tc7Iw>p7?gs9dMZ4r*X;dooo8e3JifThc_~#l;;BI zkK?UoV)+MklQ%UIkGm^}d#-EJS+<(KdE4YuKy5lr%10BxYRDF}!|?_ir1<<5t876-0r^ftg6 zT=H9NfUE68yUg;v{=q2R9|VK^7C;*4?QaNaV$Wcd6~**JI}eew8NCf~28a9>M{~E| zhjyjqZ+(L_+$MrWehZ)s^!5=#hS)nuv!XbDh(1N++Cy&xjKL!hW(7=bAEM7Jf9oG) z!5vah$%6sHKyTxyXLNc7S*$1qAEH@xIMdME0AH}mgW1!%T|Y$2F6;FTB5^;cjPhWB zF3{T~>N%a>L8KMM>O(Z64%ZlZ8(<4gdEp45Lf7T1*s;2x6Gz)!|4bUaJeW2%L|<5r?H#PZO;fq#i*8`Qqqjw= z3eW3vsD3>b>dy)cZ8<>%cfzG%3Bn5cceW5me-k$3#N8E1*7?ufr61s^?ttE8E$>HzS;$|L|Z_hAH5H_ zhv$qv)WX&0dYdq|Vfpwl!t6)^eiyw7Lh-NGe+)4TUagPu)y`V(i}ckmm?K7#PtA?G zMh5C>8?CB^>uL8i{k8>$_S}TOBaU6E=uP^&r-kcFLaqc0*Sq?*y%g-bPKG(o5U-ON z=M?V)=3#APVYTo}+PzKBY=No08KHN?v3nG~$#<78s~!_#9Gq1j?Aw<4y3dWQ}(u0r%C^Ow^a^%Wro!5Z}*zHP76 z`b^21^D{(KGSi&@eZV>#WvrnVt`gPTlxG_Z@~d56SxIfu@YVimw}9G3`MXe1LcIZA z)y_WZlGm~Jm|xjwF5jU(S4&Ehua3H}$1%%)j+BgKd&vjFNKseL6J{AdS~m3 zwjAqy|8i}z3%yHtMO3y}G!KD)SNRGW7@ZGSMJo}+cQ4`4&4id?pJ*^acvnRVT}wC| z{u!-L0CQg~Mt2ezhQ$v~shs9kc0s%$Bo5~toKX?szC=Qx2?oRV2d7m8xK(%&?+Eze zxr2);pwx@65G@4hVeJD}6~0tu3q&cQWVr1>Rz)oJ(m0}l;5r<2Afh6as&WbOf$(FP zb^ue6G`m=bXeaOwvm7w0@R}(HBHj`*hLH#7R7A}#{X*0etcDE_1XKjgRM-&j351)b zvALIS-8`#m_Uv!Auxl~?+?AgV_~>r!;k}DP=kQwvqsm3wwks^ze%?56{-}E6U*F%- z!-=Ua@`#Q9dtTybF{I;f`j|#% zH+=?OTxe@obgyvscjw}Pq)tso&yE$2kuCJ6$@orf#|IsxXTxCh%w%#L%+a@F`PoPj zx@R&mPRsFr#}D|h6MAwI8K>#!Ek-R{39xY&>+UR2RC%$l{F?__v2p_%s;Qrr*<+FKMAjDA*g_xQX0ZwE0 z@3$N)Uf@P3<{RQ9Y&IeVh-{-s4%rrd_{T>HZ6{W3rf`=^VGD#ti-=RA3KE( z;4}eZV%fbLxQ`1#EeQ!xb(8Y4Ej)lz+X4YbJrrcxOROS^u~YgjYl|1?5DE^4I0l=I zPyrp*8l<>v#A*1)_ypvMQJaa!rBcI!da*^+zj(m~`bUUek`ACW0iA@hZ#Qry3t=sZ z35Io(+OiRxfKS_kDuQ|_%#@bcMv`Qg?6=%1USLBg5e;$tHXAVlI&2gqi);iRJmyhC z#feRu$=s!q$AYRFAtG74;067|%Ps^2;*>bMRHj*=86=T1vNJf#qBjfB2)@F%3sBTQ z0VWS1;)avS12gF%3u7rvUX_I*%k&I!SgKC@WgHgzhlzbOQJ4f_A9bx%vOwsybJpHM z=)q%>6F3&`M7&7gu-WhtaA2*)iwih>vi$yL;cNu|x3>#tQU8RPgz*5KF$rTP!)03- zza{ULosB})a* zPF>%W`n~qWpb%H6Ty*&Sovs_*{KG-a3|y$bNU$*r`qo}4k~?bX{%-KwT@)nbr^1*k z*mf}X-lSgrbN`qnCbIm{tkUDJ)^|Tw*S0@Cd;p|-g(YD&eov$ zXj_^4Wg|K3y2P2EYwDNp;o|EH51+ncaATNfHsJ!D6Q5PPCUGurP2K2TFRni4@cp|) zH;#GcAs^Varg|s-(8s!lEbe_K#*(dCf|C; zgR&26-jyt~&fge{g55~1|IQ!!98@wV5((}f4BVx?O?}NDA}$YJfN2gKU7idKdg}jU z$pEBOF$;dGb#L^uUwd>C{}PlYZ6WhXv(@xvF3e&S*sIM0+j2D!>)E2Gttf|*IXg@7icg_oeijvAH<#C zxQ)HuT)C}jmN7>jyIycUd`;c7qia1V>oEBpVOes%d?*WMK*4khgr?}cndAJ+?lEYv zYyEaK(=a4Y&gqBNcJ^W8yPu!?@^C^dubw~1_wdv$L<_^o+gVpTw z`&*@YzLzon&`^flP$Y13Xl|%SrTF*<7GKW1&mUOe^GWWyE><(o-F0m@+*N-v0in>U zKYJ~k-`EDs@HM@)4M4RDj{NKTg610eho1EF{36_xyy^KRtIXdJ6N}a*U&h2jLs@eF zAc4T=+&>XJL6s8>WEuBm1aTri z`!d3Hlia0Az^;qCwC#qAsyGu8&#NllYnk?D))U5;`gYb6^-y4>uWKB^InsxoboEph zIvMk;qsk-=F@Xr3{L7d?=pV-17f626x!e~Xl``YL^!c0>pMB}!y2VtFv0O+xU3>lisIOSUOS4jQUHQr-=A;T|ck zpVU;%V~$I}s^;}su1@%B9f+=s`)a`p3^Afq;cRn^C^S@W#J$_eg>J;Xev(Pmp5=&{ z!>frK?r2mQO4>p{w)|N?d0o{WxJ03+~#Tlm(2#>hA-oBLfPc(<1)pv z(eu;*n;QkDHXF(tzRbt*%agOu%M>d{g{X1p1TA~?q~PQ%`w@qHB}NeLXk6g85QoqT zZE~rsu=3D8vAT_hLI2#`AfNKU%}&n7mlc{86r4m5Xld6?23KM@0E2>+hcN1)UT9b% z$HLVOa-XGA>8Kh7jA5T`YZ!VM|n^D z2?f$N8zYmN?EdlgmB9k=kFT}*yZjK^p{XvF%~q?h76wY`DDdN%ZRDQ<>a@nzpk z3o=du2()DCCZj5Y*@-NBR;vQ2hx(yri7X3OHuC!{!2%x>gIFcY%>Y8Ms$h_eH>@P! zEWu@?&=X7bC{X%gq3(Eo>bX#N%%n`E3Jc(nt z1e6hznq}*OqJRBW8CAXA;1y8xC7wbP!%4bNp{|u%77sLm&gRwwpy)q3E5Bf{|M;xD z&4!_XEgKl{Qriw#>KA`nr^`hI3))Wy2d zTSGhR{JgIk*?+aJ%C@nsz&wM}q_vCYn3<~nidQT(=Am)!HawddvuUhwyqQtBYfE;^ zLKii9E45{w|40s=0ba}XJ*K{eX_`KqTdhlT#KnGlA44|D-KkS@c4~Wim~e;obxL*J z#|gMior`w8Y8T_K(oJ&%vYzsHmB<#J*I77=Y}hvj#WbBC&;{@=TKcTdq2s#4J`Udr7a zFQhl6cIWO&c<~S(l*X_a&PtQRKWGg~mJP_#6jPh}XSVD@8+6Sw#&>l=uIA|Pm<^Gr z^z*J{8thBq{my`?x?NS%zOOupd|mue*ZUu2+uY|E@M1(%>S6Jn{<+_(q5=BdY`gR{ z=_)g{u$MCKOV5*CzP9o^GGB*Ob3FC?jCa{-u2p9B)?Ug3u?ZQQYP&Lb3w&#>Jn9c8 zzrD2EJ+JnqzIhFes(xWb_6zf?GOG(;yVx6>VzueG>v6Zhdu!a~{UNAgmPQ_{)*8`! zgIA?7nKkpwI@yG_##}?L@51w4z?ja44Nh7Tw+h)BM$mJMbDPTfl04SngxA_x>$aRO zHniU7`T$R>frdIh3`zXn{cvZk&vLrNFnnL)gLxVS8XEI(=Mk5)!NVQrO7V#j8DBBs+>He#qtX?1I zX^Qt-A5Sauho>E(C49}(j-a7Y4>=!6I2%6XbgtB%a6Ln2)F!x|fzRhiCOQkxyp~L~ z*{InO;cUZEc0`6P!z=nv&oJzk_Mb+z%IKza-$QHYrqoY3JCio<>50 z$>wP!XlUHS=Z}ER{D;q-D_JI7&z+3*UC+UFvn5LwPQx-KOKdiZcSN|5IMR;Dh$T-& zzX*&zwX`32bv&or*KL7N)$OaFELp$Aof_}Dekrv~5}x)80Row){eu39czE*>uzLUS zrgNptgzMRSMj3+ZS-5VFWX1xJ<|3J4vr)N|%~^59AZ-BZ>SJ5n^Yvn~uupL!vz3iE z3s-sfmWvPvHMEIkO$S_ixWTdjm6hJ#+f>5#2Mn zS}C~W&Bzy|B6QNW7-kZZIW2I+1edFogFBi;j*z~ipKs@4Cfzcp`HmQdaE z;$xNp741(>EYOhvYZqi{hyn{YaskTlk9phW-N5u@+0?CaZ{kfn%`>2LYR=q`JK~gN zsX7Zh=H=J=1z_3cYrV||eiFr%)>Sfz8Ucu*33NgXO(1|68kf5PVrX2h2Z%v|=Y$v( z(g0%c`+GtRet!XC@RolB5QDe8Ge8XXQYXYiNdk^^+Qv)ttbZO=b1-vJO+fWnf+c_gjYkarCZw1Z>%OS2=xc~pN65AA;i*{whe&5PEC-uKS{_1XxMt&?@ zJ!FZNjKhrDul~)mgLQrIm{tyYG`ryUs4?)(?57&PrOTLsImVw0XD~UA{hzM5KWk?C z(u;GKZ>xEgIHDdP{sCz)92TZ4jm(z##%%*e<`$6u-QZShYj>n_gWSU~TNi_}XMb$Q zb0J!0hy0zF5SXz!7V$+^Ows)3PrNIS!cVDpzgjud_Ue`Nh}70;-{&R6!sNaismjQY zcr;t8x>J3gh~`(&uhgz`SYLFnzxR?z@T4|-9?9mnT3gN&wf^>%m6mW&A=Ru4M(mEp zr>X>T1W#zQ{Ro8rR?z8?uQMy^V;v?ZAZ?pP$4F3z_*Zg0jUFV2> z2VG{$LE^z*v^g>Y?SD&a&wlb7(cA*v#BC0nlLe%kRaQU#z9`tNp^ve8LHy=GRqRqF z;tsmaRDxK7$Jz!6T#O0BJHUiI>!ZzllO{f;!d$~ zDyO(r)ku$ybw2O^V#&RNKw~Ai|BWTS(2qll(jzBw_2H5xK6D5XkeLr1G!jy`*Euk-XOxZO7p@@V7yT zc`KQzL8vdC!&3qDjfKNYAs-?ge$&nG}=D}EpIeJIo$NIpkA@L>SF=L zCVhEJGC*g3d21UaSKZr%bP=uYMK3AaJxfl@UUBDq-HQ`CxU^znW9Fn_6Qmi_n3RTN z|K~lBF27bK7!A5R0jxDR7&}#y_|Z>0>@Z6g{&_PLB~9&2r9i9+*7o}XAh?sC?Y7N7 zZ?C2c&G>JFwJ4c?y-FHhXyjC|5RBl9 z3B5m%u~wxLEooL>I|`&r#)QTzb;<2aB}2@MYip?2VXr${>z68brnov7E5@xgP&zL< zTJc%MhEDUg^@dKm#qp?)hxHHOP9KXC3F}oF@ItdYF(i2e^PN9#11j&{>qiTYmcBoN zMxMXJl~jwfy{+~VGWPa8jv5Nfd(Vtnn!T+i&_TC)Yw8wC2l+k)uR(HnFE*OKYB^=J zFO+Y1N7{B<=e-!y_mtn2-U2{q<((3eHiGWX3-aLzGJ1 z^X*@9FuMbRMrz)9-EF?W{{Dj7fc(4noU!cI6V{?W5XYDs=4stQM;`8i4yMM-&`34N zoh~(Mu*rMpfg&qQhy%y8g(U=j&(Qb|LYH{i_zpCZ0dfZq2uP4Sm?hIpRU!Wrds#0i z{M%LI9}9O|UZnTcE%{}tO8O(UWxaIZ->w+*FPODFOFxfZa?Dia1@`=9z0ShF85n0Q z+-pfoKVP>Llc_3tq91uF!}Ts3TP<9Pc${92UNXp36^y0hv6{Gziq1|i$0?C`O>+Cv z({81+bS4f&|OQqWKs|PD*`b^_LJC5a!1q6Z%uFlac0KGDmL53Cb?{#$rZ9* z=YfDrW6VN0RTVOFf1pCnOZ`Byu()&<`j-HgO(>(H3(`@5 z^McF7ZZtjM)&x5cWoB$FZ?kQhYsYq6R%Wl-z+SWRU0IFn{lj${?PD|BA8V$sx2sO} znXV7O)Ym#1>UT7q-^Ki#RjVd@Xnam9`Y1u0dqJM7CI@JYr*&2*GmzcG6y#ZE6n6NX zeSTbSd+j(k7#}8n$oN$Y_Q~I4x-RJD{1yExO^V!2+Owh6Am8wDb96P*fb1WpE03Jf z-QmCg^T(~WEXVJI#bMyXq_5gQFEG}DA9UG0b(l2E3?oWD&_D;Z=w8z&HA$tSozUWT zn%$D+MXYnIhF_ItUwi<0rF}Lp^?P1IwkG%8O|3Uvml;&Yjqmz|@yi>|x$f}$Ut z<~x@j~AyqJCA)fPw-3T-{c|Ry=&Isu`V;-1T+GMVLtLQv%xzV59Ssg(D>V4yw2E+ z{Ab+c7x?rrXa38e+%ElL=}Z`G>Zse_WBGQ}t5{3FsC@T0x8~-c+>h7vy_y)znmg;X zZ8Ycal^2D~@p{aBpOvW2cd7ksp0b&wvI!=e-5qVPUY8++_1W&4hk3|*%=+wPxV0_# z(uCWdy~b>Y{u^j=3`{-Do1Yrg++{von(>ECAK@wP%MMW)u|a;}MEubZ#XoAszPzFu zw`HuYce0zItp~+wZ0?^}Yd81nmpU4Lu+VTv4L|zRX@pdL&?%OQRG{b^Y11K2gKlaA zMgQU^W49DSc9XGwsknik5qJY{;O|LC63i~1Y>Jy*gi9K0OHs{WPs6U-Y|DOSOAg14 zH}DUp8xqV!+QnDK%|zg%B?X8Gp5clD1S;QLo0ode=CU>~6#FRbEGY*4GVH8vkksZi z9LH zO9~?PSwRNczkpoF&2wFbE}EO?&`UvIjZdiHtMP*Y!G;V*F!FUnMo+rDmyD<$U6rRy z4#axgj6Is~F2Rf)UQ=Fh`-BO!E1{9l%~D(i3HH_4b>N4mjKmR7YDGZ^N~bVPc|iq1 zzxe>Y#MY3(1`vUlOa{cfv;evf%D<*<3ytL4e1KmiJ@`7{K9J!h!_sl8;m9cNl*vB>mI^E5IFwF- z33Ipg0_O$MPw5`F2AuaV)L(OY3}7lBoQq}{ zElz(0jpTm)JRzVPdt3P_5Llc(PE~-VncToEMcz(lIiZ1bX^^ps^yqzVhs%(^&`7%1 z0$t&xs~6;)2RJHAPj|2gTNq2h?_Gmjr^d8on>@AIzJEb31>6O>Z8@Btb<3FT;Iv{{ zIv=V%)5L^ih!B09S-<3V+mcy7WvH_BIYg-<{SuYGCBdW*y(DUN;3?0ekt$lBgw+li*6Cn zzvwA#-_9gMS$sW^dwfYEmMP7$F9a2xsqEeb(7<3eE|G@-NWS!Ug1h*RnFUaED( z6J;UH5f=L2=P0APzmo(9Z3fe8Z}lnfv$UIt4%uwq8MNU(PMVf=)dT{L zMdg7=OD(q#&I8kOu8;%qZ*QKzhW=%C`;c(lm6uuf6u2n;xf<0e;+gUn^pbsMozNGA zPj#Aoe+}sYxVcb0n&>w(?dSY`<$+W4EA?ncQ}U{MosZV+m|r=Uez^8-xcyAC_Uw1R zGOjpl4we=_vYi<>V$MstYpO5apQWuSgJIt6D{Dw7;^ka#&5xxs&3Wnx4vrx5F8ZgI z=FDc<3ukK{WJ?VBp&54|%}csxs=t_Ws;wi#VE)Plw$Shv1NGsUD}pYRi@HfzfCpj* z{1=uG;YpI+js_r7$9;{Epd@V*9^M+>)DS0(xTfZWLLg zr-+B~%NVw0?3~`rT@=|h`p*z(!-n{3%PY@D#~K z2M*9ytaCFWs9hV*^Q#78hS0CK*V4ollpq z*Q5p-aRU_LFmV}~wr4vp$zK-*_T<~DGpvY^hem;$i1)M?^P>Zm6lLAg)?L`h;i&)C zl4^LexBRz_UtJl`j#54F@&f1ne)k(_cLdd&U3}93kX<(B2e;KTUnE(Nd+OigRO*Q ztcAe%sg&P_*tP{vJ64Z@5-%zGZ(P4Ur4k^k<5$nMv#b!Vo`a88B+Eu9LEj|Hq8u)B zj8pY&j5x-j*q1{07Y@;nh3?w~@hbFmvp8Q;=&4`2Z}&Knr<>32vFiY&;WQ68p{(Kb zSUTKGicXJn$WtmFk~MzyY&2tPaq=}F99}4Wp#+^Tln(R*3g__s&P)pDFiY%qM)7HQ z6FVc9f!7T$*^3_CX?Qu9UgasprN>s~CG`Sm2d-*Gv!|9N1JxcSq`W{tfE0jgAK37c z?U<#XaP>TpD$ij{6@|qM)dSst-KUhX?k5c|N77{`u5w3no4rj2p>&?m2a$dwl$n<5 zmj(nrGXNxT>oN~YCz<{sPMCC?eXTdW!tyeBpD+IgU5w4PD*GCXQ;HpZW zgiFB-$3lg1;S2ONZI2;J3$4#cZ3wAp$KiD0pn1qiJMdG{W#Cy0#d2TIVK_9aeyP_X zTomXBNUcDX;j4+_1ejpCwI97y<`AxVz^eG8b;x#Gxu%UimNP4O9w_vh(~fNV@$xMc z6I6S`&zgjy?$-|Cg7A9-q1O2Smwife;@Oq#hy1^*WX>4}b$;LSIng6j`U64zfy4hh ztIB|C0h~Q}`nf&YVfs0&4Dg?Yt2A1qzU0{IPo9V)>g?O|m7ISr5f>3Dxx;SnsxMl_ z&^K%4SC%7kyv2tti@N=|Jt`|5P+z$*Qyz#m{H(WFR&`hx=#M6mgh0CRTSOwSm4>y9c#RNw!$WFzvzGtw$Dj-^U<2|A|q8BRX?OHm!j%#2+r5 z1?m+wr1m>*9Dc(-;u8t)tr%`25pOsZPJN6woYJAZ(_&7OXG+tIcJQLKXwjKpU@(V8 zTiQ}ewV2GSwn%bF^Qyl@uTbW~JNjKJ+(madS}+?Ro!Z!>RqEB(6|sxklV=sp9UuMO z@QA>Xq8qb^tc6|T|1sSQX}PRGVxNTAyo*iF0x8~xW3Psa3RQkza6>+uRIV{{jJKZJ zqCUl2PX$rlw=&mO=`W}uYpVo_p`i@lZpU?tbAP`rkImZek5-EpXE<$_Os zr#9gs-fGrwDP5M0%-;fAZdWgxOgY+=f5EkJXI1`j$Z2I6nqrO^BLy|SlDFP-N&d>X zrR`9x9xS*)7i~12XS;|LD4+x5`UMJbeEQ{U|0!8(H&Od?*F8>Lzt=tLddB3m#5*1- z$!X<5VvWZ&se;j_m!d9W(iHe!Vh8cKKxtEfoc+^i7@>e@@ z_1Hv1b4_B*j%JvI!tVHL<(%R4ROFzw(;IiE0`vZkR>@t0@a&(48FS&!bwxF4hkqbp zF$%lW$sy!g@r6?{IrEpFF@*3+u&kxPZaKMVOfFuId7a7-Dh(>FtrPbOeA!{vf2H*j z_ZN+s7mYBR@1;MS=NXY}_T=++ptsM~Vb?Tzi~p&&RSF?D__VmEMiAK@c>nTV7hpdc z9}*0z*{aX;32ad;`GJ_HH(ZM%3)z8+ht^?Nn|ogU(`zdeGTdO-N}a4mWX5*+m-Slw zlaer8{{pkqzu)|2(ep4nwIoy=R8oYV^9+0*YtnzIRUvh1Y{stU8Zo>|S`6L?~qD$}{Z=4F8G#kKu}_ z2iWlg^nY?UUwDOw^@;xPVU22$(QU(>=>KhXJs;O+B~JAJ*#Za!J|=mh|Bp#R1LGd< zCY|X2yAPhQOkjjh^nYLssgqs!kp$@fg&%Dfi+9KjC;C4bFIE%tht;Hp<*9G!}L5+1pzhBSQXU8Co_&X6D@w8fH(uszlpy>mB_w|zXA z^HRYGRW3txPl*?d?}$IOm}G>C;N&vI#o_8yLA`uwWhBr209a~DiS1&bDv05jp`U=@ zgzFThxRT;XjAE&7Ph>tLKnLJ_BYYtA*EO+JBp3&L*D*fxx5QMcD=a5SyS^twu36=P z(X5y_3jHN8xXG@cY$ew$57&7e)P^5hP#xSHEU>%#&pCgZ0W-eZ+~fLMKVY5HkQ}-JbS0cb;K!bj$(AZ*Pe)zS{CHYzQs~cs0MxV&kONN#IIl z=vjDsp`8p4=p`D>?myA6{Q01T&AN72+AfNXqYs`0SB46mbQ?dR zs0fY5(UU!T?}8D&t+Fqg2r$vSI@o6Ma8gV7Bq={k@#mS3lIdHTnc+sj?w#yyOpl@L zWOoTwB-swNFOocZA}+BPjPVt{`OE}(aIY70*eqHJw4Od$o)3|NN2gpB#$l+SWX1bW z#w^zEM@X&!h=QeE&FbQ~SadZD-SbzVRp4F{lR#@GMDfjMH!2^r^s^f@P%2pwndQQp ztmyINnQRDCJAJL?8arzLLUK*g^g<}R;{cyi#pgRz=62)aDx1YTfmX3^3B3ZXxe&$j z&y7@dXz}MpXdqj14KmzWF1g0z30y8jx}9T4HbffkaHBYnq-$eV3{?9d_K`<>E=MWI0v$roH z8=h(2EKpowb!^RkKQje#vsSxr9sFF;(NvA>o{Fgwt;^HAlI35n6{aCGJN05_KL0H| z+=?M8n?M%3L3SDaT!yTCqB~+6W-7g_y1F(&nC!l{qSJ=rjmpt!-BE6=YsPlf`1O1{ zU!?faRD$fPSpQR`E=}$Y9Bd^_%Y~C$Z;*Z1*C(bY@v$q)ZH2tFoU^SvDu3!avE4NR zJx41S9RKAp!@3+?H7+im3NBD&xM=|L^hb+}1-`)=XT_KEaw|8_ym$`#BtUR`>GeEu z#4pTMnp+(bBI=vFa5@+{-}BhurkrC+b~$X4=%%5zc+2WWR&lu~x60nk$aC0mCc!P+ zD-}Ny6BaDZtsxj9>TkEe`uov*_hac{gVyI46#`2md;Gn6UaeemypeksHmKv$th^}u zJ5#abrVq%ku+CmQup!bcYrLE*^^($z-*Z@Bd+Cq}L8w31n%r~)Fob!pciw|-+=3mX7ShmVd>n%ng0L(U+miUKOKdf8Dq{Nry<62n4~3y za$3&GDQt6Q&dlj8p)hlvO_KAJ!*c5P>U;hEdA``?a$VbX?e=&)@AttLoH0Us-pBfs!g;!G0ZIfh~+DU}}G9 zpKl=c7Uj)b4*_ z?ra^(o82KR7Jh`l7ydl*@tYVU<$=#bPM?8QWmcrLl;mMWj7Ep8AlbFDlr-D0zP?^v}-)*B1)R< zMqo?^R~3^lCZkJW{XJ#;8uwa0Wn6Z{Fec-eKT|#?qeEc*1Et;AIZ`@3+G?M z?@6J>^_vn{N>lv&#+k0sz=7vxUW}=d-MAQ&!O!1u`7AsKs&(6sXVwJ!nk7`;*&~I% zsKIAsJas#jbG%FLxg=9M#->vs=L4mSt)y|sFBZvCJX?4CZ_O(DlzwPDE0y}|S^_Jz zvh(TLy3=;gexI#-4IL<=%(E}azNgH~`W@{eJtH_=>>@ct^GKtiO{Zg}&`^EU-H_5* zaC)PjCs4qtb&jV}yq%); zKYH;r9n&xN;?YOxx!5pmt?s+nAXztLCQq!lKarX279g072Vx}<#^Zshl;@uU+klag zDXR|A5~kyZfAL?LCA;A)Gs%_HR%Biq3&rWVgv=^(GCysXcMcYNNdPnNO~Pd78RwMy=0Fr^7h%R`U#J*{%lvvDkpfz%Yi8`%nEw2vec%cQB%ilt@gxr`NFs| zajfI}*R&{7wB{;TpaE>V`gg#0cTY!!khivdG0k2_v!0Gmj7{1((QBQ2Y#zAH)bedt z5XLdYu^yqXX$hp*uvKuN?XqWCM)39tD6Irygj?9QFoR+(hP zh>QscdpH~eCd@{OIdt)QOY#A}ZUS~Ash?ttvD#-n= zB3f5IIL1UwQMHP4g;apav%;c-l)}e?ZpH#`;M_MY)(Tm4=LAg+)F>qiW)F_oABKNd?t(;r#X+@{z<6e>N z7tN1*GoWkt^tH<2D|hs@WPujmpZG9u&B=tAeTOh|&MH6m7o)IL?2!YdXMp0g>)s~7 z{~_n)-@$^~yhzr?&Eb;(_xkQ_2n{7e2ES6M%S1EnaRjz_!h0|Ju6Mm!w8hq*aIK<0l?{bHSJzU$sAP&8x& z`-}b@gsh)1t@`D9M?i^tzd(_hfe7cQsIIa%koubs3*k=cnDqZS`hTq+1Q%4Qn@H5D zGdyQ1B5!t!IuDFbPs;%fA2OFdI5}Q$wz7KlAI7$BdY}_6`{?!c?sE!GK(9wj&u{D> zjrh4`|Exd!)<(RRxg}cRB5ZugnQIS~UtIR$GmGO;M;@Fb3wOL#i7LGs%JeuZN=m1i zs3aizIrmGiU%Raw`v)hSEsQuB{S_q;dty4D+s7H-hJHnJiI(#oS2GKCsc9;MrY)gW zBVOlc){dmhaxBryPD}`Hf&!auAp&t2v0Dadtk9ABG{s%Idng8-9RGOYil$d4`}vBY22Hz7SX;6#{{X$)mC2`)v6uCuU$k}@$U+kZe4M0{n7};w zf`^5jHe;X7DJceC&OEKub1mK;>JMGHt|zOefjj@246s)Po$AQ;)4&`?S1v`V4K(5! z&sszXvRziMi2&|w5A)TAy~^v{L$;%UI;2Nj{?Z$_5Z5HcpGP6vL-j)6fjc`cU&Dht z%~)pDLbhMk%NzvNwgLU( zA!E0*T(}n~e*)iq4l>M&f>1(lKwH}8gZ|FxA~1BL75sirvu`v}=Yz~xFceOq2~_6y zyE&O7OknT>9TF&6uSx z-(w#oXy1>P@ck{wz8Km$Z)D8UL1}@tUK&wCWuQ3f{P3(9=XGC^3Cj(oB?gSTe2}-C6^ssH-e^!SR z-M3)OGD2xVa4+pCq4Ll}>in-+%g04V9c3z$i=k+rqScB>tvi#Rz}W{9t?0|0BP8h`MFNI7s z<_Go^Y=%wP#_BI}Pc~^ayko zX!raM`8|xKw|mppBo?*^nQBZVzt-H;Z>SUs`C&OVc_%!w^ap(tZjHLv%AB#-tQ#|H zEoh}V86Cujge6L3c1MO}8g$9A@d7;`1Nmk*kSENMBEAd7=z+oL&)64hfi;mnzt zU7eM@)*)?=iiFy^UthJ`J*KAP1uW;*_}Fe%S&iz(G|bTh9!Su0uHU$?$_@r-+cyIA zZ=-kzIjamC$cN6FddwLs?$Pw2gh8%PvTq>8r96o7vlg*sdw zduKy<-x*hr(H%RkGCvBH_A^7a{=>oL-LQ<*k|2ssCRwCziQx^JWN4zRIw8$wNQy_;x#QgawB2OvHB0Z4k<5R|6K@3gnI&<5dmTzu3OhR}9%kqMiSnlvCSz4C zz4`7iPm(7x1rlaU=+V?s{L^m4ZUtx0Xl#IMei~vkt z<%Gv$%GfzQFUpwJ<8~;@OBQNJ?dt9*@I4O^F^VSmRYd#VM74KaHD!TOUIIXinqP%( zG8PQU_{XwC8RZ3n+I4h&n6;+F$KEqxVNkNdK#Lm1N(nmC*5x#NpQ8?m&FyUUZ?vLB zi+FusotqVH=?~_a`cyiY+iCN3>#48E)IjNAUgzY~Ev{?l*Jf!DW0qLc?rnUTd4^4UvUkOO>^}7Lb^yS++T|Rm)nQ3rH-`EZc#S)UuWsft+tNdYd^m#?t;y zw9&@WvV-t`q@}%1^q*N8ye)tuS=JIE@Ta%bhkm{Zabv2Tko)I(TGKF}US_ZQ>htgp z*%@~Tbbyt$-4^bkvR)k};P%UKZ?^SXRM3P@v_H1!7+d>YpSsxl&iba|iyZBDJ?df~ zI-fQTpTP=$|7EBn<)^hCa5g)_?P*b7GDmhjKr}mY;c3xaGDl-QK)~)9tRWdQyNCv?m+K0XOWYz#1%OKo| z_Sx8b!t};QF$nt6>+HRsv$og-20?~=ek{>UFnY)S4T-G+0jIaO;a?l^>mGi_E%yCIc-`I@r6eMZ~P88dO7e$OJpeb&9vuZidxIQ(hH8vq+MQ2DXv{7eM@75x`Aa<)VM`wiRAuWTRA zzG=HCU#J?WY`Ms~zvA)y`qO7pXs&^O1Hc8kx7yCRp#PT5uO zWxm_2xZ8bU2v-z6?R~D%A2in2^r2PJN#R#I*VD9hKZ{>44`*JLrq{`SEctyjBqL~( z|Ne^iOw|RSUzfM9OZn2vq2i!D zkW4wcCw*FkdVi(p8Wi1$i=lRxpW2hwi_QQ*K6X~cSzA7ta(>Sb-|xt?J*O9KQ!W7q z+T)zG(a99>o?jHnQDl40FnY2)XQ*7xs|TlN#tK|1lJ(+OFVCq=rX1Uo=2xd0t`uoN z*E(@B)PXNwacUwse$(2(f73Yog zregcC?LePg#>R)vu(5|0W02j-w36g8LO-?x_a&hpYv!b_GItD^OQ$k-C;DLgp|a|c z6Xj_h$*Rx#&7+PvUdSjmj6R0c<5fL*q9R#1S@l)H$n!|YYx>3aq9c)dVyegaT%<8l z8^)=o{CtrYWedcie2&t*v%cp(nF<@Tj8lhB#y@kEEjR<^bCx!keQ<8TG~Jlxn>utF z*!qFgQ`FN)FQ|Uz>;M#9-HN_!r3VA1vpY+Zq&8Gjhfc&t30moi=xIb0R7VOrUeR~g ziN1`~<5oRo=qfEHwecu*=#*iE8B$MDPa~yYb;3wlakcTCl*$k|a4z=7u!GX_kNeSi zNF9C-?6sqqrj+05T z-!l&%iM1!IzQvrz$2tlupVNy@veG$eZd?`ruO1PpBc=4kNh~*c%t_1v+{>94K8E3cSvH|Cjf58>WiT z8H4D$idW|-VppqPozu(we94IkY;(WYY0AoybvgkU4OOqsCu8BU@3?l_nq{4Ypfi1! zShHG`oO_)ntOHpmd0=K#<*WK+&V|@_{QT`5vQAo1tt8z zNqS$Ur-=U*)+E=j44%ZN81pe2uZAw@vjk<0z>tP#+V)P^GB4PQu>_yyQK)55i1NocSK2X^+5MMR^qIrG=+pTKt zhK!7a238ML9MLH~5?DDf%q3fqgo)r%MRHYCVcyC=j?N42qaFe(|-Xa84nVHmjhb+{}yaK+))R9lL^6vwkGr}Zyo;G>NVm6huH=A zB@X`XWq#1v<)0w!q>4#N}t zQ@&Bco(hl?Lv1&e?bJu6px<nm4`evtE=+G9Y0Iblu%DJMy+dM5eye=GWs!&W1wWY(1F zQ)?2PaHM>dQQa8lRey5Qx8(n%O(fGySWWq8Q)*epNXM?OE-`3eE6EghkbhL=p`LT3 zKl zUbq3d#Gkm>d}ytnd}r#yluYq#ou0LG#0*zm#ws~+foibnK>6O6?go%spI)1V#SQ_AI%+fPm@ z6M>0z#)9i@GxE8G;5AaXT$AA55BomQjvp7MT;Y3aCR2F#$#j0s;Z}JIL!q8$4`$B* z?O3*4JUq=U#rOi^H*UFpD9SCv7z6Q}x12ishg+J_3F0?xIe#d?4P$%;@grKe4na{; z3@V7B^93{i}1md@7;XOPNCC%st@td`X9STLk7y}@F%NET;J{>8>3@E|4<=COP zjtrw6lwjU+`S7TYG~+8M!L&v2@Qekv&EQ>c-}n?%MV(7oj}Y|z@Kck>zB?bnY*y9%ZHK@n{%qdR$|Q#Rz%-D* zwXoWTCM^lzY}Sb<`i8o$DJz7Bw0-%Nbt8j9Ww0$8n(w`>&2)|IGd2Ruy*5S5c{`TN+=b{*|UPNGXC+_&2kw=7l1ltj}uc4mn z)5Klzgc42B*lQ*s6tLllc!ad8M?yI@ZQ}CAQB>jf16b3;RTItydCQPwupN2(ap+0Q zkD3#pGUJfbb>Wqkc;w4N%M9JMcG;ue)ghhx3xxHsZ+y^gV7IKN) z)kDjEjF+$s5g>O+j$}W==NO0FAoobpvc2#*BcUL4Ta#&3HKWZesVE@(YP1Ii`_Aih^r#I(0 zm50RU0V#AtHlTQj4U=%zE=FMv22!Y@mR;Gf)3xiOpEV@1Jwa|9wU%68S3JWQd%h*w zN4n`VsI`FoWS<9ZiLD3EklCPp*qSjG51z?ocW~fS#q16QwuqWA%kU#m6ReoFAnlkx zZPOra4}1`8|JlU+M#&D^OD z<74#jSBDLN!r!m{;#|b#ul_=CE_>UzC2RT8O^QzWo7J@aOgcI2&-VCb3#=BY{B-42U-BZS<)J~Gve%?W|bnNX( zF$CvDt@?(DM)9g4EYx(;mLr!v6i5JT6|AE7`HVapCqX%*f&91xx9VwU{NUq8NO+q> z&9o;zd?Zlf@P%8=v>SdQIkg$q_)vJjK-(A&m+O1F7!^3K zlu5wlqwRvV2>RN!gGL?mLq;QV1iQBRVWU-^A>ZGdikQA4$P@gg{~lEdbx|`Pjo-XP zekWe5pw|H&zPyBZsPKJ%lV^{ml$j%L0oQJR!84?=sz2m=EBN8`^G#E7GS0<5{UxlR zhek@a>rNRiTkRY2o!@-XkTTp?ueTc)_Eeahm~CFC7tCd8&{|XWpu=L=Yzbjd_Kmu! zzV}H9^)4Gxcda?5w16>bS-|vIJdY_Yu=L;ByRPTEirYbH)lrMt1bH8GCl8xGBw9rS&cXh38ifo3oV{oW_` z6WRZR+RQ5WzDblLvM)ewm{{a`&WE(>#=X`wM zC9aa#*P!aNK7N!$F(TU+s?J*XeUT_eVxNSnGuQp{6SIiy2T=7n3E$+zED~D^+BhrW zSDk1}WIu*BvT}T1C)$$O=b(+u96w^>G?8s((6{i~0P=0~!TKl=&3>*M_AoV$vdaQ87Ww1 zFUkq#o0~^fEL;s;KtI$AY zbZ<4SXpw*nG#>s#MKFgN*U(>06~Sc08O>dtcC}#|H1v15`BtF;vb#=ypR1zWtNV?> z#-e#PZcdd<-i9?<)OB$*(8Jyp;F-cnQ^3C+@h%4AOzMu?QwbP-BD=4_g#&6Sv2n%Q z)4b2OyewO;1-MS0d=h$$G2&eh#+kK)fSp&66y)2Mf=NAq_H^$8fcBFz0PVkcrvS8{ z^w<|!8}Y6I&lFDz@2js3dB=fg3MQ@hxz|R$N#GgMq}smZ+E?!^0QQr?`;ax7cQbgV zbn?VLd~Mh}50nH!@ zVVxz3?bgs7pEGxH+OCcoH4-Ao@7y(YPe%NE-O%NR1FpI~ogoGGM<)R4CS8HE1nk2K z0P63Y)Hh%nJ#h5$awT%S!Ew1)Ruywk&ZI}{Z^=H~lWEYqe3fplwfaDzL}2i1@_9KWcL17Wr}nk=+&y$cVlL*T<(8Tc_T9;%!|-V ztsj?6pLcb+sjxR9pMn2XIjGdvH?Z(zd|~@Hl7b3+Q1xV-3#Fy*W&Z}>=LYtNeNP=s z{U)vpdt@&aiNz}uwfD*)=lobA)`Fc(Q$NwRHsN|n!QU#0K-O0soI6@dEftgxp-Ae)2aeuubTbEb86CM}%V_*s>2-S@wxatE}>{=p%n$LSAoDBQI4jEWLlTZJ9rcGtmDDtJZt&UuHce+YbJ0w zhIzfW&IY63Ti^706=x1}!;2In#6_HOUZ)8&nb8lrCyX~}_qrPx9YHf2^CJjS#Br6e2Na;Nmfn8^A_ zNd&mXtn~gE_nrssKhTbaT&ov@_;F#Q z>hIs=GcPUxi+?or+~?vmEv^BJ=bPH>L-`QJB(OMfh?fHs{V>Ffz`oLhvjD*B!L6CO zGSe?w+Z|=5AK{~l0~CR5zBHf;EKVMh~FZAkI0O9rOteJ0RreB7)naE7L z;&qAx)PZ!qG@ubIPBIPJ7mPP427b&lHQHBovok^Pf)FQWcZWKUefiy>W-gmUVf_^2*HnYz))TNpn<%EJ!PU z1`V3Ist)vUeL7u7%;Cja_BsJGfKM18KUstEhZWRPuQsO*eJbAh)G_dR<3_Da-t4lUDFl zu*xF+;b(EJ1>ix|@vXU5NujtlZ(bY|? zzUV!%6Q%(ia-m_6~Lq_J5_ldHBFp z(q}I7N_$*NYGabKslQ?#13zA7q zzkBz_${lX^>moXy7bSA_KJvL$`7Z=&G)KLmpjU{EP2~6W`*|8?{JpE!+{f?dMS0!q zF8;ozD{0^5bhW4jy#f0lKkFwgG!^vrP-N30BnVHDKbz`gk@2v^G)mKFSR$ke+^e^L zB@GYj+t>a1VMn(Tq-|hFwQxC9dOE(RgpBh{wDiTK(XW&RdnVRU;a=!IR zQayqLgQrE=usz{vZur)~q~8>9U43sTtf?O(Eaas>^qi|V5RQBm+qBvXr*tR_A(>+2 zigT@9S>%c_(0V`|lq0tGBo2ze5jst7W3NN!QHGT*$6WO=uoYUbQ zcgP!QV6AV$IdkjSF=Z_v%Gu8G$P#dGla1ix^M4b4{T-YvXNU@;ltTICxkO zb#vOuQm+HZ+NblLSmR3zZB}L6DIN{|7A1u=iXUmjyQCn_)7!`+pFz>vQbJvxW(p_| zrK8^9Jn0WRxE2iwQV%@+9Cx#9VZ4ySM%0IEr2c!0JAtc8sN}W4=nvI=kRt6d%#g2N zLk~1&xB-=T_?T|c0bR5sNa-&7x$h>~D)15t_fWvHvO(UVoYh@-%B=kDqckulhP%H@rrqhc&s-n zzXXh)yABjgHwU+NvQlzP|mkY1!G{Nz_+Hb(8^e)HnZ}KSfz66 zG%Ej0cFeSG{#)?c+9dsBdEZ2E3zf^T8@3?>Q5^@_&;K*c_Gs$nYCtkQQSa65V(In& z3xb7a@ocs&Z=8VqaK!ugMr30z&wTqwlf*WV|H9+bZG(DG0Jn5;1<#SN89bY{RG?OS z9IHOV{Kw6Psg9aOH3_aii2(9fTlp{C>)jFZUwT0N0kMVnW5dz-gP~g!JNkc&zXOjR zZI;6-pyjkr*uM&=UYxLhjE{O8e*N%#lt*|4SiI!m$UYC>Fzy*xyzsz;Q;STTYYOiuLk8$2_M-thQ6a3v#Y{ykVV~VPgc%CquH^E;) zP4Lh@E78$YBjJIUpySRNw%y%xM5unnsTRI-*Pa5oO(e95=6u1u0OM}&y;VJvGlI(p z<4pHVR8Qp$;gZ2Py}hrh=W|AJ)nJ_U-gQ;3oUgdoVBGz^3RQ3p4Mzmy5PN>AyhWbc z10W+hPD3k7v0B0h@Af#HUn@bernLt&IC7{JdeqH5JO`XT57HUQPBAPwXx+TGD$&N> z3>hY{U1$>3RE5&RYc0>qN)F4xIEyWv4dqbVEgcejxkNkOXEvim8;xhY=GSV$S6LVl z*uwB`X*Zyhv56wxC&zBHBT)fCuwlf~SqzP$lGYB56p=NAm zN7=Jnn-`GG{@}r$GT+!4Ks^&`hRo6_7VBIHZ0Ss_uLznR@)o!k?Kth92>Q_sApc9q zDO{}jmYowmXsP*ZYlm$xAgMKO+P!4Vp_>Uv=JY2CWVL!0s`}Ewt@RWNclbz10r=2l z-GTxM9|}nUAL_2tDE#50AvNGbnA69fov6X75%+;FZ zM@-g=Bq_EE!2fF1vMV!j#f_w28H`F_ty^|s4y`~)`c=DA7t|f-gH+y1`x^8cQq-%zogN z`;-edb^Dhd_H6A)Qz95${1mJqY~ya%1*#ir5*CjnDVMq8Z9MIcC3LBz)61#eN(ybY zV}`$g+~4rmZLb6|C539Ln38shVU(9%)(2wV#JzDpm$i?9*ToaEM&xU$z|k&PUNr$l zP^u;hu$KZ&8b?4uCXGk4U)a*n4*15B>61p?W+UiFcq5a>Q-K|3G_(`mh(K}?1r5ixJMv@~AbZk=J1`@ybi*CNv5qt9S!M^v8P!qgo}njz7117{FTpJ)bBY^Rn~-yg zBzC`W4!&-dB%I@d_n~n_n|OWb98p+kWA_463|rI0ImpNKqKGhjp@c3k%*<4ty4VH)?RZ8q#x5z{s#wiU*wCwxv7<5NeS8)+8& z6FEm~9tI`QIcPwUFbgUf{WBji-rT_Vc}@Bk@SyIoakkt@<{n1u^`8#z05LDT#23s- ztylbuYMmhM{AJ_Na}r-MAHlf0bVrVqR^jqB*#0|m$A(|#`(Y*6PK+*g^PZuje?B6 zVnnqf%`jqa_@X5>@tiLA1|XgfpVcZ{tM;;<4yjyhn28-^p0-`Dp(+M6OXT#VHOv6q zU)nv=@~kQ#tPM0cv~@A`j6HC|_7lkovwfTxGN961285lq@1h5U0V+`8^6^kH#uv_p0(UhB1|3 zoDp4GRf0js#DH=3^kT{p$dVcjq+lK-EKhaTFD!0)Rpch|V^?&5t@WSKTvW_JIaNYN zO^^UA`W^s;9BDNk0;?xxppq&9+ZB-Sn2%|BgeSmuLBJI(ro{lo4Kd8)f zlNW$IZ1*Jq`5!g_Z;Zo}-4k?#a3{{b*YguD~v#uGIyf8MIK=;N;}>uUlYegN!$ z-hnmQ_9d`IrgG=v%>$`^Dr_TU3h2j%HQDr8aE`5g zeF6w(dGj9oXLda5CqV18Cvt}`_Nq~y_#O9p@$l{gYjPcRuV(65>+DfExE1r5HM-M& z<9(_KiG52B&h)`P=raIJiqwpaAPwTYD=gINtDKcQBe90GlUX_cvv#Xx;M0N)O*Y4J zHs-fV<85*BJ0GCC!Y&%Nz!*#U={p}9m7elT2BrLGl$;n<`dytyOt+j{`W+Nlqk+<7 z6u53RouGeit@HfGO#h8m_WaI;g8gZi|F5Iwr>4i~C!YGi!#vw}QJxDSG-4LSFKb`2 zQHr15yHkvmRQTTTdoa*XoSd<4S%tQvZvC`hNz_0oXuo%DZc1KWXq_l{O2Ofb`*)un zL%;ju3jeQz_tzZ{mCqu6bNfM!CgSK!2}G6O%!`cg^X0nY=n{{~V1r)^%PE5y#nwk* z!Tmokm)B>5NxuG?u(`Kmw+BA~;(WHnhLl&an+0sjbZ`ks@Q11z0G-~=K;wkXGIAJH zH(DhG{UHRx{3!5?-4902%4pn7? z*Ye%MG>)n-BN?Ee(W;BtuWUEH05|r>_&FmgIasBZYFpzKBk8<1w4kh+mN7rQ>6w~i z`W?Ni++$YJi0dc$1|3+>x{uvy6+%#=aB>>~E5-KufXLH|z@FG1DoX(re0xs~!H{M8 z1ZZcpj5izawoLB^<&f;vHMj?9Wm#Y?%T_+D`1aSbNbp*oz0OMP!RGQ~{N5t{)L_ry zE~b)tu(kXeGK=5E)KVc!bS?#I?AEdqUSYHhX7~Kq=CTK#TWY)|;D3j=7o~p}>|#3l z*iR0Cq`hxzIo@(MWfxONwOpbf14zHM?1(QLEt9m1q%YBjLH4Eg3Qc@v^fDl2w`-lj z9-H~d`2X`C#zOWJwhnqWdn3I<-f`W|>izui5+E*!0u=@Vt$WLRxM_0F-0z*a!R66!JGy1xR2Zvd%b!QK4JYP~ z;n%XJn8A|#PC4HUL(9K}CGwGth9G;;AVO z6Sewrt2Yd7eB)fyl`LQ1+^8$90e2E}ImSchql9*Izu|U_Z=6;rtDx*2jLs`JTHi#9ay}`ULsVWXPMpu z1dcKZExh37vKwCV2mFX6Yj~HpoNBZTKPkyn+~uvL4*q~2mz=BG<*lF&F75nl*uJnC z@*haM=-a$C z2~W<5?%Ob!ceQ51Q!?-{--0JqME0jEGoK!T$q4xhYH3OzTMm_na>di7;39usdZuJ} z77JCF`Me+@XH#nxxi5Gr(hf-RM`{^fo*_9G7Rz=_JX#=PAc2DHB zp-Mci2USqa+*8M$w_0G9JOH(*qZt{j7fT37J{ zo^;-L6oT{GRf1WEMN}ma*~osa?sayitAq{Hf5{fHjJi=bdibr$buVe1C(N$Oh|S&f zeq0j4dG4CSy7yV1pV@VLp&+L?m0(A`H`K8Wb*DTCzFTw2-9$cBye77g>9lyA7E{nw ze|NXuR)d%DAfXg-Nv^kGNMR+>onjt*yT;VrT|NotPB83n8A>Wi(d~OuzX@yf5|$@q zo03r0*$M`&wS7fQ{o&yf+w{Ns6g&{Ggu1SoXWdmTU}@M|11n&DT13)hiYzD|fY1cx zwKN!Xw^Sx|Z+Q$_lv`VuZSUxAZ8mw(hJ{gz2kqpyFFgw(=A~()tck^YjR;|K*7y1e zcQInAT{f6pYIlYhwMBzD;g?G-PXl)FRt%E)aw(0D@8xT)@^DyW4DvphO3| zuzSYLn{OZ}#liDz$+3scz&#@^n80!wLpAFIfoMyHWmJI=24WJ=mdxB>c|mXr$ya}? zgf%~=*IWW9O~G3pb$a$6Z8g0~c3YbZcr?k*V4thjq^uB(&O>_dYkQR3{{H{@?qQpL5X#x&z6;!y3(zf*YnRbym#m^y!zaJ$C7UgAn3ADlXABlU`FtZB}j+l<3q-pLWOZeYC7F z%n2~fY8+}w$85wSGW48c+D4bd12dkp)2AMXkRR?ptpN-2R@_xH2=a zm21u=&AkeRFih}uTazCt?oBC+GANRn2i|fyxuecE&hRFz>3-2o4jffFED9JRMfIE@ zRM9XB)?`@p6u|oz_iV8I7k8~gLHe>11Mo)Nj~?>Ce^j>t!i@XHmN*fN^k2FBV64_~f6{U_EYz}gjG_oG_$Pm~SD8jY`$39MWjt+sClFRH7N zfMO@IA&kUsR3mk-QtlMdfVADvldU;bJS+@rGApX#Xsr#o1BP|Mu-g8SpylcT(E4cc z5$#S*HLw*^Q;pP<;9F)0P~9#|WwwW6R0J4lIff#B+Tvf_^8mkk`0D;~{-x?(peV^* z5f>8`FyV#gU##ZR?&whSuAoNICSr7a!<%tiwSd~#oFqQHn6>#hH(_y89v9z(38n*h zcjpH19x&mEH~LX6*xH$-wpvY<#7yArnbKK-NT!+EYIoU2wP7QP?Vhzg3&-9s(gf&k z7_wobQ95h~3q=%#am2ER-E+WNMZ-t;#Rr!I+CVw<#S4eeAOUKCF7PAZq=J*(_IW$m z0J}h|;-(b=`9!e_gASH)G3}}nK;77* z%D=_yyGka@q@}Qi#tDJNvM4q6V)I4fX+nQXW#1!a<+yagC)asO=yr{*JP%>e5N4s( zm<)Tw7$dkcLF4BOWFCR*O}9z5*Lf^I4aitXmwvXWTwnQwr04}27Rw+l$f;pImi>S= z{7cWGuf#uYg}X6Wi2T;pxa2+}$ka+iWDcH zAQRT*F*{gM;{+68dS-br70Tlb0Z&&XYiah6qfOS56|*+RyOg2g$#`!Uh&SA-5VI>b z3xkCg81in&G+7tDL~uN^gy)p8B*QooyGBj99ne>@(yf@w7>^kEY@C{M4HdpLa@vQQ zf$?y~`-~Rf1SZojjZA?UC1yN-<&|PQxW%?8bxeHy3t;)S+$lm}B?G?{1GkI!W}Zzv z>PsUNprDar-kG?aA0yo$MjpbZ3BvRLSi0_bs{ijFLaw-<(xuE#!@O4Z))vACx%OUJ z*OfgYal1t*GqW<2y|2x^Av>E~Z;&4HI5KqkQ3X2Pgp4!i(mpIX>@P!g`GZ%h=HGYxOV70Q`9p=*~t6pvYJmpp)s zkUbDVW?L;Bh(KU#HKk`=OE8+!#zaoJiLgd|n%smV+J6~{!kqp~Konlj8sH$G+ZhJ( zFTps6k^JvYU+(ERqkB5ecr2feGak^LHVQf5j-lm4{Is|;Tu6K|b|+Pi_%D0DE-zfc zAg4#>JZ2dj(18x+(kq4HJ(jJ|n597&S1VD@vjT_rTz(xjeIXSLHkaf)+f>{#H75#4 zk5m1ot!315I1xUvnu?ol>;<}@TQ|xvBZJ0W7j)~7awbn3cU@P|xaTOHHtxAmpmEn# zK5g6wdm&N!PUn=+>TH!xGEGLmP!>|!Il*J0_p}BbM3o|9JXoF4yh{>SJ~g+?F;w7U zJXv4&&Rj`-ZAj#oV`y7IebdAjke;SKn*RH-XaJBqMFWoynKTC+GGS@i1AK?#nj;Re zz@jwp?2x|ar$ad`?aRPbK))|bv;rpHLGT!6za%jX$QcLrhBt3DXY;}JJfA1G2OR2P zJy(o>iFPJQCcU5YNP1J|7OBZpKw?kjA*##h=coc(lV;4OZA#974O~Ghho21T8F2^$ z4yS>q0Wg+D3BbGjkf@r)JLrMSQ>mDB?NSuvZe6BmliW z;K0)m-07JF==Bu|`l;_Qx$aUNe1=xJ@VlNIH^N7c*;TKS)n3|hn6J!Ey2IAVqr=t} z<`<#qPIAQ}a%2(Ud+oltfz`~ZM);&^{v~l4d>&8$z577GA#dS`LjxdW1}G0%kUt%Q zVUwQ+?gg-cy8aX5I_Plem<`nR9}w4m2fAZgP}jdhT!$U59kYPC-UV^(bC5lTRxOtk zAg)6W)SzegFaquSQaNO*WlgT26gLh_o_OYuDBCn1V@w^M_Q&exNCF$GrqkvNLe_v0 zyiPsGlT5Kb>;T+I1-cZ0Z~Q$3igErPMp0Zuv(#Z@V!fP8Q-i+5RH>Ojh0g2l~L*7|&l;Xy{5gENWUiwbF=~A?Jd(CRb|$ zjcCEqrET`(6xMEb4G}oYF-Di_Wn+G2KU7nGQ(Yl<;M6UeV@igl^*fxpMF)_FtXeDO zr*2UPXkduF$1mU(T`Xs5Y$=li#v*&qUzjPPY$`Aoxyx}hZEV(Q(&m$VQyFKVK6i6M zK+CR)A>EjtQ^JR%dpbD~0wUgYEbXVo9UcBVQb6yPer)($Wk z+DtX-tx4eQVJr9ZtiY{@2r=(_xzW`+CUU!3rUER&si)=Ld#b_X19iV^t%WIfVpcQhH_EcX#$wWq83?=c33o;WzEUt6M3_>>>)0 zEYHV&s~q5NnMlcM`%sF^)cxi?NWYbmkRqr6La5ch?GI*dnF!13`9M0|(Z9=0mFV;z z2l5|M!=GpUerI}{PG2o-PfgV}Kl(~r$HT(tl)1cHdL>3WlzGh6M$apY;-9v$f~bJm zZ`O9%hDXooG~1P>PuQK*0&_F%bm07Pzj+s|3T`a$X1xg1VOzPiV7YcI@n52f1Q9ww<8cRTXvU#Tvz+am)bs$s+FdQ|AMpQ%hI_6r(VFECs=2M$t zCM3YzQ0p*!xu|VDsiGV=A?bvcwt#jU84@{@Dq7h}a_kBUP+D`pEJWD#4nkkmlfyEr z>|9EqJ45*eu=PAc`8v7-hhd;n4CJ44Z9Pr`P1JExMVm3|r5$jSp6iyIzQdkrNDEhZ zT&C#*XB^It0Ujko`#J{9vtdy4`|{~0BkxNBUJK(m4BKe3OggRk5-YF0XHeas<`a`V z+iFpvcA(~8==PhQkkKgvH9yl%(d|m)a6a%K6-<8pl@W8Z!k^b#awYX0{~v?y{TW3W z&9VlBPm0|U$&9_oP#LaZR(32yu`!U}2y4(Pqma3@G2;9Z)?i+CSB8%3r}KMQ;GME& znQL5w&OrM7uk5-EHCMm$8(84uvM(|)u3=|D@Eep_$}n*CIe!FPe;HAR!~U5R0n!Uj zcO>1usHyCKT@KnHP|zfPbRtTR+N(z^TIS!QbYw#>zNee!c0sO zU}&@)r8pOex=9gWws7#`>S@c71O`#%g0UmOpgW!#M8A$v0lvj)=OqXxjD?Ud3iqwv z1@*fOLozh^Hg+HQR_&nH*Lji@ozYC)oXuCnyE#j6iFV>7S(EIsiy)*>rn2d4QaF}@ z@Tsg(kiV;QAa@}c_xD^TGH$^jk-ONVBZ9un-phE1z3rIBG@zX0kR;RwMa9^4 z7La)65Yrna{u5Bxp<>Y!DW^)v$xCAiV_S69vr5*JH^!32E`YwNq&W#4moT8) z2UpB?@08ZB&;QQ&tqkFOiafQe5}1Kxq=O8HDPs4EBoKtIHbsz~=DcR5$pz0r&tKKC z*1SRb`rIER66SVhc72{y*8`jC^8*sCLLos3*i@^J$T!peLpB1N>J-Hxo&4~4;jGr7 z_MfsqE^glN!?Hly%`0ic8=qlH1yk1#se2c_e?UIUqSmY&us-{5U$qRB zf!IOr3k0k12Ttwc2YylsPn64gHSyd7y)ePVH2f*QnMEx8lIg4%m0X&=~xo+%91dVRm`xMD~IT7gj>c*nCP|77%=3)4G9o^{nGQpP3p0N zmS_>}(p$sdjnv}HaTT$G=sg4Dc0l!1MrV~U4(pYc1A@rr8X$<`1g+64@Wc>?;xEy% z2<(^W>d^~q@I)sx8{CjN*+Q4Q(}=#DPZ(yre$RvjF9Pv^=Xfdbe>Os270@B{#kL zi@pHK!rZ!!Reoh;6i60D{AMAhTR^I-UG(NR3pd?9m8kVfKL?I@=pJ?UuWUI5PmuhM zfURg{+v)AZCe4wN5s_`n39U$abOfq9+qP4LVo5?EsO}<1zbsr=GtAlG1eM0hQ5M(! zq5@chYNOXE)O8pL6%B@sl&euyi~1I5*S;b}#-`wXz~)eGue=N)A&%q>h{oG1p&^@6 zM{m5*?Uh&eHyMv=42beiSHtk64w~da_STUs?`-@~oq7xK;+P7U zMcpJJVL-a|&hw#){H`r9X#q^9J~tySy-g{&f?mt`re#X@%o@TcDa?ENri^Ked_KuTvB+6eae^@2j%hU^dqZNJQBSKLIX%9tfD~@j(>z3uj(5)|sT=jQNRmtA-3v&aMm_in za#;M|d=lKKr_Gc6E&eYc{*8p2w#iBH^dJLfB#dJq*TjeClU9tBLfAGLgg%wvT%YX; zf;^bR9xxlJcY;oSs;~4Xq%o$oKBggj!y)lBjTLZ~#tJx1V=e13C_k=}2vC}cJvJNb z4pVYPOT+gmk14A7_Fus!(-+kaZzANVM{ZCt!8Vmuv8lpxhPBLrCMWXAbPxXvCBZc3Z3n!w(AzYd7T;z5$Li;OU4W zS&?(VGYZiDD`tljQ7bj4ypSi?@f%(bkQ38dtl+NVgoNep|qo3;dpeoJtj^lEwzQaG-|D^6!mtM$yVLQ z-o1TQ_h41fL`**2xqX#gIPQhrrx>)l?I*agfP8|wz4_>Z9T8AH9q~OqvtncVJHCe4 zCp#6Twxxe%c@J8Zl<602rUa5>Oq;65ymzP#Wu)c5IU~KU13suqqzh+TX5A6rEj&{P z{Qv2KDG40aTtWLuR@& zO11LHEf^5N!%EjV+Z}N6+3tpQSCUsuKodVA3l+gL%K;k;vs?n++)_QY(~uZ1r{835 z4Nrv|5D{|vxG4*#RJb8=Sk8O`>seXg7MCCKORuyuobYJP5zza3wMj5!o*ToVP~`v? z^zu1w8mIlb$pGlrQ=;^({3HH?;vTUw4T}5hsWjcXT?~pl_Ee8{qXNa)hp-Hjp+^!(YxIEwff@ActW_1bBS|Et zYWiHnxFL~(mF2WwcMArY0yqCU7_Adw5&|8@Qcb?TFR^X`yqE+!*S0UL-le$vlI!oh zr$r;=fk%hcyII~1+X`}EsEFDbQ<;0Sg@A}}ogg9EbKRr?5kI>w15t4zOhBTBv9yuR zC9J!^eJQ#Ac6VAP!rzGK#=6_;?^f4ZUqJGyC~^=}8lTK`mAYQ5wfR;JSc#b8)-k)Y zx3UfliK=S5ZM3M;{-XktO&0BRh|I$!kRu4|4RX;9FYKpHHv|K=tr z=v$9LL0{@4-|Erz0|ZXamO%hy0j>yG8+|`^r3m_15a?xDRODotsnDoeX?V|T6TY4H zUP$1NRXy*FIQGHDTZGTs$(Yk#efJi;?&(Im!5i?o0F}Ob1yDWyVX&U9w6I>HSUsrm zwtiap1-I%+K-E02&zS|S`o{j_Ex|c$r-g$Sy?()n=;_ys*LSO*1O?u&egW|OcQ-&-EM$_N$~!*H&mnF+w|^k9QZ2q_#Pc81qCi}7jbXgDw?gMY>cTR*2i?UdB;e* z1qM%P@c3@<(39$a7hyj)ZE(lb??$7^fq$5V1xW|00UNyGD7WaDbV?CTF;}kiNL3k*nPB3vI1}o>D|B+qW|&3ia?0^zdsFJbGy|?6Hni)6Oq1MiMKvCtb)-`ehSeC1r?Ni8-0Bp)As}^IwT! zz~0^yLGqDWZ6d2~kA)`3oDj1qT{Y+Zl4FEM^SZwcyBCMd0OUoMu#MHuL;dR_Z;c+2sa&38o$VpgP-Y_KA7al7GzB({DdI7KN9b%cy!A=da z7!!{Q8?GqhsR|ngAzY|!Te2*t$2J&=KIe(F-6x~-M5=J+*nNn?JM)@-2pA9YioV|@ z$K@53!u!rOX}I|4LCCE+0z@*w`_S8K6F%T&tSBcG$dD1@dZyK;E<#u8BZ^RDEmS zv$BG*>&@=e!m;a~NB0~og~2s87ES!aez3pkmQY7azt&p11JBAPE!qZTm}2gcY3=G(aj@#IzBJ!SGC#)jtl{d;4)K z`i}vDv-Q>VHiN!eqt}s|gKc3nXBj+)D6k}5XwF^GG1NcnH6&pobTqu3mo-%V$PZVT z_Sz|PsyZ;DoM%K$>zCvJs$eUYrlS8Bp>~ax+cBqpJWX>sbE-BlwOn{ay?4}e09C(5 z3(PLV3p7iHwSlb2_s5I-4E?VOe2VtcPw)}__sl@_KuY_|dl)kP-X-U-?x%cK=;Yl0 zz`B!pfh%RWZau!>1#5bkngNknAQneooCN`yV|7@^Gt4=St5j#);bfu|p@N2X`b#_J(`J~EC> zi@0GKf!<7%Gr=_CF3FkLqg57BK%7unM%BYObMXvhHufw$P~~D7fmhD0oKf14*e6GL z*NDF@M{q{BEu!d;Iop;|;UJraF93Q#4!!_TL5x*Iu1zEdvJB_Ff|3V&DOOM*$X0~s zAls*W#&aNiUNL&ln4{7ey@A3Tw&fswd*bJM;T9(c4YA(m6C<$pzmU7wday~s0dmH) z9#>yMXYR{gEWtsSM6YnQs(4%l*8V=rt4d)}OQKy6T>T&fLlkD*}PTX z!cP$N;R}sC&IJLn&!ihNB^mcYa*ti4;gjG3+u~f}6 zYRnrEIg+~YsaW+GUodj;{B@IF@3UzD>`^-h$Le;ELK+$lRQLzi+|= z9>hd}6!N$BHU#-APaerhA-i1w9`GbalFYz4Vgu~ph3#^O%v#S%dmx{F+E4{zcqPXy z&{~eAbXr6Q4Jkw7K$3Iw#^yU7<9yOJjY=Za2N44j)A>B|Bo&36c1C!BW{jx;-*_Bz zkJmW56h(B75n zy0Nxzh`sCeD4E9aK1dT-NP~ag(z0I?9jxv#t17i_%*g1}fo#>`*koIDu>&(QTia3r zn(Xv+6g2%``O>0Uk)!6Vd+hzQoW*6`w>HX%9F@3y7G zhROL)h7m=e=ogsH6?0UMY_2}d+?dyS}+~cR$99|M`28upQ z{Wqdkah};wK1#pdahiKnqlguOO*X3_>8uv&3&9DdMRKP;eSHq#(>L2DLF#mh{19Zd zde&(@p9(XEeA1AgB+iLsRL=@BS=)}UMl7NANd`nt0ZIHyNsd}~K1wStX_mn!L#?}j z6d_R2lw6#kCImJI#Tm_Xj~ZviBd}F!z~{W0p#ujbPth^Qfz7yFWzW9jkX&s=(~Ann z@G48?f?r^*gi%G?cu6B-4Xl+YkelIMG*J*`8b$l-6N^!`u;NGN9vMIe0MA?iuH zGKf5|7$_f6zX_Gm%$o#--RcD?O6K!`)ZFopDm!ndA(oTS*@|T+beq!<^yD2=`7*F( zJ{#gXdBs$*?26`flsvc5>|eoZf!-K${C5(w>m>OTGF3G6FSz(V=m>;>|MHs2|0v}T zS-=mA5%)=#0-okwN07A(Ue8#Qv55Nxq*)^da5zP^0I0#d6|9k3!~r7)K;k<|QhsW- zuK-q`PeYEklrX(clKl|nq9MN1bqtOm#HF3rN50&UKlBWKScYIkjb~W9D(m`J)q{vRln=3O%_3Or1n)AMr!DIhI9*sR*Gc5SmD(u~re3{!NUNz2qyq)h63pf& znfoP}0@B46&XyqJlNL@e5W)P}Pa?{$_yaXVXI1qO+JjhCg)11B%3>j8tguJD^nQ&{iwLP0sa`;`oQNkT_&lfrt^FHh=V@IZe-22WDi~Rk zbHD`+NFT6bgC=UFvLBE>E|P)aydFT_&|0iyZp6pb6jT7x$2cm7RE`8LB!>wpAOSq8 ziU3RxGI^6$A#WT60uW$7#?7<=7a%y~jC%>jy7odAgbvFdrNGgW5j)8aVV9m^M#-6@I`t&9`mkC63Je+LQJw zP%x2KE;L`OV;t#jtXTO?tl6VGTAts=1|FxHiaUxDJ5JZ&=%<*-X zqd)JOw7c{UA#~~B9pwivF%Oeu5tqXI?zqTBfz{Nu_!lfiNGOGY3 z&(Ue+zxvtQ5Kg$;_Gk2x{F1>W#Jk_^XG`iIMHc-o`305**k9o!L^;k>qTro!b72w! zga#xF!V)Z`HNYYuWD=rHy`(mK2#_=yPBXSX)!C8-q`9QwRyavb10)pSk_F-V#G$0& z3Y@-^5m!;OlVpJ_x@f5Ux*0}M!sQv-|JkprMQv$GAdH|W>2hiqN|FMGAxA&X$%O+sMNe?zA)d@G?Fv4FUTk?0M_xX}!lXR!j-9T(KulCz`)`RpLVQ z)?R_G{hL<_?*7xYFy*!{-@Gbt_x09ZH#S**^D4vPpRPqVekXqOs(QWf|GNpI?G1j~ zvZ*}0bwdo?VYmSfMVcE+5fT%xgAX%iUw=&c=RJOK`m);zTL0MfSKb$ddn$(?#X)Ml|;eScs)ajFI);7sSNXKs6+qrZP(CT= zX%0p58Z{g~fYb39$5`rA#Rw?}mc~))7VwK^jpxQID-WrKEDb;XIK1WofB6Y%+w{bV zwQJEU8}_5OywsvnlEcnZejGAmZ!9Z1l0@NNlj_v{d%>ua2ytRInOuVnJ0C-wxVy^c zo8l&V%I2Y|$`ZYDQXYkIuH9wxMtL=Va zsQ=Ozj97lCxY{tecEgFT@rNH3v#~mnZ9;gy@;XVw0OxX_&y%i-0qxqXgJe_7Rj+oD z31EwN8%L>`wz}LT$`_}MKhjx#j$~t)Lc<7BNjmzdaRV}E{_o0?*qZ3^R{So}5y_^N ztM5MkfJ|Y6cVXh+W<@4nR1S5jb`i@m>vYj2s{EX(%#HmuQ@Ql>4zY46)(Gpub9fcu zHvUA-w8f>22sW#Z+vzTE6Qo!IDWy^4CTjJ;u5QGwUp`5h^dNzm%_bxs)~8?h;O9 zDNY(^)?Ic)8dElgN)jqVbw*I*I%HV>TxC{lP5gL#vuSbUc)a$q9MYJv@%1*LGDT;^ zeH=n&aGtJ&#nuFmBXF*kH>(OV2f=S7dB`Z`Ha-+v^LD&2cnWWatbMH`?LJ;Xq8-J# zjOZ*gtDA>!-*ld>Y1=-3H)-4C6vSZZ8 zKc>m}_^|A*JG{bdlsRjNl=YU1A5^~($*2^k#a5+OQ;#QyYzaMbab>GRiDH(F zBYrir6DJwxrB^8kAqVRjlTZ_=DU{{o{epBO_JZzsG> z?dIRt%@@hvCY{i)S)8fueX*$Yy8usZjlC0G9 zo!n2>RFNcKnOFuHo#ijipQYJpFlE7$v3@das8=_qA4^R?$WBcJ2#8L&FbCC*I@4d>fRD5TXCBN?#x1fq}Bv}SHw83mLK*|MVI zjQthP;De+4natp$isOIZH=g(YOJJwW3 z#4C%vH2b=aK~kdUG|?2!7U!}UEvTr#A%h<@G&;+7gq1I7V(yv*{*%Qxa~wjvh_(5y z>Lp60jI?t*Y3uGn_Rbt@;l@ymyFt-9y~JZJd^L)5Hz-M`clNlqX%9uQ8}wR7Wcs+b z8I8KM8x*M{GIM<2Qq-jD2&1#yLI{#Ep-gywLX7p}-{qpaKP|Sd9IYcVdz@j(cM&}K zJ+JO^QQYjt%;^3*A{A^lz3~&r!%U*j#-;tGyg2XxaWIZhq@c zB2Uyo@2flurQJ^tbN^PKu#*J@r;pwFJSlcR(VB;#2|=7$!oQE{r&o&K9Ef}#d`Jko z6hOu7e)2H3=Hr3LYggRm^~R4n38<4Sl8`~iQG(90&U)j;28K@Ti5LKdBm6V~g#(QC z^7C~ric?Tfeg#0`gzpp-j+;b5bG%@oJasrI1)cyw|!v$9{>eq`cqIi5d@%cu+XGMe6;@Q zey;TMlQ|Rhw?vP2&8i|xO*w9(2}}^1UKk}gzMYwKidm1(85=Y^s!iY^!%bY4aJa$!Tw}v#BQle-HMJAJCmXzYy0+Tf$YS_F@sPvJ>e6=X{^jszFN2yo1=E9d%5x_tgVbw0ub!d(Os{iZYK9s67CKA9VY|+O?)uwnIq18=F3cmwa`5XIo z!++@tmqN9C&+UGgTB3b0p(0!orS#|Db$HNEXjn~=i5=IAx$6Y2<4ms0p32oYrTun1 zw(;d&z+h2C06t9$fu5Y9{y(^yuqX+78d1+y8OmLw=j@-`0jk zv#nacKZHW#PZm$!`V|vwxI*UK4nwzIMuqRwd0A0?3e_rMVpBW1B42{m!e3wg4(
iWJm=Y#hHN|(SGB|LKP2MeY@aCxM;@3;h z1E0EHD(Hp6J;-sB*7M2>$KZ%Y6mA0w!RDoI%kWdP=f0S=4xxbcKgk_qA$C*G6pG}vz1e5sK%ZIsd4`N1aWBquNubsDV9&@()Co8Snjs`D_5>CGMBzK`SmUmh~=2Mo! z-Q0z;*qmuf?E5T*hq)}JvDZYcEqG#F-js=ncC;-~GeU4nJaf zDKAkYH{77xgiIr7C+W`bc{?u=Iu**DZ>N+cY$-`kG%k)E63iUT%ICQX5Wqk~G)vf8 zQvDPGh6KM1&gJu*Z=i=8Xk5rDw2=HlQZ#6HyvOg!eL%-$%Fpr$qi%~lul~oRu&^t_ zMlr6M6MKU?GS^m2(`{HCU{ z@jZTF{eH8!!HYHCNj0L9G22(N2*=pXk-Ww{Qow)Q@d`h+DsA2%&oAF6na8*FSCl<{6z{CF?sRpZD|% zpjc^-Y`mubZ=zdKpY-ftq8mD&SA6}E^OwxF2F9-lg-{CCbhgfM<{jgoJ<%ZQyD%gV&0~G&hz`p586_i zAU^da$@Js@G=&r^w(c8-D@V6%8EFUI;*C!+`18SF^v8`~mn%NZhDV7=-5^^V-UT0BUZ0FX#RL5f7sCcvE;E1Pv21b zv5t{lW!IsOBS~NEL*b=u{eFcE-&TE{aPRi>HHg95O#ZL@&k_cSMRTwk%|UbZ*{#+k zb9KezM`qb8504@w&P$!YcUVyOK>E$TaqcIC@|v;3_o?=9kBqO4F*Pr1UT$R41&N6K z)JQ)zbIcjmJTgn#go=eBnYW=jv*xoWA?4Ftw&y2qEOTY~scpw{Wx-Bv-CRUypWLcF z^x8YROc;z1e?m zLbX0%_l;lC#YBnoUNbNH=f!Pwa=z}WmTO56qM&c+>&*vgZQ8zbFhPIZ4R~xfK>P5; zV?WJRI_T`p(w^Fd@A!138@hElO&R2mhvJrDyV}}ZOir(;qh^H@pK!5wCYbEjtPgPm z4mvme{|`gNQ@j7vXI9C;)*2W8!+!7Ox$B|-{EMaa_J(I&;bY7a?9@@?7e4V8S^wW_ zwK`SWy=UV^u=sAFcW=zM-MUA5Y|RJcV3r>Z8;M*#D^|lT3M9@M6*HBc`@M%hI{%5G zP-@kxyQwGkU16t7tIksIb#o3mQ9t@b(?YR+`2PAIr?K6xEeySHl7lR^+8F*jY2$uj<-N$ldExZ!%~30hY^ zDzzg~SAIeEBTbGcPII=)A=M8uK-@y!mQNMW@m8_W=Q z$B5f|!Ruh6^gYXn(lQx$L{*0|xO%*bl$1{vxA%6clN6=xSsE^NmvPsu>M^EuC;nTN zmfD#ZLZES_>WcV_jaVi0q_nyGk!r4!`83|uv(mK(yk7KOQ6UouuRwc7ukkvU;&Y5{ z<73n*vftN8UQMsC@5kGP7rW2|VOJKxcj4HHRa}qB_qENPrgZGb@lN68t`>rlE0U+#T_U@6V zg%kO_(@fhFGYHfhl=<&6CLVg(+DCdN)TtLkBHa?k^ipJ7O*adhzG0JUI|`S(h6s`y ztof1F6P{k4_K}{_b!^2ng-gg%CmDAQ@6K9kC*rSlsoHq)5ijpL`C`VxmG#mU8Fy3f zDB)qxZPW|zPo}+zE48#9B@Iq5*=jJWG(Ci$is|7DrZ)+rigwb^=Udn5Or2Mgre$_x zC%z}n62xn+E+oy^t;^Qtic7nTELw?=uvei(#=29h(f$uvjIr_9IR9?sQda?it!A{N zqSb|9?B!&SoSp1zkt!5IE=-n<%el+%1`AiZues6gewdg{l*ej$l!UCjk+1GujqD+` zzxWcKYGO}ld134AH7~r*X1Y34GlN{0ugw$pc2{1sk{;pMMJW~6itm07YbfSjeF%GD zIB)i}k89`ki?8X*odk>k4GvsL)VpYtc%a=vE>>p01Dh@DqWU`Xcq&-<(nXRuG!yfd zsD=7R@ky5$G##S~?t_aeKIssHW@FlkBB%?BPkO|lUoiZ{A{3wElXfv^7ABV{DwD#Q zZI_4{z?YMxWnOV++a{{~#BZT)u|I?klyzk+T0W9tjr@YSW;lO)OH`blzO4`6CHPmy z?;{M{s9Ll{#QRbzb}B#olr zcd_2jn1|EKC>}*YoLFxrre#`CCYh7jI?;Xre;LJ?q7GFr?s8wUImvoIBg^95VQ!UUSC0ep?% zUoSrcEHR6r){?G+B};K| z@ir6FZ7?6QRZOjL9y)+$7W`}JHwxhlR#~!y$M5keI#We8&u$enbOxkwR)+9StNM*b z+A66+<(qU#5{j!$VxO`xsnged;yJyn5_|jc!eGeMg4PyyEi73|`%su=VkQjc!=Dz@ z)}0s8ZRQmGYb!8HZx*bdYatT9CjeWeDr#PNS`6(BNQLa29W@_~vTabMr)@G8ZgQ32 zW?y`CF!9=Zaq5ouf%(*zppetMgU1Y3i5q>5%_OCOJ0~<{UC_m;e}EvH9(3PeUS-Rb zFW};*pN(Gx|1#}Mvnzq<-o;ZSH}^rM2PTx=`h>ChbOH~BP50+ zHoRElm!R2cV{+tigmvOnUn3n!@$k+GPjMI5;?x83rNHzcCc}BHE!WG37aM;zstW#P z*_XZ+h@M)UvWeegJ4&W(#B*)ALOKsajy2FqVncf!pPadFgUc3>MR?$-`LWf(+cEQO z-8(^aO~HSpok3jKF=HHyL!t?Q{XSZg^ud$m`+MBvD>@}=qsYtnN9q*m*mkOb$fxUMckD4m$Svufo5`cqEh#Eex=jkvEDJG0t94dAyvF<%O6^f0wrui^ zA46>AsV~M;$~^EYrhg;*z)Pp;AO1J=?ZRGnZ?i;CTq%BkrS+i=_yT z#h8BVJ9g^?X7_8dniQRn4cUcxM{Qfclivaxi=UgjQk(Lzv(o`$U?wnqK^9;5@B9T> zb&8vrHzH%*SHr9#WB*yozR=0epP46}R5 zPVF2J19it)x#tO{#$BGjIjQq+db%lA_ZGtE{rQGe4p@` zvy3gQ`h&ukml2t_kvFu)B3cL!(zKg+E@P5A+`UphCU?+7MxQ1T=l?`cJn8D5gHbyR z3!&6~yOG!D99^1blw!lEr}(;Tl`}PB2r_ zZrbUm@Q&?WWwcu7Gv~8VSqhc!)c4}O?H%uWrMS$a*>q06HvD642NW`@S3_K%p{jjb zEziA$RpZpMmW`>^O3ilbVgr4CF_ZhB)p5Y4j8Lot$;R^Q|u7sZ9%H>mB(Z!wN%H>mB z`44x7D|Dy0!uLO12|dM?*fU(w#+~Ad+8M6!o#6`i8Lsf3;R^Q|uJE1V3ilbV@Sowz z;XcEa&@)`&KEsvJGhDfRiYvOf zGhCrN#TEWDT+zjy;tKZ}uBe^iO6V!BXyZmdgY7P|4i$_&nofXtLAmhB99Pg5C+nYLsdjkZ;K zLS>AtLZDzo#&_!YI-d9$O^e8SvrHc4)%1ZO2HAZNrm2KRlcyFt^ez zW}X%Bf+*>e`e)lVu}HV!m_$>jy0>4}CCxu|$A^A0JAle?emvIopE{}yorb%De@#22 zuTF;S@=saB?fR38B?vReP4Scfq8hCs7=h%A0l(PbEPK%RM z{zOGBGM>Eo0!>r>K1^<)iSmJUiu7&4a^1F51N@=O9Vi#f!gKJ_pOB9pf3Fx+|or z9?Uodq};JDnaz_8?#X^ZCvjaf(|wd~DCO}&f5U^MtGs=_tKS_p)Bie{U)BhVS)%mP zk>njfmy0CwwDEgflCMtE{i<@H^Gq!zjA|+-)=@Jn6Pn%XFsPI}l=6sWpg|&uIcI=- zwcJrNN8Q0(P9sWuQOwIv(*EaMxk!lJ_e2kB`HUp-CAD7(s^YVPHDRfS-G&~LYn|za zZc?WBo^SKcHSs&P1FJHaLTh_{cScjKgXhmQQ`Du4-M`5uAT#~$*3j`>5T#wR^JB@Z zcH^AD^%Tr2)Wl8B^?}I+X}zH5t~8;!@y8*MJzsuwZwNe>OpBSDuGfT{-s!$xlRSxj zcGu+bZZZA0>i6qe-so~s;q-%1^pMGe-GSO0Mpsw{s8-V*g|lgwOm7QCsxMyma+6E| zjcQ?N{P!<@p{6?BU;JVx(T@D=*=?g)gW$Pj`Z2!McI$j?c~a-1s-OO&&KZO72SHUh^WIsALXkymeU;D?1LS<3bi&|1;(btdV3(PIk%tHUEd!|{Y_Xi_7l_hR3 zZb_9TTR)bpIJa^$OMQfUxmlKXpAmK2qNEp0vTf1Rk0nFz|7beXc&Ph7>Ss#`$yU}j z*~ZA0ok|iDGWKN@8heb!USwAkS7)BPG*E#2X&NoD>v!}L-?jN1~*?xkd;}G0XU9Ia7JVA7-i4R~%hx2PqQHUG0 zWgozMS29ElE?^}l+R=4|O4yAH@SclX)n$*F^FMMfUl*U=_1 z7C$8XPtQ6%Z;-5PZ!jv4c-+F1F(J-8>z^@^y8*Sz`{KQuHfJ|HyrI<@P&ORG7t1Y<9gTX_Q$cM5x1LFg36yy-=3VcGM*rYf0T(B zRb~G6Uwv_78NvPifj*daGt-I~|FKb<9sU-v!A)$!5i11i(!-PImI?)jW*@Ka^sLr% z&#il5?~ZmJm=^wBe3uP^RwalF0@bqQZh&bQoP!{~seSnm? zu$9#vza~$}O+xNJT$M{qq>hBD9v}LvF$ZJg?z^$%&WqU*C*<-9#CMH%+CkRqLRI;J zGd|X-30Xf!$Q2Ms?HcP0g{%jMs&YuNnT>ZIhOA#DS~}oo=4B2tsZma=&0z?$9yom4wiH-IuqyYxu?={@WGY`+p^x zXQ>end?ZsJA36oUo9QHU`EpX#|No7AcG2^-sqzoTl~r~wqUSR^w?5zf(Yf_fX)t2$ zNBc;7?ff4>w?#v1HAm88)M=W6IG5Q$nT0fT$?T7FW2Sxc9P)&tZpD8%&bDW*WpW{jzU&E29urrmSGdrP)8msK7+Z= zVao^&>NUsX7XK4PI00O%7)s%>J`-B%AQO3 zh%%?*SA~NrT@U6G^-#{#g;m?2c2_n>0s&=8eY&aj~Bu1m`sgtYdpcYq-DZ(zw zlG?J$7gXcQJw-f-LQpqXVL_dr4rminP$tyJtIA+ZgHxM$1!YSeUcEo--O3dd_Lzq0 zc0~vWfv2fjiDf8P>i62?t#jYw^bzQ`(`!~!37?k3d**88W*N7;*@*d&fjOSY@fbmCPNU^msujU@t$F3JHJ8zM0;+uOc;tt@LnE9< zE3se51oT?R3hxM)(GNn%;v84xSXin!`;AazwD_D{V z%pF**C362D#;>_k{{$V0m^X|QoV&SwV$DQ5>F$zD&p?pEJ(2AONqS0>K}es6lf=4k z)p_=P{^#%(A;`LI)p_21j^{QlHjs6VD$JpO$s?66;M(O^$@Pe@$=Sm^u{Yo?x3*>P znNDUo(;e+@Yu@9xeRphI55;>NHU+7(+1$}?I5Pk5LVPk|>7Lsc%1 zQ?Fy+09kV#du{5HoQjpa?X+KizE#U+)bXtmV$p#-tB$5Cyhn?6U-ZKTow zT9%O6J4pByHM4ht1hJFB)NAUgdnwN}c$^clAz}@gBv0}wi>bz|4*8Y_`qj^x-n$Vn zmW^Mf)H4eZB;8v^h@q3ZAyDUTvLfBB@xR)ItXZ8*i07ghTE_X>)UT`V`PP{F^)%h* z5PZPS8Dr>)q3NA|w+onw7Iyd%vq4@UaLfiR2x5xh@p$Kc=oxx%Bd%5ybE>W4rH8l> z+Qu0+m1dzPCATu&{|0=QU2;SQ#v0=hZltlsC$*+99yj>;2H1o4(!jC+IlM^56BQ=h zGvi53sLB>p(=Ez8LKHz{NapjX{NeoMs`7{iq+tk$eDhqGCgv}a_S3S%IZHu z<{eL~|2jieE)IU>1=7l$BznsjQGFu#OWK{dy|QP@gBJ-7HlKT+emn9Bo~_q>yznsK)cq#O^!A4acwfs>#h|nQ z>hsdtu?8i+!=<`GC;v6(rMD*=l>4fb$_2@(POJ3KTDZfT9mXYEpZuGy>7TvgZqs~z zT&y*FiuuYjNM5lmd|W2{MWCV+T_8tA^shv$$qc-KG=Kfok!hju;=q#-wEUnLZ&&iX z&8zE}9sNO!K&;^X7lF%8bXf)0W6Z#xpX+WC-tsm8s}6=Cs@97mhu*4h6$k2ql}>HsP>=dnSztRg zxLy!B)UUo(6c}~$#&l-mf(J1FUfpAve>1-tNE`~TY&u5-iUYZv=ofQTjtZohD7c;( zGDvC^O);UJ8+3Svl({mK#>$bA%7f~eF9K~?#@hdyVC6MVrogE1L5Dn~yTuHn8;R0IR`;o^ zmIsEh-j)ZcCe{)DDjFr~W;w%X0Gi7+rtM%YK5)~S-jY+p=Egl``|2QMJ$mrNk)Sw9 zMOPC{)M;b@r*_Y9PK{7N(_vVXsYKt}^>wkq51c{CpA=k$F;SApfP<}l(m6H9hdL4) zTjKO-(7_K0$j_!TE+jx`mo zsdro&iEbo0E;*}4f-aR$rxQJ3SHK$)ep~ z6E%c+V{eQ#jjd^2EApbQbSs8JOkqWeAxhE%R+V%+y~1MrI7R2Oe7$Y8?qzv+^AXEL zldz-uk8vhp>ZW{?%{m2E9_99Ey#kA|aldG(vWe;CN$^#4dFI_s(@QM4I84*aE=~@( zB30BheS8vp6f}X-J81b#w2P?Z zc@OA@%IFhu=%ssU7!&jtH$q&2?QnbONz)-b&^m}W|I<3pivOo|pdKu(gRmF_TF31| zZo_(YdmOafm#g_i21S&a;GaQxDxw1CqxBNf5Lbtj!?aA5Uz(z95Pi^MdNBc0EIR3_ zV=>i`Ua4y_W!tQvl2H=IL2Sd7gq=1`|8TUeG{`geI@){=LMuA$`%&z;kc&m@)ki_d zTsZnsPzh4aMPh&v9`2_#zz}c|t45;oS8wIopz~LYks&T(11mw{&RPR2GA?3u3#f!u zWUehbVYLLw<09iYy2t$GU;BdF*O@Cu*pZUR)Sh&+5gcnw+ZC%VO`Qp@Y|oM@LO$KN z?dF`>IPZW)8eSPvTG4Mylkw>)p({lvVC=^tLhKFS$2tz%Z;Gf?SN)**N8|Pian;Mm zT^uau2PQWme`2&8r_jwW39)ejOHseM{cDTFeVzIDj!aR~nSzsUwyI-|xLu)IeC~{F zWjn^b2-&jX<>p$~XzYN#-Faome?`76!`bv9fIh5zi$V`lbA|%^>bQu zblx5ox3Vnl;$Uq&u(=8SgVvgwy4(Enb!@!kQp|60|DYFwzRnzbN3|)bOx4NR>#}2w zDZ5}$B6H?OW&0DOBIM(Z`)v*T!~4&1k}I|@ADr^2WxImJhqye3hRX-LJQ{shm59aV zt)N{#xVcg>y9slbhE+D@R01k%Pyu_HbD+1iDXJ1sa~tIvGaOVxe}zj3sQ!NQE)J7` z46>ep_Xw;7;4o>(eOy^`ckoR!oLH}VM5$j!$>)XDpJ+x+bM0T9=yj&c`nhm|mNDK>2!eLRVR*t5=lopGV%qgBhuH6@82Y-1 zVcNMZhYsHDz2WO}9w;dshU0fa*LbrOP*R#I;!OVgC4TKr^m$(wnQ89dc)uto`fE)U zS+M#s(R>erzK?S_AP~pl=fZ}GI$@cflt{ga69a)!nO`#Ph)zzKC@}WMn~PD$q^u^= z8cBVAE+Q-u)~tYuI%AohQbPR!Hfe#tDtmm$k^(<>sz&!oMt?UrG1BQC*z6@I$Y0^J z6AexnU2H~wPZ!l!Pi#Ovv&`!!>aY}S;EOZ8Ar!)vk@$7liG9eRFvFg8V)Pv1k%deAKk)UC_Bo>E10o@Us4V#~)H22m3ERs%{+q={uGQ5_C)sh7PI9UJ;mDWA({ zqM-`19;oNQPma&X0Eb0dx>|rsNxGV>t}I>6W|$8f@Mk4RPL?`?@9XZlX)OEI?t z;^Pf2ko8M?4wBG>^kt{pm?$;rYL47)eCSJop^lu4$>bsG9X*F!q`TDsqem1aCtW?D zURUb1$%>0ICVjAu6AcGZ{BE!TiF#7NvXe=hKyA4fzcYO^^rg`30qyaIE0FaodLNDj zCFCtTd10b7qyu@KjgprRc(4AW)T@RDZOhAO z@y15j*1bXr&cZrBHj?gEI^Jy)TrGG1?o8jKtOT&~BKI3jB<@N_v&cg-W$wVA>!Yl= zMsmvF-RB!gW|dQ0YsU<%I$!%>Gi~e0EI+SP5BPbNQ<-Zb29}+%x3Jc>b+v1U4Pc#w zMv_mZV??AtVY&O9Gku*>&E0W86ZS;+mo6w^{^Q{vGuyN~5-vT08%tU3OfG-Lsj zPqp8|6Z;SE^nOkE-o*x>SHwjEH1F~JD{(4urvFp;;KVZgoV=XrQz8Kd*JuL)7RK=f zy?M5~4>gtX*2%Vi%$atd^FLabIakIUvie$HZ_=Ur__;;X`1bYu57u6((P$Ur0&o20 zGAi0Qv4EU6@dvA^+$q?1{4XT@nYPbblb^#u*$j?^o)j=hWa{h@Jh-u&7b zKJQ*^XUwf%bW$vIc#zpdeHkmGpRVn*x$fuS@SNXgi*I_SLzj5tYE$DMDV_2gZ^*P^ z-91m-(}~62_PwLw&|vcS-0_#QMuOxOa9ASI?p^ub3!6Kr*sg?vk-YUkPc@ahRO9$| z?V0SK-A3!mz(3_@px>tIPj|NecLUbMyVKkD;Kl%yIc)kj8fX7TyubhN!El>kC|!H} zrKb_E_*UMu&1bjS;4axwZ5dYJ!#&e$znUJEoc5Mba@#6Hymd_I6xhb<<@_j z2m4c?S>2r3kNgV_f2p1Olj!GgTsC!R*N~|b3|-k8f7xgxle_+B`sZgi@{e-og?sGK zAA)SDOa^@b3Tnzx$*)T&`l_B4*NFX-iUGkJJX3s}6Q=f-b)iYakK}=H_(u z=l^wvN3edRjDkN#TT$elIaQQZ)Gy*hp>5$ka{he5&-{2U)HD4dD3#=~n!hLodBE4c z&P*V?y+f?I#U+Qd5#K?XB-(1$;!#M#=~-mBkWU86(ZKPhjT;Gp6c3|&uHqIYSX9Ky z|EP$$wb?ki5XFdW#5fk#?po+tuL=Dc)CAK+Sl%4{urkZ(!+DyRg6h$nyH9&P29Y z`p4DIMZ=egUUS^x(g2c>HI(&0NG%#QtmvWh|1ftBuh4c^`G|!YQxP`|Z+LLbj>dRt zqv$EC#6|tUBuQdq{SyuCaAVYCgzz+Fr~9nCc9`)UON7Ss>`t$@yLNE?e4x6e*aDI z)avG;+(Jp(m0lYtht%?>LhcjEg)4n(P+_U>o5ymol3(BUhCunGRyK8VpGq#i?c;$; zO8wa6%q_^Hp6xYp%llH+>jxFgXKZTb=H}7M`i?_o z^H(6()zMJ zC+q4|9|FRrQ|TZOHXBvPfUt3Y8V7_;w0!~)Hgy&~K-hFD^#ft!h6p_dy~>~3t1<=^4W%rkyJcN5RiX{!ZrT7Yk-a7mI}_Eum?*=} zw0KGCm1t2AAeA~!f{qu~9UMsQiE0TH->gL)xQnjK_Hpx(684`SZvmHWqT*vv-qa^Q zI|xAe$19uw!0Q41*LYnRqirTQmR|kp4UXV92SI0|0}1O9}uO1}QiIFknCS0)PSO zKLh}VYu{-AU{G)I!H&SoGFSiuR0G1Lr$eU02tIaQ?#j;gOoe~Fkpe3wss!L7*d}vyS7J_j?7ix^uqptm$9wM zbXa2#)|0jrCr)f1Fe<&4=tNJ=@jG;nQ`WZZBxK!dQ1h@+ytU%QIn0hCQiAPy&ti@r zfEM!A0klAc4Qc{t!5sUx+i^_`DFL8`vTI+dlG_+{7PJ5_<5@G_&QQ0!iJiA8GiLz| z*s~2JS%nD^7Qk@L)`@PN<0k2@w4~MlZ1!V!%wNnV-}hf8~#4ZevrD?^ep?3UqTo4K*OTA z8ux?ovI9G!+=INo>pTT=o;E99kL|nIn6{VqEb*4sZQiW;kouP%|Dg1n&ak|M1wpA( zE7SJ@Ht=Dh?8uI={8N6Waeuzu`SG&SU*ZB!Ety?!q?GdfY%Bgg&VDcUd*0LPtZOCw z&dfm$VO;i5cyx{%oSM^^Sa6A?h?F<9P^?7x&-7c#(J762GI)%pP7TweTnOzl1Irw@ z(93uK-7xV`x06{8k{S-O0{8UsH~BUK<0X%>cziG>%xj)xNb0hU_xpsVL7{LlHJ_&? z8w%C8`eqi6>E}Ib8VZTdLENF{-$^JiA?YrhF&tEEM+NEj16X4A4o+qPW1v%)@3AO^ zcGrbv{<1L2zd>$Tf2iA)Uv9R8KOm6J$7J^u$WGr-xW@bC(w&Eh>!jpw>V`^Zz|C>< z5EnzYx#7)da89##TH!*#(hQ3hIcXZA*4_hEqI=%SmJ!YV(v~#qDK3uYX}8R@xN7cZ zv04vHYtNa*o8e0}p{KW|@%?UBRwqi-;bOVj+{{K#sJ!5^jjm#*^1IXFQEHC=4S=au zyEjjMB_V7`Pk!adm|Dn5$&E}u1@`2~vQ3WN+p%8>2xrpRuPhl;n5fk0x@r42;Ja+| z$gY3#8$Yq?S@nC@XgkZQuM)pxiG*hX@ARYXO#HvN{3=A)l3KoT7*@%(gh=nat5u=v z{dy=f`NYZ1(WuF*-_7y$XbItH>dJdZfH$nX*N(QcW_&&Ps{nC>#D7L+>Vib?l~6^Q zq1gw}o$j~wJcwT&*t}7>8`C1^OJJ`dXbpmh?D(;wd;B#8U{~R z74(6&5nD(C+Qua(8)%#KJAFXgs0a(?~`k;5E<%75_=jCqp7>tr6Ap}q!0EijGCkmHjrdo?AY`ank(3c zAk7L#mE10-pK_*)HY@PfCRO{|fd4K{%(QP3#jld)jhz}TKGr5yTVkpPS=V#1UbzNnUF^lbFoWDe!i5>=&nIYs#JlaQ(_uM3vWvl1Lj=B zXmK=3e%3S{m~%vh>l|&8t3?<1o#=)l6+*LoTQ)C%S~B9LaIU6uD_j@G>>S|oKPcF1 zB2saD=-vbNmN?MG<)tJbjV{Hz^pva}+3JVS^il(@ZQQ5 zu5(996c$|o-kXL0ebnhWM$Z2Sz zyp%-b>r3&`J=JRxT#SD88zmQFs9}+b?3*6g;L)OoJi+jShdhaYhqWuM{-<*ESSrWP z=YJ}vfTeP(H2$Y@$SjpJd7Pzk?0i@%hjEyta;h{~DkqGkaug?zzX#ZYUI`6g3yeqJ z09z<%U||bkZyQOfmE=g4iqZQDRE(X^DHgU+0DLvqDvjg&-19aC?;-2@RT`qNQ)Mg2 zc9Bh>t#ccN zD`u>7$E1xdAMAzc>KEbmCQTy#Y6)4r%qX}z{O~EW`+3OGmPH>e`7zN@`G@-n9HDZ5 z!~q7OL=b+__2mL$c;9|Fo{+N2$q3fDI@NgJ7X4psiT=f1eSr^~%z1zdNG@5`@5h^j zp8g}KmAZ76AY6R^ODO@nSB}eOh!L{w{`D3Sp6=xWLr`*#XR<3qeU7~EI{ZGAd(OP< zeF*Q|d#hTy2qAs7bTmij>8f|R9a?G5Bm(Z|pMPbm5x9zvS4;H$G*zt1|IGMJ=35Dl z7KGwtd`1(Q8~)@MsY_M|haYGF!u5)l&-m$3`E2jE&KnB)O}M5dHA1#&)0e>L`~v&o znGMdl_hmC70*3F&wREZIwAF8;`8v6U-ivl7?k84w1|JpFuChweL1&E?L2cf0(-ss1xA|Ft0 zm+PAP02&c|^}m0&aA@1Jf6SIF6{}jOh5Vsat$vm^DkTXYx*4hmi*`1*2jt0tN%P+-#mV zC|9%>8J6LBC(or zpLtBnaWbO0tjeKpa?_BA!&R?vyVxQEsJPuH2$Ny0P2rl#$`;h^JyI>6{&6??1LcFB zjzd@DN`ISA=ZTO*JUU!+MywT8X4t=}FyIyzrrx!MFLhAlB`>RsjeFz$c%Rcrhp~lb36Vt9W zY|j3v`<13V&(JJ}&ef*|H3p|c&TnY1Z{>B=G5;YYsx=Bu3keYc!*Cw zrE@ls5PnvNm9_X0h%J)mJBle#;9xFq$ugf=HR6@6?mN z`dUGCWUOB30#lDiQ(iFtko;J_0c8CQE*o^ZZGEkb0CqjBc%Z!3PqX|0K&|N_NfWfg zINSe_g%{DheOQ#(up&u)q*yn5&nJNMu+^ZIFcAB!eZ`qDM$wqwvD4Wu(&F0{4;`wOzJu4gL*O_kBJ zeS-WwinSQMSfugP$s2r%dPPIa)Q36}$ae9{G$lRTWTd;PPgO6tDg~8;>UBjLlQfsM z46>aMc4`cZLmAFq@bPaXNiJQx-6j-NtO1~E%h0o8K`E|F*C0b3$%HpO;Hrc@1pJ## z?k-Ay_9BAOMAEN-nY0P77DIvWhM?fpX8E_S=e~uk%P_`8R>9kKFu=dbhWQC>^>I+} zfPaf4G$(P>PBO*;|8^PS($}|kD9jJ=Z$;4Ctc3?cGvMEBa(}Ps7+ger1b$fs%q;R~ zVKG$NnQlwLYZ&m4T+apk+i}LYXkqeV1q|?SQen#iBYk`nJmBBr2^&e`G&#mN;NLDI zqQN__ykX0Le=CB<(Ci~06E=|8|F^q>;j;ZNK;)p1P$FFITm!M5HL3!B#LbU$;3qv5 zaQ{ct_y1LZ$i!4#4dRIEv~Fv`$=TbB0v#)dlOw*wOdWqa7q;5-hm49;{k&BmG#j(X zc5mrXlHixU(c}Gwp@0h@`)-4m$P3hFy%u@yF&-sbf7zoPA219n>{$dS2J(5NfM*uH z$aQb|T9WFQy>aZD&QSNBDe|ptrO$N*in9TW{P$L_B?o`m`+$Ah8TPqniyXQQ{rscg z`0U+9j=xK!B-t-}W7vLhNqR=ef!m6opBL!P`YiJOWss6@eA%1C4#b4L?)gE!x2^Jd zzJSY0B6CKDuOOKmm?^tf-gIdH!Irg;+SbGl*L{5$wJ!2sIOa}Qf$>^h)8YMxTQ_`k zx28hWh6*_8H(sUN$e&Zr7abLq*ZkLjc&u0Cy!(m?=8yC$oqxPceP5p%GX4J1$G&3&7p$4cr0;$d_qTdt(O$Am@!xM# zjJ!*lgaBqoIvakc#LDBmex8Q~GvHOdtwc-R3?ncZh?Azr1%nl~dEd$5pdX~AKkfHO zuV!YMnG~y2Fw_5AgAwZ8+9X*vaGWF)9$iAd`CIR<$Cdo|OfMfz#x?J*CRy*d`}70) zO*Vh>63G{zartPaPCGx+tJY;orOcqh(Mbiv6-{|1WSeIki<$$|x{vhAbyuZQflNUs z6bx0M^Rmh2&$t)0%BQ0q>DB3e&rboKAUdUBq+&glxi{VMHLW_zG$dR}Jq!6p@3t|>>ddqbliCl=eJ<}GT< z&D!+}(Yc6OHfGOqHRXEk-aes=5*uyIa8sIc<90nZbbex*jTyF8Q*O|%e~d0otg(3l>HL2c_KPX8XTY{ z^KeY`A;b4P-~er$k%t*RD*ToL4p4Mf9$x06^{-xVfDGRc!842HcWB@M6@K%n92m_` zU>zW9-bTvTC36qf0g_L4X7FsBIbfBPy<~m>X85pBBr%&N5)FTZFT7-J(Orp>FjVY; zTbIb=si8WFg*l#;KBL(ebcD&p^6`*ONc2%opFZ7)Siat6BS1p$JK&iQ1vhHSlBP|}7EJu0WhvYpyw z^fu05@Zn45QkvJn2d1*|W&{o8ue?o=Gp`1VeSpE)5iJ1v5z&93XYuiD8mcB5iOu^Lj=&WK(Exj7=taK*5X~vwLh| zi8HfLIHyDy7L#x%G+v&jI5?J!H!{^F^h~TBURZhye_kYINOg|PxU+Gya>ORXoy7*| zMmJEz70hHKh1`p!Vw@PToD$iN!*$m)Vj-KNgK7ud!HdoV2Qa%53&%L?`XqC_j=*9P z?;MWTrl}07CE;J2>ZbI3S`%0}_7*;0Bz2lq^SSfrX5|l?3>#L>NB2qtMNnbj7^~(Z z<>kaM%kh!|HJ|GlA&^bsK{Yl|^HCV!#Oxklc*Y58J~>`TVS5R8uEm3z&!Ac|KGsyX ztOwM57M{I@cNIx#uxdVcNH;6zZ8Dr$E}5=(14UM0K#Jv(Nuiw>);V4>ESD@J3bH9a z*l>X5k`1t9b|n{_I6=)P$4dydmv{#fZ%k7iY)HcIo9beE=GO!loZiC!6-oW;?HV<8 z=&j$FW=AG$XZNwB@rK~n)gtx&w z^DlpB5>D59`1OxhtlVW9C@VoC0M$=9fPQ%VO8k&BW%Uep$jr5Y8 zA^jF>$qXzKH_DPK|F4Gmt=Q`1rzKNf^M^4=eSLgI43fKZ?Wvtk3-ea_(J=oRQ&l)j zBHtmrY2txi4YNqxuV1h{yED+(raU`l<5WTE!0}tg`kfUT@7SWBTvY#Z?aqG%HWci) zBc1QgU>mxmynV!4x}=K4_gMv@5+`oDruNcedhpIqUMQ>=o}!WsS43iP02 z^b=4J0&Z6UjMBbvxbvM#6v|z9hv$Q&;g$ho_5>iQt{MEeNzG~-0n1=WO7hyW&)*;p)q zG21xhG)OxVMS{vRjznEjy;N!-@U)!yt61Qv&&DY`CF;2-a_tGmxu}b(y``rFa&wt! z#R9pG8>ifqXosSPYtJwaMVY9c0tKO*R_0)FMoz@WDR>oCA!?-d6hmR>f@*Q8QU)fI zSyh~YvEDf4QbjwqL#jQ?IJR?1)w5JD<7pj}QJnEKc;l3VKUHUkTzitCvvX1POX=BK zIpY~EQv@t_EZqezIrU_o`LVe6^!+7M>B{t#_cy{3HdDglwS|;a{-qQ?*j!vtTW?Ce z&>RX`DLKZkJ$>wakz^1gOqf6u$m{hE?JK|sUD3|!rmx8oo`C;nQ-eykaMsEOw_ZE) zO=meoUQLf|EfsaI7Y(#j?r>s!Xzt~H6@S|=Sr9gdaVU)gsDaCq1bmRS@;wjkX)sUS zNRjq4^`GVeb9a@_407`+6(&1zJgpfL=IHW7fQ1g2>R|}_HSZ1pJ=1)O12&iKa4Vtn zX0XeXhxk=X<-Z=X)42}YO_UfvQ-^86-{qpt&W!!$Q~FW|xX06&kj)D&PlN>$68%iA zFwy5~-tnh)7d4;Kh8gBKd`s@5qFkQj;$^IqfAtBW&eXgcfoGPAa?uoYa1J83D$OR+ zdRd{rGH(;b%g^)x;L0Y_4q&1UY7`F2cRy?{J_a*<;*b*u@MagA1bpE&Ws4p~lv0gC zKinEG8qYGul-Zpb%gv__k>fP{Opm|}vmH45oKfd%4tZDlOr#yr;U?onPeL}`T!eTR zdv=;n<=_jgm3vvMBMwOo6h1#w9XC$#i8K@@TCL^*D`;^M>Vd1`MW3LKyICopa%L`4Vlzr&k)O0}OUgjEbpbHYTc)I3OBp+>t1 z4Z!QlMO$8Q@YhuI!zEp8xOtnKiv?h}6)zjMu?3YL*>qw&7Aa<%<l&y1(jff#3*I@Jd1RVWs86M}O>!ZNF%yk$r1(@^3XT*j zEaB~RW}xkgR1E}GuP48TY;skN%DN}zR$RJ|**$81?2u}ovYp2XSWL3g;S@)jR@J#@ z_}5oNQ+ij{4%#0ZgwMa=?W0*nrYjxxsSLJBezJDH!$kB-BPF7u*Edr9a|v&WGvltE z2c$z#_Ih#yWRs)noQzRYW<_r#W>?7m*&*3Jc{`6&u)QRuYbn+=jjD54eC!p`vfjlt zF8gPL@U9o2kPq8VQ@Z9;dBY|dyQX!|MAW;H5?s+6xGna>y6=5n<;6y(egVJ-HpS z$z63$Ha6*bMejY#uCTq+A>%$(JCBpFy<{aw$_<)!RsA#k{uNP7?~gTZd#6G8zZbki zH0$kjC5TVueVb&_HSND9q7L1k;~D^%TD8NK7f{u0niwAyEB= zJ++A$1*27S%xD7DTi7FFrC!*%g{T$UGk5l_IpoLdGp`tDK8z=yIj?BttzDn(5~fak z0zK0tGd&UHdDp3>eD0dKL5=wDSFY3U4&QILj&lXCBxZlQ9VlCE71TPB{OvrKm0v`? zty7o<(N;ZLwUmc=tEjbZPOg#}^s3Ijyz#E@MWsB6@`k%Jl{I>g=gj!Vr<MU?S!!1#TLw0@l7i?zlc+Lz&BIP zOmINFb1&ga4)kz)oyLykd z;+SE9CPvzCew7N}z7V-T80nzNr%Zj9>+i2n2Y(yt_f{!?5^R3a`1p-I(<~eeYd1Xi zy(lV~T>Wu`sZd?(-!+<0bzZ`;>Bh{M17bL~gljVJaBWamlh;cEVecYe)h8#nzWrMR zE@AwC(#0tCA#G)Q!x>Es+;2gZ3O_J?@LQFLv7dScb-V4#{Q=Jdg@hq+RNj@IUCo}S z_*!e>S(Zh1wylA}@0Xy@vdB0iFa$NrtL!X`>?99tF6Zo-#NnH0+<%YoK~};BG~;?M z%e7mTuA`j6(??$w^L&PcDdYZ=dX;k9_tG5rFUg4UD)fYCAor(h5LBh#vHj`U4?V#Rn|m(sEdc~CdjF;4 zWv+7n>IT7!^t(Q|U>U~_6$CE=-@GbQ$K%;q!3#HUJ*98?!pZIb*WAHy<%-#fUpbz_ zmg4d3#oc2mz$FR#-$X6z^UdTkfhd_$dzXwSvVP!CJ}n}AYC50);rQ(m5vRO#kZ=ig zp_>EY5%fZmlbd)7b-g*df8fDduM#TKzrp`f>d-poxe@&gQh|_H+sF738(j$Ow zn&N~Wj(id@sB$Nrc6916R3P$U09hpjWI$8wQ2t14zd|__t9(7dsW#OYf{~8{25LeAXnI?6Zjx(y zQ-!XJANu%LmD`M{{IwjdJ-3tcW~WeI-Fa?iLD`=Ux3&dS>2a-XR#4a*=7Zq5)}Zh= z4sF9>DXtu!>94=OYX6Q(U14wK?&ymd`HlkOX-ahUaAfYE_)iPreTKD?v(jraTc=wG zz9;{2i)hoP#;nQ*J^v&q*d~Lz-_1Qc0sw_0GItDVJ#m8VH`+{A&l!A)?&)Kb4e7dL zHfs|}+DtLyFrvqP&1nC+mbAi=$$hVPFH@gp)oWX;YH)l_aZ5j9upq_9%{WqzsWCehd5;4w4zP|YLH0uI>F>55XD#U7b@c+2}*B7%R(4UzMAnP<)fFYD@|6~cN zpCH`f7BLnA2T&mvv~eDMk?$K=vH95rFZxiWui2V_`qnY4dB+{UbgAJh*4@m-}`3b|fgd z_NylR>lGB~dVt)&@Dm2M08jU;dSH#=f7PE|z_g!mCR?4LpK!joelhC^_mD;SB{l9L zC7dAUvd(3PCgF#=m#@lzTlPlgVSoPR9D4RuxRg?1fgP!+F@1BE1pmY6YJ^g9CzMZ_6aq3HK$l%9E}#yfXVDDwF)CbW>H zi5j05*pRT_4ixxg0k~W0mX8=(O!xAl^BLugCwZKIYM;QH5s$|{i zea6>sI{L(CU%zn{444b#+gdfEALy3n7q91M@hxH|m~NhA6`v)8)^9yg3lROxXX}Hy zdLzofZPOhg%hUhfi6^iBe|^*r^wD|({E#?ihrKhUY zj|u_$=q&xqokVgvt2Y$X$m$Kv7O!W)d2=zJKyL`D60y|LoYR9`fD#`)8yYm!Q&nC- zMFD*zPyh8RjywbdA&pF<@`LtbgW~lxIFBP{fN22=K-Nngjdnfu3yQ?FXVrrmdUZ;Z zs4yUq&e9pb63Epsem7h9#()RyWgW%qI5^)FW{T+oYCOS99j_=oVGF9^(rQyrTSMI) z8MNJB+Qh%ePLjQkbn1n0C2N?mp|A%F*C2i0jzhLw`kHm2H72MXOUj{&~;& zjlv{kkjamtHS{#*vxzhP)rQQ$uDI_@U{)o_><4WPHeL9+&zb&vLx!y@L3+tB0l9kN zhuRagpMeYgN-V$rcx3ZEX_T3SHCI=BfvJeo>0aZPn{C1*& zgzz=s_$w2R|9KeD(-I?`_v7Y#4TJ$N@U{tTghoh=03f^k=do>J=Ebj|1|ukunQYuw zs42r5PWY*_&BrwT8Uio^i4mUr$(?=%@58kFXk`jG8bfgZa zjz)%nEa-7=5@_&9r~>M`MfGGvhUcBpR*b@*|v4~T*y zO?AXFN`9t+1qF$^!vlRhn(kO+;f1gEw97gV-&Qx0vVA-B@(-7fMuveT=z03@wQ)MfQ>TzkQ!4zl$Ym_7(sW=-la9l-R# zU=Tl;)XAB{fk~a2+breO_^BS$aEL0Q*A<8=Ly(*~0i}(cxeVm*oH?1=_y9$s;ZO(W z_2QM<5_W64#1ICDLU4SIMB8Lzh9R&YBGyI?-b&v%&x^ zsAva-SRC$dPvY7Utu0Tipba{hRn4J;ij0u|Do19o7iBgTX>?^ch6cz2+@D5}0kz8- z@L1cRjWIhST3e~*wB!D(BkIqK=xnTF&M=1)9ZfMf1i*rds3RZ(!libnuN~DA^uVgx zte*pq#T3y!AnT(3GHi9d0ThD-gBh?!n7kTW~a1LE6Nv-SU$>Lk_h(%KtRh;Nv-FY zsrP>?N(ok%!VL8*L5R=6pX#qJRVF#}9#r}Be08p6e?`raK!%T+yVOU+v70S9g!6*B z{&|Uv(W_GUPcu*aGsdMgrSM}q_|;ox)w=lAVp9{6_ngqE@4UA9u3{>dm6~3SaN~AZfn1{c_HLWO0kZVJ9QVYEYDs@u}i)d0f;l=SL zX1+fezv!!z3*^s+fqpA)eThqgToM&x)cKBFO}`iYFlC+95>jnqwS?S@XzVUW000VF zLeSDF-?sn6-kNUc5$kd3OZ*bhVqm9rzK2%PJ)_f8()~i*E(JHGSGOm9E}|WEIhs38 zWM=t8@ms!aIjq?OqyNnwa7aK)kRiG;@4)QAo!tj1Lw-?i7iF7; zcTH zA%I%SHyapNJw2NoGDdgUkmNN^u2}{PA3>H~9%~xYDY1P{^z$2%0>-Jgm$mbeL9i~N zHNNSL*waq*@C`|>uB7kFU_dI!x=WGfSkJLnb_=UN8tlKV#k|^8=}gbq$PstS z!L$qSm4QiV@Y4%XLUHeT`vBG~SYTU&Y2KylL@(UPE9*ummp~4T^06!aeK?HhlXaK$ENof z(+=#ayj2%`F@F?wLh5pt9H@M5xbm{Hn^qA}`HUUvE8xYMoA2R^a zT=^Z$?o@sUvpW=l>qX@h0Z3ChC4<$ssRUtsOU_50+Qb#b7GW^Y2G{Qc^KAHVZ!n|d zFWv_Bb0|>Ay0^u5R^2A&<5T2ki^@4FSkHkI8%UoNTl9f;fcr-PxEcRD4#3S8-+KVu zOetiwZ7L0&>76+r`M`LNVvG1t$8-4IK902$_}|`GLAbvxt87Ct0&KIzcfQQ-lAMpZ zNF$5NJu2AFf&XbB4Jfu48?af!{qJLT&fo>&%Bg-9(yVPZ%EdKMwW$Pu2byy9IKgy| zV%VXfdxh}S9=^3xcmXf0Dx7gBlE=M_0vh{B3u&&p?&=)9G~{cGN-ru{&Vdiqlcp5I zq&nDw;S3$j&RM)wTr2gqML{KgZe;J7T7GOO_y) zcB27^sd;1VpelCmk$_~(hy5eMWmjSUl$xZ*pHg5&aGJ9BK~sC+3^z zY+0jQP2UXvJ@kN!-5TT-e0GHXE%>M|>(u3urnc3iZtDenuUghA&U-z@-3v-H0Y3wl zbvpCN`rG5f2l+;m=yl5pfYpi4RP^v%#EB&y)WO ziQLP)By&aHLP8Y0z#CS>;XgVD?^4hI^)dm!y1xEO_3#SEC@*fEclEd3#ovFibfJBL z)7=n(``xSWv*CnWfoGQ)1nG-%u}KE6SuS+G8`IbK?V|6$k)c>kF3nd0W7PhD>HNabRmZe`AS;Z??O4BSBTS=1y>3Ff0M=bC1d1hIsz zJYze2#rOrQiBDqg3oWxPs6gX$b1jtW8HSmqLdBZvS}zIhJa-lDOn1F8avcsMVOLDz zvJm|>!Ue(@wOkkY)amsza&SmL=5>H}EUSqNkxW zD`(Z3#rQvC_a4$PWVejJT~Rl2dnMqIZyf)6MQS|7*1oaN@e7~V3j-(Pp{m*m?}yjZ z8r>dMy8jcja-*oBg!;|7sxE*Xxw&i5ZkQ=vr{kz$a{861&V8}?(G}}WR9Ve@irYKs z?yENnf`-Puk7?B?;;wPIfNCdtTJjcv*bVmwF7zO3)!xFRn2M$HPTuy(}z!9VF` zZkNrAam)x;n_?UatAA}*ua-CD+O8&wG-0n$PuhLLUJ8i+`D<@0*FPEZE+F1IahmGV z^zg)KS9Hx~cSpiYq;K z`gYm^dDryqD|q=p9q0~U<_CW%E2G;dWC=9G-aptgf8nB5DTVKJeI`|+; z1gb9jAgqk;L+{a!qaQ)QLslJB?ui#}Jd}Kn?tg5xMk)Eg0r!eXs z=IK+Yjbn!glP_TOr5Bqh12x*R4M`d`+J#8Z^XWI=3rI^ZHdAgvzn5oZK)<{Bo=%7G zHVD!osEy#msI(BO`(e}-{KQ2c3El*OkAziMDKwk5i+C5B?MBaWbQJdgtKi15n3 zi@Y__zKaDh8qx)%YZyc6f@aE0O_Kr`6Rl|~LC&8~m&VA>UPzZlZG;`JPXlu|hwHE4 ze_r&F26rnQe59?q%Aj>L{+K(^IyZW%qpM&d@P?W)kS1r+Uocf;il8BGbh4wXaAJl< zbyF_Vju<`Fb*?DOKv>D69B~|GXiCvqxWY0`Maj=g;u!q+NK_ z#?5!dyLt0)dJmA@=WaUCkTDPbscfMXs)rkZ`$pwm%cu?Mcg3Q=Q(V-;4_U?E|Hq$q z&Ej405W=gXtDUI~R^t|1C==@8Cfl;}kGp=MHpJgu68W8~ryhR5D*oO-GGtKj`J(BM z4|kYH#0bwn>q4S7F2B39Yn{Z&Z@Z1`w!Jw(6J+1{r=XPrR}V+>`aScO1?=WO_5X_`8SJxr~-R56fNsu`en@^W|tr(1?v``s$_u%fUUgabmRe zRamm(k6lq>no;lNgGS6ua}+lZu}0k))q10)6=C>CKV+h;G^0LV3K}^N8l;;iSUT>E z`nJ*1*0A);KVVTBno%DNgGSDpW?$Ys!fJJA)JBh%)`jKF{ZNVu(Tp063mUo5Mp}zi z?#z+t{Kh3LHO%O7erBvZ#~(e`(_$EiwMe+VVz9jMI9wj9nou%t zuslr6-gz|VmfiVP^2evM(XZ!zC^H(|XD^gJZ42u#Tr0P2?hL`g6CTdT26{27Mw${{ zOxl(`(=%Po6a1n7vPVYv>-Qfrj|H)Jx_e<`^!*8U=TjnW73#H`fFZp#?rdvdEdAI;+^VHCgjj(=VXjp*8uQ6x?+7H$kZuBc15lM}` zQXLWAc!#gU65UXT<>9+Xn`efq?0wc1hN=bVoV60#H|&&-rXG$K6st4 z!_wWNI?KcNk;6|7_1H3lPYm@6(AjIVv>ENY^}?U9^dgjS-DrB_*hf*7FTzmRqt7>!%6bv*-$;nU_Yak~r3yYiRJQW4 z4B=<$xSEIXqp*9QLr5b&`p+TFlrIFC-91Ut1ermE%X@c`ZG`hjcM&U(^C9Ix1H1L< zlpFo9yI86hDcD`i4__Cgsels?3Dhh>Hk7{*VT;#4dm&OlEckQ~G>vmV9dx4`xM!y< zkYe4leevIdH22})@`0KUkztQt$gu?#%tMZ%HniQd(?KlFJ^L!&;=Q|qf~ds@cLgh> zTYYq1Dy@(1Mn5W5z}Mf0l`7~(X6O@?ysG~NHL#@(5CK_sW6(3;dy01huz}|JAl~H!zM;ahDeIMQE`ceh_OMN$` z3P^~NiY9Bc@{z|))&(*A=~sD2tkSPGQ?k`i@<%dqqzZ-*dKX<`;D!}=x9ZC8yG}bc zdB&$5g_TVILi&P%qA}>o&3ot>&=|1TKM#t%XGo`qu zRslo_YH9%{SUO2#4n;@VnXYqPf2jsD8f zN$@9$>*z#AtXI{#^Gchw?gi}qPU9qH+JV|yKRn#t$#`2-sj@b<1803*kXTtxOYTq9#~^2NAVr0YuaW zr~M`sJC6T_Sa@3dj49J~!O6;vKI1qpwBFn4I8H$L;r+7dl3a7@l%*AHF^CcPvbpSpuSM&l z0)-1!j4M%1m-aQ^n>uf0v$&TL8r58O)Hk5DxS-^M2E&)tbZKvM_LP;?kwrH~Fsr#t z*Vm=Btia#D?os>)i zhy13@%x6bE%Ab`UlKXiqu{kw|SlDi9^~zdg?b?oirW`grBBvm>*bW@|Bg1s*CW{7| zn@4?!jlT*N{V54U;_fRUeOS})%7(L9oCn2(v5_;>okbbLzAR4h3}I~H44r=wX818n zxTuqGy!Q+(c~QymU6$}ej-%8yrQY6`^uX_Ta8HKzEBGlj3-86=zG-cG)L%PS*LE^B z2mNW1+3YWJyFx|20G*o~O0$!*wRnlb8YxbZWW5Cxrf>Fpc=5^)cg3tq z%q<$2H;Qa58c~7?e&pZaYG`}p-$RI{k9Xnm@Wl^z;Z`LU7K%KO$Teu8{E8gg zi{lgfTCHQ{anj;4EwkxPZbu6xJ#tJDCzSoQ+R)15j72xo&(+$Z8-Ro2FW}&aD*kGv zAS0zjOw*r5O8o`2M|)fLUQ|T|_`~-qZl;uxuN$nqMih?PhqEE|k1OU;Scu|gvR9v_ z!3r7h2zxKKsq0yb1b_|(E5bb~ zd4V8#?cp>|>|3DfbbQDS#Z^l0y0`HvrHQgBB`MaEDJ~^BfY7tER@ml;RTQBMV&q;^ z$YAvBHKmoJqIQTMSSD(Rl95VhkK!@M6;2-oA3No4olo(jp>Lh{#rNA=A5(y<+F2j7 zGD`NnNCl3G??qSod8vC)RkFI&y>|%P@}lEtX`70o;{`GMUOy!-V|KlM+Dg$;J0u7U z6SYIB$kMY%A7D~$y)>1lcN7iMqhlcM+nBHkBD1y`kfp{{gkDF+dkQ(~{B zS|~EN)Kj3rx72;{y04{93%b}_AGPYb4ciAsQ=+juDl@ z9v2-eh!K39PX=bm>-<(qv09}7sLZHUrXc4}9lejym^D9oAGLAfR!zzb4R))>7awhB zt;_RDtaYurvVDW8VZ=<|U{|_vW|o*6rK3v*h}Z z_G7`Dj)M;GhlIYzKm$JJxp69jGbfqC=U8h1awQAG1vsVyZ_1&3;3f=WG z8(MW5%-St}Pb|CEeXystf%I*~B4&v8Em-t-M+bj+Z5dTOGracj&V?Id^UkK^ORxoJ zt6y#7iZL!+fpv7ei{|GAr=Qe>O^vQ4X2Oe-M6dBa;T1jCZ%n}#DZft|r{(3Rz!h;% zkN>_KySq$HP}y+%jN6Xnrj&_854j3v>Y(v!#8ENwV`8sC=A+0LwHd?2(w%!zpXjl% z_uV0vF$u38Mtz`f#pbvRUd9T(Du^1V?}~loEGn% zN+`KT7^4yIo0w&WU^No{yhivaL409y)*=ualJM{v;nQ7Wj>&0@V61#X!8O7Md$fC5 zMFj!82grXg*wr>J`&CXetWK*X=cR#6+i2leS1^mL)tVD!0Bf6Y`HI#IYiw-EVHtpV z6fahmVO z<`b3WYv3F*f9O*aq*AZ>zTRZa9{He?c;V)|85nJaKJ$GS=?6leN+>|f{8Vz(HX=fw z9`-I`rF}@I`!Jq}#w+aj;|Tqo=lrM}YI^e!(Q}%E^G39b(a$;)5 z)LB_eqlcFx(-SlOCeON98f9FLO#PkYH+5Fe(x~8aWM)|}mhr?8snmZYwl}eFgYiTd zsWh@LmXt^mtuL2FmiF%3>P;kDetMsYaB0cFF-b4w zgxW8OvbHpO1i3=mi#a6r*At44+sPB#h+$v;sr(ojvA>eNNK>f&nTQd8{&r>v zIur8uMY=v}KO85x)RJUmX;cXLn;cFvlDT7xUusSg0G!2}ACe!#FD3`;NfmSJ#aMRT zfk^eO(+)ipjwz$l@q(BjkLqz@c6MYhpzw@p{ckAcQ+RC~#PP!(T>Q z0ts znI-<`byBLK40pINeY z^oZG=Q!z@ORF>>Ty4dW_LD0#evt;e*RpS`QCM+)q961mUv*)nZCyXP~aiQKFCY*<^5!hHHqBKJ!^TdA!_U_SH3><-k@ zqwdT23UGVc8r3&Whb{D3Fpsu{t8e&)B_iVm%C2)X)HfW$diI&5-k8PdA{#!lT+CEk zEe(su3HKxni*-GET5B>d54v}hN^}<8@aV~|$-6uh+tDr2`B2=Wr!FXIZqU3VS)%j7 z1&^NMpscweugKFXQF_Jgxad)lxFXF+6&05DrFI;AR4(q}wMah|*72Ekob9Mk+yli( zNfp*H!*-n3s8n3>J$Qn%~KrdY&+(G~J(-Fyz^=FmaqbG-T58KtfMX z*CR3Fb0&8PH$9TY9pZ0TSP-I~jJN~Srk{++`UXF4hMrWYOuY@>XY(MS!ITzafi-)a| zAu74Ymxy3@ahJt|?~(EwCiHJC8qCdSeI62TFrAO=6`W-wwLt_pG&G zO3mWYi84U4j(<;E^QQzYs!x>pcYKcgetRv9vRs+_%~sDCWBXYsP7Z79Ox+zvny>GD zfcWVTIRYx`15gh&td}zt3L>}lz4sA610Z1-K83%avuYbBovAQk5~;rT5d!WHQNoBQ zyn`C4ZP+UZr_=@@(ZZfB~vm$pvIu^jjhs7yeMfEN!oAl(2q zq>pC;*l>Pd0$@WRkp~-)9RM5BLVW->G(?_K+Uxhr0oZUg(+02sDd)ik^prZ*L_;LC zku2c6#~d%_YKD#%gatro`7vmXtfcV1Q_g$L@M3Oef5eN)G^d;cr~!RKoxuocWIds% zxSH*b7nW&Fk+bv|g!3z|~UmL7djI*o&o(#Qg!;c6zZ%~#Np zB4p|D4vGNe=eG187wN~G?+BTue5v1$M-;ZETwx!r_lCR$m?8bNw}}LzOY>ul+7{$@#B8Xn zI0w4uEY6D^?)rB1=QGOvSzIDp8GUzV-4?HUog+v~r2P?=xF1iVTp18Exxca?ddhMxLw-{Hhz%3W5+*Bd%T4nyKt2$i!o>`3+l$i zfG-f>cFf{rkfm=|1ISu5RG4WoVua3$pt@jAw3VI42SjlG#f+JeXOsvc*N81%GO!As zQSGxhlWm#d&#dq28%AM0e8ZVyM6Q5kSJI3!4>a1j;Gg0^o0mvWW;T&~%CakKMk~d% z_w_7J0_i!pnwMnShn(FJvFu8l(c+;-SWgkc1|zyiVZ@ zv78EOBSyc?^cF&EiDkuujW#b!ybFeN)y`~awb0%dyqy-*Z1b|ggE#)fOpSIf)UxZ( z8D$=B%NlFNPoH5h5yCH&@F;fNulL7!g zEaQ`A%0$6X4Zsh}_^g>S2q@tI{IHBqn<*0qaWa4(mhrb|$^=1~6X1tse9BCjm~9`x z3=hlrTQg5AEaP)$yhOmJ0KgB+_~aQcA+S^f@WV15KjXC< zv~U1^SjMN%cnN?+8Nd(A_?(%>lnI)6SkEoQ0+m(RSIh*Mk&jrQvkG}_&bFT0hy@y} zkle`xm{EdQ0MGwkPE)~*ml-~!JwY4{)y#HRQsC_gV#=ic+3shQymsz=rfB@T)yAh3 z+jed+d-o?SXFQ(>zA?xn#^KMQE~7TijL-=Tclf1JRCCC3tTV%R0?R$}(j@A|x8*Ho zMz9`++whVc`Rf0&{khl?cnxi9;PiHA&PE$2$W(*XCQQU7$|nh~ni=%o5?npdVETR+%{#6L z)q18^z3v!c>lb0`kI)6f8MBOJ$kT|xe25!saqAH$8(!ymjg9&n+u6M zax*gh(*u#0G;&|K5O-W>VEt2dk%k(%trNr@^o&yf%(uumjodap;!eyAzv^Uhq_Bof zQyXzdXhuSHdM46B!-fz|+&MENsyfvfsj6Yq@{_pp$IM>U8BSz?hRw?t9*HfhDGm(5 zj%?|w$xM<dTJmR#iIcq^qX;JrZkIGe#Mb9mUdBQ&k>`4_9+XS9Ll%rK@HP8cgR{ zy~fWa89uL#-I8@cS))}Tz;d=&n%Ha7q@{r=rzE%~Yk;-jRsk2w*)nM&37iVkz*JBK z-I5IzgoIlKqAiVzq+@yyVAM32k140!l2zkjFh2-*6v}BZQ%1-DGtT05T$AR1E*V_D|gW_Cp4IuT4V)hsx3ak)ogz}WU(dXq-Dtf zG=)~DohL2iKsR#U1NBP%Rlh$UvDBKP%{D-JLwxdyNz!R#Ju1d|j}4$3DI)Acs4EbO zeBvGHGktB|5*k@gDQD~VL%YQ-no`s(jZ&m3z0uxBHL}Vnuj}{Abc zeT{P&%4DeX9c#P><2OGa-|ycFMZYdNJIR}${ZB~a3IfklLH39a-5wkq{GUGu^LW3> zd)>M7@0Y;!1d$~-taHhprJJSw;l}Gz6E8k-h3l6JB`bU#TekQrt4U13Vh2Z**4YFR zhw(k^@}Zi;xrMbRDM2d|?7Krjhf519O){2))wU-WBXw^VHkc&uU9o55hxP`ewN58M zb;kXe4#hg-BA%67yZvkWX#pn!YJ@xswiNv9RH;iR0_yH|g>FIy?QQ$CLWk`uV)R$6 zit0*JyrxU)%)ehBchR;r7Vl}%x6Pq_scH1-j@^a37SX+LV8CUeS6~yh{bo@>H`nEH zcY6AYKKuDldazNWU4n4-*mtI3y#L8RZJ$^=o;O(_`)&$s&oBDwCcD(*l8`HB*{MUw z;De24hBBY6Akhnp7j^M2wYapaD_7YSLsx^3HCk&XeOUqjB^R&jrn@xYQlwX$*!PB{ zgAX^J;_lWM4`BurYm6WFe6|H~u9=`|op7o7!}I=@v~!&vwef^Y-Evp(rhL79c;7;3 zgS}3S?n<<4-F!;Vj7OdSKf&Y5O*XH@e>b(;4AIAHOnqL(3hx9(ym~lP=aNX;wRvQF zU~!jj=EUQjjGPr?cH2-+@R3G)u3+&PhpAj#JZ9v1Z%g*@rYXB|=oaf9aQ<2Mfb-8$ zf1ff$DCuWz}_? zrNg^0&(d+DOB2cJo3kV0Ed9@-5I*wjuL5p=6HGn89=Njk<# zY_+Y|rsM3?ayrAtJ+w;JzUR@}?bS`+r~u8E6IRs=p3x*&ta@j*jN#X-W9yGQeJx&< z%sFK%ZbDb0$L5;udA?n%d~Fjey=l0-HtE*rA3fn4@G4{L(G57`$pmOxHfMWUaF2AT zMvZ@)GJY}X)ON9_ZY_Nd!~wK`buVADaN*~hwOGl{HelTkjjcCuyy36X=PZDA@4WK! zeL{29{F}A9*QK$Bn{g|+Nvn15pQD}~bE+TOa<(KBINwfuc^=)u32APdKg*qu=RQHxsnIl6;Bx$ehP)YcyF@EPwBUg7f0?`pVR;$x+>)E{QR z{DytZtTsKjke<5f!@BIQP@iX)*wihb`L%-4`2u}LxngHe0vtBpKlL*ZTWw;-E#LA~ zsI+ZaAE#?4+}a}lNI-j(`7czxrfu!s%>2f|rkMDCH)}SgeSR zDc2}d?6(eb%rtX97FL^Dnx<83US*vIPJE+XA^|7Y|Fw>h`2ub3Rk0&yEZ4vCjX`o( zs0+7CtXE3;{Nn;^k7n*@TeYc;Y0Br#Yb+Iag~oNe#4e>Y@ztI3+S;?3jNZ=X3A%xKr~xHVMLnP zO8ML_0B0Ln{|z;ArJrf&mF(tI#Xw3?8$u1eV%^n}392hQP2-Dr4}fo?HgqU32E!fmE$2cH0ZV~UERZsFJ9W_3sk_BUgH7qP2A=)RKSfMfUkfuXS*;5d+%ViIAAw{P>#jYUckjtK33mL7%6zzf- z8J9h``@TljRlGxc3Yslp{TWJkr5|tDuY}t@+MW_q zfPTcvB-wiJT7~=JbpjL7+fx1?cQ6I$dn}9I5pS8*Ja2q!pea8N)*57*j2y1y-ermv zo#v>x(G{kZK{UIaV0sJ07q}mo8buZy5EwF=Ru=5Z_(d?yKn_=No0;;iCpms@bb)DQ z;hyw#f@v=D$W!hH)6n${=d&9fJTO6=%y-)M3O+k1QDB?D00WV9MRttqk-`sdsD3sw8Efy{opq_<_L0V;+11L5at#JZv})RNJN$RuwPkW-7h2TO%m(Fo-Vz z2}rBy7OSe60v@$OiRt5nGLU~8-HBw&*OzmbQCRWSk)$57`*axzG4XN7nk`&k!F_V09$*_o=g- z@2)6D$rCAY~BQ#rXoGfIS(-U9%dZiKS8HQQoy3x^oTFsmdgLnpQjjSe+`9=f%|p(g^flt>ujeFmix* zx*B)Ds_PLen+lsW=bS}h|5t<{rXYyL@{^}IqBzYH>*d^wMA?_4ICZO88_rdXGUGA# zSOGDWH9*6~B(VnE=z;_QJ?figKw2IGVeWTrsNoIkZniye$50D|k zLNCJW3D=z0uHl*&u%WAZyw%QCy=KZppcm=?86zss&O87ZVICSF12AIw_UaJ82$h$5 z03+rjNicta5fX+)03*CRfL~uSH=d<91Tf+_zzDauK#Q^Q2N>~SZE~vKKUxpC_5dR; zz!|=uTH@rIv)uqjRH{bl0*uI+947#bSWcfC1Q?+br2{Zx=*OEie}EAZoy7nn!nVBO zt?6_9S;m6^BeVfVOubnPp_KXqjChEf9AJ3=)bZ6CHRpO(nzBw9PH}T4XBba#;W~Vr zqB#ctn|=cOEN8uZCos{g)q?ZjRj8K=OJ{X#JzyJ~G}X`2qw|fLaryoC!;W*N1ar{u z6&~-L)DaPWkjgMKSGWSRPrTDECR|Krbn?`+@*VF8J=jr;sc3m@mcBXnz@m~uaBl1R zQMz43%GT+jSLnf#D+?hMpGB$7%m*9{QGcj=B#S(>cLlEZ^Rq}qK|+s&MN!0bst4%)|WmOEGWu=-cTs>l+n155mqIiIjc^f_L0lx3=+POzvrr`3}+wJDu ze?9n#DV_&E`*_DyN2vcnG2?)_LO_^(V#!UR{^A*i#clNbZ23-%juc$y5!Fsb>5cRx z$%2My=*>kc4NFgw`>7%gy&ybFl1BIT`v|yDdlBI2oy!zO|EYnwg15L1v0tuep+b$^=%OCUvhfl&e?z-m>EQSO?+7tjwzcJ$f7)HF`5h@_2u~hqhH33{`p#Y2!-4Pierkt-o=CGu!p#5MgRZyn)`gB>S z%;bM##Q#Sa@xNga8fvK=Vlr$R4pVDw|Njj}{HH$u;53PZ0@N?a<_0Y}UBcI%( z;8qOZ=M^~FI@PEbpIYPGeO>DAZ(V-wIXN`lyLJ^nSD!Ps?!WnF?bD&Ca4SaGwMtxc z9h}wlvWesB3lGuQjB^E0@giq#0ukpvQ?oF-lKHA$k%23i90;z!Y1NssUPiTWF8PLs z$Zp165^TzQQEc3c;JC(BFZBQO8utls2_&m^x@KYIpML&NCNF!Wz3)R}V2sDOkz~Q= zFv-Bx062k?{E2@BPT1x4Kl>=6mt@WWf#Z+=C>&=_{^#o-pn<~s`Cl)(3hx&>$8>a# zfuA}$u>re&CcFh2$e_nx39`{W-@9}sZVYn;ha>)?H^MH#+_v*4y%d`jl2t%6<(*Ou zg}-*-BgBGsa#J}jA0b@pSKx;^4!hVtRdFAE?om@ok{_nY;H^>xD4ynjky6M$Jr=}r zMZ)X@Pytl5LhW!%zdE+sAV6QZ|FiauQe#g%c?>cDLGx$UWiv!A6&jAX9U9VdMb1zP4PSB(jo`*G6sbL8nLG7Ydbh#VZ z#!h&@v1T|b?@G%auO;Ja^}703&V1>$b^O`)$GGr($c;a*|E*fX@orfWTW>XR)8Du^&ecA< znfmk6qbDW5)PoJIow&5fI*0pWrcTCuqhoyRr17A4_sc4k(w2kw$G=~R{PM=u1y0(n zD`IlxM#lA}fa*-2%6+?)Um7^vWa^BoXj_RV=ea(5X7tN##lGlyMxVv=VQ0_n&`r5P zdotC}c}XoCmm90FSW9DK>mu&MON{)=!K!JrH%b+#md zhTbyd5h_%GI@)wcL&O^|)}C^by&Fa6v6_Po`$bH|vfESk7Z8gyL^4K5h=%<|$Rjl9 zDeCv8d<_vFycM7`Yys3~=qWe)v4;J-yF~`t0jVLDXovtW3EHq9q=%`{gVcehKQ*$v z@fv{5u%Y#I=s}PsYXEv4w5}a+8sb9@u+){R>tS}>%7YBFy$Nqkmpx?gX&r}(SN!w= zhqCgp;9!~JuC^R33TwRDPuBAhT!TY?v^bvPrTWYw1C+a8x zQD?+ko+s*D1E1GTGrzZ|czlW1@`da zAhI8E{8n9eSYosZjKr#J3ne?SM+7G*JBkBI;v!Zytqt%8Sss7r5yA-6J=r+@hyiMonMjsvn(fpTzX!01yw|^PPHy#7=;@L3-uMtG} zW9}~AmL_*sL5$?8Uf&Q#Y*nv?aw)J!G%S54um_KvH|GrT2Aw%Ws14Y3cydqgBf>%- zV*M$1FVlsX%?fA&B30s{ZSl2Az`{1dro+>F$i{?)cL=~m+?a4;AuFJn5*L(sVq0>p z0x+@-lj$-L7v=;df^c>jYpJh?#y<@dGv&wJn;qIXS_X54u~C#5qfs>l&!?QR`Gb?!VEMOv%p{Y z9HB?RGub655dt#JEJI$+lV#{i|3(N;2KQ^G+nOkPL0-Ie%W6mg`^0K=pHhdz)YtnA zCHK$@7yF)^HT<>B;3!+om*XMT3(i0LqGB~W3uR`OZ;XXkt@pBGjX5x&ddUOT>*pI- ztO^jN48L9gsu#!G;?)BnN-1v20M#p@`QH3HAWGT9$^+F4H|Z80@N*QXUa3I!N@P4q z^!YjAEBor+*5x03jM~Jj@b|thud=r;Z=PgSCi=n0eDz)xZ_WMK&uB;tfPeIjewDd3 zx9QC&|LqO`nBKq)t9TfqwaBN#Y zmv9CG)U3Pi3XeOhO;6_aC2yIs)HGpZ{ne&+d2h_N&~XGA;bqA&<){5r?}R^%&9hTC z)mPx3{IFRjLrQfZa|WcXuUNEW+10#Jx?Mu<*6Ga{h02lE{t(bcImz{j6@X31PDv8b z9$rs&Fvb!VUk4a?cZKrK8dI=Idv^;O_b1T6>VO8O`dKX@$Ji~q;>V4ZypIkTLN}@O z$?FeWhc+#~)u7E6d~L_+atXy-em{;Y)W!7%xdZuWve(c@!Nr@50Hoq2^N8Gt^kfTV z!y{R-Oa2fB5{1GFxOl_YlL3V=wkr7xaG5wC#J^ky3%HC)(I8Ln0xUwZOSuvQu|Z*> zF4WysWXFcy2S^|{c~Awlb9hu{lqYxXw_bvJfG$cyuegbj4p9KKPEs{6nQ*doL+>Nx zFwnWED%#l^Od#hF1#H=BgNr;S6AIKWlU)%+0nj_i)L>@hkv$rEi;=@$pz%~m?K};p z5B@t5qyhdIT!O}fK1xHcvdOc~1hF6IT zit`$^ftc1xULZXqfG3vLGwwyUv4=V58R$DsYo(g@ej#|hL1$~-gtl$h@IV<1h2OU@Fa&Gt`6$Ch5SVD5*KGY+A! zSoC|Gj#XDG>k^0;sIGzsq+w9vu5IbHC)~ZLjU&@#qKT=UL5YVz=Y@5NwvK_XwzW{= z0uyDn1=pT&-B25z(`Dkn(>Q^NKrYK;E%o+LaXis(y^7n;3|epGVi;W(qzJT&B!UC;C7^wmLkAVa;>`3CsRZ1U<*EKAWG zR95VIUM=S+4w^@Wt3+uRS1*u$kaGP&ZC%3fq2}0n;Fhmy>;1WjtVe?{?C&#znDS1;G{AD*gA0|%@ZNE_pq=fnFZFW8| zo1EJoR=Pr2^M`MC7Lz8)+a#q>iju$9W?a#M$F*&b5zr_3+%ZN?STCP5)YDU4#WxPOyVBr!R$J)q=6>0aEk$=PSh2#2+DZ%-~L zl;aY05W+}T+oLO|XL`hK8or4Gs| zgxa+9%VC@J&vHrrDA>`eDZd;vrKEgkj}9#JVJib`t`IzIpIE&&k34vj6$27xK-wC# z?xigrj7yQoEVdd`4m(5_iFjcAAxp(tZ_}daJo=-DkL@w~xrqB)DnnNkt~NBQ%474} zZMgRS(A*PxlMX-wn|-tw?9KeUFqnC zUYU6a@Fgx40QspYHJS{(h+HI)oES7??YkOH0eH!F0pS=eYdTcHmHtOVuf#l%`~@s{ zl0PQpCt0eY7tfRYOK6cr?|7a_jYqOrJYOXias-9l=R$=lr!9LVYgm;G1G_)ddzTCK z4I%&o(Safk(1%ypTT@qJ+ZAxKKrv)2D#wWmPgH9YgB(hPhB*BbG3bc_y}K zKsu(C($4ck1QxLn6BJg-g(_B-_RAw#%c^8VnoX|q=69ivAY?HRerAyCU(!NNlysgo z@`kaYb*zH+GOkpCR5Uz9p z6|lrKKp{$yo@N|rrYdlbrFqV=OJ7-eI!nXNS8UbA45G^u{~xmI%99@vyoM0bA93Oy zqJTn(gPIE~ga&FZtPnRkfC^r&i{RA{YA)PZUd@FY3u-Q_db~XFs$LTXK!pUTxdisy z1vMAW1zydCa{<&`ro%Hp&1E|L#D57Eg+Y$efA2Hr^;7=yI`nTrMdIIN#%*5yV({jt zuB1{FJ8LV$@JVi;U$BhP3iIfuX0)|prs`N|%q&fE#c(anK-9KIpZSzr zHTH8>{l>mh`v_W4=18?NQD+5ZRJBNIg_Sv&USAr!8Dln-xun?r7)L)+oz48yolnak z{ycZ^@DE6(b^v)*@wL{k$YR+mKTgPAEzPs!IB4aP)JfqneTRENOVSSf^ zMZ1=nnwy5vR&yEAV;5s)Xof5BwcHM|^_r{92J+9bvRUIB%FcCPnBC;@6|Dw$<0SYv zl-*0qTnS$*j}=m`yU)B!&KozK_1hFOajK(AW|q~C&pK@G{-g#e)Ay)2?ov@ga2}p+ zdPJCXyL8n@lkuc(@&#e?Y@>aM_cccCL`z6WbDd|^Cz8SBg~ou14}e~nT}!&r+&TYO zL*45&UQgwl5B$}G%eKH%mw5sC)L$B_(b+8%w{k65J~ZjokEkd?P=+^*;d*u)V0|*2 z5(j1Yb^2!9H*?^l`|Ex5ReqH@v0fqX=k=;^&NVw>)5No^f zSrfuj7hX(yDbqJO-~7=hmHqWb>3j)k6UvW^Kc7sMA0l+@0 z#&FF#kg!jxQ<^W(u4VL1;cw39GiCm5AL)A%GA5LtT>5O6P%!77eR@;hGqE}>mHgvo zl)NTvR5pg|W1wL1HJgFa%za-N!<8^VSS$}P&H+Q+C5CI?p_%*1W$M}uf(^XxNlHlV z;rd72mA*f2fYN{N2fY~X(GKI>alNV47ii}N`ey$M3~jUEAN`(=PRN{4t}>*@Boxls zXMf%__Dt+rOOyPO5T&YV^M1T*M`&)Z>WVO9*uA7$`JXMx8?q%&m76;Qv47$QzuA7g zjBUa7t<}DMyQSV}>K08_-`d+*EK~J$pJ(E&wOof6l8lDeHI5*3+EO*v9YhyVv*oQN*k z@sQ*+#1oV!tstH#>=Ef0vWvG84@ho0QU$%ze>qagh$V+)t$1-6JAfk|Cm}fIjmd1D z#qQ}yg)bzfJMtjqB5>O!W$exPK&=sy%+z$Xh9rYpqqGpI%|}#PsF{Lv1YUdcGsiuH zh{B6zBJr{X_GX~bZVs}^#;!Jyady_>K{5ag^0K!N&MmF|k>gFQ_Lt0wz3DJ0%WNlG&% zHRym6EctZM0bop7iiz@ujm1P!8yNRYGqU?U`d5zlpy&{l<#Vf zrFKM?98RiR8I=jVquOFXU!|EMruJB2XSeLdllnl}5;zP#;7dRpn?T|IF=7WQ>u?eY zlr4c+@&XlM@O22GRi0(SjAnOw)^=vfcYLk8z*ffW)%`J^i0-<*Z;Nb7yB_>?1W=cZ zWTqA|W#6tAibl1M_O^V((+m=7!!lS7`a8L<+D8q*&b|!txSc&2Q{ZY(J_m~V8Fzt6 zpOIId?li_cYUOcO+>duzdEc8RN=S_linrlH2LH0gy0=roW#wSGenj}wj18WxE)D_< zLu1y7wwKF&5db4K+azfcc6k^ z%D&1bN?eW4Vcv=h*%5Rub*)o{sb;}NfOkC2SZ3}qvKAi&(=B7#g5aKVjn6Kt`22nT zeSS=|E|{#zb0DK)1WKP~xS=-A2N#Lkl19h0i;*T5#3QKSobo=fSrbs>bHqq^#DVNl zz&6qyOqHdc4K4!h`KKAbn20WGaX@MY#+vwm7**rL55%Z_GJPRT16{CN)964x7$Z3U zG~+C4<3h0O?r_q~SW^+Q`+~R?@0jwTf+k9Ijh}j#@cJ@G>f(jHhm3Sjd0f~#Kn05{ z$H06IT-{HfSEPS)y?}l6YJHL|)#05h31Rx|_I>8w`tsZNLDu4GeTt3${iqXlQ#R%d zl<77=1z)zt;}Fn)KT4qe51QUQ8tVUl|1WFylBKMb?E99TN{F#jh_R0~(`b-=XF@1x z7;E;Wk}Zs-FeGN|3Wba`wz2fGX6yHu_viQZ$5ZE+bDDGJkNe!O`*mG6S06VUx9Jby zkZwJi_@*jumcSFl4-VWJ zQ1WMevTx9M;(PDd6M*yr0T#tDZ``m_@2Q7iTh)c5+>=J)qa5NHB*?`)r6%0^eok)C z|Fv2V9NrT{lJld<;Zps==h7?(HjB&~vBnU-hiP^8JYcghp*KhPjxe+*nG=#t%-CO8$BYj{34iq zf2zpu-;V9w@ap+Py5ZN8YpU8cwhI+G6#*GOq4P@H z&t`E?$BkL?OECqiv#MPM`gx|ZLtARuw`7N;{OLMcI+t8TI)D=kv$MeH6m7S&Fu?9P zn!+1?!DBpy7m{ZpyRjvltt-1B<(G>1*PSSV^ zHw2(H=eCq>Z_3U|`Je4L=$v$s4gKSi6gHY78&1tNmI5ZlUD-$wMZ6;$Ddm5m;{ey} zEZ=d^(9^rae&%PQ@D6(?s6l~T38zPoL9T!r6xmx_P+NUj;L5t(L4(_OR_mZ?==r_F zPW2PTxx?NKYEU3z$7k-3LBxQMNY-UbcPiJh+je1YhyAQhY{arK5YKgGEox=S2C^1X z{yLxm^n`dOG|3~0egwkI2cTRKW{BofM^~b=YP_SX-NMx!c6thviyau;O0t3}y^dMI z=wOl7wr$MxEtl4aowk<13;kZTal9G{H&ORAs4U8}Es&Z&`;w$wxiWe_V6eCyYN zY0XQN_LM2DA|Ff&&GpjmC>%p0CEWfxUx|`I2^s@_1df-i|CmHAjMX8UNv;a>|K{D3 z`<+3dmazy(ofKt*Nx|6b-Zf0Lrli{|ERAm*Q-N6gLrN@uu~U{9klgQ*RAFT-bIdsO zI@4qrr_F%4Fj?PEKhsomL5@Ka6d%zEp^BuOJV8sfA zN|55^&xH#h@_JkJs_CxDwhJK~R?JcH+v4TfK--cR-V$wzrlj3zLE1ekkx9H9tQvRa zLG%TL>jx;|dMnnbgcR{|uxc2~15QUvysL!mf-r{_Llg=kUY z7CaDjFqMx0OA5z0+bOM$xv>s9n6{TWIturSLopiEc*)1j>mWFLnSr6of^fctMgF}((fvuiAC^!Rup`Gmfz@eBd>lzv;tcbE`bxPA_hGtX`%EcH9ZYG*H%}^@|8sFM}G&Do+J)#0V|#1Tp+wW0xd0%VBFu4P`-< z)E}2s3>~PUoKQ>^);7`_CxRNv(DH_!zA8^TN)6>OGy35AE+HP&P_AP7*$c}H>?k#q zS`D@f4HJsIpoX%_^9CfmAe7VzYAAOJE=jVM!w!@h$^!5$0Je&u2Q`!vis`~1jkLCj z;64#bY5+Yeo@ashWI0TQJ`mZJiU;D;svYQADKF5Y)KF@r*eQ>?sckFAPO@ z`W!@uf=R(v=_ww5PS9f55`A!CS1K+YEID?c*>#Eves76~>ucfd7Ca{!sJ`74JyLlK zh>bO+f}(5Be5a$PhVDDyYl;gWe50LgdNVC^-{FVEVU-`$pFi?I@|RC960gZf~*_|J>qSG2=nfrO}n`4$4>&Y+I zwSsIw2xZ?}d^!jx-jp*tm<*yQ4~*(&{i`FsY`}Y~_AW@5eHTz2)b`k~T~IoTM~^); z%YzrzhduCbjnIdz?5X9s_)ZPm`6mW0D3G`1UY6(CE*R%j(}JO(bT37CsJQC;mR`1r z)dz5k$f>4wid9p(hY}tts%oVy3RZ!33l2He^k6C|-Afl9Dycf$x|eNcwF^!WIX-8c z;%Jq=MhnZlew(-zc+k2I8!!1bp4AK$+@+H$m3?%Sh)dY{P5m=oZ|64-G-PGTg1F&$ z78n@DPLW%Fwzr+Yz+i~@i+kcM9s&AL%62%<{7iuDaDss`YI6mo4aaT3z%X^XwH0A| z%Lxn&+6Y=4o3n5PZ9~ti9Zo8p_>L9Hc;O8br=l&PDbz=B@7CVoq&w=4+~NGpzF+eG z90eHs9{6_8MCf&aIDdp5SQC~dc_@{?k~zWxx1Hn>Vto2elv=FAWJ)d8VKSu_%jSQz zST>**Yv5Gyf5DTi1e-}uUey0-8{o-#;R43$Gns;q%|;Go$$vh7+3lJ9=SyzSrf2ib zrYMBjUB~^+nI(tGr6YQWCC#5X)#b?+Kf7!>X+I%V??-g?R&ToGB`5yW;H&t!xG=#c z=lC^NN0slr5Ax5dLH6s_PCZ%KXNCe)4V4!&Cd}mQ zlw)<&^4{_w`&T`)i*q^)`AscMupGNg#`gy2Wmzk#Ne?fk?)WC6wU?XnT>&CctvE>&+h)Esh?ssy&2P}|mKGG-_A|EwA& zh9ny9$xdlHXxi_RJl?1wYF4S2R5fjOf0L?15Fc0Rm((0?R{}rPl7ivV?xai74YaYopq3nYwtt z`X1r~53P+_`-4={{Wr>pcRbXSs_pkve}AhEM0D`bPpY*$rY?PZ!-6QuIisT5Zeg-` zquK<~kVB)Q)^2A)y75LD@#c^b?jQUnr`zuy$Sm$E&yco)lwbX*;h0$Et>pXW!&C7p z=6(cID(=3?(%bK6pH%1EqwrZqVtKP*&gLDK5oAsWz4Z)5y$aWs9o)uFHzlA3a8Vbk;nK z5$~KNYh^3aPW9f>meTatT~9fAch`fHHwT*KQD-itR_$kLEnhIxTzM@YxOpf1?$+3n zN%&pg^F&;V4HO%%NP$^o=HCF;i z?uf~EN`da^hayL5Yv0WE$h_{Y303UCuEtK8JaXZt9LQhI%>-N}*EH8Lc>^KK^wCDE z+}=ns)HX${v0Q}>N)zUTlPT^k_&%0fA(PwzIwCD764uHfgc~;(%H5XD6}C3JLwc9u z2DB@vLrTPsW9as%aP9lKKAEE4kD)ppe^)c6%mkgYCR}wQp3fZzh?C#5)j_=LK4}|2 zI&D>A9Wj_H`v7FDvgR{TI{H;NvH}Q;q>tObmPv`*VF zJ6vz^zZegh+uFHoMm$&E#nwF4-BAnDlV%-VT9LkE(_M5()t149T20yEs+RmE7m)ePf$)^73ztyc;!o z2stjRDSLvN{Fc643%lpos6WLu;(i^yI_5ID8HR2&qtO~d$Cx^Y?e*_Nr zRcUh2V-A;1$c-BN(sv>dl;$D+o`|rNhxkS<-e;bBNWpdN| zJr*qUt88Sh2Q!vYr5)&2PWU^WI(u(_-}Rmk-~62Ic>-LMevG|Vq)@Y7D?(-p=%CI3 z%18&bT}V?}W~4CYmj~FNl;2XBq75p$rGjpeWNF30Yc*I}f&asMgnBPp%X=hFc*8KR zpq86#5?3JQ|A=QNHp1D7XU8?^XzaBLNMVk@R)NeE(nZXlQGHGazQ@KWEi-}mzS1(D zovr+qjw%gn*)8yuR*dBk=YmjWIc(^`dCQ$iix%>hixw6zjEg~V@R`KLNd58RVUBg1 zx8(s}bo<7f`7|J^o6UXrrS4C|&Ah2mi0q@dc3)XC+A9&7=?W%&NMgtLS_CV{U#O z&8x=QchW!X8lzq=*U20+h>W{I!4XSf4Oi&GsP)YgbZ*vNJ*F)mbJInuz7pWYIl1s? z-2f8ga-H}wOZm8)6^N|}9}xlHhCm9RMtu# zGkI9KfO}dl;}6^?7go+D&m?4JtmhA<3LX&C#y%2jXBZ0g}tV@yGZJfgajWexRqP0g5^dxL5K(f9gGI` zJ!_Q+B?mSYbD|eGgK(xL@DfEN`0(tHio5?%j^znJdXlzQy#V}Fm56JQ_Eb=8wr*}g zbO)jqkpqZY-Qqyh>aH5;=C@XTB|LX4VSL`SC4~_LVA?7X>N70+=0xCMW8?hX*?|z` z1X|Zor5qOpmm-H2*1nnGzja0PPJ+MGpQ{qjP~dbS@yrucLb_0CGNzBZf?mgG5*Gn2 z#i8&u;Ux#Q*=OzCAH!thSg>^@8@nc%S-C4{QV(0YD?q~0*R1B%f(x!$*)D8zU>HH^ zBL@Z?ze~R;QI?J`O6UUxy>gu7mV_SIVr;|}s2$@b#TDX&KbpS4Y|*tFzQ6z(Dwz-q zc8+8MFz;Hq-)N`av~&kcOx88zyjF1GH6-YBjnDL7qzX+;ifOZtCu*`*gi(fia!>B9A{kP?zGub){}8gayc0l*B>`&&0gT==?`99%c|=U6n{*6Op{MAI_P&1ZAcc$H`XRUyx*{Wxvs%Z z_vMg?PLRCs^xxE}-nS)&+*W7Wa(2m7`edIQBc*S|pMpk}pj$x4%Jmu>n%b?S$0kh~BG zg?Nqq;ONu#5N&G#6^>A)6*nVir-oc;H|SLn$k|aAt}C3z%Xs%hdI*{vVl1bAl0CB& zT%(ZDA}1!^e9o-(5MKXiwL34*mNT;|p{8M=rID|w|E3ieJ*I?Q4^kp&qwdeeQE`L* zL25#*H$=UZrZW3}o6wG{yXxc@OdJVGequ8y_5y#5!rGqiKF%UduZy>OqNL4xn$ zLyw$aT!2xJz0fT8Xr&>+JINmNgpAgo8ECHaU!F)3-ndyYR4YWjQvn1qQT4!>d>3x@K$oPpkt#_F|Gi2QGLvdL>-=OY z&UDsxK_Fn272w|kR)^U4i>=tEc=w8|*w8I(`Jvr_{+=IN-*d7&!L)~>UYym^TsgmVmM2%L-f;x87lX-+{tm%pE!i9iUHoy-FSOuBN z;_)-8JZVMaXVAgS`E=cN2)=wEoc~%@I+upJxUBRP92Y9ODEN>HprK#)d3Qz>*H4mc z&~Z(!I``xLMYE#m^9Ra#Mh#b`{#fU*B>e33R^kQnQ^~a4fpnf}gNxLkfE*Uozc_g% z-XZqBk%u(Lte?jo(tsJ!@M0_Ml0m}@DSy7f`zW2x;GB2h72^+C4up(fPxAmP+@OBf zWw{p~B_NKOlbWdL!ak_q&||1%&vZ)j(z}TZl4slyI6o|x;IUTQBd%o6c1mUC-6Q}3 z?}lamyn9Og=#d+k`_FPu_!F+mF(Yxs~B+sxxdcIID z#lya~$4==7<0)-}cM~TNBpdwZf6Adee$@7;WqxNprACT>;D_V^5Xt-}xpYg7+MW=A zBRQq_wD1uH+E-anOaqb6XdTqh7frwsGI<77;6ns2p$0`OVjt*1RitGYrBS$S&oznL z!bh;>l1veRiu_?WTBpzIhGl*SU6J}#)RclqOZr;SBOG`hT*)m~ub_iD%UEOH5P6LF zK{;^IgbPC2!JrBU?6#TeQ~;eN7DxkWkFhl90%!8FG}j~*i`7dMbk+n|A-RnB=>a+` zQyqNo*RQ%bA5-j z_bLjB_ycf0YgucsY$P022Jrv~l}`mF7XH=LGl|XIk69B`L;5iTGp&ys(Iq(E#|=28 zta<4{1&TFqAG=`5_h5=O>U%Kyfaa@6=gTdQuOhWQ6_qzG6eN09-gqIbt^2`{0@V5d zjwEW;CW*iqRc+#tBx~-+*nUpd!jBP{XML=Qu;2tA@J1x9bLl|)iFNL0Py+ltAeevd z_4fb}3;*iT3DO*2J!*TJD{ou`tyz^fUJ8HI{a{DIX?*}E60PdN#C;-l^`J+Rpt&C_ zMUV1hg&aTiK@!~#;(d^I3!>J!^nj*io!iH*Q}W$zOD+7?2cVd7TWipNjDKGl1YVu& zvqZtze;sm5`f0vSMZph$rFdn$Z-^8;kw;2_B-tG1`GGU#$ZJ>FQm zEpOeu_b-{TbURp9wtF$xJ|5h7%-KUp!UA zYZh^}tzg}wH-SvoD(yb&@6z^S9p0O;F48LfPzwP%xz=5J^~hYU(ym(m7HzNAy?gc6 zZ?sDLbRgv0^4DR#y>E+uxPIg1ULwAK*38M>?9LQ{V2~8lzVFbz@%k70H}0eF8GFvk zI&Citei$U5kY#xG8p>)v%?`@IdQXGcSdM;-se(jGR@1_nw)8~mhnM-h_8aiF{;#!e z;$oVlSnKJhVeeGG0I=2-dsg}%IP&BBd=Jw{5Vg|jl2aw*|L!|<36u~So363e^-XiW zQ|$p{t@HK_sP8YyM)&!A%tjHF@1Ehv-a#-))&@^6Rl@x);+|AnGAHT>lJxKYdPcKQ z_GlO{YBp?%09r-wz7YQZqbAoD=E9cda{ixj15=0zdo;fPQD%QGY_p>|a%op!-#>3@ zQ&9E&uMpFafwr4Z2CTaB_wxRuQAg4~`HZ9keK1eZ{ELLHjl3m3=B}a*O^qIm?kw#v z?WIJ?))cawXU(!qh)fs4p&#mj(m)D7=|$245b%bCMmI*X&9}f;4s;()PQMs(=$+k~ ziMD8|ZtG8bOsd{MFL6El{B_1Js8P`d`9$-HGjq+xM!j1ewzX{uOYi2-@}!V|ji5Y3 zJxqNG<=cuPt9sU~2!sf91v}uiJo+?{()&HzG$QlgH^eo%FurY`1)g#jCjEz}lZG7n zI=0%Q4O?o$`b{5`S~p%TaTugtA!5!)Cw0mXse~uC2qLRcwcoAOFYGCQ@64r_BwX zo%l^s4g@gaEczSUtynzXcjG}PT1N08F%XhfI63wq_8s)>)WF)enk(-=_)`C9ycdZOp7@%~5Ke(*Y!erl96fxmrYIr)!zt7&b6B&$Zq?DVVoz#ynI_vF_y znoTyD6{FUZE->O4X6=I)t&hDXu7=jgHZh}V;GGZpQ(;BtHm z$3*A%sFvYi-5GOL0&QoJBO9`CI}uAn#7okn!nC?yEu|uah+?fW!wTueYm8eE)tnm1 zT0+T+`lU$PzL7vVyk=sJE{I)7K~ePtx0Tjw$8>2WYZNSFYYok(gkLMI?0C(B8cE#d zJWXp20T8#=B(}m-U(~#*C0tx-l{h|YTD(>*wSB#{hAt}pZjLV71aJa!hMlvX&#Yt<+jW8wBBotU8zAO}r|pwT!dx1NwviB)r8 z&N4?L(IE`V&GCRI>;vytkS!s7qr}n&-`SBkCuX>6;@?zarSa{_IX`e;5iH8ho|s8< zqyfe9OG=a4R`=Op2B{FGSqyCM<}8$`-!ke~yfv8Fp`)tY(~hZc!~L7*8UWhMR zEkJg>Iju<4J$PCV_`m65VO-(Nh30!w=)VlG;Y2f{;y}Tmf=&rpjWVvLk+vl6Of7CO zuEfG*@W;vZ{~K4ZVK>ZA^SyA&xFY`rz{df{Gm8CGS$16Yib-k>QpM|p3|h}uy<{oZ zAqj|=#v6CsG)!D0H@g9^@O?-|4Pk0GG<21eS0CGiG?K})zh93`Qfq!&ygrjbzx3)W zS<`h$($`Dl{UYu!Oj@HbdmFF*eMpuTam#L4<_amcKBkGu=&FP5je4deZ0*D1-EK=7 z?ic&ll^3scJPM-MO&;*DF+O$U=0wU*-(Ko1Rh7AFKi%8wa9cCL@wU-|<-(M3uY*zi z5TyU8rBUoCLOu)a7vT6nyHmS9|0E_m0sw@MzMxv3k=>)jNDB2SL-;QSQ#L^jI_wSa z$wf$~zn6~s9ryZN_g+Q>Y={=l*Vh-GBmw5`S-{*abd6H`a%xRDef)Ijr`P76c;T2jj}v)1IdQhHu()NPxiASN^3A z9yVOaQir1LyH&9zixL3nTe8TLZ(w+jPkM6tUN4`t3cw^+6E9)=0p0f!R&Pn(lD}`j zFDOvHZ-8Gt{S}X@=>|aks+n$#hU883{CQjmwIcih@Zeu%T&B-afctCu+#sOf$_M{q zO^-<~WU*$^_MNJ@my6o^ ziUHrC2>FTuUUjoqbgCwE^}J8jOy)*Ia+G>jG8k@fAk{lO6H}e(Iy^lvipyG(51DgE z!T>&zp~Dkp;-VNjJfxsfvY%J0_VCuGnjML2r+~L9{@t>c3VtSeZoB(iElUSr>HZiA zi=5(WD%n4lLTZG{p8J7g-En+QK>#PX7)&M2F*OkKD^IniE(f7y9?O-!;Yjc#8L|R=({Y-r|1Z;PfGY1)E>~)rqcH! zY)O%gPIO>l0!cW$X7pYJ1<|_~VMn^vxJgfsyV|(fr@&TpQcpR*9Kq~JE{&UPDBOj{ zO)wT-pPW%;Pb)q-gAUDrR~8wKgWgz(mR1`{wlB5rA@#_R#!F+-!^*-xm8-a4_cg1bjAz)d`9PsomB z*eFN~286dxGOWP(Xmm?dHDRyBjwIeFNF9}E<*kzlD=<2$+u~MD-E*@ec{d8ugZbdC zlLjj=J^Hhyq8hjN3lM)n)-ejD2)bEd1%^jSTWqQ+dl&6UcA)eGj0bO>Bv^sTQU8{# zDhl*hk<>ug@n0g+TPFcl00CjgD1^XzMLl8LONRwahw(@-stOmziJuT>dn+0UOk zGVv*5+Ho+)JAX{(U0dQ8a%%|?v5%KQv6V%*B0A)9Z*w$Y%_l$VQh5LR;66gwLGRBy zThcv_pL8{ezG5F)@a>5UKe|A(Rd2H!X3`>@o?`VqV3pdYOWveW={5=YnFO2bpz>y461~itXxjRBGFBRgjuuy8@qz zw#kVb5!kK{`|1gvRZ)Bt+toh+Y}DLMM*-W_s^GAKQ|T!K#ddW9;J)|?oeLD(Rn#?n z_^U(FE?~RbHFL$hxBBN!v0a_^OKqF17E)1cSKwp$XmVmj1h%U~@Uc9pisAvbE31D> z=)c;#W+=9K-POV&1)b8<%Uj?ca-wKU>YLEHs2yWg6(xc{&OG$&^4wC}9d^YOTK#)? zK=rZobXsZ~vRX*()HOMwgMvkxoFIw32k674dVmFv5(uYPT+!LepTq=cpLmV|!ulzG z=q*XAHTtx{>wcaIcYQ;(TCPm-zlS;eOC!Bxvd>TvWLXDA>cIwxYgFVQR}>D1%dq|e z(4nDsB+b^?(`_%uJTsg6?P|5cn&N-qIfC3HU&+TlGOvA?KSgu(Bm_vez0P4;Q9qO| zP4K?}9hP}Zy4D(Vnz9_$QVDneR6#0kO*&2Sk#}%VX-KKUww(59iPZ=r7^JIEx{wvF zjoYEw*i86uuFSE0fA8JepEgMEqb-!6v)~7(Shfjr$$<3MD4YM~+0UougRlUN=GV_6 z<0As;kA#$uR!!kC?<1d`1aKd@DIcxC;W34gvG`SiJy9Q-H(^*>*sc}%{n1om^|aGI zlIeI?k?%GARAH6cO}58OR69rqQ?@!C8>E^RL0YWhxBqJamn<9PYk}WfG(NO^rF~PJ z6X1!Zy~XoPcZWv9cJOLwhW+{CB2{CvI^C~)JT@LMhSyDx;M zK@A5lgcjJJF9I{d(b?ucvea>r>y8g6Us=}Fy$SHd(po&=G+5&dx~FWgb`O55@jgyI zG!<$xC?A?{f4=0a|Jyld8yn=?xkauX9~Qof<|*X~@Wj&kKe*Vp#vf#&+_wgW-)jHX z%@CRng$y!;7TQ054aS7F^W=SGvGyX@bszq`()6jo3Gl?y8b06_UAqY2zoKha@LToY zfGPMH)ND}vFwg#Z@veXMN9Rc!Wc9~Iu2LWByz=)`EEC{~rS<-PzG96xNLRUH%^rTM z^+xyQ!?fAEgO_uXvWik&F2Zk%fMKC>mFXd4uK&gQTEbup!?ZJUDmM3dDh%Fb^5v|3 z_x+i8J}IOm{I<*#PZLpO(G~Lpaq+*-rdocLt8Dn0wA|-xO(^~rhQGEX)|q%JAg%S7 zh;=GFSrFlf1=XTNw0QfV!4zo(bFNG7+Q5Ycuf+fch?@ zYY6nUZ?Z~SWU>9COaYcP>Z8YkM&5$&DZg-$wL1GnV3ZUd%~R8N#>~(QqC>9i7cqlU zOr+pIEg=re_K{Mj9I*w%pdJez&&#Ri9nFJjvZS4v0F1sHx_0Q0>-$}TX|Yb=&vqnP zEZbRx(sA<9`A&tX-Uu=JzRvepw%@iS@Qgu79VZ^0|Dq6ui#QkiYrc5~bp`#`ena;h zAKRy2+VQo7pZ9hsfJpr3Ua6EF<97TGrgY*upHcB%uVL!AQ=WE>487 zP0?k1d&Zj)s+!n0SVn1E(pQIs`v4J)zvtDf@cy?%W6auoXw7Y2bcmKs5kKfke9Za1 zj=+m${}S~ZAsD&2{K^oKybF!5(H{)t1Q0;|Ewc)-&;P* z3I8Rr4nSmjMhF&Ce1oL{Rr$^_=IJ=!shSW zJE)`iNSQlPV773(UeP?m_V_AYJ%Jv3K8|=bIGag68>UT2xWot?TnI{FI^Ui*FFEJDk^F?yaXYc|4Z}wg%kP>H5Ds~Csl8yHcMh?U_~hc;fBK!d3?F}| zF2X|qrT!5|d;Yw}oYzL$6Yja~&z)@yAD2>pZ-fA8?Gayl;XM6Z@J7u3dF}s1I#CRf zizbT#A;9kSh&?=SUUbfVBWa&Qd%L`|f+3P*^1C2pn8@^qC%j;udoExjVW0iue;%D# z43WPF7ace9sy`~!k@fn{-$~!pmS)Qgs#fkvmjt_F2 zrt9D8tkjS`&2#o_03Qse)<{*X2;}WZmE$+vyiqrl&lQ&f=sV)%xQEs?gJe91*7KH}<%WL6^OFf7d7+Hk z3*j5*HQp;OAyFMTIT}>UagE(H6D&R0pq&ZE)FR|~>q#cG6T{T%HYaa;jytGtIw22R zt3k|<)pw6CITTQW0;%@bE=K-3EWh)4){!ys%+7nc%-c-SHG@g0R|uG24U5Z8h5|uJTRH zZ5t+g49(0$6mbtinP$c!V9f{;TIRz4|LrKolOBk4EPx|{5rUd1O*6* zgRup_9{00*FF@!rex?9k8vV12H9&|KKNABljsJ;D3lI{<&%A(_ChByW1_&wRXOiHh z2|C?F0YcLF84SD>rPC!7Af$z#Nr1aY>)>hvgsAZ|ui);npE}_ILKpEfsc`qiPu&{< zLW*ELg1aYt>JkVLlElx%!QJCN;R*tTc=0nY;fv8xosI!Q^!S++_+ori_gsLGD1Igv zz6e;~+5tjfO+&#KWB$cOtmq4%v9hyY=>H|2EbD8dv4Xo_0Fveu9^Gah^jh+aeS8l6 z*z;J#c|K!Z#(F}W_gxo?HQ{Q49jQpERE&=fHU7LM-|`dhbOMwLOOi9jbL{!u1GYRp-D_ayQ7YxhV_6z`?jyC$ z$HPtwIFCx#!ER-s>(d=Y4Ck?Y2`ezuEhbX6JWsp>wj1xe{4D@oe>KpS)T?xz4oo5M zyUc)GpE!ZS&p+m|EQC3l>hg4hA;jYSMd5wv)jA6GSJ%~!^i}CPYfX%=_gy}GJ33J* z0gND?u}qkwnQjgaVCq@CfxK19CrzpAecb4dQL#$2H~_Iu)GY63T(u-i5rBWLsWh*pV{y1$Sk48w`V;LhzAp9VDtrS&P+|c3pj*- zFZY1$g)hH|p8xkUK|y0+l$OzYU^D@Ce_)hTAQmNVV1xcE_j{RQ8_q1@1uMpgw2t5@Zt#f0ZRG2)BW-7v zu(hB0SHcGt>VIMePJnMx%m=n;NM?!PGgQ`!fe!kwg7D=t?>i+os(mknz9?W0;MN5h zc5C<7ZRmOMOO9eBe$fS%90>)Ex!C5aFXrgKH#je|QYek?s`&7%UK4 zb_U1M0DF+7{Lz4=`hsLX;<4RlQxVc)eO}u2ERHF&`?Xe2r_JtDlYrOqxDf^gJ6 zrH2a0FL5eRZ4=Me8EDI+o{qgZ-=EI(g&Lp@vXe&*LsEdi|(dxa3LlH%4CTI7lF=JZH=zNRF68;lZcSIsrjU96$iwK9ns zk>;s;y$&Yu@k}2}euI8~Afp-dLvz@Ll@TV`zM89~oX3Ub6PXWG9{|@tlG+1-u}rV3 z)U_`{L4NX+FZz_q(k6Q4|BHv(5aj>MEbqCi<=|9Mx0Zzcs97PtTXq&MCOgH_Gh^;J zmYFzqu5UmxNC?PuzB%lYV2ZT6sBj_k+RMcK$7obuJt2#@!X-AnjjX&AbMf>+it;L1 z=}KkT#!l(q7uyoD?FT$_#w;HYzx|gmYeabWThhc^+7DrGNb}<y`BTig7sM5f z<@fs&1I9>WFA4@XvkxaesipS&1H)*Hpcl8ord~8%_rz$7ap@dTlkeCc+mcKhg*lHe z1eHry*^-7Dg;|anf{LWQZAmhX!u-dVgUX~=Y=N-WD|kxPHp&%+#`600pNmSE^@ix9 zu^N61(O`-+3P-~>ZrRs&{&X?>o{<3CFtn#43f6M_F}P#GM$zOgv#H!BcTY^%IQq%~ zH(A#{)|RB*D9n8g1$|W;_Mmg2skgu1xiRpTF;hrp|R`|g%FH(mD(2jZb_Av=;j4s zK!-5D>CIa|lQ+z$)r58jskGWz5jX;Mm`iZku zbbB|JY2r0go2=sjB>21K3s}J|7YQ5&QGP7+u6tp{IKBrp7~24T6O6VVs$XPS>cj?! zWB{eiv$`u}%SqiT6?U36bf@bt;%t7BD~5XnlSAo!_w)hBlVxKia6B2oc!A?deib#i z@duQ%;@h7M^>hLcSbmZV#%2U_NYTeUR~ra#mW_2O&F_lfG@~3K!c~*7wrnhgZ%-TQ z=>ml*`AKdVl@UxiLTKiWXHGpKci9+8Ir1wG+L5FK%7tebn@2HdsqGu;<2-Pb#Q)kNW|1YZrKG6~4Xq`3Rzl1urxO`1I}&oDj9ZgU);!Bm zYMXX-jQ5mY)-r2OYFlr0jNUQ!&qNs;p3e+=*p1&}3|zr0I2Fm9%-;)6ZQXIhytl|Y zOSz*!>1qiomG~=^GI*IdTax_*>|8s`rg_#?scrsM1!lm?s>I8o{|W?NrW{fxU^MvS z=+#tEmJl}9%GDkk&C|16eb8%^GmYnA^R2JrCA~Tp87bvUIoD&x)_>SS5 zrt13*&m{|_qt;E||9rIhis`5SPVf~Ayki(WZi!Y8id5z{x1?rTxb2f3>myT&@-n$9 z{VZkKCCEjYdsL{g{3%y&?~YhoWlqY9`~xfFlwa(jc~FNz;T@WH;zh##;#&PORV2&M z|E7LV zI5^6V<^1>Htjz?3%;-~7`KPhG_saF_P1Qo`X$VBc(eGe0{u? zYToTnkNVWSWAifcdl$F1svE?wJo(k2vApK5LZK|IX9=?Z$VJW5$!eQa0T`Rz3gf zf2#da{g?N|=*h>?paXjonjckv?GImGYC`=SE$ornezMa?9~Av_Tu>1GH|+2ilSa4T za=I-3nEJ$!RijIA#q2TqufgFjdJSCRa=IW8mre|6+d2zZ%p}l%Jr7^f1tokMDO8f$ zmfaEMC(}ih>44GVw34nvW{oP*IhERev?D4+J{wh`bJ>pcbH(fonc?4SowGn%+7V?U z)BY>fv9l%Vte9OPv-~U8iILiN+7ZfLO8ZUIH6(pNQ zf!T7hVkXCThJLKj87Oln{S>40j;JhNGy622f~z@UaKWI)3aKao{3ym+0&e&JSxjiN zp5NovU)k8GDWbdN>ao1RVQ?q@>i^Y-Xy3Wh+6I1(3Xm%srYrD|5p#jxz90Ri4V&d% zxIJ@4@P-xAqZZtnL>w0A(Z zsBLlWvo!bmCH!z;YEj<0u`k^`=!g0-%lZUMFr8#?VpQ1-c-N8-=bl+FCf0PeL4D!P zKdARxZy?O(J8GnxC)eYSpc*Z*Tao+x%|H30>7il4SGI2LYdslJ6RV*Pi0M7)1eS5A zL1j$G(TnYk^O|J!F5MWmw!#+N#yQPZb(~O4dTSxJWaEf zi!o-cPS}?l9GdH`T~;w>Z9iMaH<&foT5*ao!>vED4I6x#o1vY^nBg{^mhBCy_T|uS zrWn~)9c;=5dpkL_3lbyS_Rl|SmNWS4ZlVp9Q(%<8&8%|p>~72?mD7_bEIu`am`degZ@2kp5+go+(kX1`WZ;w(zYSczDd^|7PD#f`qq}tK{OSM zD@M@#_0_Gb;df#ZYk?nNMDE~{cF|36uM*|(q6ud#w0M0%YFm*}iBTIRD>ecg4z#p9 z)K1+t9A3=85I~h02ZS|7r8HQiZj4B0^AH3GRIyNGXh7oU7qOJ_C;iw-mKBLeVY%>oRGg z$Z7vxJ~Ptj>tJ?4(dU232d<`9n7quX44+=?za4a2UNl2bN$kb-&xY>u8gSug}c4CiJ^sW-$`!_h+;lGL<25)W_15J9 z^=;8D@9Wa0qQ2AKy?LhJMqdYH2zWlB%C{l>b(hr|5dG_acDip!mPA!wD8Tvryn{Jwt!@u^ zj+Mk(p5x}~letdnzP3|Y%h9E$H?7y>V6CYoAyL$l&F6|r#%A-C8~o4bU@%4TIi!u1 zu(KyZ2IPaXvmpciq1Kj0#IeV*P$NRw@9SC;(lA6zo@>?xWJ_MT0g*jM=r^;*ZNiJk zXaPMVI|tLDI(#xVFR#Mjcd2I074E6uy$-iz#!471FW6*2$jPoX*RxhEgsYyt`NA0l zlec;CW~VaQy7%iMPg`~NoBqI1Ym@3$Eg4%dQ(>@Mstv#LuWl^`8>9K!fTi>*{5sn-OYh8GfzcBE zu}dm1`pBPM;n&QTEiV40Qh8NDmhG9bE*LE-|4%qAV~H%=%O@^qBklPR0vvGxs zykE9Z{L7<~HA#NoJL51nQbPKV1%CC>^v|c*g!U2x=|%e)%ejV6oJh#uo9PdXB>AKT zr{DGPNf$0#G&X+rTU2cOkp1H>b4_Yv!uql$Ggy$I$+Vy1)>ka0&{!>KGW925!nWi? zW2vFZ^ik3LD6Vg&ARjYoq%vG;`#jIR zGpq?XFYB}DkV~l&=YVNJsrm|DGghB75%}78?q$OS^h%PuM-j}a5(5h9MHapDwyJ6R zoNk!D(MooNX5P&bBj8l?vNlw`U|MV;42%d$)k>62sd7wen}dgw6>LgM5_zx!{gP|M z0M$5sy?TP8m$jPe#i3#gaJI5js-~joY?X`@o$VgOP{OvCwJcj8^r=?UajBiKj z>rr;2)MO9LzD4;BgpTG-%=cOXy_l6o@Okxti8NSHW|bC4C3xW9W>!(}Y@LU#b}9%K z8}n2A&kBL=m~``U(=9PgeGJ&1`oyd}DF$fcd^_MOt76h*FtaF^k=hm?nBW8sXfZ3G z%Q3@N=>P&UvkK^PX$pZTV1XW($OpDEOlW7y7CUUUOTnqwSc~F;R@imNyf-g*+>&V5 z#{gY!Qq1ZR#RP2(bUEJ4DzNi!m0_5OS%CB^{(Em4Hgb+%kul06oEZ24; z`&P;xvJNrU#@J#s4930}p~Tq6TDD}YSsTmP%D#rO4Vml|V~^iw?)mwHf1KkS$MKx^ z^Sqwt!+&FV+)o8Io89v{{MTXCy zymV@13}-VjRI;0K9x?WD{gUfI@!GPfW`Csg8U`&WgZRqg{ZF-1o2Ac9rMQghRYAoR zGwP+3{@7nK;gW^NG%+joXhxZsl`D#zh1652=f6|wr9ley zRN_bccPia0>T#A&7<#jgl+juiQRK{fUH>85N$Y*XhwMBP_(xuea>Nw%R2m#E`R`QP z`R`QXS~k@vixgecKtp7pU-i7-SDCk&fx{^_qjZ&1(afxrw2DaqAi_9>JL^3jIBSFf zd>%Oi6Janu_PD-w_Q`}X>M22^$kmO{8tPfSR@{J}%`@@&$d51gouZ6c+ine)9DO?{ zH+H;E@9I%}He43{?V{XB7e*!jg=((t)0_SJyEpn>^KIt~kDmXD%E(-G%osOq0WXn7 z{wc_KyDQQkJ5r964(dA9?eE`J=yxr&T_hic|M{4axu%yfZr#!*zRP*cBU3jtai=sY zIL>O7XRprHqr}t2T<>YrTMA!OO2%7?N`LIvul4c4c6H_`oXzi-G2 zF0|a(RhqVOY~320&PI2FyoR+#R1jo0=uVK=B-Dt?g6sy}3Gy0)8c_+5-Jm-`UNcZ5 zstU3jG$c~3U7|+x2FPyEkm#0He2wUBklmmmQ7uG|8c~o9#iJoHEp3}Mq6#3pK^H`} zwDZ@9f;=b!T@W48nq4Cb@}M{9f~XLpO^qnXgW}Kypjtz4fnu-?avMDt&%%S8p}?_Y z+vvG;79HdQbpqMVMi1D%`r`}*be_OQ59?Xf9~USZ+n~15!w6u88d0HM;~f)v?!KOiMp0Fxm})#K3PPtsFI z@!ybi);dkD)qSEaP*^<^z7XaEQk$AmRcqRb5&V(Z&f{<6m+=v6F3|e`9H4W%B-hH) zX!Kok_MM~tuG0^Y+a$;g>xapWRu)o&n{MF2S*=)bP~Xt16vp~1@ww6awq|*M0BpjW z1eqAH?4tM;2dJ~|$E~1)&*#=AwN>CKPExJUN%;6AeSfsl+3y_xJ4e8g-BbKZuPE&O zfu&c>`vafp@`{BLcFLrDkBXuZzE$_dlt3B$F7OxF@L7=Gj+Ne2rcX2}2VAw+>NsO| zs89)12zz*ovU-WiqLoVb24-Mb}$xD4QRhO@WPy1-W#rROl0Zxk-7p*pAQYIP+|%TnSVXdnia*Wuek& zrEuaEJOrw|Y^zdMd!^=zZ2ruLY@esBc1x+{+WetH%NPnGUBIY3 zZ2NYgIsq^chQ$+h2OO$zZ6Ykmh%qy7P7$I(F}o<%1Rf#4au__yyA|;pMt_q*tKI0s=zY)sw#n z3Sl|=E$j!8@~?B+4VL>~oLC>Fd=~1`IZ;MCcp5DH94)`4Dq=w9s(6DzNwvHRhU|bt zrR90nbf(Ec=dNg!;R};P$a?ZDu1B)Kwj|IM*Cpv#v`4-Ztnf#7adg2um``|KG+*fh>C*7Zuw4yu;^8Mx1eOAnYe=`{2CsiL|W{_|7Od+Fo) zxa_zJ?meH>$#LA&1Z>YbI(TvpW`C?u-O{w$z*M?;&Nm)5lTJJBGw5Rh@Bq0rJPfc{=#if`_3&Y5m&(?~hH)CK0R$G3)T1}s}eap3)0;iq#W8!`X zEo9Cu`5FcS>A|1lU8Gn*U-7JUwf(|h=KV?WgQoFp3AVn#U&PH*9zc^bmj&Ct;IHF; zP@X_>nd^nECHM=t1&S#&DRZ^3Z3WMPBU8Mg8JU|-t?8Q#^6hKs_)Bu5Aozh<%J;5K z;d$l0_lKci{_@>x4R{*4iT;Qkn1=k|+994@ZcaRu83wO!U5mmq)eVV<$B9^R0)`E~ z6Q(zB+(T{$I^^Dr{GYYKRG=k!DY;(&A1W%6sRjtPD!NuPqzs(dE*)x8n*jhey(thI zxrtc)MctdMy$fUr&KwT*CVQ@R0C>RNhYAl=jl%&vPm}+Aj1>U2)!@w3!O5O$ zU4R{M7pKB$RY029OlrSwUsTqe0%(Gkh?P85Zfe0lW_fsAsL;Gl&owG0;Qo4>Ls@?c zAQq#^Wwg|ye=PvGEqlH)VshWQXIVIDP37m_EpRsys~)L#rlJD~pmhG2YqX&Kzbh6E zZ`K}kAVK`nvpYd)X2tS91wv32X!ipbP@vIj8=yd#uM9weRxE)LEvT}_02FBV1Gr)` zvsprc0%2rBfdUQEw*duG;pqShgt$*#pv-2mPZt=e&ZC|)gVCTivjV!vLV@ao(UpCM z%Tve$u9t5S?UrPDj4aJ5OU_6&u*^&_vh1g{Im4ihyd#Z~WjbZf8Lc(~Cl5xJ`;@`< zQSI;tF7FM9O27io1|nEzu!{%=15|i=rCJIRi&TC(8)(toNc9WqoXntLRo~Xi)RTkr z!S4_Nu}|D}|L5d_+Gur&*bV|C%NHDGyE5gCJrq^RWNc~M+I0t{c2;(O^sgtiagwOX zUmd)*WaFBHq;o>a{_7VU41{+?9;@p_UhSnp|9x8s8^s}Bd8X=uKK36FFQ0VLe z;WLnQn_TI@*?cvw*`-njKL1e$Ya)*UWgy*4j~2X*z{YckS3ha9gkM#PORMg2ip82$ zk=CoCvh@3=;G!D665!OC>iVxBl$F1f3{IVOyPxbRk13tuXL{h&L09fPS14aEboPe` z3`hn{9(Let_l#>!jL&f;2d&dMNC+Q`JiaFq$qW#L8@IWz5gcVy+idA_)AO{r9{X5x z(^%5Nvq;#(zRo3K4N(ceq$zYs+#n}C!{w(W8`sz1>{8iLE($usy1i`mFL4fMf03o0 zW#p&4uByl3>{B^W3JQ9|Jv#o@_mtOvJK+Diz+&E!(!tN!X|0o-Z9AWoUAoqUO`~0s)nHEkCa=9&;TET$y=h$YS4qTLj|!Ua z_C(4tM;U&sow9VkDLZYg_kOIoE%&^ge}~6kWIO+d!{G%tSYHFZ0j(lK;m6PK5c7U> zHN8%|4@t<6(si7EaOwnzf~)?z!x`Ep(+^MW9IOv*qz$Aue$?#iz9|$-tOXYvt8vE564nsLzgk zyyy~hbBV7hEA8fz3M?*puRbEsu8s@EaHu_@sI4`vqgtsH+wdl^&@JnobV8Ek`hY`1 zQn|OdZsWZ3Vb|v0WamSJiTnc5gx7u*rD=}KR)Lc&T_be|Vkog7y^Brl{JY}d0E%da zM61|K9%0kqn;LUT#MuhAAiW-`{XEYv_MoiO3_+wH57#AEsPW#NGp_2RUAA(iI$xe8 zpslkjE>3m6>S}=Vb%fk|^g_&V-Al2Ze~+;LJG~S$TGt22vm-3$ zDUH@p-3PIqi$_?#QzosEx^-~G9AU4X(rXRZ;Q&W)gylJ9)f%mv7Ta+?!g8IS?;fiA zB(`(?2rG~fwYY2*jP>UY61~{n&bV6!oVBoK&giS8TSw+;@azY3vBWP(rXbNCP~`ON zB$>*`H*+G=i-zlxkczl_bGpDPTefncT4kOE07=^|yF|6h>ZE{Gc4W>9tg>Y*1z?p0 zm7W7u*=`vPu*#av!huzGWc~tJWpnSTR#{Lf5m;rOsz?!Q&AC7eGJLn6OCm;eIa9Lt zQ+V@XdkT^>Xa|uF9j-e^r3gIF1E=iB{3?|qu+j%kSrBvN8j1es+Gi=xT+e${ieL`l z8Tz|r64RIJhU!3l^xBcR0GN_lwhF@f^8_(dC-sl6^+|2zdfq?^O>4}lTaaP9;vC@7 z_+=|$V3ak>gawdPkG@92XSL=s+Q242kQ-2vTu*!IZp<9ff_#6s3<8>#Lv?^lxbn{g zH-D7YQd-9$U#jfCU2}oBuTkv)J1b)bq`*kyMe4Gc5BN{)@|cTpu(i5IA_-hCYl?T- zyle2Zz3a{}A>l_ZFd?j^3ycHQYepMExS?DY7Y*t)kGc9my#{%kdeM*^K)r@T;u@&e zh**RAW(tuKIx+9VG!X%BHXvczc(w(j);gu;-XVkL#r-mg6#H~flloxlh9&8!vd>p! z`kWuzY!Q;yf_%*3aXIjufN}jjv7P%&au-y?6Gf&kV-Igf@i3i58VXN80yWGi9{RIz zFAk6Ey}Vka==K(p36mTS9;{2!Yz>*0W|9ks2Wyfv0c?n3l1qXI8;~^H&gPk!uVz1OyQpcbjCvTsXx`oaN!a4r&*hS)ef*iSC1 z8P)5E?Fcc+(FI4_3Qu!j51FGf>05}N93I@gC-KDo&^deMEXI}qK10NCy&t!L(9qQS zfvZ6-%GsJm|M}GwYqYwI(1`yXiPwT%I^L>El^JiAX}D7#yjghC_$te^|BLsPc)db! z-@y6|@MMn0$gYAR5X4Bpi%e1fz%&<)T|f|%T5VFQ5RcFGa=9aJP$W9#GLAEV4%d5U z@FPIGXV-Ng*Uj0QzW=%O%44*q3}VFRj>H?ME=z9}rRt38WqR&Zjp_w=Kp>d)PkCRB z?=1vD%uL?^p3K`A)wMK)lTY=4|1?EO%xEq?bMe4w)m5WP6%sddz5MS;4iqU(`H$la zeTVD+D-+H9S0)N@wmvWZoO#6teNTpKBq3dTw-dpv`6v?;U1s}5Q zQjJiSDmq&|t_s#RhLyb~4WF_LeV0Dc;ENHU zq-&%DB zxy)9@+WUs=*#cimjjBiP{v>65`tH_f6Fv7!o>f|OMF!96eayt8`^cL?9jr#En?Vn^ zxLt%jK7>@=AD_0!8EWeKc0q2uWm11^+H`RlX7V&Tb8T+j$<+gIxB^45w2SWwpwlv} zmZdQh>Q)QpN0Lni8CEMO%!IDhqU@1C)5nbO%gmSw&2}%Vpf9XYlvR+%);HG+9!2F- zS6n@cJeOfg@f{l!aV2AbxAPI)%K4C5L4%vQ_WH?fsFqyltVzRM38Tu?Y`tpC9y=r0qSCp6R_1({8(L zBZxiZYD}d|2yGc5h>7i#drR{0GKRbjn6eg*{64p5CW0ByoUR~EBh8#Y|hJ{H+UC72T*74^2=1GjGzus=SJuSfI6?( zTmh)_j3_-ooxRJi1JrplB~*I;VIve&M|Jjf+bof?GoqbRgN4CrtqRD5f$^f#otjH8 z%RI8RXnTnOOk)x2m6FU2TcWo5>9L4(uHn{L>Npv#XjPw{vmLyiL*i{%iX|C-r{W)L zR_IVGSSn!RwqOCZerG!wtUo8qZUk7L;a={RD$ffWr>>LHn}dq~a4R=bW%@2HSS{dj zwiBnm_89gNXv&=mS*oVAegm4qZNUIE#o3MzXbQ^*hPrmXI~Dw(L_;qdszj3Do(zT0 zYS8i!L3^LY{j=0oUReCRO?@K0ENJo%w{}s}7h0a`J_cty6Ti#+`C34(`cp_%-gyEB`2K(JEE-{dBfN`0?B1hJmz(`A!8hSfCkh1!)b0 zdy>;88tQBZ(i*0vHR=RSaydwA;(zGAOxTolZ;lYNeXus|x>+u)F2|9o8jhPc?;N|09*VBmny;K<|7WYKjC*m!HV%jHJ zmJ9;Ku&V5{SH-ji-%Q(Myj}$kgjkH-t7>x(O`K=_O?P(1$oCbu39UdU-;&zlVS4-#KAzHyh$v`0*9G%xsG^WmCusBP``f*xj^)fYGQm{)VEhBbYWI}v&? zqhFi;bX#W{R=bnK;p01Hk8UkOvK?a2l8XaA8&qFajgMGTIvvet#loDp0_pdYw+;uN zHx{kSHj%39Q%_n$%*O~%5~pZ$+6hZqr>Oi*@qm7t1Le(|#|MLPjfv}W$0X+Z@{^8` zdt*UMp$g}0JN|TR+j97*UAegpnYLy9ZOiTxqVR;Z?Y#VhU^3ffpXa=?F{Hc7+QB3? z4$5B&4hgNPzw8cE&Z>$T^vX)2%I6=IEVlf;`bfbwfq28O_@qnV@726#eY_Fn3--U2 z&diM1-Yk*<$E)?3@f2y4`05S$Rv=4a^4Txjs}$|S_LK9r-EV&wp04HdLqgJ9=6U>T zPWnUckG(C(rb)oc{knb_b5>Z~{aJ)bUbAb#>$hachvM!pE==+UU5kvr1wuX*3w{0{ zll(ow1)Y)B9N|@+XIL78q`K`V3f&=kV{ev}>2PA6i~lyXhFFc+KYL9R{e?r>OyXD3 zbIg0|!cdbXK4pFU_S_Z9od(_WMPHiy8m$*wL|0dyWziUYiy#qr!P|hFO}7L3WCLxsj^MKJ<{?D3rY* zWOu1VS|m>Z$51VtCt#SR!T85swl6R8RN03Waw)Xo-$Zu$?J@u4e=n2UX>_lDHsN|i z{LA8EoUr|6L)ZgGrB(;$yhuJjS7yliMZMem^8Rd+L_R17Z zU3nUS--(beW@Drwyo@d3R>CChYz#Amm#!t+N`xc~UY>&Rvb2PK7bZDpV|XCEv_RC7 zk(Ue!5$p+|*p-!x>#*h%lZV*1#K9h`OHzZ`T8-SotB^fDYNQhyN zZWAEc=w9zK*(!)Mp+-aO21_OTCPbHQA-_;|gT)fIN5qw(yLpkL{;qkE($obT_JR2l zHX~yCHo`BBec(q4n+ef!o69efePE#^@evVuAo`&olBDb-{wFTdXIuDB+>esPkrw9z z9%aq5eE(I)UCC>wtcjy77THJgke4w+^Nb22MnHkDF5BJrGkp+_i%aWIjD3_7r~NMC z=id&0UtLoF!5jSW-I4kecD_%S|HqYbD>Xmq(9395&q&03+t^gyJJ$Mz!IIE}(((sI z!)>C&FK)X$6Bo?Nx&F5K+M9a}<(;=Woj%7;Hk`b(pu|r;RP?(k3z0DK=L~H- zgb%k_dAaqHbqTeb_hyn`X)$E7OCf%~uyRmUL)Cs0 zuMA~vggB9~D5gWp7pquQ?6O_2g2JRqNm`Dy)<*V|yogO5|9bsrjB1iPGlEmP4Bu~Y z*(D|$2j8pucC%BHLB%+>0^$0<7GaHzbl0MY?2>EiDdq!p@7@zec)f>2`(p{CS%GdD z-;`!F7zm?JCxskU=dk#_FBW$hf~3|beK*ycRqk&n)rGSj4fxo#^l1ckTYt25g7EG3 zdfT7*3;cqmyPo-*c@Krm((t~b+ztVM@^`;FC|-YM7Dij0x-D8Y7B;1~VrPeju=ZGX zmG_x5EQu9G+*2@<7v8@bY?h!X>`TXYsR#D_`%+MkZCa^EYF!d*ZEv+*-O1nqR{5jV z=|76gST&xBpXKDi0Vw#A7eP66Wd8bjFZ|<4CEe-<-P-c}UbsZm+Phq&N{e1zV{SzT z>otySVxC6Mjv?p!`*E^EZA1Bx`pRYI)1<$B9F17`+{L(Ut#7WywfsnwG%uH&)n=IB zs;hKQ7QyPD$X=sx^g1cZcN#~i=r%7*Sd8&Qtd?`Nbgtti4l@MI^YIek_gt#?*rKlI z`kLIyHFL`KHQBN}pZ_&G&kWc)nc3EC=0#WqBgcYnNjbwROysKgLtqtlf z7d7*(-N=u~dTQkNW+NatS)cHwHj~w(N``a;uC|@&zIp>5usr`tidofS@8+Cwp{hk~ zDl5O(PW4vGi@l&gwI)n{kQyJ(xxCe9FXw<0?UVKA9N&lP<5YoHl}piEws?Lk1NuCc zEm5`4bt$F$DFXQs$2~KAF;7Al5zS5aRa(8$SXq&`7Ct7}9=u?sJnp`tSpW5xkjK_Z z%30C3&^kb0A>Rb^cUldMv&vK_fo*P_O+{x4a-Dq1xp=v1uv28*UD iPWs?j>6-*eViSy~w^9{*eUc=c8q6C{*^aBOSNtCdX3!D< literal 0 HcmV?d00001 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE new file mode 100644 index 0000000000..a6d7fd364b --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE @@ -0,0 +1,384 @@ +Attribution 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution 4.0 International Public License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution 4.0 International Public License ("Public License"). To the +extent this Public License may be interpreted as a contract, You are +granted the Licensed Rights in consideration of Your acceptance of +these terms and conditions, and the Licensor grants You such rights in +consideration of benefits the Licensor receives from making the +Licensed Material available under these terms and conditions. + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + +b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + +c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + +d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + +e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + +f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + +g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + +h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + +i. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + +j. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + +k. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + +Section 2 -- Scope. + +a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part; and + + b. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + +b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties. + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + +a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + +a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database; + +b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + +c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + +a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + +b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + +c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + +Section 6 -- Term and Termination. + +a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + +b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + +c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + +d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + +Section 7 -- Other Terms and Conditions. + +a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + +b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + +Section 8 -- Interpretation. + +a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + +b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + +c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + +d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public licenses. +Notwithstanding, Creative Commons may elect to apply one of its public +licenses to material it publishes and in those instances will be +considered the "Licensor." Except for the limited purpose of indicating +that material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the public +licenses. + +Creative Commons may be contacted at creativecommons.org. diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py new file mode 100644 index 0000000000..c5503ba361 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py @@ -0,0 +1,150 @@ +import importlib +import torch.utils.data +from .base_dataset import BaseDataset +import math +from torch.utils.data.sampler import Sampler + + +class dataset_info(): + def __init__(self): + self.prefix = [ + './algorithm/DDFA/example/Images', + 'PREFIX-TO-YOUR-DATASET' + ] + self.file_list = [ + './algorithm/DDFA/example/file_list.txt', + 'YOUE-FILE-LIST.txt' + ] + + self.land_mark_list = [ + './algorithm/DDFA/example/realign_lmk', + 'LANDMARKS-OF-FACES-IN-YOUR-DATASET' + ] + + self.params_dir = [ + './algorithm/DDFA/results', + '3DFITTING-RESULTS-HOME-DIR' + ] + self.dataset_names = {'example': 0, 'YOUR-DATASET': 1} + self.folder_level = [1, 2] + + def get_dataset(self, opt): + dataset = opt.dataset.split(',') + dataset_list = [self.dataset_names[dataset[i].lower()] for i in range(len(dataset))] + + return dataset_list + + +def find_dataset_using_name(dataset_name): + # Given the option --dataset [datasetname], + # the file "datasets/datasetname_dataset.py" + # will be imported. + dataset_filename = "algorithm.Rotate_and_Render.data." + dataset_name + "_dataset" + datasetlib = importlib.import_module(dataset_filename) + + # In the file, the class called DatasetNameDataset() will + # be instantiated. It has to be a subclass of BaseDataset, + # and it is case-insensitive. + dataset = None + target_dataset_name = dataset_name.replace('_', '') + 'dataset' + for name, cls in datasetlib.__dict__.items(): + if name.lower() == target_dataset_name.lower() \ + and issubclass(cls, BaseDataset): + dataset = cls + + if dataset is None: + raise ValueError("In %s.py, there should be a subclass of BaseDataset " + "with class name that matches %s in lowercase." % + (dataset_filename, target_dataset_name)) + + return dataset + + +def get_option_setter(dataset_name): + dataset_class = find_dataset_using_name(dataset_name) + return dataset_class.modify_commandline_options + + +def create_dataloader(opt): + dataset = find_dataset_using_name(opt.dataset_mode) + instance = dataset() + instance.initialize(opt) + print("dataset [%s] of size %d was created" % + (type(instance).__name__, len(instance))) + dataloader = torch.utils.data.DataLoader( + instance, + batch_size=opt.batchSize, + shuffle=not opt.serial_batches, + num_workers=int(opt.nThreads), + drop_last=opt.isTrain + ) + return dataloader + + +class MySampler(Sampler): + + def __init__(self, opt, dataset, render_thread=None, rank=None, round_up=True): + self.dataset = dataset + self.opt = opt + self.render_thread = render_thread + self.rank = rank + self.round_up = round_up + self.epoch = 0 + + self.common_num = self.opt.batchSize * self.render_thread + if self.round_up: + self.total_size = int(math.ceil(len(self.dataset) * 1.0 / self.common_num)) * self.common_num + else: + self.total_size = len(self.dataset) + self.num_samples = int(math.ceil(self.total_size / self.render_thread)) + + def __iter__(self): + # deterministically shuffle based on epoch + g = torch.Generator() + g.manual_seed(self.epoch) + if self.opt.isTrain: + indices = list(torch.randperm(len(self.dataset), generator=g)) + else: + indices = list(torch.arange(len(self.dataset))) + + # add extra samples to make it evenly divisible + if self.round_up: + indices += indices[:(self.total_size - len(indices))] + assert len(indices) == self.total_size, 'indices {} != total_size {}'.format(len(indices), self.total_size) + + # subsample + # offset = self.num_samples * self.rank + # indices = indices[offset:offset + self.num_samples] + indices = indices[self.rank::self.render_thread] + if self.round_up or (not self.round_up and self.rank == 0): + assert len(indices) == self.num_samples + + return iter(indices) + + def __len__(self): + return self.num_samples + + def set_epoch(self, epoch): + self.epoch = epoch + + +def create_dataloader_test(opt): + dataset = find_dataset_using_name(opt.dataset_mode) + instance = dataset() + instance.initialize(opt) + print("dataset [%s] of size %d was created" % + (type(instance).__name__, len(instance))) + samplers = [MySampler(opt, instance, render_thread=opt.render_thread, rank=i, round_up=opt.isTrain) for i in + range(opt.render_thread)] + dataloaders = [ + torch.utils.data.DataLoader( + instance, + batch_size=opt.batchSize, + shuffle=False, + num_workers=int(opt.nThreads), + sampler=samplers[i], + drop_last=opt.isTrain, + ) + for i in range(opt.render_thread) + ] + return dataloaders diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py new file mode 100644 index 0000000000..dfbbce280b --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py @@ -0,0 +1,165 @@ +import os +import math +import numpy as np +import skimage.transform as trans +import cv2 +import torch +from algorithm.Rotate_and_Render.data import dataset_info +from algorithm.Rotate_and_Render.data.base_dataset import BaseDataset + +dataset_info = dataset_info() + + +class AllFaceDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + parser.add_argument('--no_pairing_check', action='store_true', + help='If specified, skip sanity check of correct label-image file pairing') + return parser + + def cv2_loader(self, img_str): + img_array = np.frombuffer(img_str, dtype=np.uint8) + return cv2.imdecode(img_array, cv2.IMREAD_COLOR) + + def fill_list(self, tmp_list): + length = len(tmp_list) + if length % self.opt.batchSize != 0: + end = math.ceil(length / self.opt.batchSize) * self.opt.batchSize + tmp_list = tmp_list + tmp_list[-1 * (end - length):] + return tmp_list + + def initialize(self, opt): + self.opt = opt + dataset_num = dataset_info.get_dataset(opt) + self.prefix = [dataset_info.prefix[num] for num in dataset_num] + + file_list = [dataset_info.file_list[num] for num in dataset_num] + + land_mark_list = [dataset_info.land_mark_list[num] for num in dataset_num] + + self.params_dir = [dataset_info.params_dir[num] for num in dataset_num] + + self.folder_level = [dataset_info.folder_level[num] for num in dataset_num] + + self.num_datasets = len(file_list) + assert len(land_mark_list) == self.num_datasets, \ + 'num of landmk dir should be the num of datasets' + + assert len(self.params_dir) == self.num_datasets, \ + 'num of params_dir should be the num of datasets' + + self.dataset_lists = [] + self.landmark_paths = [] + self.sizes = [] + + for n in range(self.num_datasets): + with open(file_list[n]) as f: + img_lists = f.readlines() + img_lists = self.fill_list(img_lists) + self.sizes.append(len(img_lists)) + self.dataset_lists.append(sorted(img_lists)) + + with open(land_mark_list[n]) as f: + landmarks = f.readlines() + landmarks = self.fill_list(landmarks) + self.landmark_paths.append(sorted(landmarks)) + + self.dataset_size = min(self.sizes) + self.initialized = False + + def get_landmarks(self, landmark, img_list): + + landmark_split = landmark.strip().split(' ') + filename1_without_ext = os.path.basename(img_list.strip()) + filename2_without_ext = os.path.basename(landmark_split[0]) + assert (filename1_without_ext == filename2_without_ext), \ + "The image_path %s and params_path %s don't match." % \ + (img_list, landmark_split[0]) + + label = landmark_split[1] + landmarks = landmark_split[2:] + landmarks = list(map(float, landmarks)) + landmarks_array = np.array(landmarks).reshape(5, 2) + return landmarks_array, label + + def get_param_file(self, img_list, dataset_num): + img_name = os.path.splitext(img_list)[0] + name_split = img_name.split("/") + + folder_level = self.folder_level[dataset_num] + param_folder = os.path.join(self.params_dir[dataset_num], + "/".join([name_split[i] for i in + range(len(name_split) - folder_level, len(name_split))]) + ".txt") + # params = np.loadtxt(param_folder) + return param_folder + + def paths_match(self, path1, path2): + filename1_without_ext = os.path.splitext(os.path.basename(path1)[-10:])[0] + filename2_without_ext = os.path.splitext(os.path.basename(path2)[-10:])[0] + return filename1_without_ext == filename2_without_ext + + def affine_align(self, img, landmark=None, **kwargs): + M = None + h, w, c = img.shape + src = np.array([ + [38.2946, 51.6963], + [73.5318, 51.5014], + [56.0252, 71.7366], + [41.5493, 92.3655], + [70.7299, 92.2041]], dtype=np.float32) + src = src * 290 / 112 + src[:, 0] += 50 + src[:, 1] += 60 + src = src / 400 * self.opt.crop_size + dst = landmark + # dst = landmark.astype(np.float32) + tform = trans.SimilarityTransform() + tform.estimate(dst, src) + M = tform.params[0:2, :] + warped = cv2.warpAffine(img, M, (self.opt.crop_size, self.opt.crop_size), borderValue=0.0) + return warped, M + + def __getitem__(self, index): + # Label Image + + # randnum = np.random.randint(sum(self.sizes)) + dataset_num = np.random.randint(self.num_datasets) + + image_path = self.dataset_lists[dataset_num][index].strip() + image_path = os.path.join(self.prefix[dataset_num], image_path) + + img = cv2.imread(image_path) + if img is None: + raise Exception('None Image') + + param_path = self.get_param_file(image_path, dataset_num) + + # img = cv2.imread(image_path) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + M = None + landmark_path = self.landmark_paths[dataset_num][index].strip() + landmarks, label = self.get_landmarks(landmark_path, image_path) + wrapped_img, M = self.affine_align(img, landmarks) + M = torch.from_numpy(M).float() + + wrapped_img = wrapped_img.transpose(2, 0, 1) / 255.0 + + wrapped_img = torch.from_numpy(wrapped_img).float() + + input_dict = { + 'image': wrapped_img, + 'param_path': param_path, + 'M': M, + 'path': image_path + } + + # Give subclasses a chance to modify the final output + self.postprocess(input_dict) + + return input_dict + + def postprocess(self, input_dict): + return input_dict + + def __len__(self): + return self.dataset_size diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py new file mode 100644 index 0000000000..b4bb726d96 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py @@ -0,0 +1,123 @@ +import torch.utils.data as data +from PIL import Image +import torchvision.transforms as transforms +import numpy as np +import random + + +class BaseDataset(data.Dataset): + def __init__(self): + super(BaseDataset, self).__init__() + + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + pass + + +def get_params(opt, size): + w, h = size + new_h = h + new_w = w + if opt.preprocess_mode == 'resize_and_crop': + new_h = new_w = opt.load_size + elif opt.preprocess_mode == 'scale_width_and_crop': + new_w = opt.load_size + new_h = opt.load_size * h // w + elif opt.preprocess_mode == 'scale_shortside_and_crop': + ss, ls = min(w, h), max(w, h) # shortside and longside + width_is_shorter = w == ss + ls = int(opt.load_size * ls / ss) + new_w, new_h = (ss, ls) if width_is_shorter else (ls, ss) + + x = random.randint(0, np.maximum(0, new_w - opt.crop_size)) + y = random.randint(0, np.maximum(0, new_h - opt.crop_size)) + + flip = random.random() > 0.5 + return {'crop_pos': (x, y), 'flip': flip} + + +def get_transform(opt, params, method=Image.BICUBIC, normalize=True, toTensor=True): + transform_list = [] + if 'resize' in opt.preprocess_mode: + osize = [opt.load_size, opt.load_size] + transform_list.append(transforms.Resize(osize, interpolation=method)) + elif 'scale_width' in opt.preprocess_mode: + transform_list.append(transforms.Lambda(lambda img: __scale_width(img, opt.load_size, method))) + elif 'scale_shortside' in opt.preprocess_mode: + transform_list.append(transforms.Lambda(lambda img: __scale_shortside(img, opt.load_size, method))) + + if 'crop' in opt.preprocess_mode: + transform_list.append(transforms.Lambda(lambda img: __crop(img, params['crop_pos'], opt.crop_size))) + + if opt.preprocess_mode == 'none': + base = 32 + transform_list.append(transforms.Lambda(lambda img: __make_power_2(img, base, method))) + + if opt.preprocess_mode == 'fixed': + w = opt.crop_size + h = round(opt.crop_size / opt.aspect_ratio) + transform_list.append(transforms.Lambda(lambda img: __resize(img, w, h, method))) + + if opt.isTrain and not opt.no_flip: + transform_list.append(transforms.Lambda(lambda img: __flip(img, params['flip']))) + + if toTensor: + transform_list += [transforms.ToTensor()] + + if normalize: + transform_list += [transforms.Normalize((0.5, 0.5, 0.5), + (0.5, 0.5, 0.5))] + return transforms.Compose(transform_list) + + +def normalize(): + return transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)) + + +def __resize(img, w, h, method=Image.BICUBIC): + return img.resize((w, h), method) + + +def __make_power_2(img, base, method=Image.BICUBIC): + ow, oh = img.size + h = int(round(oh / base) * base) + w = int(round(ow / base) * base) + if (h == oh) and (w == ow): + return img + return img.resize((w, h), method) + + +def __scale_width(img, target_width, method=Image.BICUBIC): + ow, oh = img.size + if (ow == target_width): + return img + w = target_width + h = int(target_width * oh / ow) + return img.resize((w, h), method) + + +def __scale_shortside(img, target_width, method=Image.BICUBIC): + ow, oh = img.size + ss, ls = min(ow, oh), max(ow, oh) # shortside and longside + width_is_shorter = ow == ss + if (ss == target_width): + return img + ls = int(target_width * ls / ss) + nw, nh = (ss, ls) if width_is_shorter else (ls, ss) + return img.resize((nw, nh), method) + + +def __crop(img, pos, size): + ow, oh = img.size + x1, y1 = pos + tw = th = size + return img.crop((x1, y1, x1 + tw, y1 + th)) + + +def __flip(img, flip): + if flip: + return img.transpose(Image.FLIP_LEFT_RIGHT) + return img diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py new file mode 100644 index 0000000000..a116ed426e --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py @@ -0,0 +1,250 @@ +import numpy as np +import cv2 +import math +from numpy import linalg as LA + + +def distance(p1, p2): + return math.sqrt((p1[0] - p2[0]) * (p1[0] - p2[0]) + (p1[1] - p2[1]) * (p1[1] - p2[1])) + + +def curve_interp(src, samples, index): + assert (src.shape[0] > 2) + assert (samples >= 2) + + src_1 = src[0:src.shape[0] - 1, :] + src_2 = src[1:src.shape[0], :] + src_delta = src_1 - src_2 + length = np.sqrt(src_delta[:, 0] ** 2 + src_delta[:, 1] ** 2) + assert (length.shape[0] == src.shape[0] - 1) + + accu_length = np.zeros((src.shape[0])) + for i in range(1, accu_length.shape[0]): + accu_length[i] = accu_length[i - 1] + length[i - 1] + dst = np.zeros((samples, 2)) + pre_raw = 0 + + step_interp = accu_length[accu_length.shape[0] - 1] / float(samples - 1) + dst[0, :] = src[0, :] + dst[dst.shape[0] - 1, :] = src[src.shape[0] - 1, :] + for i in range(1, samples - 1): + covered_interp = step_interp * i + while (covered_interp > accu_length[pre_raw + 1]): + pre_raw += 1 + assert (pre_raw < accu_length.shape[0] - 1) + dx = (covered_interp - accu_length[pre_raw]) / length[pre_raw] + dst[i, :] = src[pre_raw, :] * (1.0 - dx) + src[pre_raw + 1, :] * dx + + return dst + + +def curve_fitting(points, samples, index): + num_points = points.shape[0] + assert (num_points > 1) + valid_points = [points[0]] + for i in range(1, num_points): + if (distance(points[i, :], points[i - 1, :]) > 0.001): + valid_points.append(points[i, :]) + assert (len(valid_points) > 1) + valid_points = np.asarray(valid_points) + functions = np.zeros((valid_points.shape[0] - 1, 9)) + + if valid_points.shape[0] == 2: + functions[0, 0] = LA.norm(valid_points[0, :] - valid_points[1, :]) + functions[0, 1] = valid_points[0, 0] + functions[0, 2] = (valid_points[1, 0] - valid_points[0, 0]) / functions[0, 0] + functions[0, 3] = 0 + functions[0, 4] = 0 + functions[0, 5] = valid_points[0, 1] + functions[0, 6] = (valid_points[1, 1] - valid_points[0, 1]) / functions[0, 0] + functions[0, 7] = 0 + functions[0, 8] = 0 + else: + Mx = np.zeros((valid_points.shape[0])) + My = np.zeros((valid_points.shape[0])) + A = np.zeros((valid_points.shape[0] - 2)) + B = np.zeros((valid_points.shape[0] - 2)) + C = np.zeros((valid_points.shape[0] - 2)) + Dx = np.zeros((valid_points.shape[0] - 2)) + Dy = np.zeros((valid_points.shape[0] - 2)) + for i in range(functions.shape[0]): + functions[i, 0] = LA.norm(valid_points[i, :] - valid_points[i + 1, :]) + for i in range(A.shape[0]): + A[i] = functions[i, 0] + B[i] = 2.0 * (functions[i, 0] + functions[i + 1, 0]) + C[i] = functions[i + 1, 0] + Dx[i] = 6.0 * ((valid_points[i + 2, 0] - valid_points[i + 1, 0]) / functions[i + 1, 0] - ( + valid_points[i + 1, 0] - valid_points[i, 0]) / functions[i, 0]) + Dy[i] = 6.0 * ((valid_points[i + 2, 1] - valid_points[i + 1, 1]) / functions[i + 1, 0] - ( + valid_points[i + 1, 1] - valid_points[i, 1]) / functions[i, 0]) + + C[0] = C[0] / B[0] + Dx[0] = Dx[0] / B[0] + Dy[0] = Dy[0] / B[0] + for i in range(1, A.shape[0]): + tmp = B[i] - A[i] * C[i - 1] + C[i] = C[i] / tmp + Dx[i] = (Dx[i] - A[i] * Dx[i - 1]) / tmp + Dy[i] = (Dy[i] - A[i] * Dy[i - 1]) / tmp + Mx[valid_points.shape[0] - 2] = Dx[valid_points.shape[0] - 3] + My[valid_points.shape[0] - 2] = Dy[valid_points.shape[0] - 3] + for i in range(valid_points.shape[0] - 4, -1, -1): + Mx[i + 1] = Dx[i] - C[i] * Mx[i + 2] + My[i + 1] = Dy[i] - C[i] * My[i + 2] + Mx[0] = 0 + Mx[valid_points.shape[0] - 1] = 0 + My[0] = 0 + My[valid_points.shape[0] - 1] = 0 + + for i in range(functions.shape[0]): + functions[i, 1] = valid_points[i, 0] + functions[i, 2] = (valid_points[i + 1, 0] - valid_points[i, 0]) / functions[i, 0] - ( + 2.0 * functions[i, 0] * Mx[i] + functions[i, 0] * Mx[i + 1]) / 6.0 + functions[i, 3] = Mx[i] / 2.0 + functions[i, 4] = (Mx[i + 1] - Mx[i]) / (6.0 * functions[i, 0]) + functions[i, 5] = valid_points[i, 1] + functions[i, 6] = (valid_points[i + 1, 1] - valid_points[i, 1]) / functions[i, 0] - ( + 2.0 * functions[i, 0] * My[i] + functions[i, 0] * My[i + 1]) / 6.0 + functions[i, 7] = My[i] / 2.0 + functions[i, 8] = (My[i + 1] - My[i]) / (6.0 * functions[i, 0]) + + samples_per_segment = samples * 1 / functions.shape[0] + 1 + samples_per_segment = int(samples_per_segment) + + rawcurve = np.zeros((functions.shape[0] * samples_per_segment, 2)) + for i in range(functions.shape[0]): + step = functions[i, 0] / samples_per_segment + for j in range(samples_per_segment): + t = step * j + rawcurve[i * samples_per_segment + j, :] = np.asarray( + [functions[i, 1] + functions[i, 2] * t + functions[i, 3] * t * t + functions[i, 4] * t * t * t, + functions[i, 5] + functions[i, 6] * t + functions[i, 7] * t * t + functions[i, 8] * t * t * t]) + + curve_tmp = curve_interp(rawcurve, samples, index) + + return curve_tmp + + +def points_to_heatmap_68points(points, heatmap_num, heatmap_size, sigma): + align_on_curve = [0] * heatmap_num + curves = [0] * heatmap_num + + align_on_curve[0] = np.zeros((3, 2)) # contour + align_on_curve[1] = np.zeros((5, 2)) # left eyebrow + align_on_curve[2] = np.zeros((5, 2)) # right eyebrow + align_on_curve[3] = np.zeros((4, 2)) # nose bridge + align_on_curve[4] = np.zeros((5, 2)) # nose tip + align_on_curve[5] = np.zeros((4, 2)) # left top eye + align_on_curve[6] = np.zeros((4, 2)) # left bottom eye + align_on_curve[7] = np.zeros((4, 2)) # right top eye + align_on_curve[8] = np.zeros((4, 2)) # right bottom eye + align_on_curve[9] = np.zeros((7, 2)) # up up lip + align_on_curve[10] = np.zeros((5, 2)) # up bottom lip + align_on_curve[11] = np.zeros((5, 2)) # bottom up lip + align_on_curve[12] = np.zeros((7, 2)) # bottom bottom lip + + for i in range(3): + align_on_curve[0][i] = points[7 + i] + + for i in range(5): + align_on_curve[1][i] = points[i + 17] + + for i in range(5): + align_on_curve[2][i] = points[i + 22] + + for i in range(4): + align_on_curve[3][i] = points[i + 27] + + for i in range(5): + align_on_curve[4][i] = points[i + 31] + + for i in range(4): + align_on_curve[5][i] = points[i + 36] + + align_on_curve[6][0] = points[36] + align_on_curve[6][1] = points[41] + align_on_curve[6][2] = points[40] + align_on_curve[6][3] = points[39] + + align_on_curve[7][0] = points[42] + align_on_curve[7][1] = points[43] + align_on_curve[7][2] = points[44] + align_on_curve[7][3] = points[45] + + align_on_curve[8][0] = points[42] + align_on_curve[8][1] = points[47] + align_on_curve[8][2] = points[46] + align_on_curve[8][3] = points[45] + + for i in range(7): + align_on_curve[9][i] = points[i + 48] + + for i in range(5): + align_on_curve[10][i] = points[i + 60] + + align_on_curve[11][0] = points[60] + align_on_curve[11][1] = points[67] + align_on_curve[11][2] = points[66] + align_on_curve[11][3] = points[65] + align_on_curve[11][4] = points[64] + + align_on_curve[12][0] = points[48] + align_on_curve[12][1] = points[59] + align_on_curve[12][2] = points[58] + align_on_curve[12][3] = points[57] + align_on_curve[12][4] = points[56] + align_on_curve[12][5] = points[55] + align_on_curve[12][6] = points[54] + + heatmap = np.zeros((heatmap_size, heatmap_size, heatmap_num)) + for i in range(heatmap_num): + curve_map = np.full((heatmap_size, heatmap_size), 255, dtype=np.uint8) + + valid_points = [align_on_curve[i][0, :]] + for j in range(1, align_on_curve[i].shape[0]): + if (distance(align_on_curve[i][j, :], align_on_curve[i][j - 1, :]) > 0.001): + valid_points.append(align_on_curve[i][j, :]) + + if len(valid_points) > 1: + curves[i] = curve_fitting(align_on_curve[i], align_on_curve[i].shape[0] * 10, i) + for j in range(curves[i].shape[0]): + if (int(curves[i][j, 0] + 0.5) >= 0 and int(curves[i][j, 0] + 0.5) < heatmap_size and + int(curves[i][j, 1] + 0.5) >= 0 and int(curves[i][j, 1] + 0.5) < heatmap_size): + curve_map[int(curves[i][j, 1] + 0.5), int(curves[i][j, 0] + 0.5)] = 0 + + # distance transform + image_dis = cv2.distanceTransform(curve_map, cv2.DIST_L2, cv2.DIST_MASK_PRECISE) + + # gaussian map generation + image_dis = image_dis.astype(np.float64) + image_gaussian = (1.0 / (2.0 * np.pi * (sigma ** 2))) * np.exp(-1.0 * image_dis ** 2 / (2.0 * sigma ** 2)) + image_gaussian = np.where(image_dis < (3.0 * sigma), image_gaussian, 0) + + # normalised to [0,1] + maxVal = image_gaussian.max() + minVal = image_gaussian.min() + + if maxVal == minVal: + image_gaussian = 0 + else: + image_gaussian = (image_gaussian - minVal) / (maxVal - minVal) + + heatmap[:, :, i] = image_gaussian + + return heatmap + + +def combine_map(heatmap, use_edge=True, no_guassian=False): + left_eye = (heatmap[:, :, 5] + heatmap[:, :, 6]).clip(0, 1) + right_eye = (heatmap[:, :, 7] + heatmap[:, :, 8]).clip(0, 1) + nose = (heatmap[:, :, 3] + heatmap[:, :, 4]).clip(0, 1) + mouth = (heatmap[:, :, 9] + heatmap[:, :, 10] + + heatmap[:, :, 11] + heatmap[:, :, 12]).clip(0, 1) + if use_edge: + edge = heatmap[:, :, 0] + else: + edge = np.zeros_like(heatmap[:, :, 0]) + label_map = np.stack([left_eye, right_eye, nose, mouth, edge], 0) + if no_guassian: + label_map = (label_map > 0).astype(float) + return label_map diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py new file mode 100644 index 0000000000..cd0cb777ba --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py @@ -0,0 +1,183 @@ +import traceback +from torch.multiprocessing import Process +import numpy as np + +import os +import torch + + +def get_input(data, render): + real_image = data['image'] + input_semantics, rotated_mesh, orig_landmarks, rotate_landmarks, rendered_images_erode, original_angles, \ + Rd_a, rendered_images_rotate_artifacts = render.rotate_render(data['param_path'], real_image, data['M']) + output = {} + real_image = real_image * 2 - 1 + input_semantics = input_semantics * 2 - 1 + rotated_mesh = rotated_mesh * 2 - 1 + rendered_images_erode = rendered_images_erode * 2 - 1 + Rd_a = Rd_a * 2 - 1 + rendered_images_rotate_artifacts = rendered_images_rotate_artifacts * 2 - 1 + output['image'] = real_image.cpu() + output['rendered_images_erode'] = rendered_images_erode.cpu() + output['mesh'] = input_semantics.cpu() + output['rotated_mesh'] = rotated_mesh.cpu() + output['Rd_a'] = Rd_a.cpu() + output['orig_landmarks'] = orig_landmarks.cpu() + output['rotated_landmarks'] = rotate_landmarks.cpu() + output['original_angles'] = original_angles.cpu() + output['rendered_images_rotate_artifacts'] = rendered_images_rotate_artifacts.cpu() + output['path'] = data['path'] + return output + + +def get_test_input(data, render): + real_image = data['image'] + rotated_mesh, rotate_landmarks, original_angles \ + = render.rotate_render(data['param_path'], real_image, data['M']) + output = {} + real_image = real_image * 2 - 1 + rotated_mesh = rotated_mesh * 2 - 1 + output['image'] = real_image.cpu() + output['rotated_mesh'] = rotated_mesh.cpu() + output['rotated_landmarks'] = rotate_landmarks.cpu() + output['original_angles'] = original_angles.cpu() + output['path'] = data['path'] + return output + + +def get_multipose_test_input(data, render, yaw_poses, pitch_poses): + real_image = data['image'] + # num_poses = len(yaw_poses) + len(pitch_poses) + rotated_meshs = [] + rotated_landmarks_list = [] + original_angles_list = [] + rotated_landmarks_list_106 = [] + paths = [] + real_images = [] + pose_list = [] + for i in range(2): + prefix = 'yaw' if i == 0 else 'pitch' + poses = yaw_poses if i == 0 else pitch_poses + for pose in poses: + if i == 0: + rotated_mesh, rotate_landmarks, original_angles, rotate_landmarks_106 \ + = render.rotate_render(data['param_path'], real_image, data['M'], yaw_pose=pose) + else: + rotated_mesh, rotate_landmarks, original_angles, rotate_landmarks_106 \ + = render.rotate_render(data['param_path'], real_image, data['M'], pitch_pose=pose) + rotated_meshs.append(rotated_mesh) + rotated_landmarks_list.append(rotate_landmarks) + rotated_landmarks_list_106.append(rotate_landmarks_106) + original_angles_list.append(original_angles) + paths += data['path'] + pose_list += ['{}_{}'.format(prefix, pose) for i in range(len(data['path']))] + real_images.append(real_image) + rotated_meshs = torch.cat(rotated_meshs, 0) + rotated_landmarks_list = torch.cat(rotated_landmarks_list, 0) + rotated_landmarks_list_106 = torch.cat(rotated_landmarks_list_106, 0) + original_angles_list = torch.cat(original_angles_list, 0) + output = {} + real_image = real_image * 2 - 1 + rotated_meshs = rotated_meshs * 2 - 1 + output['image'] = real_image.cpu() + output['rotated_mesh'] = rotated_meshs.cpu() + output['rotated_landmarks'] = rotated_landmarks_list.cpu() + output['rotated_landmarks_106'] = rotated_landmarks_list_106.cpu() + output['original_angles'] = original_angles_list.cpu() + output['path'] = paths + output['pose_list'] = pose_list + return output + + +class data_prefetcher(): + def __init__(self, loader, opt, render_layer): + self.loader = iter(loader) + self.stream = torch.cuda.Stream() + self.opt = opt + self.render_layer = render_layer + self.preload() + + def preload(self): + try: + data = next(self.loader) + except StopIteration: + self.next_input = None + return + if self.opt.isTrain: + self.next_input = get_input(data, self.render_layer) + elif self.opt.yaw_poses is None and self.opt.pitch_poses is None: + self.next_input = get_test_input(data, self.render_layer) + else: + if self.opt.yaw_poses is not None: + if self.opt.posesrandom: + self.opt.yaw_poses = [round(np.random.uniform(-0.5, 0.5, 1)[0], 2) for k in + range(len(self.opt.yaw_poses))] + else: + self.opt.yaw_poses = [] + + if self.opt.pitch_poses is not None: + if self.opt.posesrandom: + self.opt.pitch_poses = [round(np.random.uniform(-0.5, 0.5, 1)[0], 2) for k in + range(len(self.opt.pitch_poses))] + else: + self.opt.pitch_poses = [] + + self.next_input = get_multipose_test_input(data, self.render_layer, self.opt.yaw_poses, + self.opt.pitch_poses) + with torch.cuda.stream(self.stream): + for k, v in self.next_input.items(): + if type(v) == torch.Tensor: + self.next_input[k] = v.cuda(non_blocking=True) + + def next(self): + torch.cuda.current_stream().wait_stream(self.stream) + input = self.next_input + + if input is not None: + for k in input.keys(): + if type(input[k]) == torch.Tensor: + input[k].record_stream(torch.cuda.current_stream()) + self.preload() + return input + + +def prefetch_data(queue, dataloader, iter_counter, opt, render_layer): + print("start prefetching data...") + np.random.seed(os.getpid()) + for epoch in iter_counter.training_epochs(): + prefetcher = data_prefetcher(dataloader, opt, render_layer) + input = prefetcher.next() + while input is not None: + try: + queue.put(input) + except Exception as e: + traceback.print_exc() + raise e + input = prefetcher.next() + + +def pin_memory(data_queue, pinned_data_queue, sema): + while True: + data = data_queue.get() + + for k, v in data.items(): + data[k] = v.pin_memory() + + pinned_data_queue.put(data) + + if sema.acquire(blocking=False): + return + + +def init_parallel_jobs(queue, dataloader, iter_counter, opt, render_layer): + if isinstance(dataloader, list): + tasks = [Process(target=prefetch_data, args=(queue, dataloader[i], iter_counter, opt, render_layer[i])) for i in + range(opt.render_thread)] + else: + tasks = [Process(target=prefetch_data, args=(queue, dataloader, iter_counter, opt, render_layer)) for i in + range(opt.render_thread)] + # task.daemon = True + for task in tasks: + task.start() + + return tasks diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh new file mode 100644 index 0000000000..fccbb2d378 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh @@ -0,0 +1,24 @@ +python3 -u test_frontal.py \ + --names rs_ijba3 \ + --dataset megafaceprobe \ + --list_start 0 \ + --list_end 5000 \ + --dataset_mode single \ + --gpu_ids 0,1,2,3,4,5,6,7 \ + --netG rotatespade \ + --norm_G spectralsyncbatch \ + --batchSize 18 \ + --model rotatespade \ + --label_nc 5 \ + --nThreads 3 \ + --heatmap_size 2.5 \ + --chunk_size 40 40\ + --no_gaussian_landmark \ + --multi_gpu \ + --device_count 8\ + --render_thread 6 \ + --label_mask \ + --align \ + #--use_BG \ + #--chunk_size 2 4 4 4 4 4\ + diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh new file mode 100644 index 0000000000..2c75e612d7 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh @@ -0,0 +1,32 @@ +python3 -u train.py \ + --name rotate \ + --dataset_mode allface \ + --load_size 400 \ + --crop_size 256 \ + --netG rotatespade \ + --trainer rotatespade \ + --norm_D spectralsyncbatch \ + --norm_G spectralsyncbatch \ + --model rotatespade \ + --dataset 'example' \ + --gpu_ids 0 \ + --lambda_D 0.75 \ + --lambda_rotate_D 0.001 \ + --D_input concat \ + --netD multiscale \ + --label_nc 5 \ + --nThreads 3 \ + --no_html \ + --display_freq 100 \ + --print_freq 100 \ + --load_separately \ + --heatmap_size 2.5 \ + --device_count 2 \ + --render_thread 1 \ + --chunk_size 1 \ + --no_gaussian_landmark \ + --landmark_align \ + --erode_kernel 19 \ + --pose_noise \ + # --G_pretrain_path ./checkpoints/rs_model/latest_net_G.pth \ + # --D_pretrain_path ./checkpoints/rs_model/latest_net_D.pth \ diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh new file mode 100644 index 0000000000..944871a061 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh @@ -0,0 +1,24 @@ +python3 -u test_multipose.py \ + --names rs_model \ + --dataset example \ + --list_start 0 \ + --list_end 10 \ + --dataset_mode allface \ + --gpu_ids 0 \ + --netG rotatespade \ + --norm_G spectralsyncbatch \ + --model rotatespade \ + --label_nc 5 \ + --nThreads 8 \ + --heatmap_size 1\ + --chunk_size 1 \ + --no_gaussian_landmark \ + --multi_gpu \ + --device_count 2 \ + --render_thread 1 \ + --label_mask \ + --align \ + --erode_kernel 21 \ + --yaw_poses 0 5 10 15 20 25 30 35 40.5 45 50 55 60 \ + --pitch_poses 0 5 10 15 20 25 30 35 40.5 45 \ + diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py new file mode 100644 index 0000000000..62c236c018 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py @@ -0,0 +1,40 @@ +import importlib +import torch +__all__ = ['torch'] + + +def find_model_using_name(model_name): + # Given the option --model [modelname], + # the file "models/modelname_model.py" + # will be imported. + model_filename = "algorithm.Rotate_and_Render.models." + model_name + "_model" + modellib = importlib.import_module(model_filename) + + # In the file, the class called ModelNameModel() will + # be instantiated. It has to be a subclass of torch.nn.Module, + # and it is case-insensitive. + model = None + target_model_name = model_name.replace('_', '') + 'model' + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower(): + model = cls + + if model is None: + print("In %s.py, there should be a subclass of torch.nn.Module with \ + class name that matches %s in lowercase." % (model_filename, target_model_name)) + exit(0) + + return model + + +def get_option_setter(model_name): + model_class = find_model_using_name(model_name) + return model_class.modify_commandline_options + + +def create_model(opt): + model = find_model_using_name(opt.model) + instance = model(opt) + print("model [%s] was created" % (type(instance).__name__)) + + return instance diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py new file mode 100644 index 0000000000..91e0febc81 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py @@ -0,0 +1,60 @@ +import torch +from algorithm.Rotate_and_Render.models.networks.base_network import BaseNetwork +from algorithm.Rotate_and_Render.models.networks import loss +from algorithm.Rotate_and_Render.models.networks import discriminator +from algorithm.Rotate_and_Render.models.networks import generator +from algorithm.Rotate_and_Render.models.networks import encoder +from algorithm.Rotate_and_Render.models.networks.render import Render +import algorithm.Rotate_and_Render.util.util as util +__all__ = ['loss', 'discriminator', 'generator', 'encoder', 'Render'] + + +def find_network_using_name(target_network_name, filename): + target_class_name = target_network_name + filename + module_name = 'algorithm.Rotate_and_Render.models.networks.' + filename + network = util.find_class_in_module(target_class_name, module_name) + + assert issubclass(network, BaseNetwork), \ + "Class %s should be a subclass of BaseNetwork" % network + + return network + + +def modify_commandline_options(parser, is_train): + opt, _ = parser.parse_known_args() + + netG_cls = find_network_using_name(opt.netG, 'generator') + parser = netG_cls.modify_commandline_options(parser, is_train) + if is_train: + netD_cls = find_network_using_name(opt.netD, 'discriminator') + parser = netD_cls.modify_commandline_options(parser, is_train) + netE_cls = find_network_using_name('conv', 'encoder') + parser = netE_cls.modify_commandline_options(parser, is_train) + + return parser + + +def create_network(cls, opt): + net = cls(opt) + net.print_network() + if len(opt.gpu_ids) > 0: + assert(torch.cuda.is_available()) + net.cuda() + net.init_weights(opt.init_type, opt.init_variance) + return net + + +def define_G(opt): + netG_cls = find_network_using_name(opt.netG, 'generator') + return create_network(netG_cls, opt) + + +def define_D(opt): + netD_cls = find_network_using_name(opt.netD, 'discriminator') + return create_network(netD_cls, opt) + + +def define_E(opt): + # there exists only one encoder type + netE_cls = find_network_using_name('conv', 'encoder') + return create_network(netE_cls, opt) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py new file mode 100644 index 0000000000..123004ef52 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py @@ -0,0 +1,199 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision +import torch.nn.utils.spectral_norm as spectral_norm +from .normalization import SPADE +from ...util import util + + +# ResNet block that uses SPADE. +# It differs from the ResNet block of pix2pixHD in that +# it takes in the segmentation map as input, learns the skip connection if necessary, +# and applies normalization first and then convolution. +# This architecture seemed like a standard architecture for unconditional or +# class-conditional GAN architecture using residual block. +# The code was inspired from https://github.com/LMescheder/GAN_stability. +class SPADEResnetBlock(nn.Module): + def __init__(self, fin, fout, opt): + super().__init__() + # Attributes + self.learned_shortcut = (fin != fout) + fmiddle = min(fin, fout) + + # create conv layers + self.conv_0 = nn.Conv2d(fin, fmiddle, kernel_size=3, padding=1) + self.conv_1 = nn.Conv2d(fmiddle, fout, kernel_size=3, padding=1) + if self.learned_shortcut: + self.conv_s = nn.Conv2d(fin, fout, kernel_size=1, bias=False) + + # apply spectral norm if specified + if 'spectral' in opt.norm_G: + self.conv_0 = spectral_norm(self.conv_0) + self.conv_1 = spectral_norm(self.conv_1) + if self.learned_shortcut: + self.conv_s = spectral_norm(self.conv_s) + + # define normalization layers + spade_config_str = opt.norm_G.replace('spectral', '') + self.norm_0 = SPADE(spade_config_str, fmiddle, opt.semantic_nc) + self.norm_1 = SPADE(spade_config_str, fout, opt.semantic_nc) + if self.learned_shortcut: + self.norm_s = SPADE(spade_config_str, fout, opt.semantic_nc) + + # note the resnet block with SPADE also takes in |seg|, + # the semantic segmentation map as input + def _forward(self, x, seg): + x_s = self.shortcut(x, seg) + + dx = self.conv_0(self.actvn(self.norm_0(x, seg))) + dx = self.conv_1(self.actvn(self.norm_1(dx, seg))) + + out = x_s + dx + + return out + + def forward(self, x, seg): + if self.learned_shortcut: + x_s = self.norm_s(self.conv_s(x), seg) + else: + x_s = x + dx = self.actvn(self.norm_0(self.conv_0(x), seg)) + dx = self.actvn(self.norm_1(self.conv_1(dx), seg)) + + out = x_s + dx + return out + + def shortcut(self, x, seg): + if self.learned_shortcut: + x_s = self.conv_s(self.norm_s(x, seg)) + else: + x_s = x + return x_s + + def actvn(self, x): + return F.leaky_relu(x, 2e-1) + + +# try to put SPADE into pix2pixHD middle layers +class ResnetSPADEBlock(nn.Module): + def __init__(self, dim, semantic_nc, kernel_size=3): + super().__init__() + norm_G = 'spectralspadesyncbatch3x3' + pw = (kernel_size - 1) // 2 + self.conv_0 = nn.Conv2d(dim, dim, kernel_size=kernel_size) + self.conv_1 = nn.Conv2d(dim, dim, kernel_size=kernel_size) + self.padding = nn.ReflectionPad2d(pw) + if 'spectral' in norm_G: + self.add_module('conv_block1', spectral_norm(self.conv_0)) + self.add_module('conv_block4', spectral_norm(self.conv_1)) + + # define normalization layers + spade_config_str = norm_G.replace('spectral', '') + self.norm_0 = SPADE(spade_config_str, dim, semantic_nc) + self.norm_1 = SPADE(spade_config_str, dim, semantic_nc) + + def forward(self, x, seg): + dx = self.padding(x) + dx = self.activation(self.norm_0(self.conv_0(dx), seg)) + dx = self.padding(dx) + dx = self.activation(self.norm_1(self.conv_1(dx), seg)) + out = x + dx + + return out + + def activation(self, x): + return F.leaky_relu(x, 2e-1) + + +# ResNet block used in pix2pixHD +# We keep the same architecture as pix2pixHD. +class ResnetBlock(nn.Module): + def __init__(self, dim, norm_layer, activation=nn.ReLU(False), kernel_size=3): + super().__init__() + + pw = (kernel_size - 1) // 2 + self.conv_block = nn.Sequential( + nn.ReflectionPad2d(pw), + norm_layer(nn.Conv2d(dim, dim, kernel_size=kernel_size)), + activation, + nn.ReflectionPad2d(pw), + norm_layer(nn.Conv2d(dim, dim, kernel_size=kernel_size)), + # add an activation + activation, + ) + + def forward(self, x): + y = self.conv_block(x) + out = x + y + return out + + +# VGG architecter, used for the perceptual loss using a pretrained VGG network +class VGG19(torch.nn.Module): + def __init__(self, requires_grad=False): + super(VGG19, self).__init__() + vgg_pretrained_features = torchvision.models.vgg19(pretrained=True).features + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + self.slice5 = torch.nn.Sequential() + for x in range(2): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(2, 7): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(7, 12): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(12, 21): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + for x in range(21, 30): + self.slice5.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + h_relu1 = self.slice1(X) + h_relu2 = self.slice2(h_relu1) + h_relu3 = self.slice3(h_relu2) + h_relu4 = self.slice4(h_relu3) + h_relu5 = self.slice5(h_relu4) + out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5] + return out + + +class VGGFace19(torch.nn.Module): + def __init__(self, opt, requires_grad=False): + super(VGGFace19, self).__init__() + model = torchvision.models.vgg19_bn(pretrained=False) + ckpt = torch.load(opt.vggface_checkpoint)['state_dict'] + util.copy_state_dict(ckpt, model, 'module.base.') + vgg_pretrained_features = model.features + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + self.slice5 = torch.nn.Sequential() + for x in range(2): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(2, 7): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(7, 12): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(12, 21): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + for x in range(21, 30): + self.slice5.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + h_relu1 = self.slice1(X) + h_relu2 = self.slice2(h_relu1) + h_relu3 = self.slice3(h_relu2) + h_relu4 = self.slice4(h_relu3) + h_relu5 = self.slice5(h_relu4) + out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5] + return out diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py new file mode 100644 index 0000000000..2eecf7d67c --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py @@ -0,0 +1,54 @@ +import torch.nn as nn +from torch.nn import init + + +class BaseNetwork(nn.Module): + def __init__(self): + super(BaseNetwork, self).__init__() + + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def print_network(self): + if isinstance(self, list): + self = self[0] + num_params = 0 + for param in self.parameters(): + num_params += param.numel() + print('Network [%s] was created. Total number of parameters: %.1f million. ' + 'To see the architecture, do print(network).' + % (type(self).__name__, num_params / 1000000)) + + def init_weights(self, init_type='normal', gain=0.02): + def init_func(m): + classname = m.__class__.__name__ + if classname.find('BatchNorm2d') != -1: + if hasattr(m, 'weight') and m.weight is not None: + init.normal_(m.weight.data, 1.0, gain) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + elif hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + init.normal_(m.weight.data, 0.0, gain) + elif init_type == 'xavier': + init.xavier_normal_(m.weight.data, gain=gain) + elif init_type == 'xavier_uniform': + init.xavier_uniform_(m.weight.data, gain=1.0) + elif init_type == 'kaiming': + init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + init.orthogonal_(m.weight.data, gain=gain) + elif init_type == 'none': # uses pytorch's default init method + m.reset_parameters() + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + + self.apply(init_func) + + # propagate to children + for m in self.children(): + if hasattr(m, 'init_weights'): + m.init_weights(init_type, gain) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py new file mode 100644 index 0000000000..28dbb5baa7 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py @@ -0,0 +1,308 @@ +import torch.nn as nn +import numpy as np +import torch.nn.utils.spectral_norm as spectral_norm +import torch.nn.functional as F +from .base_network import BaseNetwork +from .sync_batchnorm import SynchronizedBatchNorm2d +from .normalization import get_nonspade_norm_layer +from ...util import util +import torch +from torch.utils.checkpoint import checkpoint + + +class MultiscaleDiscriminator(BaseNetwork): + @staticmethod + def modify_commandline_options(parser, is_train): + parser.add_argument('--netD_subarch', type=str, default='n_layer', + help='architecture of each discriminator') + parser.add_argument('--num_D', type=int, default=2, + help='number of discriminators to be used in multiscale') + opt, _ = parser.parse_known_args() + + # define properties of each discriminator of the multiscale discriminator + subnetD = util.find_class_in_module(opt.netD_subarch + 'discriminator', + 'models.networks.discriminator') + subnetD.modify_commandline_options(parser, is_train) + + return parser + + def __init__(self, opt): + super(MultiscaleDiscriminator, self).__init__() + self.opt = opt + + for i in range(opt.num_D): + subnetD = self.create_single_discriminator(opt) + self.add_module('discriminator_%d' % i, subnetD) + + def create_single_discriminator(self, opt): + subarch = opt.netD_subarch + if subarch == 'n_layer': + netD = NLayerDiscriminator(opt) + else: + raise ValueError('unrecognized discriminator subarchitecture %s' % subarch) + return netD + + def downsample(self, input): + return F.avg_pool2d(input, kernel_size=3, + stride=2, padding=[1, 1], + count_include_pad=False) + + # Returns list of lists of discriminator outputs. + # The final result is of size opt.num_D x opt.n_layers_D + def forward(self, input): + result = [] + get_intermediate_features = not self.opt.no_ganFeat_loss + for name, D in self.named_children(): + out = D(input) + if not get_intermediate_features: + out = [out] + result.append(out) + input = self.downsample(input) + + return result + + +# Defines the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(BaseNetwork): + @staticmethod + def modify_commandline_options(parser, is_train): + parser.add_argument('--n_layers_D', type=int, default=4, + help='# layers in each discriminator') + return parser + + def __init__(self, opt): + + super(NLayerDiscriminator, self).__init__() + self.opt = opt + + kw = 4 + padw = int(np.ceil((kw - 1.0) / 2)) + nf = opt.ndf + input_nc = self.compute_D_input_nc(opt) + + norm_layer = get_nonspade_norm_layer(opt, opt.norm_D) + sequence = [[nn.Conv2d(input_nc, nf, kernel_size=kw, stride=2, padding=padw), + nn.LeakyReLU(0.2, False)]] + + for n in range(1, opt.n_layers_D): + nf_prev = nf + nf = min(nf * 2, 512) + stride = 1 if n == opt.n_layers_D - 1 else 2 + sequence += [[norm_layer(nn.Conv2d(nf_prev, nf, kernel_size=kw, + stride=stride, padding=padw)), + nn.LeakyReLU(0.2, False) + ]] + + sequence += [[nn.Conv2d(nf, 1, kernel_size=kw, stride=1, padding=padw)]] + + # We divide the layers into groups to extract intermediate layer outputs + for n in range(len(sequence)): + self.add_module('model' + str(n), nn.Sequential(*sequence[n])) + + def compute_D_input_nc(self, opt): + if opt.D_input == "concat": + input_nc = opt.label_nc + opt.output_nc + if opt.contain_dontcare_label: + input_nc += 1 + if not opt.no_instance: + input_nc += 1 + else: + input_nc = 3 + return input_nc + + def forward(self, input): + results = [input] + for submodel in self.children(): + + # intermediate_output = checkpoint(submodel, results[-1]) + intermediate_output = submodel(results[-1]) + results.append(intermediate_output) + + get_intermediate_features = not self.opt.no_ganFeat_loss + if get_intermediate_features: + return results[1:] + else: + return results[-1] + + +class ImageDiscriminator(BaseNetwork): + """Defines a PatchGAN discriminator""" + def modify_commandline_options(parser, is_train): + parser.add_argument('--n_layers_D', type=int, default=4, + help='# layers in each discriminator') + return parser + + def __init__(self, opt, n_layers=3, norm_layer=nn.BatchNorm2d): + """Construct a PatchGAN discriminator + Parameters: + input_nc (int) -- the number of channels in input images + ndf (int) -- the number of filters in the last conv layer + n_layers (int) -- the number of conv layers in the discriminator + norm_layer -- normalization layer + """ + super(ImageDiscriminator, self).__init__() + use_bias = norm_layer == nn.InstanceNorm2d + if opt.D_input == "concat": + input_nc = opt.label_nc + opt.output_nc + else: + input_nc = opt.label_nc + ndf = 64 + kw = 4 + padw = 1 + sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True)] + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): # gradually increase the number of filters + nf_mult_prev = nf_mult + nf_mult = min(2 ** n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2 ** n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] # output 1 channel prediction map + self.model = nn.Sequential(*sequence) + + def forward(self, input): + """Standard forward.""" + return self.model(input) + + +class ProjectionDiscriminator(BaseNetwork): + @staticmethod + def modify_commandline_options(parser, is_train): + parser.add_argument('--n_layers_D', type=int, default=4, + help='# layers in each discriminator') + return parser + + def __init__(self, opt): + super().__init__() + self.opt = opt + nf = opt.ndf + input_nc = 3 + label_nc = opt.label_nc + (1 if opt.contain_dontcare_label else 0) + (0 if opt.no_instance else 1) + norm_layer = get_nonspade_norm_layer(opt, opt.norm_D) + + if opt.norm_D.startswith('spectral'): + use_spectral = True + else: + use_spectral = False + + self.enc1 = nn.Sequential(norm_layer(nn.Conv2d(input_nc, nf, kernel_size=3, stride=2, padding=1)), + nn.LeakyReLU(0.2, True)) + + self.relu = nn.LeakyReLU(0.2, True) + for i in range(2, 6): + nf_prev = nf + nf = min(nf * 2, opt.ndf * 8) + enconv = nn.Conv2d(nf_prev, nf, kernel_size=3, stride=2, padding=1) + latconv = nn.Conv2d(nf, opt.ndf * 4, kernel_size=3, stride=2, padding=1) + if use_spectral: + enconv = spectral_norm(enconv) + latconv = spectral_norm(latconv) + + self.add_module('enc' + str(i), enconv) + self.add_module('lat' + str(i), latconv) + self.add_module('norm_enc' + str(i), self.get_norm(enconv)) + self.add_module('norm_lat' + str(i), self.get_norm(latconv)) + + self.up = nn.Upsample(scale_factor=2, mode='bilinear') + + for i in range(2, 5): + finalconv = nn.Conv2d(opt.ndf * 4, opt.ndf, kernel_size=3, padding=1) + if use_spectral: + finalconv = spectral_norm(finalconv) + self.add_module('final' + str(i), finalconv) + self.add_module('norm_final' + str(i), self.get_norm(finalconv)) + + # shared True/False layer + self.tf = nn.Conv2d(opt.ndf, 1, kernel_size=1) + self.seg = nn.Conv2d(opt.ndf, opt.ndf, kernel_size=1) # do not need softmax + self.embedding = nn.Conv2d(label_nc, opt.ndf, kernel_size=1) + + def forward(self, input, segmap): + # feat11 = self.enc1(input) + feat11 = checkpoint(self.enc1, input) + feat12 = self.relu(self.norm_enc2(self.enc2(feat11))) + feat13 = self.relu(self.norm_enc3(self.enc3(feat12))) + feat14 = self.relu(self.norm_enc4(self.enc4(feat13))) + feat15 = self.relu(self.norm_enc5(self.enc5(feat14))) + feat25 = self.relu(self.norm_lat5(self.lat5(feat15))) + feat24 = self.up(feat25) + self.relu(self.norm_lat4(self.lat4(feat14))) + feat23 = self.up(feat24) + self.relu(self.norm_lat3(self.lat3(feat13))) + feat22 = self.up(feat23) + self.relu(self.norm_lat2(self.lat2(feat12))) + feat32 = self.norm_final2(self.final2(feat22)) + feat33 = self.norm_final3(self.final3(feat23)) + feat34 = self.norm_final4(self.final4(feat24)) + + pred2 = self.tf(feat32) + pred3 = self.tf(feat33) + pred4 = self.tf(feat34) + + seg2 = self.seg(feat32) + seg3 = self.seg(feat33) + seg4 = self.seg(feat34) + + if self.opt.gan_matching_feats == 'basic': + feats = [feat12, feat13, feat14, feat15] + elif self.opt.gan_matching_feats == 'more': + feats = [feat12, feat13, feat14, feat15, feat25, feat24, feat23, feat22] + elif self.opt.gan_matching_feats == 'chosen': + feats = [feat11, feat12, feat13, feat14, feat15] + else: + feats = [feat12, feat13, feat14, feat15, feat25, feat24, feat23, feat22, feat32, feat33, feat34] + + # calculate segmentation loss + # segmentation map embedding + segemb = self.embedding(segmap) + # downsample + segemb2 = F.adaptive_avg_pool2d(segemb, seg2.size(-1)) + segemb3 = F.adaptive_avg_pool2d(segemb, seg3.size(-1)) + segemb4 = F.adaptive_avg_pool2d(segemb, seg4.size(-1)) + + # product + pred2 += torch.mul(segemb2, seg2).sum(dim=1, keepdim=True) + pred3 += torch.mul(segemb3, seg3).sum(dim=1, keepdim=True) + pred4 += torch.mul(segemb4, seg4).sum(dim=1, keepdim=True) + + results = [pred2, pred3, pred4] + + return feats, results + + def get_out_channel(self, layer): + if hasattr(layer, 'out_channels'): + return getattr(layer, 'out_channels') + return layer.weight.size(0) + + # this function will be returned + def get_norm(self, layer): + norm_type = self.opt.norm_D + if norm_type.startswith('spectral'): + subnorm_type = norm_type[len('spectral'):] + else: + subnorm_type = norm_type + + # remove bias in the previous layer, which is meaningless + # since it has no effect after normalization + if getattr(layer, 'bias', None) is not None: + delattr(layer, 'bias') + layer.register_parameter('bias', None) + if subnorm_type == 'batch': + norm_layer = nn.BatchNorm2d(self.get_out_channel(layer), affine=True) + elif subnorm_type == 'syncbatch': + norm_layer = SynchronizedBatchNorm2d(self.get_out_channel(layer), affine=True) + elif subnorm_type == 'instance': + norm_layer = nn.InstanceNorm2d(self.get_out_channel(layer), affine=False) + else: + raise ValueError('normalization layer %s is not recognized' % subnorm_type) + + return norm_layer diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py new file mode 100644 index 0000000000..a3e52242c8 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py @@ -0,0 +1,50 @@ +import torch.nn as nn +import numpy as np +import torch.nn.functional as F +from .base_network import BaseNetwork +from .normalization import get_nonspade_norm_layer + + +class ConvEncoder(BaseNetwork): + """ Same architecture as the image discriminator """ + + def __init__(self, opt): + super().__init__() + + kw = 3 + pw = int(np.ceil((kw - 1.0) / 2)) + ndf = opt.ngf + norm_layer = get_nonspade_norm_layer(opt, opt.norm_E) + self.layer1 = norm_layer(nn.Conv2d(3, ndf, kw, stride=2, padding=pw)) + self.layer2 = norm_layer(nn.Conv2d(ndf * 1, ndf * 2, kw, stride=2, padding=pw)) + self.layer3 = norm_layer(nn.Conv2d(ndf * 2, ndf * 4, kw, stride=2, padding=pw)) + self.layer4 = norm_layer(nn.Conv2d(ndf * 4, ndf * 8, kw, stride=2, padding=pw)) + self.layer5 = norm_layer(nn.Conv2d(ndf * 8, ndf * 8, kw, stride=2, padding=pw)) + if opt.crop_size >= 256: + self.layer6 = norm_layer(nn.Conv2d(ndf * 8, ndf * 8, kw, stride=2, padding=pw)) + + self.so = s0 = 4 + self.fc_mu = nn.Linear(ndf * 8 * s0 * s0, 256) + self.fc_var = nn.Linear(ndf * 8 * s0 * s0, 256) + + self.actvn = nn.LeakyReLU(0.2, False) + self.opt = opt + + def forward(self, x): + if x.size(2) != 256 or x.size(3) != 256: + x = F.interpolate(x, size=(256, 256), mode='bilinear') + + x = self.layer1(x) + x = self.layer2(self.actvn(x)) + x = self.layer3(self.actvn(x)) + x = self.layer4(self.actvn(x)) + x = self.layer5(self.actvn(x)) + if self.opt.crop_size >= 256: + x = self.layer6(self.actvn(x)) + x = self.actvn(x) + + x = x.view(x.size(0), -1) + mu = self.fc_mu(x) + logvar = self.fc_var(x) + + return mu, logvar diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py new file mode 100644 index 0000000000..ca3356c3b4 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py @@ -0,0 +1,126 @@ +import torch.nn as nn +from .base_network import BaseNetwork +from .normalization import get_nonspade_norm_layer +from .architecture import ResnetBlock as ResnetBlock +from .architecture import ResnetSPADEBlock +from torch.utils.checkpoint import checkpoint + + +class Interpolate(nn.Module): + def __init__(self, scale_factor=2, size=None, mode='bilinear'): + super(Interpolate, self).__init__() + self.interp = nn.functional.interpolate + self.size = size + self.scale_factor = scale_factor + self.mode = mode + + def forward(self, x): + if self.size is not None: + x = self.interp(x, size=self.size, mode=self.mode, align_corners=False) + else: + x = self.interp(x, scale_factor=self.scale_factor, mode=self.mode, align_corners=False) + return x + + +class RotateGenerator(BaseNetwork): + @staticmethod + def modify_commandline_options(parser, is_train): + parser.add_argument('--resnet_n_downsample', type=int, default=4, + help='number of downsampling layers in netG') + parser.add_argument('--resnet_n_blocks', type=int, default=9, + help='number of residual blocks in the global generator network') + parser.add_argument('--resnet_kernel_size', type=int, default=3, + help='kernel size of the resnet block') + parser.add_argument('--resnet_initial_kernel_size', type=int, default=7, + help='kernel size of the first convolution') + parser.set_defaults(norm_G='spectralsyncbatch') + return parser + + def __init__(self, opt): + super(RotateGenerator, self).__init__() + input_nc = 3 + + norm_layer = get_nonspade_norm_layer(opt, opt.norm_G) + activation = nn.ReLU(False) + # initial conv + self.first_layer = nn.Sequential(nn.ReflectionPad2d(opt.resnet_initial_kernel_size // 2), + norm_layer(nn.Conv2d(input_nc, opt.ngf, + kernel_size=opt.resnet_initial_kernel_size, + padding=0)), + activation) + # downsample + downsample_model = [] + + mult = 1 + for i in range(opt.resnet_n_downsample): + downsample_model += [norm_layer(nn.Conv2d(opt.ngf * mult, opt.ngf * mult * 2, + kernel_size=3, stride=2, padding=1)), + activation] + mult *= 2 + + self.downsample_layers = nn.Sequential(*downsample_model) + + # resnet blocks + resnet_model = [] + + for i in range(opt.resnet_n_blocks): + resnet_model += [ResnetBlock(opt.ngf * mult, + norm_layer=norm_layer, + activation=activation, + kernel_size=opt.resnet_kernel_size)] + + self.resnet_layers = nn.Sequential(*resnet_model) + + # upsample + + upsample_model = [] + + for i in range(opt.resnet_n_downsample): + nc_in = int(opt.ngf * mult) + nc_out = int((opt.ngf * mult) / 2) + upsample_model += [norm_layer(nn.ConvTranspose2d(nc_in, nc_out, + kernel_size=3, stride=2, + padding=1, output_padding=1)), + activation] + mult = mult // 2 + + self.upsample_layers = nn.Sequential(*upsample_model) + + # final output conv + self.final_layer = nn.Sequential(nn.ReflectionPad2d(3), + nn.Conv2d(nc_out, opt.output_nc, kernel_size=7, padding=0), + nn.Tanh()) + + def forward(self, input, z=None): + net = self.first_layer(input) + net = self.downsample_layers(net) + net = self.resnet_layers(net) + net = self.upsample_layers(net) + net = self.final_layer(net) + return net + + +class RotateSPADEGenerator(RotateGenerator): + def __init__(self, opt): + super(RotateSPADEGenerator, self).__init__(opt) + del self.resnet_layers + self.resnet_n_blocks = opt.resnet_n_blocks + mult = 1 + for i in range(opt.resnet_n_downsample): + mult *= 2 + for i in range(opt.resnet_n_blocks): + self.add_module('resnet_layers' + str(i), ResnetSPADEBlock(opt.ngf * mult, opt.semantic_nc)) + + def forward(self, input, seg=None): + # net = self.first_layer(input) + net = checkpoint(self.first_layer, input) + # net = self.downsample_layers(net) + net = checkpoint(self.downsample_layers, net) + for i in range(self.resnet_n_blocks): + # net = self._modules['resnet_layers' + str(i)](net, seg) + net = checkpoint(self._modules['resnet_layers' + str(i)], net, seg) + # net = self.upsample_layers(net) + net = checkpoint(self.upsample_layers, net) + # net = self.final_layer(net) + net = checkpoint(self.final_layer, net) + return net diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py new file mode 100644 index 0000000000..17f0506e0c --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py @@ -0,0 +1,188 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from .architecture import VGG19, VGGFace19 + + +# Defines the GAN loss which uses either LSGAN or the regular GAN. +# When LSGAN is used, it is basically same as MSELoss, +# but it abstracts away the need to create the target label tensor +# that has the same size as the input +class GANLoss(nn.Module): + def __init__(self, gan_mode, target_real_label=1.0, target_fake_label=0.0, + tensor=torch.FloatTensor, opt=None): + super(GANLoss, self).__init__() + self.real_label = target_real_label + self.fake_label = target_fake_label + self.real_label_tensor = None + self.fake_label_tensor = None + self.zero_tensor = None + self.Tensor = tensor + self.gan_mode = gan_mode + self.opt = opt + if gan_mode == 'ls': + pass + elif gan_mode == 'original': + pass + elif gan_mode == 'w': + pass + elif gan_mode == 'hinge': + pass + else: + raise ValueError('Unexpected gan_mode {}'.format(gan_mode)) + + def get_target_tensor(self, input, target_is_real): + if target_is_real: + if self.real_label_tensor is None: + self.real_label_tensor = self.Tensor(1).fill_(self.real_label) + self.real_label_tensor.requires_grad_(False) + return self.real_label_tensor.expand_as(input) + else: + if self.fake_label_tensor is None: + self.fake_label_tensor = self.Tensor(1).fill_(self.fake_label) + self.fake_label_tensor.requires_grad_(False) + return self.fake_label_tensor.expand_as(input) + + def get_zero_tensor(self, input): + if self.zero_tensor is None: + self.zero_tensor = self.Tensor(1).fill_(0) + self.zero_tensor.requires_grad_(False) + return self.zero_tensor.expand_as(input) + + def loss(self, input, target_is_real, for_discriminator=True): + if self.gan_mode == 'original': # cross entropy loss + target_tensor = self.get_target_tensor(input, target_is_real) + loss = F.binary_cross_entropy_with_logits(input, target_tensor) + return loss + elif self.gan_mode == 'ls': + target_tensor = self.get_target_tensor(input, target_is_real) + return F.mse_loss(input, target_tensor) + elif self.gan_mode == 'hinge': + if for_discriminator: + if target_is_real: + minval = torch.min(input - 1, self.get_zero_tensor(input)) + loss = -torch.mean(minval) + else: + minval = torch.min(-input - 1, self.get_zero_tensor(input)) + loss = -torch.mean(minval) + else: + assert target_is_real, "The generator's hinge loss must be aiming for real" + loss = -torch.mean(input) + return loss + else: + # wgan + if target_is_real: + return -input.mean() + else: + return input.mean() + + def __call__(self, input, target_is_real, for_discriminator=True): + # computing loss is a bit complicated because |input| may not be + # a tensor, but list of tensors in case of multiscale discriminator + if isinstance(input, list): + loss = 0 + for pred_i in input: + if isinstance(pred_i, list): + pred_i = pred_i[-1] + loss_tensor = self.loss(pred_i, target_is_real, for_discriminator) + bs = 1 if len(loss_tensor.size()) == 0 else loss_tensor.size(0) + new_loss = torch.mean(loss_tensor.view(bs, -1), dim=1) + loss += new_loss + return loss / len(input) + else: + return self.loss(input, target_is_real, for_discriminator) + + +# Perceptual loss that uses a pretrained VGG network +class VGGLoss(nn.Module): + def __init__(self, opt): + super(VGGLoss, self).__init__() + if opt.face_vgg: + self.vgg = VGGFace19(opt).cuda() + else: + self.vgg = VGG19().cuda() + self.criterion = nn.L1Loss() + self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + + def forward(self, x, y, layer=0): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + for i in range(len(x_vgg)): + if i >= layer: + loss += self.weights[i] * self.criterion(x_vgg[i], y_vgg[i].detach()) + return loss + + +class VGGwithContrastiveLoss(VGGLoss): + def __init__(self, opt): + super(VGGwithContrastiveLoss, self).__init__(opt) + self.closs = L2ContrastiveLoss(opt.l2_margin) + + def forward(self, x, y, layer=0): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + for i in range(len(x_vgg)): + if i >= layer: + loss += self.weights[i] * self.criterion(x_vgg[i], y_vgg[i].detach()) + + if i == len(x_vgg) - 1: + x_feature = x_vgg[i].view(x_vgg[i].size(0), -1) + y_feature = y_vgg[i].view(y_vgg[i].size(0), -1) + loss + self.closs(x_feature, y_feature.detach()) + return loss + + +# KL Divergence loss used in VAE with an image encoder +class KLDLoss(nn.Module): + def forward(self, mu, logvar): + return -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp()) + + +class L2ContrastiveLoss(nn.Module): + """ + Compute L2 contrastive loss + """ + + def __init__(self, margin=1, max_violation=False): + super(L2ContrastiveLoss, self).__init__() + self.margin = margin + + self.sim = self.l2_sim + + self.max_violation = max_violation + + def forward(self, feature1, feature2): + # compute image-sentence score matrix + feature1 = self.l2_norm(feature1) + feature2 = self.l2_norm(feature2) + scores = self.sim(feature1, feature2) + # diagonal = scores.diag().view(feature1.size(0), 1) + diagonal_dist = scores.diag() + # d1 = diagonal.expand_as(scores) + + # compare every diagonal score to scores in its column + # caption retrieval + cost_s = (self.margin - scores).clamp(min=0) + + # clear diagonals + mask = torch.eye(scores.size(0)) > .5 + I = mask.clone() + if torch.cuda.is_available(): + I = I.cuda() + cost_s = cost_s.masked_fill_(I, 0) + + # keep the maximum violating negative for each query + if self.max_violation: + cost_s = cost_s.max(1)[0] + + loss = (torch.sum(cost_s ** 2) + torch.sum(diagonal_dist ** 2)) / (2 * feature1.size(0)) + + return loss + + def l2_norm(self, x): + x_norm = F.normalize(x, p=2, dim=1) + return x_norm + + def l2_sim(self, feature1, feature2): + Feature = feature1.expand(feature1.size(0), feature1.size(0), feature1.size(1)).transpose(0, 1) + return torch.norm(Feature - feature2, p=2, dim=2) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py new file mode 100644 index 0000000000..cff06f4148 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py @@ -0,0 +1,106 @@ +import re +import torch.nn as nn +import torch.nn.functional as F +from .sync_batchnorm import SynchronizedBatchNorm2d +import torch.nn.utils.spectral_norm as spectral_norm + + +# Returns a function that creates a normalization function +# that does not condition on semantic map +def get_nonspade_norm_layer(opt, norm_type='instance'): + # helper function to get # output channels of the previous layer + def get_out_channel(layer): + if hasattr(layer, 'out_channels'): + return getattr(layer, 'out_channels') + return layer.weight.size(0) + + # this function will be returned + def add_norm_layer(layer): + nonlocal norm_type + if norm_type.startswith('spectral'): + layer = spectral_norm(layer) + subnorm_type = norm_type[len('spectral'):] + else: + subnorm_type = norm_type + + if subnorm_type == 'none' or len(subnorm_type) == 0: + return layer + + # remove bias in the previous layer, which is meaningless + # since it has no effect after normalization + if getattr(layer, 'bias', None) is not None: + delattr(layer, 'bias') + layer.register_parameter('bias', None) + + if subnorm_type == 'batch': + norm_layer = nn.BatchNorm2d(get_out_channel(layer), affine=True) + elif subnorm_type == 'syncbatch': + norm_layer = SynchronizedBatchNorm2d(get_out_channel(layer), affine=True) + elif subnorm_type == 'instance': + norm_layer = nn.InstanceNorm2d(get_out_channel(layer), affine=False) + else: + raise ValueError('normalization layer %s is not recognized' % subnorm_type) + + return nn.Sequential(layer, norm_layer) + + return add_norm_layer + + +# Creates SPADE normalization layer based on the given configuration +# SPADE consists of two steps. First, it normalizes the activations using +# your favorite normalization method, such as Batch Norm or Instance Norm. +# Second, it applies scale and bias to the normalized output, conditioned on +# the segmentation map. +# The format of |config_text| is spade(norm)(ks), where +# (norm) specifies the type of parameter-free normalization. +# (e.g. syncbatch, batch, instance) +# (ks) specifies the size of kernel in the SPADE module (e.g. 3x3) +# Example |config_text| will be spadesyncbatch3x3, or spadeinstance5x5. +# Also, the other arguments are +# |norm_nc|: the #channels of the normalized activations, hence the output dim of SPADE +# |label_nc|: the #channels of the input semantic map, hence the input dim of SPADE +class SPADE(nn.Module): + def __init__(self, config_text, norm_nc, label_nc): + super().__init__() + + assert config_text.startswith('spade') + parsed = re.search('spade(\D+)(\d)x\d', config_text) + param_free_norm_type = str(parsed.group(1)) + ks = int(parsed.group(2)) + + if param_free_norm_type == 'instance': + self.param_free_norm = nn.InstanceNorm2d(norm_nc, affine=False) + elif param_free_norm_type == 'syncbatch': + self.param_free_norm = SynchronizedBatchNorm2d(norm_nc, affine=False) + elif param_free_norm_type == 'batch': + self.param_free_norm = nn.BatchNorm2d(norm_nc, affine=False) + else: + raise ValueError('%s is not a recognized param-free norm type in SPADE' + % param_free_norm_type) + + # The dimension of the intermediate embedding space. Yes, hardcoded. + nhidden = 128 + + pw = ks // 2 + self.mlp_shared = nn.Sequential( + nn.Conv2d(label_nc, nhidden, kernel_size=ks, padding=pw), + nn.ReLU() + ) + self.mlp_gamma = nn.Conv2d(nhidden, norm_nc, kernel_size=ks, padding=pw) + self.mlp_beta = nn.Conv2d(nhidden, norm_nc, kernel_size=ks, padding=pw) + + def forward(self, x, segmap): + + # Part 1. generate parameter-free normalized activations + normalized = self.param_free_norm(x) + + # Part 2. produce scaling and bias conditioned on semantic map + segmap = F.interpolate(segmap, size=x.size()[2:], mode='nearest') + actv = self.mlp_shared(segmap) + gamma = self.mlp_gamma(actv) + beta = self.mlp_beta(actv) + + # apply scale and bias + out = normalized * (1 + gamma) + beta + + return out diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py new file mode 100644 index 0000000000..e57350490e --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py @@ -0,0 +1,584 @@ +import os.path as osp +import numpy as np +import torch +import scipy.io as sio +import pickle +from ...data import curve +import skimage.transform as trans +from math import cos, sin, atan2, asin +import neural_renderer as nr + + +def _get_suffix(filename): + """a.jpg -> jpg""" + pos = filename.rfind('.') + if pos == -1: + return '' + return filename[pos + 1:] + + +def _load(fp): + suffix = _get_suffix(fp) + if suffix == 'npy': + return np.load(fp) + elif suffix == 'pkl': + return pickle.load(open(fp, 'rb')) + + +def P2sRt(P): + ''' decompositing camera matrix P. + Args: + P: (3, 4). Affine Camera Matrix. + Returns: + s: scale factor. + R: (3, 3). rotation matrix. + t2d: (2,). 2d translation. + ''' + t3d = P[:, 3] + R1 = P[0:1, :3] + R2 = P[1:2, :3] + s = (np.linalg.norm(R1) + np.linalg.norm(R2)) / 2.0 + r1 = R1 / np.linalg.norm(R1) + r2 = R2 / np.linalg.norm(R2) + r3 = np.cross(r1, r2) + + R = np.concatenate((r1, r2, r3), 0) + return s, R, t3d + + +def matrix2angle(R): + ''' compute three Euler angles from a Rotation Matrix. Ref: http://www.gregslabaugh.net/publications/euler.pdf + Args: + R: (3,3). rotation matrix + Returns: + x: yaw + y: pitch + z: roll + ''' + # assert(isRotationMatrix(R)) + + if R[2, 0] != 1 and R[2, 0] != -1: + x = -asin(max(-1, min(R[2, 0], 1))) + y = atan2(R[2, 1] / cos(x), R[2, 2] / cos(x)) + z = atan2(R[1, 0] / cos(x), R[0, 0] / cos(x)) + + else: # Gimbal lock + z = 0 # can be anything + if R[2, 0] == -1: + x = np.pi / 2 + y = z + atan2(R[0, 1], R[0, 2]) + else: + x = -np.pi / 2 + y = -z + atan2(-R[0, 1], -R[0, 2]) + + return [x, y, z] + + +def angle2matrix(angles): + ''' get rotation matrix from three rotation angles(radian). The same as in 3DDFA. + Args: + angles: [3,]. x, y, z angles + x: yaw. + y: pitch. + z: roll. + Returns: + R: 3x3. rotation matrix. + ''' + # x, y, z = np.deg2rad(angles[0]), np.deg2rad(angles[1]), np.deg2rad(angles[2]) + # x, y, z = angles[0], angles[1], angles[2] + y, x, z = angles[0], angles[1], angles[2] + + # x + Rx = np.array([[1, 0, 0], + [0, cos(x), -sin(x)], + [0, sin(x), cos(x)]]) + # y + Ry = np.array([[cos(y), 0, sin(y)], + [0, 1, 0], + [-sin(y), 0, cos(y)]]) + # z + Rz = np.array([[cos(z), -sin(z), 0], + [sin(z), cos(z), 0], + [0, 0, 1]]) + R = Rz.dot(Ry).dot(Rx) + return R.astype(np.float32) + + +class Render(object): + def __init__(self, opt): + self.opt = opt + self.render_size = opt.crop_size + print(self.render_size, opt.crop_size) + self.d = './algorithm/DDFA/train.configs' + w_shp = _load(osp.join(self.d, 'w_shp_sim.npy')) + w_exp = _load(osp.join(self.d, 'w_exp_sim.npy')) # simplified version + u_shp = _load(osp.join(self.d, 'u_shp.npy')) + u_exp = _load(osp.join(self.d, 'u_exp.npy')) + self.keypoints = _load(osp.join(self.d, 'keypoints_sim.npy')) + self.pose_noise = getattr(opt, 'pose_noise', False) + self.large_pose = getattr(opt, 'large_pose', False) + u = u_shp + u_exp + tri = sio.loadmat('./algorithm/DDFA/visualize/tri.mat')['tri'] # 3 * 53215 + faces_np = np.expand_dims(tri.T, axis=0).astype(np.int32) - 1 + + self.std_size = 120 + + opt.gpu_ids = 0 + + self.current_gpu = opt.gpu_ids + with torch.cuda.device(self.current_gpu): + self.faces = torch.from_numpy(faces_np).cuda() + self.renderer = nr.Renderer(camera_mode='look', image_size=self.render_size, perspective=False, + light_intensity_directional=0, light_intensity_ambient=1) + self.u_cuda = torch.from_numpy(u.astype(np.float32)).cuda() + self.w_shp_cuda = torch.from_numpy(w_shp.astype(np.float32)).cuda() + self.w_exp_cuda = torch.from_numpy(w_exp.astype(np.float32)).cuda() + + def random_p(self, s, angle): + + if np.random.randint(0, 2) == 0: + angle[0] += np.random.uniform(-0.965, -0.342, 1)[0] + # angle[1] += np.random.uniform(-0.1, 0.1, 1)[0] + else: + angle[0] += np.random.uniform(0.342, 0.965, 1)[0] + # angle[1] += np.random.uniform(-0.1, 0.1, 1)[0] + angle[0] = max(-1.2, min(angle[0], 1.2)) + random_2 = np.random.uniform(-0.5, 0.5, 1)[0] + angle[1] += random_2 + angle[1] = max(-1.0, min(angle[1], 1.0)) + p = angle2matrix(angle) * s + return p + + def assign_large(self, s, angle): + if np.random.randint(0, 2) == 0: + angle[0] = np.random.uniform(-1.05, -0.95, 1)[0] + # angle[1] += np.random.uniform(-0.1, 0.1, 1)[0] + else: + angle[0] = np.random.uniform(1.05, 0.95, 1)[0] + # angle[1] += np.random.uniform(-0.1, 0.1, 1)[0] + angle[0] = max(-1.2, min(angle[0], 1.2)) + random_2 = np.random.uniform(-0.5, 0.5, 1)[0] + angle[1] += random_2 + angle[1] = max(-1.0, min(angle[1], 1.0)) + p = angle2matrix(angle) * s + return p + + def _parse_param(self, param, pose_noise=False, frontal=True, + large_pose=False, yaw_pose=None, pitch_pose=None): + """Work for both numpy and tensor""" + p_ = param[:12].reshape(3, -1) + p = p_[:, :3] + s, R, t3d = P2sRt(p_) + angle = matrix2angle(R) + original_angle = angle[0] + if yaw_pose is not None or pitch_pose is not None: + # angle[0] = yaw_pose if yaw_pose is not None + if yaw_pose is not None: + angle[0] = yaw_pose + # flag = -1 if angle[0] < 0 else 1 + # angle[0] = flag * abs(yaw_pose) + if pitch_pose is not None: + angle[1] = pitch_pose + # flag = -1 if angle[1] < 0 else 1 + # angle[1] = flag * abs(pitch_pose) + # elif angle[1] < 0: + # angle[1] = 0 + p = angle2matrix(angle) * s + else: + if frontal: + angle[0] = 0 + if angle[1] < 0: + angle[1] = 0 + p = angle2matrix(angle) * s + if pose_noise: + if frontal: + if np.random.randint(0, 5): + p = self.random_p(s, angle) + else: + p = self.random_p(s, angle) + elif large_pose: + if frontal: + if np.random.randint(0, 5): + p = self.assign_large(s, angle) + else: + p = self.assign_large(s, angle) + + offset = p_[:, -1].reshape(3, 1) + alpha_shp = param[12:52].reshape(-1, 1) + alpha_exp = param[52:-4].reshape(-1, 1) + box = param[-4:] + return p, offset, alpha_shp, alpha_exp, box, original_angle + + def affine_align(self, landmark=None, **kwargs): + # M = None + src = np.array([ + [38.2946, 51.6963], + [73.5318, 51.5014], + [56.0252, 71.7366], + [41.5493, 92.3655], + [70.7299, 92.2041]], dtype=np.float32) + src = src * 290 / 112 + src[:, 0] += 50 + src[:, 1] += 60 + src = src / 400 * self.render_size + dst = landmark.astype(np.float32) + tform = trans.SimilarityTransform() + tform.estimate(dst, src) + M2 = tform.params[0:2, :] + with torch.cuda.device(self.current_gpu): + M2 = torch.from_numpy(M2).float().cuda() + return M2 + + def texture_vertices_to_faces(self, tex_input, faces): + # tex_input: (B, N, 2, 2, 2, C) + # faces: (faceN, 3) + faces = faces.long() + tex_out = tex_input[:, faces[0, :, 0], :] + tex_input[:, faces[0, :, 1], :] + tex_input[:, faces[0, :, 2], :] + return tex_out / 3.0 + + def compute_tri_normal(self, vertex, tri): + # Unit normals to the faces + # vertex : 3xvertex_num + # tri : 3xtri_num + + vt1_indices, vt2_indices, vt3_indices = torch.split(tri.t(), split_size_or_sections=1, dim=1) + + vt1 = vertex[vt1_indices[:, 0], :] + vt2 = vertex[vt2_indices[:, 0], :] + vt3 = vertex[vt3_indices[:, 0], :] + + normalf = (vt2 - vt1).cross(vt3 - vt1) + normalf = torch.nn.functional.normalize(normalf, dim=1, p=2) + + return normalf + + def vertices_rescale(self, v, roi_bbox): + vertices = v.clone() + sx, sy, ex, ey = roi_bbox + scale_x = (ex - sx) / 120 + scale_y = (ey - sy) / 120 + vertices[0, :] = vertices[0, :] * scale_x + sx + vertices[1, :] = vertices[1, :] * scale_y + sy + s = (scale_x + scale_y) / 2 + vertices[2, :] *= s + return vertices + + def get_five_points(self, vertices): + indexs = [4150, 11744, 8191, 5650, 10922] + five_points = np.zeros((5, 2)) + for i, idx in enumerate(indexs): + five_points[i, :] = vertices[0:2, idx] + return five_points + + def get_68_points(self, vertices): + vertixes = vertices.T.flatten() + vertice_68 = vertixes[self.keypoints].reshape(-1, 3) + vertice_68 = vertice_68.astype(np.int) + return vertice_68 + + def torch_get_68_points(self, vertices): + vertixes = vertices.transpose(1, 2).contiguous() + vertixes = vertixes.view(vertixes.size(0), -1) + vertice_68 = vertixes[:, self.keypoints].reshape(vertices.size(0), -1, 3) + return vertice_68 + + def transform_vertices(self, M, vertices): + # M = M.float() + v_size = vertices.size() + # M = torch.Tensor(M).cuda() + with torch.cuda.device(self.current_gpu): + M = M.float().cuda() + R = M[:, :2] + t = M[:, 2] + vertices2 = vertices.clone() + vertices2 = vertices2.float() + vertices2[:2, :] = R.mm(vertices2[:2, :]) + t.repeat(v_size[1], 1).t() + return vertices2 + + def generate_vertices_and_rescale_to_img(self, param, pose_noise=False, + mean_shp=False, mean_exp=False, frontal=True, large_pose=False, + yaw_pose=None, pitch_pose=None): + p, offset, alpha_shp, alpha_exp, roi_bbox, original_angle = self._parse_param(param, pose_noise=pose_noise, + frontal=frontal, + large_pose=large_pose, + yaw_pose=yaw_pose, + pitch_pose=pitch_pose) + if mean_shp: + alpha_shp.fill(0.0) + if mean_exp: + alpha_exp.fill(0.0) + with torch.cuda.device(self.current_gpu): + p = torch.from_numpy(p.astype(np.float32)).cuda() + alpha_shp = torch.from_numpy(alpha_shp.astype(np.float32)).cuda() + alpha_exp = torch.from_numpy(alpha_exp.astype(np.float32)).cuda() + offset = torch.from_numpy(offset.astype(np.float32)).cuda() + + vertices = p.matmul( + (self.u_cuda + self.w_shp_cuda.matmul(alpha_shp) + self.w_exp_cuda.matmul(alpha_exp)).view(-1, + 3).t()) + offset + + vertices[1, :] = self.std_size + 1 - vertices[1, :] + vertices = self.vertices_rescale(vertices, roi_bbox) + + return vertices, original_angle + + def flip_normalize_vertices(self, vertices): + # flip and normalize vertices + vertices[1, :] = self.render_size - vertices[1, :] - 1 + vertices[:2, :] = vertices[:2, :] / (self.render_size / 2.0) - 1.0 + vertices[2, :] = (vertices[2, :] - vertices[2, :].min()) / (vertices[2, :].max() - vertices[2, :].min()) * 2 - 1 + vertices[2, :] = -1.0 * vertices[2, :] + vertices = vertices.t().unsqueeze(0) + return vertices + + def get_render_from_vertices(self, img_ori, vertices_in_ori_img): + c, h, w = img_ori.size() + img_ori = img_ori.clone().permute(1, 2, 0) + # random_num = np.random.randint(30000, 50000) + # vertices_in_ori_img[:,30000:50000] = vertices_in_ori_img[:,30000:50000] * 1.02 - 3 + # vertices_in_ori_img[:, 20000:random_num] = vertices_in_ori_img[:, 20000:random_num] * np.random.uniform(1.01, + # 1.02) - np.random.uniform(0.5, 1.5) + + textures = img_ori[vertices_in_ori_img[1, :].round().clamp(0, h - 1).long(), vertices_in_ori_img[0, :].round().clamp( + 0, w - 1).long(), :] + + N = textures.shape[0] + with torch.cuda.device(self.current_gpu): + textures = textures.cuda().view(1, N, 1, 1, 1, 3) + textures = textures.expand(1, N, 2, 2, 2, 3) + textures = textures.float() + tex_a = self.texture_vertices_to_faces(textures, self.faces) + + return tex_a + + def _forward(self, param_file, img_ori, M=None, + pose_noise=True, mean_exp=False, mean_shp=False, align=True, frontal=True, + large_pose=False, yaw_pose=None, pitch_pose=None): + ''' + img_ori: rgb image, normalized within 0-1, h * w * 3 + return: render image, bgr + ''' + param = np.fromfile(param_file, sep=' ') + + vertices, original_angle = self.generate_vertices_and_rescale_to_img(param, pose_noise=pose_noise, + mean_shp=mean_shp, mean_exp=mean_exp, + frontal=frontal, + large_pose=large_pose, yaw_pose=yaw_pose, + pitch_pose=pitch_pose) + + if not (pose_noise or mean_exp or mean_exp or frontal): + print('pose_noise') + print(not pose_noise or mean_exp or mean_exp or frontal) + if M is not None: + vertices = self.transform_vertices(M, vertices) + else: + five_points = self.get_five_points(vertices.cpu().numpy()) + M = self.affine_align(five_points) + vertices = self.transform_vertices(M, vertices) + vertices_in_ori_img = vertices.clone() + align_vertices = vertices.clone() + else: + vertices_in_ori_img, _ = self.generate_vertices_and_rescale_to_img(param, pose_noise=False, + mean_shp=False, mean_exp=False, + frontal=False, large_pose=False) + if M is not None: + vertices_in_ori_img = self.transform_vertices(M, vertices_in_ori_img) + else: + five_points = self.get_five_points(vertices_in_ori_img.cpu().numpy()) + M = self.affine_align(five_points) + vertices_in_ori_img = self.transform_vertices(M, vertices_in_ori_img) + + five_points = self.get_five_points(vertices.cpu().numpy()) + M_0 = self.affine_align(five_points) + + # if np.random.randint(0, 4) < 1: + if align: + vertices = self.transform_vertices(M_0, vertices) + align_vertices = vertices.clone() + else: + align_vertices = vertices.clone() + align_vertices = self.transform_vertices(M_0, align_vertices) + vertices = self.transform_vertices(M, vertices) + + with torch.cuda.device(self.current_gpu): + img_ori = img_ori.cuda() + c, h, w = img_ori.size() + assert h == w + + vertices_in_ori_img[:2, :] = vertices_in_ori_img[:2, :] / self.render_size * h + # original image size is 400 * 400 * 3 + + # original image size is 400 * 400 * 3 + vertices_out = vertices.clone() + tex_a = self.get_render_from_vertices(img_ori, vertices_in_ori_img) + vertices = self.flip_normalize_vertices(vertices) + vertices_in_ori_img[:2, :] = vertices_in_ori_img[:2, :] / h * self.render_size + return tex_a, vertices, vertices_out, vertices_in_ori_img, align_vertices, original_angle + + def rotate_render(self, params, images, M=None, with_BG=False, pose_noise=False, large_pose=False, + align=True, frontal=True, erode=True, grey_background=False, avg_BG=True, + yaw_pose=None, pitch_pose=None): + + bz, c, w, h = images.size() + pose_noise = self.pose_noise + large_pose = self.large_pose + face_size = self.faces.size() + self.faces_use = self.faces.expand(bz, face_size[1], face_size[2]) + + # get render color vertices and normal vertices information, get original texs + vertices = [] + vertices_out = [] + vertices_in_ori_img = [] + vertices_aligned_normal = [] + vertices_aligned_out = [] + vertices_ori_normal = [] + texs = [] + original_angles = torch.zeros(bz) + with torch.no_grad(): + for n in range(bz): + tex_a, vertice, vertice_out, vertice_in_ori_img, align_vertice, original_angle \ + = self._forward(params[n], images[n], M[n], + pose_noise=pose_noise, align=align, frontal=frontal, + large_pose=large_pose, yaw_pose=yaw_pose, pitch_pose=pitch_pose) + vertices.append(vertice) + vertices_out.append(vertice_out) + vertices_in_ori_img.append(vertice_in_ori_img.clone()) + vertice2 = self.flip_normalize_vertices(vertice_in_ori_img.clone()) + vertices_ori_normal.append(vertice2) + vertices_aligned_out.append(align_vertice) + align_vertice_normal = self.flip_normalize_vertices(align_vertice.clone()) + vertices_aligned_normal.append(align_vertice_normal.clone()) + texs.append(tex_a) + original_angles[n] = original_angle + + vertices = torch.cat(vertices, 0) + vertices_aligned_normal = torch.cat(vertices_aligned_normal, 0) + vertices_ori_normal = torch.cat(vertices_ori_normal, 0) + + vertices_in_ori_img = torch.stack(vertices_in_ori_img, 0) + vertices_aligned_out = torch.stack(vertices_aligned_out, 0) + + texs = torch.cat(texs, 0) + texs_old = texs.clone() + + # erode the original mask and render again + rendered_images_erode = None + if erode: + + with torch.cuda.device(self.current_gpu): + rendered_images, depths, masks, = self.renderer(vertices_ori_normal, self.faces_use, texs) + # rendered_images: batch * 3 * h * w, masks: batch * h * w + masks_erode = self.generate_erode_mask(masks, kernal_size=self.opt.erode_kernel) + rendered_images = rendered_images.cpu() + Rd_a = rendered_images.clone() + if grey_background: + rendered_images_erode = masks_erode * rendered_images + else: + + inv_masks_erode = (torch.ones_like(masks_erode) - (masks_erode)).float() + if avg_BG: + contentsum = torch.sum(torch.sum(masks_erode * rendered_images, 3), 2) + sumsum = torch.sum(torch.sum(masks_erode, 3), 2) + contentsum[contentsum == 0] = 0.5 + sumsum[sumsum == 0] = 1 + masked_sum = contentsum / sumsum + masked_BG = masked_sum.unsqueeze(2).unsqueeze(3).expand(rendered_images.size()) + else: + masked_BG = 0.5 + rendered_images_erode = masks_erode * rendered_images + inv_masks_erode * masked_BG + + texs_a_crop = [] + for n in range(bz): + tex_a_crop = self.get_render_from_vertices(rendered_images_erode[n], vertices_in_ori_img[n]) + texs_a_crop.append(tex_a_crop) + texs = torch.cat(texs_a_crop, 0) + + # render face to rotated pose + with torch.cuda.device(self.current_gpu): + rendered_images, depths, masks, = self.renderer(vertices, self.faces_use, texs) + + # add mask to rotated + masks_erode = self.generate_erode_mask(masks, kernal_size=5) + inv_masks_erode = (torch.ones_like(masks_erode) - masks_erode).float() + rendered_images = rendered_images.cpu() + if with_BG: + images = torch.nn.functional.interpolate(images, size=(self.render_size)) + rendered_images = masks_erode * rendered_images + inv_masks_erode * images # 3 * h * w + else: + if grey_background: + if np.random.randint(0, 4): + rendered_images = masks_erode * rendered_images + else: + if avg_BG: + contentsum = torch.sum(torch.sum(masks_erode * rendered_images, 3), 2) + sumsum = torch.sum(torch.sum(masks_erode, 3), 2) + contentsum[contentsum == 0] = 0.5 + sumsum[sumsum == 0] = 1 + masked_sum = contentsum / sumsum + masked_BG = masked_sum.unsqueeze(2).unsqueeze(3).expand(rendered_images.size()) + else: + masked_BG = 0.5 + rendered_images = masks_erode * rendered_images + inv_masks_erode * masked_BG + + # get rendered face vertices + texs_b = [] + for n in range(bz): + tex_b = self.get_render_from_vertices(rendered_images[n], vertices_out[n]) + texs_b.append(tex_b) + texs_b = torch.cat(texs_b, 0) + + # render back + with torch.cuda.device(self.current_gpu): + rendered_images_rotate, depths1, masks1, = self.renderer(vertices_ori_normal, self.faces_use, texs_b) + # rendered_images: batch * 3 * h * w, masks: batch * h * w + rendered_images_rotate_artifacts, depths1, masks1, = self.renderer(vertices_aligned_normal, self.faces_use, + texs_old) + # rendered_images: batch * 3 * h * w, masks: batch * h * w + rendered_images_double, depths2, masks2, = self.renderer(vertices_aligned_normal, self.faces_use, texs_b) + # rendered_images: batch * 3 * h * w, masks: batch * h * w + + masks2 = masks2.unsqueeze(1) + inv_masks2 = (torch.ones_like(masks2) - masks2).float().cpu() + # BG = inv_masks2 * images + if grey_background: + masks1 = masks1.unsqueeze(1) + + inv_masks1 = (torch.ones_like(masks1) - masks1).float() + + rendered_images_rotate = (inv_masks1 * 0.5 + rendered_images_rotate).clamp(0, 1) + rendered_images_double = (inv_masks2 * 0.5 + rendered_images_double).clamp(0, 1) + + artifacts = rendered_images_rotate_artifacts + return rendered_images_rotate, rendered_images_double, self.torch_get_68_points( + vertices_in_ori_img), self.torch_get_68_points( + vertices_aligned_out), rendered_images_erode, original_angles, Rd_a, artifacts + + def generate_erode_mask(self, masks, kernal_size=5): + masks = masks.unsqueeze(1) + masks = masks.cpu() + with torch.no_grad(): + Conv = torch.nn.Conv2d(1, 1, kernal_size, padding=(kernal_size // 2), bias=False) + Conv.weight.fill_(1 / (kernal_size * kernal_size)) + masks2 = Conv(masks) + random_start1 = np.random.randint(50, 100) + masks[:, :, random_start1:self.render_size - 10, :] = masks2[:, :, random_start1:self.render_size - 10, :] + masks = (masks > np.random.uniform(0.8, 0.99)).float() + return masks + + def get_seg_map(self, vertices, no_guassian=False, size=256): + landmarks = self.torch_get_68_points(vertices) + landmarks = landmarks[:, :, :2].cpu().numpy().astype(np.float) + all_heatmap = [] + all_orig_heatmap = [] + for i in range(landmarks.shape[0]): + heatmap = curve.points_to_heatmap_68points(landmarks[i], 13, size, self.opt.heatmap_size) + heatmap2 = curve.combine_map(heatmap, no_guassian=no_guassian) + all_heatmap.append(heatmap2) + all_orig_heatmap.append(heatmap) + all_heatmap = np.stack(all_heatmap, axis=0) + all_orig_heatmap = np.stack(all_orig_heatmap, axis=0) + all_heatmap = torch.from_numpy(all_heatmap.astype(np.float32)).cuda() + all_orig_heatmap = torch.from_numpy(all_orig_heatmap.astype(np.float32)).cuda() + all_orig_heatmap = all_orig_heatmap.permute(0, 3, 1, 2) + all_orig_heatmap[all_orig_heatmap > 0] = 1.0 + return all_heatmap, all_orig_heatmap diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py new file mode 100644 index 0000000000..c32239200d --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py @@ -0,0 +1,115 @@ +import torch +import pickle +import numpy as np +from .render import osp, Render + + +def _get_suffix(filename): + """a.jpg -> jpg""" + pos = filename.rfind('.') + if pos == -1: + return '' + return filename[pos + 1:] + + +def _load(fp): + suffix = _get_suffix(fp) + if suffix == 'npy': + return np.load(fp) + elif suffix == 'pkl': + return pickle.load(open(fp, 'rb')) + + +class TestRender(Render): + + def __init__(self, opt): + super(TestRender, self).__init__(opt) + self.keypoints_106 = _load(osp.join(self.d, '106_index.npy')) + + def torch_get_106_points(self, vertices): + vertixes = vertices.transpose(1, 2).contiguous() + vertixes = vertixes.view(vertixes.size(0), -1) + vertice_106 = vertixes[:, self.keypoints_106].reshape(vertices.size(0), -1, 3) + return vertice_106 + + def rotate_render(self, params, images, M=None, with_BG=False, pose_noise=False, large_pose=False, + align=True, frontal=True, erode=True, grey_background=False, avg_BG=True, + yaw_pose=None, pitch_pose=None): + + bz, c, w, h = images.size() + + face_size = self.faces.size() + self.faces_use = self.faces.expand(bz, face_size[1], face_size[2]) + + # get render color vertices and normal vertices information, get original texs + vertices = [] + vertices_out = [] + vertices_in_ori_img = [] + vertices_aligned_normal = [] + vertices_aligned_out = [] + vertices_ori_normal = [] + texs = [] + original_angles = torch.zeros(bz) + with torch.no_grad(): + for n in range(bz): + tex_a, vertice, vertice_out, vertice_in_ori_img, align_vertice, original_angle \ + = self._forward(params[n], images[n], M[n], + pose_noise=pose_noise, align=align, frontal=frontal, + yaw_pose=yaw_pose, pitch_pose=pitch_pose) + vertices.append(vertice) + vertices_out.append(vertice_out) + vertices_in_ori_img.append(vertice_in_ori_img.clone()) + vertice2 = self.flip_normalize_vertices(vertice_in_ori_img.clone()) + vertices_ori_normal.append(vertice2) + vertices_aligned_out.append(align_vertice) + align_vertice_normal = self.flip_normalize_vertices(align_vertice.clone()) + vertices_aligned_normal.append(align_vertice_normal.clone()) + texs.append(tex_a) + original_angles[n] = original_angle + + vertices = torch.cat(vertices, 0) + vertices_aligned_normal = torch.cat(vertices_aligned_normal, 0) + vertices_ori_normal = torch.cat(vertices_ori_normal, 0) + + vertices_in_ori_img = torch.stack(vertices_in_ori_img, 0) + vertices_aligned_out = torch.stack(vertices_aligned_out, 0) + + texs = torch.cat(texs, 0) + + # erode the original mask and render again + rendered_images_erode = None + if erode: + with torch.cuda.device(self.current_gpu): + rendered_images, depths, masks, = self.renderer(vertices_ori_normal, self.faces_use, + texs) + # rendered_images: batch * 3 * h * w, masks: batch * h * w + masks_erode = self.generate_erode_mask(masks, kernal_size=15) + rendered_images = rendered_images.cpu() + if grey_background: + rendered_images_erode = masks_erode * rendered_images + else: + inv_masks_erode = (torch.ones_like(masks_erode) - (masks_erode)).float() + if avg_BG: + contentsum = torch.sum(torch.sum(masks_erode * rendered_images, 3), 2) + sumsum = torch.sum(torch.sum(masks_erode, 3), 2) + contentsum[contentsum == 0] = 0.5 + sumsum[sumsum == 0] = 1 + masked_sum = contentsum / sumsum + masked_BG = masked_sum.unsqueeze(2).unsqueeze(3).expand(rendered_images.size()) + else: + masked_BG = 0.5 + rendered_images_erode = masks_erode * rendered_images + inv_masks_erode * masked_BG + + texs_a_crop = [] + for n in range(bz): + tex_a_crop = self.get_render_from_vertices(rendered_images_erode[n], vertices_in_ori_img[n]) + texs_a_crop.append(tex_a_crop) + texs = torch.cat(texs_a_crop, 0) + + # render face to rotated pose + with torch.no_grad(): + with torch.cuda.device(self.current_gpu): + rendered_images, depths, masks, = self.renderer(vertices, self.faces_use, texs) + + return rendered_images, self.torch_get_68_points( + vertices_aligned_out), original_angles, self.torch_get_106_points(vertices_aligned_out) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py new file mode 100644 index 0000000000..0c6b2d893b --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py @@ -0,0 +1,6 @@ +from .batchnorm import SynchronizedBatchNorm1d, SynchronizedBatchNorm2d, SynchronizedBatchNorm3d +from .batchnorm import patch_sync_batchnorm, convert_model +from .replicate import DataParallelWithCallback, patch_replication_callback + +__all__ = ['SynchronizedBatchNorm1d', 'SynchronizedBatchNorm2d', 'SynchronizedBatchNorm3d', 'patch_sync_batchnorm', + 'convert_model', 'DataParallelWithCallback', 'patch_replication_callback'] diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py new file mode 100644 index 0000000000..be9ef149c5 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py @@ -0,0 +1,384 @@ +import collections +import contextlib + +import torch +import torch.nn.functional as F + +from torch.nn.modules.batchnorm import _BatchNorm + +try: + from torch.nn.parallel._functions import ReduceAddCoalesced, Broadcast +except ImportError: + ReduceAddCoalesced = Broadcast = None + +try: + from jactorch.parallel.comm import SyncMaster + from jactorch.parallel.data_parallel import JacDataParallel as DataParallelWithCallback +except ImportError: + from .comm import SyncMaster + from .replicate import DataParallelWithCallback + +__all__ = [ + 'SynchronizedBatchNorm1d', 'SynchronizedBatchNorm2d', 'SynchronizedBatchNorm3d', + 'patch_sync_batchnorm', 'convert_model' +] + + +def _sum_ft(tensor): + """sum over the first and last dimention""" + return tensor.sum(dim=0).sum(dim=-1) + + +def _unsqueeze_ft(tensor): + """add new dimensions at the front and the tail""" + return tensor.unsqueeze(0).unsqueeze(-1) + + +_ChildMessage = collections.namedtuple('_ChildMessage', ['sum', 'ssum', 'sum_size']) +_MasterMessage = collections.namedtuple('_MasterMessage', ['sum', 'inv_std']) + + +class _SynchronizedBatchNorm(_BatchNorm): + def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True): + assert ReduceAddCoalesced is not None, 'Can not use Synchronized Batch Normalization without CUDA support.' + + super(_SynchronizedBatchNorm, self).__init__(num_features, eps=eps, momentum=momentum, affine=affine) + + self._sync_master = SyncMaster(self._data_parallel_master) + + self._is_parallel = False + self._parallel_id = None + self._slave_pipe = None + + def forward(self, input): + # If it is not parallel computation or is in evaluation mode, use PyTorch's implementation. + if not (self._is_parallel and self.training): + return F.batch_norm( + input, self.running_mean, self.running_var, self.weight, self.bias, + self.training, self.momentum, self.eps) + + # Resize the input to (B, C, -1). + input_shape = input.size() + input = input.view(input.size(0), self.num_features, -1) + + # Compute the sum and square-sum. + sum_size = input.size(0) * input.size(2) + input_sum = _sum_ft(input) + input_ssum = _sum_ft(input ** 2) + + # Reduce-and-broadcast the statistics. + if self._parallel_id == 0: + mean, inv_std = self._sync_master.run_master(_ChildMessage(input_sum, input_ssum, sum_size)) + else: + mean, inv_std = self._slave_pipe.run_slave(_ChildMessage(input_sum, input_ssum, sum_size)) + + # Compute the output. + if self.affine: + # MJY:: Fuse the multiplication for speed. + output = (input - _unsqueeze_ft(mean)) * _unsqueeze_ft(inv_std * self.weight) + _unsqueeze_ft(self.bias) + else: + output = (input - _unsqueeze_ft(mean)) * _unsqueeze_ft(inv_std) + + # Reshape it. + return output.view(input_shape) + + def __data_parallel_replicate__(self, ctx, copy_id): + self._is_parallel = True + self._parallel_id = copy_id + + # parallel_id == 0 means master device. + if self._parallel_id == 0: + ctx.sync_master = self._sync_master + else: + self._slave_pipe = ctx.sync_master.register_slave(copy_id) + + def _data_parallel_master(self, intermediates): + """Reduce the sum and square-sum, compute the statistics, and broadcast it.""" + + # Always using same "device order" makes the ReduceAdd operation faster. + # Thanks to:: Tete Xiao (http://tetexiao.com/) + intermediates = sorted(intermediates, key=lambda i: i[1].sum.get_device()) + + to_reduce = [i[1][:2] for i in intermediates] + to_reduce = [j for i in to_reduce for j in i] # flatten + target_gpus = [i[1].sum.get_device() for i in intermediates] + + sum_size = sum([i[1].sum_size for i in intermediates]) + sum_, ssum = ReduceAddCoalesced.apply(target_gpus[0], 2, *to_reduce) + mean, inv_std = self._compute_mean_std(sum_, ssum, sum_size) + + broadcasted = Broadcast.apply(target_gpus, mean, inv_std) + + outputs = [] + for i, rec in enumerate(intermediates): + outputs.append((rec[0], _MasterMessage(*broadcasted[i*2:i*2+2]))) + + return outputs + + def _compute_mean_std(self, sum_, ssum, size): + """Compute the mean and standard-deviation with sum and square-sum. This method + also maintains the moving average on the master device.""" + assert size > 1, 'BatchNorm computes unbiased standard-deviation, which requires size > 1.' + mean = sum_ / size + sumvar = ssum - sum_ * mean + unbias_var = sumvar / (size - 1) + bias_var = sumvar / size + + if hasattr(torch, 'no_grad'): + with torch.no_grad(): + self.running_mean = (1 - self.momentum) * self.running_mean + self.momentum * mean.data + self.running_var = (1 - self.momentum) * self.running_var + self.momentum * unbias_var.data + else: + self.running_mean = (1 - self.momentum) * self.running_mean + self.momentum * mean.data + self.running_var = (1 - self.momentum) * self.running_var + self.momentum * unbias_var.data + + return mean, bias_var.clamp(self.eps) ** -0.5 + + +class SynchronizedBatchNorm1d(_SynchronizedBatchNorm): + r"""Applies Synchronized Batch Normalization over a 2d or 3d input that is seen as a + mini-batch. + + .. math:: + + y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta + + This module differs from the built-in PyTorch BatchNorm1d as the mean and + standard-deviation are reduced across all devices during training. + + For example, when one uses `nn.DataParallel` to wrap the network during + training, PyTorch's implementation normalize the tensor on each device using + the statistics only on that device, which accelerated the computation and + is also easy to implement, but the statistics might be inaccurate. + Instead, in this synchronized version, the statistics will be computed + over all training samples distributed on multiple devices. + + Note that, for one-GPU or CPU-only case, this module behaves exactly same + as the built-in PyTorch implementation. + + The mean and standard-deviation are calculated per-dimension over + the mini-batches and gamma and beta are learnable parameter vectors + of size C (where C is the input size). + + During training, this layer keeps a running estimate of its computed mean + and variance. The running sum is kept with a default momentum of 0.1. + + During evaluation, this running mean/variance is used for normalization. + + Because the BatchNorm is done over the `C` dimension, computing statistics + on `(N, L)` slices, it's common terminology to call this Temporal BatchNorm + + Args: + num_features: num_features from an expected input of size + `batch_size x num_features [x width]` + eps: a value added to the denominator for numerical stability. + Default: 1e-5 + momentum: the value used for the running_mean and running_var + computation. Default: 0.1 + affine: a boolean value that when set to ``True``, gives the layer learnable + affine parameters. Default: ``True`` + + Shape:: + - Input: :math:`(N, C)` or :math:`(N, C, L)` + - Output: :math:`(N, C)` or :math:`(N, C, L)` (same shape as input) + + Examples: + >>> # With Learnable Parameters + >>> m = SynchronizedBatchNorm1d(100) + >>> # Without Learnable Parameters + >>> m = SynchronizedBatchNorm1d(100, affine=False) + >>> input = torch.autograd.Variable(torch.randn(20, 100)) + >>> output = m(input) + """ + + def _check_input_dim(self, input): + if input.dim() != 2 and input.dim() != 3: + raise ValueError('expected 2D or 3D input (got {}D input)' + .format(input.dim())) + super(SynchronizedBatchNorm1d, self)._check_input_dim(input) + + +class SynchronizedBatchNorm2d(_SynchronizedBatchNorm): + r"""Applies Batch Normalization over a 4d input that is seen as a mini-batch + of 3d inputs + + .. math:: + + y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta + + This module differs from the built-in PyTorch BatchNorm2d as the mean and + standard-deviation are reduced across all devices during training. + + For example, when one uses `nn.DataParallel` to wrap the network during + training, PyTorch's implementation normalize the tensor on each device using + the statistics only on that device, which accelerated the computation and + is also easy to implement, but the statistics might be inaccurate. + Instead, in this synchronized version, the statistics will be computed + over all training samples distributed on multiple devices. + + Note that, for one-GPU or CPU-only case, this module behaves exactly same + as the built-in PyTorch implementation. + + The mean and standard-deviation are calculated per-dimension over + the mini-batches and gamma and beta are learnable parameter vectors + of size C (where C is the input size). + + During training, this layer keeps a running estimate of its computed mean + and variance. The running sum is kept with a default momentum of 0.1. + + During evaluation, this running mean/variance is used for normalization. + + Because the BatchNorm is done over the `C` dimension, computing statistics + on `(N, H, W)` slices, it's common terminology to call this Spatial BatchNorm + + Args: + num_features: num_features from an expected input of + size batch_size x num_features x height x width + eps: a value added to the denominator for numerical stability. + Default: 1e-5 + momentum: the value used for the running_mean and running_var + computation. Default: 0.1 + affine: a boolean value that when set to ``True``, gives the layer learnable + affine parameters. Default: ``True`` + + Shape:: + - Input: :math:`(N, C, H, W)` + - Output: :math:`(N, C, H, W)` (same shape as input) + + Examples: + >>> # With Learnable Parameters + >>> m = SynchronizedBatchNorm2d(100) + >>> # Without Learnable Parameters + >>> m = SynchronizedBatchNorm2d(100, affine=False) + >>> input = torch.autograd.Variable(torch.randn(20, 100, 35, 45)) + >>> output = m(input) + """ + + def _check_input_dim(self, input): + if input.dim() != 4: + raise ValueError('expected 4D input (got {}D input)' + .format(input.dim())) + super(SynchronizedBatchNorm2d, self)._check_input_dim(input) + + +class SynchronizedBatchNorm3d(_SynchronizedBatchNorm): + r"""Applies Batch Normalization over a 5d input that is seen as a mini-batch + of 4d inputs + + .. math:: + + y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta + + This module differs from the built-in PyTorch BatchNorm3d as the mean and + standard-deviation are reduced across all devices during training. + + For example, when one uses `nn.DataParallel` to wrap the network during + training, PyTorch's implementation normalize the tensor on each device using + the statistics only on that device, which accelerated the computation and + is also easy to implement, but the statistics might be inaccurate. + Instead, in this synchronized version, the statistics will be computed + over all training samples distributed on multiple devices. + + Note that, for one-GPU or CPU-only case, this module behaves exactly same + as the built-in PyTorch implementation. + + The mean and standard-deviation are calculated per-dimension over + the mini-batches and gamma and beta are learnable parameter vectors + of size C (where C is the input size). + + During training, this layer keeps a running estimate of its computed mean + and variance. The running sum is kept with a default momentum of 0.1. + + During evaluation, this running mean/variance is used for normalization. + + Because the BatchNorm is done over the `C` dimension, computing statistics + on `(N, D, H, W)` slices, it's common terminology to call this Volumetric BatchNorm + or Spatio-temporal BatchNorm + + Args: + num_features: num_features from an expected input of + size batch_size x num_features x depth x height x width + eps: a value added to the denominator for numerical stability. + Default: 1e-5 + momentum: the value used for the running_mean and running_var + computation. Default: 0.1 + affine: a boolean value that when set to ``True``, gives the layer learnable + affine parameters. Default: ``True`` + + Shape:: + - Input: :math:`(N, C, D, H, W)` + - Output: :math:`(N, C, D, H, W)` (same shape as input) + + Examples: + >>> # With Learnable Parameters + >>> m = SynchronizedBatchNorm3d(100) + >>> # Without Learnable Parameters + >>> m = SynchronizedBatchNorm3d(100, affine=False) + >>> input = torch.autograd.Variable(torch.randn(20, 100, 35, 45, 10)) + >>> output = m(input) + """ + + def _check_input_dim(self, input): + if input.dim() != 5: + raise ValueError('expected 5D input (got {}D input)' + .format(input.dim())) + super(SynchronizedBatchNorm3d, self)._check_input_dim(input) + + +@contextlib.contextmanager +def patch_sync_batchnorm(): + import torch.nn as nn + + backup = nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d + + nn.BatchNorm1d = SynchronizedBatchNorm1d + nn.BatchNorm2d = SynchronizedBatchNorm2d + nn.BatchNorm3d = SynchronizedBatchNorm3d + + yield + + nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d = backup + + +def convert_model(module): + """Traverse the input module and its child recursively + and replace all instance of torch.nn.modules.batchnorm.BatchNorm*N*d + to SynchronizedBatchNorm*N*d + + Args: + module: the input module needs to be convert to SyncBN model + + Examples: + >>> import torch.nn as nn + >>> import torchvision + >>> # m is a standard pytorch model + >>> m = torchvision.models.resnet18(True) + >>> m = nn.DataParallel(m) + >>> # after convert, m is using SyncBN + >>> m = convert_model(m) + """ + if isinstance(module, torch.nn.DataParallel): + mod = module.module + mod = convert_model(mod) + mod = DataParallelWithCallback(mod) + return mod + + mod = module + for pth_module, sync_module in zip([torch.nn.modules.batchnorm.BatchNorm1d, + torch.nn.modules.batchnorm.BatchNorm2d, + torch.nn.modules.batchnorm.BatchNorm3d], + [SynchronizedBatchNorm1d, + SynchronizedBatchNorm2d, + SynchronizedBatchNorm3d]): + if isinstance(module, pth_module): + mod = sync_module(module.num_features, module.eps, module.momentum, module.affine) + mod.running_mean = module.running_mean + mod.running_var = module.running_var + if module.affine: + mod.weight.data = module.weight.data.clone().detach() + mod.bias.data = module.bias.data.clone().detach() + + for name, child in module.named_children(): + mod.add_module(name, convert_model(child)) + + return mod diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py new file mode 100644 index 0000000000..6f9a5565a6 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py @@ -0,0 +1,57 @@ +import torch +import torch.nn as nn +import torch.nn.init as init + +__all__ = ['BatchNorm2dReimpl'] + + +class BatchNorm2dReimpl(nn.Module): + """ + A re-implementation of batch normalization, used for testing the numerical + stability. + + Author: acgtyrant + See also: + https://github.com/vacancy/Synchronized-BatchNorm-PyTorch/issues/14 + """ + + def __init__(self, num_features, eps=1e-5, momentum=0.1): + super().__init__() + + self.num_features = num_features + self.eps = eps + self.momentum = momentum + self.weight = nn.Parameter(torch.empty(num_features)) + self.bias = nn.Parameter(torch.empty(num_features)) + self.register_buffer('running_mean', torch.zeros(num_features)) + self.register_buffer('running_var', torch.ones(num_features)) + self.reset_parameters() + + def reset_running_stats(self): + self.running_mean.zero_() + self.running_var.fill_(1) + + def reset_parameters(self): + self.reset_running_stats() + init.uniform_(self.weight) + init.zeros_(self.bias) + + def forward(self, input_): + batchsize, channels, height, width = input_.size() + numel = batchsize * height * width + input_ = input_.permute(1, 0, 2, 3).contiguous().view(channels, numel) + sum_ = input_.sum(1) + sum_of_square = input_.pow(2).sum(1) + mean = sum_ / numel + sumvar = sum_of_square - sum_ * mean + + self.running_mean = ((1 - self.momentum) * self.running_mean + self.momentum * mean.detach()) + unbias_var = sumvar / (numel - 1) + self.running_var = ((1 - self.momentum) * self.running_var + self.momentum * unbias_var.detach()) + + bias_var = sumvar / numel + inv_std = 1 / (bias_var + self.eps).pow(0.5) + output = ((input_ - mean.unsqueeze(1)) * inv_std.unsqueeze(1) * self.weight.unsqueeze(1) + self.bias.unsqueeze( + 1)) + + return output.view(channels, batchsize, height, width).permute(1, 0, 2, 3).contiguous() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py new file mode 100644 index 0000000000..0e159b3f53 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py @@ -0,0 +1,127 @@ +import queue +import collections +import threading + +__all__ = ['FutureResult', 'SlavePipe', 'SyncMaster'] + + +class FutureResult(object): + """A thread-safe future implementation. Used only as one-to-one pipe.""" + + def __init__(self): + self._result = None + self._lock = threading.Lock() + self._cond = threading.Condition(self._lock) + + def put(self, result): + with self._lock: + assert self._result is None, 'Previous result has\'t been fetched.' + self._result = result + self._cond.notify() + + def get(self): + with self._lock: + if self._result is None: + self._cond.wait() + + res = self._result + self._result = None + return res + + +_MasterRegistry = collections.namedtuple('MasterRegistry', ['result']) +_SlavePipeBase = collections.namedtuple('_SlavePipeBase', ['identifier', 'queue', 'result']) + + +class SlavePipe(_SlavePipeBase): + """Pipe for master-slave communication.""" + + def run_slave(self, msg): + self.queue.put((self.identifier, msg)) + ret = self.result.get() + self.queue.put(True) + return ret + + +class SyncMaster(object): + """An abstract `SyncMaster` object. + + - During the replication, as the data parallel will trigger an callback of each module, all slave devices should + call `register(id)` and obtain an `SlavePipe` to communicate with the master. + - During the forward pass, master device invokes `run_master`, all messages from slave devices will be collected, + and passed to a registered callback. + - After receiving the messages, the master device should gather the information and determine to message passed + back to each slave devices. + """ + + def __init__(self, master_callback): + """ + + Args: + master_callback: a callback to be invoked after having collected messages from slave devices. + """ + self._master_callback = master_callback + self._queue = queue.Queue() + self._registry = collections.OrderedDict() + self._activated = False + + def __getstate__(self): + return {'master_callback': self._master_callback} + + def __setstate__(self, state): + self.__init__(state['master_callback']) + + def register_slave(self, identifier): + """ + Register an slave device. + + Args: + identifier: an identifier, usually is the device id. + + Returns: a `SlavePipe` object which can be used to communicate with the master device. + + """ + if self._activated: + assert self._queue.empty(), 'Queue is not clean before next initialization.' + self._activated = False + self._registry.clear() + future = FutureResult() + self._registry[identifier] = _MasterRegistry(future) + return SlavePipe(identifier, self._queue, future) + + def run_master(self, master_msg): + """ + Main entry for the master device in each forward pass. + The messages were first collected from each devices (including the master device), and then + an callback will be invoked to compute the message to be sent back to each devices + (including the master device). + + Args: + master_msg: the message that the master want to send to itself. This will be placed as the first + message when calling `master_callback`. For detailed usage, see `_SynchronizedBatchNorm` for an example. + + Returns: the message to be sent back to the master device. + + """ + self._activated = True + + intermediates = [(0, master_msg)] + for i in range(self.nr_slaves): + intermediates.append(self._queue.get()) + + results = self._master_callback(intermediates) + assert results[0][0] == 0, 'The first result should belongs to the master.' + + for i, res in results: + if i == 0: + continue + self._registry[i].result.put(res) + + for i in range(self.nr_slaves): + assert self._queue.get() is True + + return results[0][1] + + @property + def nr_slaves(self): + return len(self._registry) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py new file mode 100644 index 0000000000..63feed8971 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py @@ -0,0 +1,119 @@ +import torch.multiprocessing as multiprocessing +import functools +import torch +from torch.nn.parallel.data_parallel import DataParallel +from .scatter_gather import scatter_kwargs +multiprocessing.set_start_method('spawn', force=True) +__all__ = [ + 'CallbackContext', + 'execute_replication_callbacks', + 'DataParallelWithCallback', + 'patch_replication_callback' +] + + +class CallbackContext(object): + pass + + +def execute_replication_callbacks(modules): + """ + Execute an replication callback `__data_parallel_replicate__` on each module created by original replication. + + The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)` + + Note that, as all modules are isomorphism, we assign each sub-module with a context + (shared among multiple copies of this module on different devices). + Through this context, different copies can share some information. + + We guarantee that the callback on the master copy (the first copy) will be called ahead of calling the callback + of any slave copies. + """ + master_copy = modules[0] + nr_modules = len(list(master_copy.modules())) + ctxs = [CallbackContext() for _ in range(nr_modules)] + + for i, module in enumerate(modules): + for j, m in enumerate(module.modules()): + if hasattr(m, '__data_parallel_replicate__'): + m.__data_parallel_replicate__(ctxs[j], i) + + +class DataParallelWithCallback(DataParallel): + """ + Data Parallel with a replication callback. + + An replication callback `__data_parallel_replicate__` of each module will be invoked after being created by + original `replicate` function. + The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)` + + Examples: + > sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False) + > sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1]) + # sync_bn.__data_parallel_replicate__ will be invoked. + """ + def __init__(self, module, device_ids=None, output_device=None, dim=0, chunk_size=None): + super(DataParallelWithCallback, self).__init__(module) + + if not torch.cuda.is_available(): + self.module = module + self.device_ids = [] + return + + if device_ids is None: + device_ids = list(range(torch.cuda.device_count())) + if output_device is None: + output_device = device_ids[0] + self.dim = dim + self.module = module + self.device_ids = device_ids + self.output_device = output_device + self.chunk_size = chunk_size + + if len(self.device_ids) == 1: + self.module.cuda(device_ids[0]) + + def forward(self, *inputs, **kwargs): + if not self.device_ids: + return self.module(*inputs, **kwargs) + inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids, self.chunk_size) + if len(self.device_ids) == 1: + return self.module(*inputs[0], **kwargs[0]) + replicas = self.replicate(self.module, self.device_ids[:len(inputs)]) + outputs = self.parallel_apply(replicas, inputs, kwargs) + return self.gather(outputs, self.output_device) + + def scatter(self, inputs, kwargs, device_ids, chunk_size): + return scatter_kwargs(inputs, kwargs, device_ids, dim=self.dim, chunk_size=self.chunk_size) + + def replicate(self, module, device_ids): + modules = super(DataParallelWithCallback, self).replicate(module, device_ids) + execute_replication_callbacks(modules) + return modules + + +def patch_replication_callback(data_parallel): + """ + Monkey-patch an existing `DataParallel` object. Add the replication callback. + Useful when you have customized `DataParallel` implementation. + + Examples: + > sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False) + > sync_bn = DataParallel(sync_bn, device_ids=[0, 1]) + > patch_replication_callback(sync_bn) + # this is equivalent to + > sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False) + > sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1]) + """ + + assert isinstance(data_parallel, DataParallel) + + old_replicate = data_parallel.replicate + + @functools.wraps(old_replicate) + def new_replicate(module, device_ids): + modules = old_replicate(module, device_ids) + execute_replication_callbacks(modules) + return modules + + data_parallel.replicate = new_replicate diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py new file mode 100644 index 0000000000..ad6fff3a54 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py @@ -0,0 +1,44 @@ +import torch +from torch.nn.parallel._functions import Scatter + + +def scatter(inputs, target_gpus, dim=0, chunk_size=None): + r""" + Slices tensors into approximately equal chunks and + distributes them across given GPUs. Duplicates + references to objects that are not tensors. + """ + def scatter_map(obj): + if isinstance(obj, torch.Tensor): + return Scatter.apply(target_gpus, chunk_size, dim, obj) + if isinstance(obj, tuple) and len(obj) > 0: + return list(zip(*map(scatter_map, obj))) + if isinstance(obj, list) and len(obj) > 0: + return list(map(list, zip(*map(scatter_map, obj)))) + if isinstance(obj, dict) and len(obj) > 0: + return list(map(type(obj), zip(*map(scatter_map, obj.items())))) + return [obj for targets in target_gpus] + + # After scatter_map is called, a scatter_map cell will exist. This cell + # has a reference to the actual function scatter_map, which has references + # to a closure that has a reference to the scatter_map cell (because the + # fn is recursive). To avoid this reference cycle, we set the function to + # None, clearing the cell + try: + res = scatter_map(inputs) + finally: + scatter_map = None + return res + + +def scatter_kwargs(inputs, kwargs, target_gpus, dim=0, chunk_size=None): + r"""Scatter with support for kwargs dictionary""" + inputs = scatter(inputs, target_gpus, dim, chunk_size) if inputs else [] + kwargs = scatter(kwargs, target_gpus, dim, chunk_size) if kwargs else [] + if len(inputs) < len(kwargs): + inputs.extend([() for _ in range(len(kwargs) - len(inputs))]) + elif len(kwargs) < len(inputs): + kwargs.extend([{} for _ in range(len(inputs) - len(kwargs))]) + inputs = tuple(inputs) + kwargs = tuple(kwargs) + return inputs, kwargs diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py new file mode 100644 index 0000000000..dde4f6e12c --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py @@ -0,0 +1,18 @@ +import unittest +import torch + + +class TorchTestCase(unittest.TestCase): + def assertTensorClose(self, x, y): + adiff = float((x - y).abs().max()) + if (y == 0).all(): + rdiff = 'NaN' + else: + rdiff = float((adiff / y).abs().max()) + + message = ( + 'Tensor close check failed\n' + 'adiff={}\n' + 'rdiff={}\n' + ).format(adiff, rdiff) + self.assertTrue(torch.allclose(x, y), message) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py new file mode 100644 index 0000000000..a0f64f32ca --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py @@ -0,0 +1,152 @@ +import torch +import pickle +import numpy as np +from models.networks.render import Render, angle2matrix, matrix2angle, P2sRt + + +def _get_suffix(filename): + """a.jpg -> jpg""" + pos = filename.rfind('.') + if pos == -1: + return '' + return filename[pos + 1:] + + +def _load(fp): + suffix = _get_suffix(fp) + if suffix == 'npy': + return np.load(fp) + elif suffix == 'pkl': + return pickle.load(open(fp, 'rb')) + + +class TestRender(Render): + + def __init__(self, opt): + super(TestRender, self).__init__(opt) + + def _parse_param(self, param, pose_noise=False, frontal=True, large_pose=False, pose=None): + """Work for both numpy and tensor""" + p_ = param[:12].reshape(3, -1) + p = p_[:, :3] + s, R, t3d = P2sRt(p_) + angle = matrix2angle(R) + original_angle = angle[0] + if frontal: + angle[0] = 0 + if angle[1] < 0: + angle[1] = 0 + p = angle2matrix(angle) * s + if pose_noise: + angle[0] = np.random.uniform(-0.258, 0.258, 1)[0] + p = angle2matrix(angle) * s + + if large_pose: + if original_angle < 0: + angle[0] = np.random.uniform(-1, -0.955, 1)[0] + else: + angle[0] = np.random.uniform(0.955, 1, 1)[0] + if angle[1] < 0: + angle[1] = 0 + p = angle2matrix(angle) * s + + offset = p_[:, -1].reshape(3, 1) + alpha_shp = param[12:52].reshape(-1, 1) + alpha_exp = param[52:-4].reshape(-1, 1) + box = param[-4:] + return p, offset, alpha_shp, alpha_exp, box, original_angle + + def rotate_render(self, params, images, M=None, with_BG=False, + pose_noise=False, large_pose=False, align=True, frontal=True, erode=True, grey_background=False, + avg_BG=True, pose=None): + + bz, c, w, h = images.size() + + face_size = self.faces.size() + self.faces_use = self.faces.expand(bz, face_size[1], face_size[2]) + + # get render color vertices and normal vertices information, get original texs + vertices = [] + vertices_out = [] + vertices_in_ori_img = [] + vertices_aligned_normal = [] + vertices_aligned_out = [] + vertices_ori_normal = [] + texs = [] + original_angles = torch.zeros(bz) + with torch.no_grad(): + for n in range(bz): + tex_a, vertice, vertice_out, vertice_in_ori_img, align_vertice, original_angle \ + = self._forward(params[n], images[n], M[n], + pose_noise=pose_noise, align=align, frontal=frontal) + vertices.append(vertice) + vertices_out.append(vertice_out) + vertices_in_ori_img.append(vertice_in_ori_img.clone()) + vertice2 = self.flip_normalize_vertices(vertice_in_ori_img.clone()) + vertices_ori_normal.append(vertice2) + vertices_aligned_out.append(align_vertice) + align_vertice_normal = self.flip_normalize_vertices(align_vertice.clone()) + vertices_aligned_normal.append(align_vertice_normal.clone()) + texs.append(tex_a) + original_angles[n] = original_angle + + vertices = torch.cat(vertices, 0) + vertices_aligned_normal = torch.cat(vertices_aligned_normal, 0) + vertices_ori_normal = torch.cat(vertices_ori_normal, 0) + + vertices_in_ori_img = torch.stack(vertices_in_ori_img, 0) + vertices_aligned_out = torch.stack(vertices_aligned_out, 0) + + texs = torch.cat(texs, 0) + + # erode the original mask and render again + rendered_images_erode = None + if erode: + with torch.cuda.device(self.current_gpu): + rendered_images, depths, masks, = self.renderer(vertices_ori_normal, self.faces_use, + texs) + # rendered_images: batch * 3 * h * w, masks: batch * h * w + masks_erode = self.generate_erode_mask(masks, kernal_size=15) + rendered_images = rendered_images.cpu() + if grey_background: + rendered_images_erode = masks_erode * rendered_images + else: + inv_masks_erode = (torch.ones_like(masks_erode) - (masks_erode)).float() + if avg_BG: + contentsum = torch.sum(torch.sum(masks_erode * rendered_images, 3), 2) + sumsum = torch.sum(torch.sum(masks_erode, 3), 2) + contentsum[contentsum == 0] = 0.5 + sumsum[sumsum == 0] = 1 + masked_sum = contentsum / sumsum + masked_BG = masked_sum.unsqueeze(2).unsqueeze(3).expand(rendered_images.size()) + else: + masked_BG = 0.5 + rendered_images_erode = masks_erode * rendered_images + inv_masks_erode * masked_BG + + texs_a_crop = [] + for n in range(bz): + tex_a_crop = self.get_render_from_vertices(rendered_images_erode[n], vertices_in_ori_img[n]) + texs_a_crop.append(tex_a_crop) + texs = torch.cat(texs_a_crop, 0) + + # render face to rotated pose + with torch.no_grad(): + with torch.cuda.device(self.current_gpu): + rendered_images, depths, masks, = self.renderer(vertices, self.faces_use, texs) + + rendered_images = rendered_images.cpu() + + # get rendered face vertices + texs_b = [] + for n in range(bz): + tex_b = self.get_render_from_vertices(rendered_images[n], vertices_out[n]) + texs_b.append(tex_b) + texs_b = torch.cat(texs_b, 0) + + with torch.cuda.device(self.current_gpu): + + rendered_images_double, depths2, masks2, = self.renderer(vertices_aligned_normal, self.faces_use, + texs_b) + # rendered_images: batch * 3 * h * w, masks: batch * h * w + + return rendered_images_double, self.torch_get_68_points(vertices_aligned_out), original_angles diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py new file mode 100644 index 0000000000..2411fa9ba3 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py @@ -0,0 +1,176 @@ +"""This module contains simple helper functions """ +from __future__ import print_function +import torch +import numpy as np +from PIL import Image +import os +from math import sin, cos, atan2, asin + + +def P2sRt(P): + ''' decompositing camera matrix P. + Args: + P: (3, 4). Affine Camera Matrix. + Returns: + s: scale factor. + R: (3, 3). rotation matrix. + t2d: (2,). 2d translation. + ''' + t3d = P[:, 3] + R1 = P[0:1, :3] + R2 = P[1:2, :3] + s = (np.linalg.norm(R1) + np.linalg.norm(R2)) / 2.0 + r1 = R1 / np.linalg.norm(R1) + r2 = R2 / np.linalg.norm(R2) + r3 = np.cross(r1, r2) + + R = np.concatenate((r1, r2, r3), 0) + return s, R, t3d + + +def matrix2angle(R): + ''' compute three Euler angles from a Rotation Matrix. Ref: http://www.gregslabaugh.net/publications/euler.pdf + Args: + R: (3,3). rotation matrix + Returns: + x: yaw + y: pitch + z: roll + ''' + # assert(isRotationMatrix(R)) + + if R[2, 0] != 1 and R[2, 0] != -1: + x = -asin(max(-1, min(R[2, 0], 1))) + y = atan2(R[2, 1] / cos(x), R[2, 2] / cos(x)) + z = atan2(R[1, 0] / cos(x), R[0, 0] / cos(x)) + + else: # Gimbal lock + z = 0 # can be anything + if R[2, 0] == -1: + x = np.pi / 2 + y = z + atan2(R[0, 1], R[0, 2]) + else: + x = -np.pi / 2 + y = -z + atan2(-R[0, 1], -R[0, 2]) + + return [x, y, z] + + +def angle2matrix(angles): + ''' get rotation matrix from three rotation angles(radian). The same as in 3DDFA. + Args: + angles: [3,]. x, y, z angles + x: yaw. + y: pitch. + z: roll. + Returns: + R: 3x3. rotation matrix. + ''' + # x, y, z = np.deg2rad(angles[0]), np.deg2rad(angles[1]), np.deg2rad(angles[2]) + # x, y, z = angles[0], angles[1], angles[2] + y, x, z = angles[0], angles[1], angles[2] + + # x + Rx = np.array([[1, 0, 0], + [0, cos(x), -sin(x)], + [0, sin(x), cos(x)]]) + # y + Ry = np.array([[cos(y), 0, sin(y)], + [0, 1, 0], + [-sin(y), 0, cos(y)]]) + # z + Rz = np.array([[cos(z), -sin(z), 0], + [sin(z), cos(z), 0], + [0, 0, 1]]) + R = Rz.dot(Ry).dot(Rx) + return R.astype(np.float32) + + +def tensor2im(input_image, imtype=np.uint8): + """"Converts a Tensor array into a numpy image array. + + Parameters: + input_image (tensor) -- the input image tensor array + imtype (type) -- the desired type of the converted numpy array + """ + if not isinstance(input_image, np.ndarray): + if isinstance(input_image, torch.Tensor): # get the data from a variable + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() # convert it into a numpy array + if image_numpy.shape[0] == 1: # grayscale to RGB + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 # post-processing: tranpose and scaling + else: # if it is a numpy array, do nothing + image_numpy = input_image + return image_numpy.astype(imtype) + + +def diagnose_network(net, name='network'): + """Calculate and print the mean of average absolute(gradients) + + Parameters: + net (torch network) -- Torch network + name (str) -- the name of the network + """ + mean = 0.0 + count = 0 + for param in net.parameters(): + if param.grad is not None: + mean += torch.mean(torch.abs(param.grad.data)) + count += 1 + if count > 0: + mean = mean / count + print(name) + print(mean) + + +def save_image(image_numpy, image_path): + """Save a numpy image to the disk + + Parameters: + image_numpy (numpy array) -- input numpy array + image_path (str) -- the path of the image + """ + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + + +def print_numpy(x, val=True, shp=False): + """Print the mean, min, max, median, std, and size of a numpy array + + Parameters: + val (bool) -- if print the values of the numpy array + shp (bool) -- if print the shape of the numpy array + """ + x = x.astype(np.float64) + if shp: + print('shape,', x.shape) + if val: + x = x.flatten() + print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % ( + np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x))) + + +def mkdirs(paths): + """create empty directories if they don't exist + + Parameters: + paths (str list) -- a list of directory paths + """ + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + """create a single empty directory if it didn't exist + + Parameters: + path (str) -- a single directory path + """ + if not os.path.exists(path): + os.makedirs(path) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py new file mode 100644 index 0000000000..6b32f129a6 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py @@ -0,0 +1,330 @@ +import torch +import algorithm.Rotate_and_Render.models.networks as networks +import algorithm.Rotate_and_Render.util.util as util +import os + + +class RotateModel(torch.nn.Module): + @staticmethod + def modify_commandline_options(parser, is_train): + networks.modify_commandline_options(parser, is_train) + return parser + + def __init__(self, opt): + super(RotateModel, self).__init__() + self.opt = opt + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) + self.FloatTensor = torch.cuda.FloatTensor if self.use_gpu() \ + else torch.FloatTensor + self.ByteTensor = torch.cuda.ByteTensor if self.use_gpu() \ + else torch.ByteTensor + self.real_image = torch.zeros(opt.batchSize, 3, opt.crop_size, opt.crop_size) + self.input_semantics = torch.zeros(opt.batchSize, 3, opt.crop_size, opt.crop_size) + + self.netG, self.netD, self.netE, self.netD_rotate = self.initialize_networks(opt) + + # set loss functions + if opt.isTrain: + self.criterionGAN = networks.GANLoss( + opt.gan_mode, tensor=self.FloatTensor, opt=self.opt) + self.criterionFeat = torch.nn.L1Loss() + if not opt.no_vgg_loss: + self.criterionVGG = networks.VGGLoss(self.opt) + if opt.use_vae: + self.KLDLoss = networks.KLDLoss() + + # Entry point for all calls involving forward pass + # of deep networks. We used this approach since DataParallel module + # can't parallelize custom functions, we branch to different + # routines based on |mode|. + # |data|: dictionary of the input data + + def forward(self, data, mode): + real_image = data['image'] + input_semantics = data['mesh'] + if self.opt.use_rotated_mesh: + rotated_mesh = data['rotated_mesh'] + else: + rotated_mesh = None + if mode == 'generator': + g_loss, generated = self.compute_generator_loss( + input_semantics, real_image, netD=self.netD, mode=mode, no_ganFeat_loss=self.opt.no_ganFeat_loss, + no_vgg_loss=self.opt.no_vgg_loss, lambda_D=self.opt.lambda_D) + return g_loss, generated + if mode == 'generator_rotated': + g_loss, generated = self.compute_generator_loss( + rotated_mesh, real_image, netD=self.netD_rotate, mode=mode, no_ganFeat_loss=True, + no_vgg_loss=self.opt.no_vgg_loss, lambda_D=self.opt.lambda_rotate_D) + return g_loss, generated + elif mode == 'discriminator': + d_loss = self.compute_discriminator_loss( + input_semantics, real_image, netD=self.netD, lambda_D=self.opt.lambda_D) + return d_loss + elif mode == 'discriminator_rotated': + d_loss = self.compute_discriminator_loss( + rotated_mesh, real_image, self.netD_rotate, lambda_D=self.opt.lambda_rotate_D) + return d_loss + elif mode == 'encode_only': + z, mu, logvar = self.encode_z(real_image) + return mu, logvar + elif mode == 'inference': + with torch.no_grad(): + fake_image, _ = self.generate_fake(input_semantics, real_image) + fake_rotate, _ = self.generate_fake(rotated_mesh, real_image) + return fake_image, fake_rotate + else: + raise ValueError("|mode| is invalid") + + def create_optimizers(self, opt): + G_params = list(self.netG.parameters()) + if opt.use_vae: + G_params += list(self.netE.parameters()) + if opt.isTrain: + if opt.train_rotate: + D_params = list(self.netD.parameters()) + list(self.netD_rotate.parameters()) + else: + D_params = self.netD.parameters() + + if opt.no_TTUR: + beta1, beta2 = opt.beta1, opt.beta2 + G_lr, D_lr = opt.lr, opt.lr + else: + beta1, beta2 = 0, 0.9 + G_lr, D_lr = opt.lr / 2, opt.lr * 2 + + optimizer_G = torch.optim.Adam(G_params, lr=G_lr, betas=(beta1, beta2)) + optimizer_D = torch.optim.Adam(D_params, lr=D_lr, betas=(beta1, beta2)) + + return optimizer_G, optimizer_D + + def save(self, epoch): + util.save_network(self.netG, 'G', epoch, self.opt) + util.save_network(self.netD, 'D', epoch, self.opt) + if self.opt.train_rotate: + util.save_network(self.netD_rotate, 'D_rotate', epoch, self.opt) + if self.opt.use_vae: + util.save_network(self.netE, 'E', epoch, self.opt) + + ############################################################################ + # Private helper methods + ############################################################################ + + def initialize_networks(self, opt): + + netG = networks.define_G(opt) + netD = networks.define_D(opt) if opt.isTrain else None + netD_rotate = networks.define_D(opt) if opt.isTrain else None + netE = networks.define_E(opt) if opt.use_vae else None + pretrained_path = '' + if not opt.isTrain or opt.continue_train: + self.load_network(netG, 'G', opt.which_epoch, pretrained_path) + if opt.isTrain and not opt.noload_D: + self.load_network(netD, 'D', opt.which_epoch, pretrained_path) + self.load_network(netD_rotate, 'D_rotate', opt.which_epoch, pretrained_path) + if opt.use_vae: + self.load_network(netE, 'E', opt.which_epoch, pretrained_path) + else: + + if opt.load_separately: + netG = self.load_separately(netG, 'G', opt) + if not opt.noload_D: + netD = self.load_separately(netD, 'D', opt) + netD_rotate = self.load_separately(netD_rotate, 'D_rotate', opt) + if opt.use_vae: + netE = self.load_separately(netE, 'E', opt) + + return netG, netD, netE, netD_rotate + + # preprocess the input, such as moving the tensors to GPUs and + # transforming the label map to one-hot encoding + + def compute_generator_loss(self, input_semantics, real_image, netD, mode, no_ganFeat_loss=False, no_vgg_loss=False, + lambda_D=1): + G_losses = {} + + fake_image, KLD_loss = self.generate_fake( + input_semantics, real_image, compute_kld_loss=self.opt.use_vae) + + if self.opt.use_vae: + G_losses['KLD'] = KLD_loss + + pred_fake, pred_real = self.discriminate( + input_semantics, fake_image, real_image, netD) + + G_losses['GAN'] = self.criterionGAN(pred_fake, True, + for_discriminator=False) * lambda_D + + if not no_ganFeat_loss: + num_D = len(pred_fake) + GAN_Feat_loss = self.FloatTensor(1).fill_(0) + for i in range(num_D): # for each discriminator + # last output is the final prediction, so we exclude it + num_intermediate_outputs = len(pred_fake[i]) - 1 + for j in range(num_intermediate_outputs): # for each layer output + unweighted_loss = self.criterionFeat( + pred_fake[i][j], pred_real[i][j].detach()) + if j == 0: + unweighted_loss *= self.opt.lambda_image + GAN_Feat_loss += unweighted_loss * self.opt.lambda_feat / num_D + G_losses['GAN_Feat'] = GAN_Feat_loss + + if not no_vgg_loss: + if mode == 'generator_rotated': + num = 2 + else: + num = 0 + G_losses['VGG'] = self.criterionVGG(fake_image, real_image, num) * self.opt.lambda_vgg + + return G_losses, fake_image + + def compute_discriminator_loss(self, input_semantics, real_image, netD, lambda_D=1): + D_losses = {} + with torch.no_grad(): + fake_image, _ = self.generate_fake(input_semantics, real_image) + fake_image = fake_image.detach() + fake_image.requires_grad_() + + pred_fake, pred_real = self.discriminate( + input_semantics, fake_image, real_image, netD) + + D_losses['D_Fake'] = self.criterionGAN(pred_fake, False, + for_discriminator=True) * lambda_D + + D_losses['D_real'] = self.criterionGAN(pred_real, True, + for_discriminator=True) * lambda_D + + return D_losses + + def encode_z(self, real_image): + mu, logvar = self.netE(real_image) + z = self.reparameterize(mu, logvar) + return z, mu, logvar + + def generate_fake(self, input_semantics, real_image, compute_kld_loss=False): + z = None + KLD_loss = None + if self.opt.use_vae: + z, mu, logvar = self.encode_z(real_image) + if compute_kld_loss: + KLD_loss = self.KLDLoss(mu, logvar) * self.opt.lambda_kld + + fake_image = self.netG(input_semantics, z=z) + + assert (not compute_kld_loss) or self.opt.use_vae, \ + "You cannot compute KLD loss if opt.use_vae == False" + + return fake_image, KLD_loss + + # Given fake and real image, return the prediction of discriminator + # for each fake and real image. + + def discriminate(self, input_semantics, fake_image, real_image, netD): + if self.opt.D_input == "concat": + fake_concat = torch.cat([input_semantics, fake_image], dim=1) + real_concat = torch.cat([input_semantics, real_image], dim=1) + else: + fake_concat = fake_image + real_concat = real_image + + # In Batch Normalization, the fake and real images are + # recommended to be in the same batch to avoid disparate + # statistics in fake and real images. + # So both fake and real images are fed to D all at once. + fake_and_real = torch.cat([fake_concat, real_concat], dim=0) + + discriminator_out = netD(fake_and_real) + + pred_fake, pred_real = self.divide_pred(discriminator_out) + + return pred_fake, pred_real + + # Take the prediction of fake and real images from the combined batch + def divide_pred(self, pred): + # the prediction contains the intermediate outputs of multiscale GAN, + # so it's usually a list + if type(pred) == list: + fake = [] + real = [] + for p in pred: + fake.append([tensor[:tensor.size(0) // 2] for tensor in p]) + real.append([tensor[tensor.size(0) // 2:] for tensor in p]) + else: + fake = pred[:pred.size(0) // 2] + # rotate_fake = pred[pred.size(0) // 3: pred.size(0) * 2 // 3] + real = pred[pred.size(0) // 2:] + + return fake, real + + def get_edges(self, t): + edge = self.ByteTensor(t.size()).zero_() + edge[:, :, :, 1:] = edge[:, :, :, 1:] | (t[:, :, :, 1:] != t[:, :, :, :-1]) + edge[:, :, :, :-1] = edge[:, :, :, :-1] | (t[:, :, :, 1:] != t[:, :, :, :-1]) + edge[:, :, 1:, :] = edge[:, :, 1:, :] | (t[:, :, 1:, :] != t[:, :, :-1, :]) + edge[:, :, :-1, :] = edge[:, :, :-1, :] | (t[:, :, 1:, :] != t[:, :, :-1, :]) + return edge.float() + + def load_separately(self, network, network_label, opt): + load_path = None + if network_label == 'G': + load_path = opt.G_pretrain_path + elif network_label == 'D': + + load_path = opt.D_pretrain_path + elif network_label == 'D_rotate': + load_path = opt.D_rotate_pretrain_path + elif network_label == 'E': + load_path = opt.E_pretrain_path + + if load_path is not None: + if os.path.isfile(load_path): + print("=> loading checkpoint '{}'".format(load_path)) + checkpoint = torch.load(load_path) + util.copy_state_dict(checkpoint, network) + else: + print("no load_path") + return network + + def load_network(self, network, network_label, epoch_label, save_dir=''): + save_filename = '%s_net_%s.pth' % (epoch_label, network_label) + if not save_dir: + save_dir = self.save_dir + save_path = os.path.join(save_dir, save_filename) + if not os.path.isfile(save_path): + print('%s not exists yet!' % save_path) + if network_label == 'G': + raise ('Generator must exist!') + else: + # network.load_state_dict(torch.load(save_path)) + try: + network.load_state_dict(torch.load(save_path)) + except: + pretrained_dict = torch.load(save_path) + model_dict = network.state_dict() + try: + pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} + network.load_state_dict(pretrained_dict) + if self.opt.verbose: + print( + 'Pretrained network %s has excessive layers; Only loading layers that are used' % network_label) + except: + print('Pretrained network %s has fewer layers; The following are not initialized:' % network_label) + for k, v in pretrained_dict.items(): + if v.size() == model_dict[k].size(): + model_dict[k] = v + + not_initialized = set() + + for k, v in model_dict.items(): + if k not in pretrained_dict or v.size() != pretrained_dict[k].size(): + not_initialized.add(k.split('.')[0]) + + print(sorted(not_initialized)) + network.load_state_dict(model_dict) + + def reparameterize(self, mu, logvar): + std = torch.exp(0.5 * logvar) + eps = torch.randn_like(std) + return eps.mul(std) + mu + + def use_gpu(self): + return len(self.opt.gpu_ids) > 0 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py new file mode 100644 index 0000000000..826ae6a46d --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py @@ -0,0 +1,391 @@ +import torch +from . import networks +from ..util import util +from ..data import curve +import numpy as np +import os + + +class RotateSPADEModel(torch.nn.Module): + @staticmethod + def modify_commandline_options(parser, is_train): + networks.modify_commandline_options(parser, is_train) + return parser + + def __init__(self, opt): + super(RotateSPADEModel, self).__init__() + self.opt = opt + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) + self.FloatTensor = torch.cuda.FloatTensor if self.use_gpu() \ + else torch.FloatTensor + self.ByteTensor = torch.cuda.ByteTensor if self.use_gpu() \ + else torch.ByteTensor + self.real_image = torch.zeros(opt.batchSize, 3, opt.crop_size, opt.crop_size) + self.input_semantics = torch.zeros(opt.batchSize, 3, opt.crop_size, opt.crop_size) + + self.netG, self.netD, self.netE, self.netD_rotate = self.initialize_networks(opt) + + # set loss functions + if opt.isTrain: + self.criterionGAN = networks.GANLoss( + opt.gan_mode, tensor=self.FloatTensor, opt=self.opt) + self.criterionFeat = torch.nn.L1Loss() + if not opt.no_vgg_loss: + self.criterionVGG = networks.VGGLoss(self.opt) + if opt.use_vae: + self.KLDLoss = networks.KLDLoss() + + def landmark_68_to_5(self, t68): + le = t68[36:42, :].mean(axis=0, keepdims=True) + re = t68[42:48, :].mean(axis=0, keepdims=True) + no = t68[31:32, :] + lm = t68[48:49, :] + rm = t68[54:55, :] + t5 = np.concatenate([le, re, no, lm, rm], axis=0) + t5 = t5.reshape(10) + t5 = torch.from_numpy(t5).unsqueeze(0).cuda() + return t5 + + def get_seg_map(self, landmarks, no_guassian=False, size=256, original_angles=None): + landmarks = landmarks[:, :, :2].cpu().numpy().astype(np.float) + all_heatmap = [] + all_orig_heatmap = [] + if original_angles is None: + original_angles = torch.zeros(landmarks.shape[0]) + # key_points = [] + for i in range(landmarks.shape[0]): + heatmap = curve.points_to_heatmap_68points(landmarks[i], 13, size, self.opt.heatmap_size) + + heatmap2 = curve.combine_map(heatmap, no_guassian=no_guassian) + if self.opt.isTrain: + if np.random.randint(2): + heatmap = np.zeros_like(heatmap) + else: + if torch.abs(original_angles[i]) < 0.255: + heatmap = np.zeros_like(heatmap) + + all_heatmap.append(heatmap2) + all_orig_heatmap.append(heatmap) + # key_points.append(self.landmark_68_to_5(landmarks[i])) + all_heatmap = np.stack(all_heatmap, axis=0) + all_orig_heatmap = np.stack(all_orig_heatmap, axis=0) + all_heatmap = torch.from_numpy(all_heatmap.astype(np.float32)).cuda() + all_orig_heatmap = torch.from_numpy(all_orig_heatmap.astype(np.float32)).cuda() + all_orig_heatmap = all_orig_heatmap.permute(0, 3, 1, 2) + all_orig_heatmap[all_orig_heatmap > 0] = 2.0 + return all_heatmap, all_orig_heatmap + + # Entry point for all calls involving forward pass + # of deep networks. We used this approach since DataParallel module + # can't parallelize custom functions, we branch to different + # routines based on |mode|. + # |data|: dictionary of the input data + + def forward(self, data, mode): + real_image = data['image'] + + orig_landmarks = data['orig_landmarks'] + rotated_landmarks = data['rotated_landmarks'] + original_angles = data['original_angles'] + self.orig_seg, orig_seg_all = \ + self.get_seg_map(orig_landmarks, self.opt.no_gaussian_landmark, self.opt.crop_size, original_angles) + self.rotated_seg, rotated_seg_all = \ + self.get_seg_map(rotated_landmarks, self.opt.no_gaussian_landmark, self.opt.crop_size, original_angles) + + input_semantics = data['mesh'] + rotated_mesh = data['rotated_mesh'] + if self.opt.label_mask: + input_semantics = (input_semantics + orig_seg_all[:, 4].unsqueeze(1) + orig_seg_all[:, 0].unsqueeze(1)) + rotated_mesh = (rotated_mesh + rotated_seg_all[:, 4].unsqueeze(1) + rotated_seg_all[:, 0].unsqueeze(1)) + input_semantics[input_semantics >= 1] = 0 + rotated_mesh[rotated_mesh >= 1] = 0 + + if mode == 'generator': + g_loss, generated = self.compute_generator_loss( + input_semantics, real_image, self.orig_seg, netD=self.netD, mode=mode, + no_ganFeat_loss=self.opt.no_ganFeat_loss, + no_vgg_loss=self.opt.no_vgg_loss, lambda_D=self.opt.lambda_D) + return g_loss, generated, input_semantics + if mode == 'generator_rotated': + g_loss, generated = self.compute_generator_loss( + rotated_mesh, real_image, self.rotated_seg, netD=self.netD_rotate, mode=mode, no_ganFeat_loss=True, + no_vgg_loss=self.opt.no_vgg_loss, lambda_D=self.opt.lambda_rotate_D) + return g_loss, generated, rotated_mesh + elif mode == 'discriminator': + d_loss = self.compute_discriminator_loss( + input_semantics, real_image, self.orig_seg, netD=self.netD, lambda_D=self.opt.lambda_D) + return d_loss + elif mode == 'discriminator_rotated': + d_loss = self.compute_discriminator_loss( + rotated_mesh, real_image, self.rotated_seg, self.netD_rotate, lambda_D=self.opt.lambda_rotate_D) + return d_loss + elif mode == 'encode_only': + z, mu, logvar = self.encode_z(real_image) + return mu, logvar + elif mode == 'inference': + with torch.no_grad(): + if self.opt.label_mask: + rotated_mesh = ( + rotated_mesh + rotated_seg_all[:, 4].unsqueeze(1) + rotated_seg_all[:, 0].unsqueeze(1)) + rotated_mesh[rotated_mesh >= 1] = 0 + fake_image, _ = self.generate_fake(input_semantics, real_image, self.orig_seg) + fake_rotate, _ = self.generate_fake(rotated_mesh, real_image, self.rotated_seg) + + return fake_image, fake_rotate + else: + raise ValueError("|mode| is invalid") + + def create_optimizers(self, opt): + G_params = list(self.netG.parameters()) + if opt.use_vae: + G_params += list(self.netE.parameters()) + if opt.isTrain: + if opt.train_rotate: + D_params = list(self.netD.parameters()) + list(self.netD_rotate.parameters()) + else: + D_params = self.netD.parameters() + + if opt.no_TTUR: + beta1, beta2 = opt.beta1, opt.beta2 + G_lr, D_lr = opt.lr, opt.lr + else: + beta1, beta2 = 0, 0.9 + G_lr, D_lr = opt.lr / 2, opt.lr * 2 + + optimizer_G = torch.optim.Adam(G_params, lr=G_lr, betas=(beta1, beta2)) + optimizer_D = torch.optim.Adam(D_params, lr=D_lr, betas=(beta1, beta2)) + + return optimizer_G, optimizer_D + + def save(self, epoch): + util.save_network(self.netG, 'G', epoch, self.opt) + util.save_network(self.netD, 'D', epoch, self.opt) + if self.opt.train_rotate: + util.save_network(self.netD_rotate, 'D_rotate', epoch, self.opt) + if self.opt.use_vae: + util.save_network(self.netE, 'E', epoch, self.opt) + + ############################################################################ + # Private helper methods + ############################################################################ + + def initialize_networks(self, opt): + + netG = networks.define_G(opt) + netD = networks.define_D(opt) if opt.isTrain else None + netD_rotate = networks.define_D(opt) if opt.isTrain else None + netE = networks.define_E(opt) if opt.use_vae else None + pretrained_path = '' + if not opt.isTrain or opt.continue_train: + self.load_network(netG, 'G', opt.which_epoch, pretrained_path) + if opt.isTrain and not opt.noload_D: + self.load_network(netD, 'D', opt.which_epoch, pretrained_path) + self.load_network(netD_rotate, 'D_rotate', opt.which_epoch, pretrained_path) + if opt.use_vae: + self.load_network(netE, 'E', opt.which_epoch, pretrained_path) + else: + + if opt.load_separately: + netG = self.load_separately(netG, 'G', opt) + if not opt.noload_D: + netD = self.load_separately(netD, 'D', opt) + netD_rotate = self.load_separately(netD_rotate, 'D_rotate', opt) + if opt.use_vae: + netE = self.load_separately(netE, 'E', opt) + + return netG, netD, netE, netD_rotate + + # preprocess the input, such as moving the tensors to GPUs and + # transforming the label map to one-hot encoding + + def compute_generator_loss(self, input_semantics, real_image, seg, netD, mode, no_ganFeat_loss=False, + no_vgg_loss=False, lambda_D=1): + G_losses = {} + + fake_image, KLD_loss = self.generate_fake( + input_semantics, real_image, seg, compute_kld_loss=self.opt.use_vae) + + if self.opt.use_vae: + G_losses['KLD'] = KLD_loss + + pred_fake, pred_real = self.discriminate( + input_semantics, fake_image, real_image, seg, netD) + + G_losses['GAN'] = self.criterionGAN(pred_fake, True, + for_discriminator=False) * lambda_D + + if not no_ganFeat_loss: + num_D = len(pred_fake) + GAN_Feat_loss = self.FloatTensor(1).fill_(0) + for i in range(num_D): # for each discriminator + # last output is the final prediction, so we exclude it + num_intermediate_outputs = len(pred_fake[i]) - 1 + for j in range(num_intermediate_outputs): # for each layer output + unweighted_loss = self.criterionFeat( + pred_fake[i][j], pred_real[i][j].detach()) + if j == 0: + unweighted_loss *= self.opt.lambda_image + GAN_Feat_loss += unweighted_loss * self.opt.lambda_feat / num_D + G_losses['GAN_Feat'] = GAN_Feat_loss + + if not no_vgg_loss: + if mode == 'generator_rotated': + num = 2 + else: + num = 0 + G_losses['VGG'] = self.criterionVGG(fake_image, real_image, num) * self.opt.lambda_vgg + + return G_losses, fake_image + + def compute_discriminator_loss(self, input_semantics, real_image, seg, netD, lambda_D=1): + D_losses = {} + with torch.no_grad(): + fake_image, _ = self.generate_fake(input_semantics, real_image, seg) + fake_image = fake_image.detach() + fake_image.requires_grad_() + + pred_fake, pred_real = self.discriminate( + input_semantics, fake_image, real_image, seg, netD) + + D_losses['D_Fake'] = self.criterionGAN(pred_fake, False, + for_discriminator=True) * lambda_D + + D_losses['D_real'] = self.criterionGAN(pred_real, True, + for_discriminator=True) * lambda_D + + return D_losses + + def encode_z(self, real_image): + mu, logvar = self.netE(real_image) + z = self.reparameterize(mu, logvar) + return z, mu, logvar + + def generate_fake(self, input_semantics, real_image, seg, compute_kld_loss=False): + z = None + KLD_loss = None + if self.opt.use_vae: + z, mu, logvar = self.encode_z(real_image) + if compute_kld_loss: + KLD_loss = self.KLDLoss(mu, logvar) * self.opt.lambda_kld + + fake_image = self.netG(input_semantics, seg) + + assert (not compute_kld_loss) or self.opt.use_vae, \ + "You cannot compute KLD loss if opt.use_vae == False" + + return fake_image, KLD_loss + + # Given fake and real image, return the prediction of discriminator + # for each fake and real image. + + def discriminate(self, input_semantics, fake_image, real_image, seg, netD): + if self.opt.D_input == "concat": + fake_concat = torch.cat([seg, fake_image], dim=1) + real_concat = torch.cat([self.orig_seg, real_image], dim=1) + else: + fake_concat = fake_image + real_concat = real_image + + # In Batch Normalization, the fake and real images are + # recommended to be in the same batch to avoid disparate + # statistics in fake and real images. + # So both fake and real images are fed to D all at once. + fake_and_real = torch.cat([fake_concat, real_concat], dim=0) + + discriminator_out = netD(fake_and_real) + + pred_fake, pred_real = self.divide_pred(discriminator_out) + + return pred_fake, pred_real + + # Take the prediction of fake and real images from the combined batch + def divide_pred(self, pred): + # the prediction contains the intermediate outputs of multiscale GAN, + # so it's usually a list + if type(pred) == list: + fake = [] + real = [] + for p in pred: + fake.append([tensor[:tensor.size(0) // 2] for tensor in p]) + real.append([tensor[tensor.size(0) // 2:] for tensor in p]) + else: + fake = pred[:pred.size(0) // 2] + # rotate_fake = pred[pred.size(0) // 3: pred.size(0) * 2 // 3] + real = pred[pred.size(0) // 2:] + + return fake, real + + def get_edges(self, t): + edge = self.ByteTensor(t.size()).zero_() + edge[:, :, :, 1:] = edge[:, :, :, 1:] | (t[:, :, :, 1:] != t[:, :, :, :-1]) + edge[:, :, :, :-1] = edge[:, :, :, :-1] | (t[:, :, :, 1:] != t[:, :, :, :-1]) + edge[:, :, 1:, :] = edge[:, :, 1:, :] | (t[:, :, 1:, :] != t[:, :, :-1, :]) + edge[:, :, :-1, :] = edge[:, :, :-1, :] | (t[:, :, 1:, :] != t[:, :, :-1, :]) + return edge.float() + + def load_separately(self, network, network_label, opt): + load_path = None + if network_label == 'G': + load_path = opt.G_pretrain_path + elif network_label == 'D': + + load_path = opt.D_pretrain_path + elif network_label == 'D_rotate': + load_path = opt.D_rotate_pretrain_path + elif network_label == 'E': + load_path = opt.E_pretrain_path + + if load_path is not None: + if os.path.isfile(load_path): + print("=> loading checkpoint '{}'".format(load_path)) + checkpoint = torch.load(load_path) + util.copy_state_dict(checkpoint, network) + else: + print("no load_path") + return network + + def load_network(self, network, network_label, epoch_label, save_dir=''): + save_filename = '%s_net_%s.pth' % (epoch_label, network_label) + if not save_dir: + save_dir = self.save_dir + save_path = os.path.join(save_dir, save_filename) + if not os.path.isfile(save_path): + print('%s not exists yet!' % save_path) + if network_label == 'G': + raise ('Generator must exist!') + else: + # network.load_state_dict(torch.load(save_path)) + try: + network.load_state_dict(torch.load(save_path)) + except: + pretrained_dict = torch.load(save_path) + model_dict = network.state_dict() + try: + pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} + network.load_state_dict(pretrained_dict) + if self.opt.verbose: + print( + 'Pretrained network %s has excessive layers; Only loading layers that are used' % network_label) + except: + print('Pretrained network %s has fewer layers; The following are not initialized:' % network_label) + + for k, v in pretrained_dict.items(): + if v.size() == model_dict[k].size(): + model_dict[k] = v + + not_initialized = set() + + for k, v in model_dict.items(): + if k not in pretrained_dict or v.size() != pretrained_dict[k].size(): + not_initialized.add(k.split('.')[0]) + + print(sorted(not_initialized)) + network.load_state_dict(model_dict) + + def reparameterize(self, mu, logvar): + std = torch.exp(0.5 * logvar) + eps = torch.randn_like(std) + return eps.mul(std) + mu + + def use_gpu(self): + return len(self.opt.gpu_ids) > 0 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py new file mode 100644 index 0000000000..c9e9a17605 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py @@ -0,0 +1,58 @@ +import torch +from . import networks +from .rotatespade_model import RotateSPADEModel + + +class TestModel(RotateSPADEModel): + @staticmethod + def modify_commandline_options(parser, is_train): + networks.modify_commandline_options(parser, is_train) + return parser + + def __init__(self, opt): + super(TestModel, self).__init__(opt) + + def forward(self, data, mode): + if mode == 'single': + real_image = data['image'] + rotated_landmarks = data['rotated_landmarks'] + original_angles = data['original_angles'] + self.rotated_seg, rotated_seg_all = \ + self.get_seg_map(rotated_landmarks, self.opt.no_gaussian_landmark, self.opt.crop_size, original_angles) + rotated_mesh = data['rotated_mesh'] + if self.opt.label_mask: + rotated_mesh = (rotated_mesh + rotated_seg_all[:, 4].unsqueeze(1) + rotated_seg_all[:, 0].unsqueeze(1)) + rotated_mesh[rotated_mesh >= 1] = 0 + with torch.no_grad(): + fake_rotate, _ = self.generate_fake(rotated_mesh, real_image, self.rotated_seg) + + return fake_rotate + + else: + real_image = data['image'] + + orig_landmarks = data['orig_landmarks'] + rotated_landmarks = data['rotated_landmarks'] + orig_seg, orig_seg_all = self.get_seg_map(orig_landmarks, self.opt.no_gaussian_landmark, self.opt.crop_size) + rotated_seg, rotated_seg_all = self.get_seg_map(rotated_landmarks, self.opt.no_gaussian_landmark, + self.opt.crop_size) + + input_semantics = data['mesh'] + rotated_mesh = data['rotated_mesh'] + # BG = data['BG'] + + if self.opt.label_mask: + input_semantics = (input_semantics + orig_seg_all[:, 4].unsqueeze(1) + orig_seg_all[:, 0].unsqueeze(1)) + rotated_mesh = (rotated_mesh + rotated_seg_all[:, 4].unsqueeze(1) + rotated_seg_all[:, 0].unsqueeze(1)) + input_semantics[input_semantics >= 1] = 0 + rotated_mesh[rotated_mesh >= 1] = 0 + + with torch.no_grad(): + if self.opt.label_mask: + rotated_mesh = ( + rotated_mesh + rotated_seg_all[:, 4].unsqueeze(1) + rotated_seg_all[:, 0].unsqueeze(1)) + rotated_mesh[rotated_mesh >= 1] = 0 + fake_image, _ = self.generate_fake(input_semantics, real_image, self.orig_seg) + fake_rotate, _ = self.generate_fake(rotated_mesh, real_image, self.rotated_seg) + + return fake_image, fake_rotate diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py new file mode 100644 index 0000000000..8528a58620 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py @@ -0,0 +1,243 @@ +import sys +import argparse +import math +import os +from ..util import util +import torch +from algorithm.Rotate_and_Render import models +from algorithm.Rotate_and_Render import data +import pickle +__all__ = ['math'] + + +class BaseOptions(): + def __init__(self): + self.initialized = False + # self.isTrain=False + + def initialize(self, parser): + # experiment specifics + parser.add_argument('--name', type=str, default='mesh2face', + help='name of the experiment. It decides where to store samples and models') + + parser.add_argument('--gpu_ids', type=str, default='0', nargs='+', help='useless') + parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here') + parser.add_argument('--model', type=str, default='rotate', help='which model to use, rotate|rotatespade') + parser.add_argument('--trainer', type=str, default='rotate', help='which trainer to use, rotate|rotatespade') + parser.add_argument('--norm_G', type=str, default='spectralsyncbatch', + help='instance normalization or batch normalization') + parser.add_argument('--norm_D', type=str, default='spectralinstance', + help='instance normalization or batch normalization') + parser.add_argument('--norm_E', type=str, default='spectralinstance', + help='instance normalization or batch normalization') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + parser.add_argument('--device_count', type=int, default=1, help='the total number of gpus to use') # 2 + parser.add_argument('--render_thread', type=int, default=1, help='number of gpus used for rendering') # 1 + parser.add_argument('--chunk_size', default=1, type=int, nargs='+', + help='specify the batch size on each training gpu. Training gpu # = device_count - render_thread') + # input/output sizes + parser.add_argument('--batchSize', type=int, default=4, help='input batch size') + parser.add_argument('--preprocess_mode', type=str, default='resize_and_crop', + help='scaling and cropping of images at load time.', choices=("resize_and_crop", "crop", + "scale_width", + "scale_width_and_crop", + "scale_shortside", + "scale_shortside_and_crop", + "fixed", + "none")) + parser.add_argument('--load_size', type=int, default=400, + help='Scale images to this size. The final image will be cropped to --crop_size.') + parser.add_argument('--crop_size', type=int, default=256, + help='Crop to the width of crop_size (after initially scaling the images to load_size.)') + parser.add_argument('--aspect_ratio', type=float, default=1.0, + help='The ratio width/height. The final height of the load image will be crop_size/aspect_ratio') + parser.add_argument('--label_nc', type=int, default=5, + help='# of input label classes without unknown class. If you have unknown class as class label, ' + 'specify --contain_dopntcare_label.') + parser.add_argument('--contain_dontcare_label', action='store_true', + help='if the label map contains dontcare label (dontcare=255)') + parser.add_argument('--output_nc', type=int, default=3, help='# of output image channels') + parser.add_argument('--use_BG', action='store_true', help='') + parser.add_argument('--use_vae', action='store_true', help='') + # for setting inputs + parser.add_argument('--dataset', type=str, default='example', help='dataset') + parser.add_argument('--dataset_mode', type=str, default='allface') + parser.add_argument('--landmark_align', action='store_true', help='wether there is landmark_align') + parser.add_argument('--serial_batches', action='store_true', + help='if true, takes images in order to make batches, otherwise takes them randomly') + parser.add_argument('--no_flip', action='store_true', + help='if specified, do not flip the images for data argumentation') + parser.add_argument('--nThreads', default=8, type=int, help='# threads for loading data') + parser.add_argument('--max_dataset_size', type=int, default=sys.maxsize, + help='Maximum number of samples allowed per dataset. If the dataset directory contains more ' + 'than max_dataset_size, only a subset is loaded.') + parser.add_argument('--load_from_opt_file', action='store_true', + help='load the options from checkpoints and use that as default') + parser.add_argument('--cache_filelist_write', action='store_true', + help='saves the current filelist into a text file, so that it loads faster') + parser.add_argument('--cache_filelist_read', action='store_true', help='reads from the file list cache') + + # for displays + parser.add_argument('--display_winsize', type=int, default=400, help='display window size') + + # for generator + parser.add_argument('--netG', type=str, default='rotatespade', + help='selects model to use for netG (pix2pixhd | spade)') + parser.add_argument('--ngf', type=int, default=64, help='# of gen filters in first conv layer') + parser.add_argument('--init_type', type=str, default='xavier', + help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_variance', type=float, default=0.02, + help='variance of the initialization distribution') + # parser.add_argument('--z_dim', type=int, default=256, help='dimension of the latent z vector') + + # for instance-wise features + parser.add_argument('--no_instance', action='store_true', default='True', + help='if specified, do *not* add instance map as input') + parser.add_argument('--nef', type=int, default=16, help='# of encoder filters in the first conv layer') + + parser.add_argument('--no_gaussian_landmark', action='store_false', + help='whether to use no_gaussian_landmark (1.0 landmark) for rotatespade model') + parser.add_argument('--label_mask', action='store_false', help='whether to use face mask') + parser.add_argument('--heatmap_size', type=float, default=1, + help='the size of the heatmap, used in rotatespade model') + parser.add_argument('--erode_kernel', type=int, default=21, help='erode kernel size, used in renderer') + parser.add_argument("-device", default="cuda", type=str, help="choose between cuda or cpu ") + parser.add_argument("-path_in", default=os.path.join("opendr_internal", "projects", + "data_generation", + "", + "demos", "imgs_input"), + type=str, help='Give the path of image folder') + parser.add_argument('-path_3ddfa', default=os.path.join("opendr_internal", "projects", + "data_generation", + "", + "algorithm", "DDFA"), + type=str, help='Give the path of DDFA folder') + parser.add_argument('-save_path', default=os.path.join("opendr_internal", "projects", + "data_generation", + "", + "results"), + type=str, help='Give the path of results folder') + parser.add_argument('-val_yaw', default="10 20", nargs='+', type=float, help='yaw poses list between [-90,90] ') + parser.add_argument('-val_pitch', default="30 40", nargs='+', type=float, + help='pitch poses list between [-90,90] ') + + self.initialized = True + return parser + + def gather_options(self): + # initialize parser with basic options + if not self.initialized: + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, unknown = parser.parse_known_args() + + # modify model-related parser options + model_name = opt.model + model_option_setter = models.get_option_setter(model_name) + parser = model_option_setter(parser, self.isTrain) + + # modify dataset-related parser options + dataset_mode = opt.dataset_mode + dataset_option_setter = data.get_option_setter(dataset_mode) + parser = dataset_option_setter(parser, self.isTrain) + + opt, unknown = parser.parse_known_args() + + # if there is opt_file, load it. + # The previous default options will be overwritten + if opt.load_from_opt_file: + parser = self.update_options_from_file(parser, opt) + + opt = parser.parse_args() + self.parser = parser + return opt + + def print_options(self, opt): + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + def option_file_path(self, opt, makedir=False): + expr_dir = os.path.join(opt.checkpoints_dir, opt.name) + if makedir: + util.mkdirs(expr_dir) + file_name = os.path.join(expr_dir, 'opt') + return file_name + + def save_options(self, opt): + file_name = self.option_file_path(opt, makedir=True) + with open(file_name + '.txt', 'wt') as opt_file: + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + opt_file.write('{:>25}: {:<30}{}\n'.format(str(k), str(v), comment)) + + with open(file_name + '.pkl', 'wb') as opt_file: + pickle.dump(opt, opt_file) + + def update_options_from_file(self, parser, opt): + new_opt = self.load_options(opt) + for k, v in sorted(vars(opt).items()): + if hasattr(new_opt, k) and v != getattr(new_opt, k): + new_val = getattr(new_opt, k) + parser.set_defaults(**{k: new_val}) + return parser + + def load_options(self, opt): + file_name = self.option_file_path(opt, makedir=False) + new_opt = pickle.load(open(file_name + '.pkl', 'rb')) + return new_opt + + def parse(self, save=False): + + opt = self.gather_options() + opt.isTrain = self.isTrain # train or test + + self.print_options(opt) + if opt.isTrain: + self.save_options(opt) + ''' + if not opt.isTrain: + # change radian to angle + if opt.yaw_poses is not None: + for pose in opt.yaw_poses: + assert abs(pose) <= 90, "yaw pose must be between [-90, 90]" + opt.yaw_poses = [round(x / 180.0 * math.pi, 2) for x in opt.yaw_poses] + if opt.pitch_poses is not None: + for pose in opt.pitch_poses: + assert abs(pose) <= 90, "pitch pose must be between [-90, 90]" + opt.pitch_poses = [round(x / 180.0 * math.pi, 2) for x in opt.pitch_poses] + ''' + # Set semantic_nc based on the option. + # This will be convenient in many places + opt.semantic_nc = opt.label_nc + (3 if opt.use_BG else 0) + + # set gpu ids + str_ids = opt.gpu_ids.split(',') + opt.gpu_ids = [] + for str_id in str_ids: + id = int(str_id) + if id >= 0: + opt.gpu_ids.append(id) + if len(opt.gpu_ids) > 0: + torch.cuda.set_device(opt.gpu_ids[0]) + + assert len(opt.gpu_ids) == 0 or opt.batchSize % len(opt.gpu_ids) == 0, \ + "Batch size %d is wrong. It must be a multiple of # GPUs %d." \ + % (opt.batchSize, len(opt.gpu_ids)) + # opt.batchSize = sum(opt.chunk_size) + + self.opt = opt + return self.opt diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py new file mode 100644 index 0000000000..6f860bc59d --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py @@ -0,0 +1,29 @@ +from .base_options import BaseOptions + + +class TestOptions(BaseOptions): + def initialize(self, parser): + BaseOptions.initialize(self, parser) + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--list_num', type=int, default=0, help='list num') + parser.add_argument('--which_epoch', type=str, default='latest', + help='which epoch to load? set to latest to use latest cached model') + parser.add_argument('--how_many', type=int, default=float("inf"), help='how many test images to run') + parser.add_argument('--list_start', type=int, default=0, help='which num in the list to start') + parser.add_argument('--list_end', type=int, default=10, help='how many test images to run') + # parser.add_argument('--save_path', type=str, default='./results/', help='where to save data') + parser.add_argument('--names', type=str, default='rs_model', help='dataset') + parser.add_argument('--multi_gpu', action='store_false', help='whether to use multi gpus') + parser.add_argument('--align', action='store_false', help='whether to save align') + # parser.add_argument('--yaw_poses', type=str, default='30,40', nargs='+', + # help='yaw poses list during testing') + # parser.add_argument('--pitch_poses', type=str, default='10,20', nargs='+', + # help='pitch poses list during testing') + parser.add_argument('--posesrandom', action='store_true', help='whether to random the poses') + + parser.set_defaults(preprocess_mode='scale_width_and_crop', crop_size=256, load_size=256, display_winsize=256) + parser.set_defaults(serial_batches=True) + parser.set_defaults(no_flip=True) + parser.set_defaults(phase='test') + self.isTrain = False + return parser diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py new file mode 100644 index 0000000000..a845eea1fe --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py @@ -0,0 +1,73 @@ +from .base_options import BaseOptions + + +class TrainOptions(BaseOptions): + def initialize(self, parser): + BaseOptions.initialize(self, parser) + # for displays + parser.add_argument('--display_freq', type=int, default=100, + help='frequency of showing training results on screen') + parser.add_argument('--print_freq', type=int, default=100, + help='frequency of showing training results on console') + parser.add_argument('--save_latest_freq', type=int, default=5000, help='frequency of saving the latest results') + parser.add_argument('--save_epoch_freq', type=int, default=5, + help='frequency of saving checkpoints at the end of epochs') + parser.add_argument('--no_html', action='store_true', + help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/') + parser.add_argument('--debug', action='store_true', help='only do one epoch and displays at each iteration') + parser.add_argument('--tf_log', action='store_true', + help='if specified, use tensorboard logging. Requires tensorflow installed') + parser.add_argument('--tensorboard', default=True, + help='if specified, use tensorboard logging. Requires tensorflow installed') + parser.add_argument('--load_pretrain', type=str, default='', + help='load the pretrained model from the specified location') + parser.add_argument('--train_rotate', action='store_true', + help='whether train rotated mesh') + parser.add_argument('--lambda_rotate_D', type=float, default='0.1', + help='rotated D loss weight') + parser.add_argument('--lambda_D', type=float, default='1', + help='D loss weight') + # for training + parser.add_argument('--continue_train', action='store_true', help='continue training: load the latest model') + parser.add_argument('--which_epoch', type=str, default='latest', + help='which epoch to load? set to latest to use latest cached model') + parser.add_argument('--noload_D', action='store_true', help='whether to load D when continue training') + parser.add_argument('--large_pose', action='store_true', help='whether to use large pose training') + parser.add_argument('--pose_noise', action='store_true', help='whether to use pose noise training') + parser.add_argument('--load_separately', action='store_true', + help='whether to continue train by loading separate models') + parser.add_argument('--niter', type=int, default=50, + help='# of iter at starting learning rate. This is NOT the total #epochs. Totla #epochs is ' + 'niter + niter_decay') + parser.add_argument('--niter_decay', type=int, default=1000, + help='# of iter to linearly decay learning rate to zero') + parser.add_argument('--optimizer', type=str, default='adam') + parser.add_argument('--beta1', type=float, default=0.5, help='momentum term of adam') + parser.add_argument('--beta2', type=float, default=0.999, help='momentum term of adam') + parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate for adam') + parser.add_argument('--D_steps_per_G', type=int, default=1, + help='number of discriminator iterations per generator iterations.') + parser.add_argument('--D_input', type=str, default='single', help='(concat|single|hinge)') + parser.add_argument('--gan_matching_feats', type=str, default='more', help='(concat|single|hinge)') + parser.add_argument('--G_pretrain_path', type=str, default='./checkpoints/100_net_G.pth', + help='G pretrain path') + parser.add_argument('--D_pretrain_path', type=str, default='', help='D pretrain path') + parser.add_argument('--E_pretrain_path', type=str, default='', help='E pretrain path') + parser.add_argument('--D_rotate_pretrain_path', type=str, default='', help='D_rotate pretrain path') + + # for discriminators + parser.add_argument('--ndf', type=int, default=64, help='# of discrim filters in first conv layer') + parser.add_argument('--lambda_feat', type=float, default=10.0, help='weight for feature matching loss') + parser.add_argument('--lambda_image', type=float, default=1.0, help='weight for image reconstruction') + parser.add_argument('--lambda_vgg', type=float, default=10.0, help='weight for vgg loss') + parser.add_argument('--no_ganFeat_loss', action='store_true', + help='if specified, do *not* use discriminator feature matching loss') + parser.add_argument('--no_vgg_loss', action='store_true', + help='if specified, do *not* use VGG feature matching loss') + parser.add_argument('--face_vgg', action='store_true', help='if specified, use VGG feature matching loss') + parser.add_argument('--vggface_checkpoint', type=str, default='', help='pth to vggface ckpt') + parser.add_argument('--gan_mode', type=str, default='hinge', help='(ls|original|hinge)') + parser.add_argument('--netD', type=str, default='multiscale', help='(n_layers|multiscale|image|projection)') + parser.add_argument('--no_TTUR', action='store_true', help='Use TTUR training scheme') + self.isTrain = True + return parser diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py new file mode 100644 index 0000000000..f35985c1d8 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py @@ -0,0 +1,205 @@ +from models.test_model import TestModel +from options.test_options import TestOptions +from util.iter_counter import IterationCounter +from util.visualizer import Visualizer +from util import util +from data.data_utils import init_parallel_jobs +from models.networks.test_render import TestRender +import torch.multiprocessing as multiprocessing +import numpy as np +import os +import data +from torch.multiprocessing import Queue +from skimage import transform as trans +import cv2 +import time +import torch + +multiprocessing.set_start_method('spawn', force=True) + + +def create_path(a_path, b_path): + name_id_path = os.path.join(a_path, b_path) + if not os.path.exists(name_id_path): + os.makedirs(name_id_path) + return name_id_path + + +def create_paths(save_path, img_path, foldername='orig', folderlevel=2): + save_rotated_path_name = create_path(save_path, foldername) + + path_split = img_path.split('/') + rotated_file_savepath = save_rotated_path_name + for level in range(len(path_split) - folderlevel, len(path_split)): + file_name = path_split[level] + rotated_file_savepath = os.path.join(rotated_file_savepath, file_name) + return rotated_file_savepath + + +def affine_align(img, landmark=None, **kwargs): + M = None + src = np.array([ + [38.2946, 51.6963], + [73.5318, 51.5014], + [56.0252, 71.7366], + [41.5493, 92.3655], + [70.7299, 92.2041]], dtype=np.float32) + src = src * 224 / 112 + + dst = landmark.astype(np.float32) + tform = trans.SimilarityTransform() + tform.estimate(dst, src) + M = tform.params[0:2, :] + warped = cv2.warpAffine(img, M, (224, 224), borderValue=0.0) + return warped + + +def landmark_68_to_5(t68): + le = t68[36:42, :].mean(axis=0, keepdims=True) + re = t68[42:48, :].mean(axis=0, keepdims=True) + no = t68[31:32, :] + lm = t68[48:49, :] + rm = t68[54:55, :] + t5 = np.concatenate([le, re, no, lm, rm], axis=0) + t5 = t5.reshape(10) + return t5 + + +def save_img(img, save_path): + image_numpy = util.tensor2im(img) + util.save_image(image_numpy, save_path, create_dir=True) + return image_numpy + + +if __name__ == '__main__': + + opt = TestOptions().parse() + + data_info = data.dataset_info() + datanum = data_info.get_dataset(opt) + folderlevel = data_info.folder_level[datanum] + + dataloaders = data.create_dataloader_test(opt) + + visualizer = Visualizer(opt) + iter_counter = IterationCounter(opt, len(dataloaders[0]) * opt.render_thread) + # create a webpage that summarizes the all results + + testing_queue = Queue(10) + + ngpus = opt.device_count + + render_gpu_ids = list(range(ngpus - opt.render_thread, ngpus)) + render_layer_list = [] + for gpu in render_gpu_ids: + opt.gpu_ids = gpu + render_layer = TestRender(opt) + render_layer_list.append(render_layer) + + opt.gpu_ids = list(range(0, ngpus - opt.render_thread)) + print('Testing gpu ', opt.gpu_ids) + if opt.names is None: + model = TestModel(opt) + model.eval() + model = torch.nn.DataParallel(model.cuda(), + device_ids=opt.gpu_ids, + output_device=opt.gpu_ids[-1], + ) + models = [model] + names = [opt.name] + save_path = create_path(create_path(opt.save_path, opt.name), opt.dataset) + save_paths = [save_path] + f = [open( + os.path.join(save_path, opt.dataset + str(opt.list_start) + str(opt.list_end) + '_rotate_lmk.txt'), 'w')] + else: + models = [] + names = [] + save_paths = [] + f = [] + for name in opt.names.split(','): + opt.name = name + model = TestModel(opt) + model.eval() + model = torch.nn.DataParallel(model.cuda(), + device_ids=opt.gpu_ids, + output_device=opt.gpu_ids[-1], + ) + models.append(model) + names.append(name) + save_path = create_path(create_path(opt.save_path, opt.name), opt.dataset) + save_paths.append(save_path) + f_rotated = open( + os.path.join(save_path, opt.dataset + str(opt.list_start) + str(opt.list_end) + '_rotate_lmk.txt'), 'w') + f.append(f_rotated) + + test_tasks = init_parallel_jobs(testing_queue, dataloaders, iter_counter, opt, render_layer_list) + # test + landmarks = [] + + process_num = opt.list_start + first_time = time.time() + try: + for i, data_i in enumerate(range(len(dataloaders[0]) * opt.render_thread)): + # if i * opt.batchSize >= opt.how_many: + # break + # data = trainer.get_input(data_i) + start_time = time.time() + data = testing_queue.get(block=True) + + current_time = time.time() + time_per_iter = (current_time - start_time) / opt.batchSize + message = '(************* each image render time: %.3f *****************) ' % (time_per_iter) + print(message) + + img_path = data['path'] + rotated_landmarks = data['rotated_landmarks'][:, :, :2].cpu().numpy().astype(np.float) + + generate_rotateds = [] + for model in models: + generate_rotated = model.forward(data, mode='single') + generate_rotateds.append(generate_rotated) + + for n, name in enumerate(names): + opt.name = name + for b in range(generate_rotateds[n].shape[0]): + # get 5 key points + rotated_keypoints = landmark_68_to_5(rotated_landmarks[b]) + # get savepaths + rotated_file_savepath = create_paths(save_paths[n], img_path[b], folderlevel=folderlevel) + + image_numpy = save_img(generate_rotateds[n][b], rotated_file_savepath) + rotated_keypoints_str = rotated_file_savepath + ' 1 ' + ' '.join( + [str(int(n)) for n in rotated_keypoints]) + '\n' + print('process image...' + rotated_file_savepath) + f[n].write(rotated_keypoints_str) + + current_time = time.time() + if n == 0: + process_num += 1 + print('processed num ' + str(process_num)) + if opt.align: + aligned_file_savepath = create_paths(save_paths[n], img_path[b], 'aligned', + folderlevel=folderlevel) + warped = affine_align(image_numpy, rotated_keypoints.reshape(5, 2)) + util.save_image(warped, aligned_file_savepath, create_dir=True) + + current_time = time.time() + time_per_iter = (current_time - start_time) / opt.batchSize + message = '(************* each image time total: %.3f *****************) ' % (time_per_iter) + print(message) + + except KeyboardInterrupt: + print("Interrupted!") + for fs in f: + fs.close() + pass + + except Exception as e: + print(e) + for fs in f: + fs.close() + + else: + print('finished') + for fs in f: + fs.close() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py new file mode 100644 index 0000000000..8e8bf09b58 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py @@ -0,0 +1,241 @@ +import torch.multiprocessing as multiprocessing +import numpy as np +import os +from .util.iter_counter import IterationCounter +from .options.test_options import TestOptions +from .models.test_model import TestModel +from .util.visualizer import Visualizer +from .util import util +from torch.multiprocessing import Queue +from .data.data_utils import init_parallel_jobs +from skimage import transform as trans +import cv2 +import time +import torch +import math +from .models.networks.rotate_render import TestRender +from algorithm.Rotate_and_Render.data import dataset_info +multiprocessing.set_start_method('spawn', force=True) +__all__ = ['dataset_info'] + + +def create_path(a_path, b_path): + name_id_path = os.path.join(a_path, b_path) + if not os.path.exists(name_id_path): + os.makedirs(name_id_path) + return name_id_path + + +def create_paths(save_path, img_path, foldername='orig', folderlevel=2, pose='0'): + save_rotated_path_name = create_path(save_path, foldername) + + path_split = img_path.split('/') + rotated_file_savepath = save_rotated_path_name + for level in range(len(path_split) - folderlevel, len(path_split)): + file_name = path_split[level] + if level == len(path_split) - 1: + fname, ext = os.path.splitext(file_name) + if ext == ".jpg": + file_name = fname + '_' + str(pose) + '.jpg' + # elif ext == ".png": + # file_name = filename + '_' + str(pose) + '.png' + rotated_file_savepath = os.path.join(rotated_file_savepath, file_name) + return rotated_file_savepath + + +def affine_align(img, landmark=None, **kwargs): + M = None + src = np.array([ + [38.2946, 51.6963], + [73.5318, 51.5014], + [56.0252, 71.7366], + [41.5493, 92.3655], + [70.7299, 92.2041]], dtype=np.float32) + src = src * 224 / 112 + + dst = landmark.astype(np.float32) + tform = trans.SimilarityTransform() + tform.estimate(dst, src) + M = tform.params[0:2, :] + warped = cv2.warpAffine(img, M, (224, 224), borderValue=0.0) + return warped + + +def landmark_68_to_5(t68): + le = t68[36:42, :].mean(axis=0, keepdims=True) + re = t68[42:48, :].mean(axis=0, keepdims=True) + no = t68[31:32, :] + lm = t68[48:49, :] + rm = t68[54:55, :] + t5 = np.concatenate([le, re, no, lm, rm], axis=0) + t5 = t5.reshape(10) + return t5 + + +def save_img(img, save_path): + image_numpy = util.tensor2im(img) + util.save_image(image_numpy, save_path, create_dir=True) + return image_numpy + + +def main(save_path, val_yaw, val_pitch): + import algorithm.Rotate_and_Render.data as data + + opt = TestOptions().parse() + data_info = data.dataset_info() + print(val_yaw) + opt.yaw_poses = [float(x) for x in val_yaw] + opt.pitch_poses = [float(x) for x in val_pitch] + opt.save_path = save_path + if not opt.isTrain: + # change radian to angle + if opt.yaw_poses is not None: + for pose in opt.yaw_poses: + assert abs(pose) <= 90, "yaw pose must be between [-90, 90]" + opt.yaw_poses = [round(x / 180.0 * math.pi, 2) for x in opt.yaw_poses] + if opt.pitch_poses is not None: + for pose in opt.pitch_poses: + assert abs(pose) <= 90, "pitch pose must be between [-90, 90]" + opt.pitch_poses = [round(x / 180.0 * math.pi, 2) for x in opt.pitch_poses] + datanum = data_info.get_dataset(opt)[0] + folderlevel = data_info.folder_level[datanum] + dataloaders = data.create_dataloader_test(opt) + Visualizer(opt) + iter_counter = IterationCounter(opt, len(dataloaders[0]) * opt.render_thread) + # create a webpage that summarizes the all results + + testing_queue = Queue(10) + + ngpus = opt.device_count + + render_gpu_ids = list(range(ngpus - opt.render_thread, ngpus)) + render_layer_list = [] + for gpu in render_gpu_ids: + opt.gpu_ids = gpu + render_layer = TestRender(opt) + render_layer_list.append(render_layer) + + opt.gpu_ids = list(range(0, ngpus - opt.render_thread + 1)) + print('Testing gpu ', opt.gpu_ids) + if opt.names is None: + model = TestModel(opt) + model.eval() + model = torch.nn.DataParallel(model.cuda(), + device_ids=opt.gpu_ids, + output_device=opt.gpu_ids[-1], + ) + models = [model] + names = [opt.name] + save_path = create_path(create_path(opt.save_path, opt.name), opt.dataset) + save_paths = [save_path] + f = [open( + os.path.join(save_path, opt.dataset + str(opt.list_start) + str(opt.list_end) + '_rotate_lmk.txt'), 'w')] + else: + models = [] + names = [] + save_paths = [] + f = [] + for name in opt.names.split(','): + opt.name = name + model = TestModel(opt) + model.eval() + model = torch.nn.DataParallel(model.cuda(), + device_ids=opt.gpu_ids, + output_device=opt.gpu_ids[-1], + ) + models.append(model) + names.append(name) + save_path = create_path(create_path(opt.save_path, opt.name), opt.dataset) + save_paths.append(save_path) + f_rotated = open( + os.path.join(save_path, opt.dataset + str(opt.list_start) + str(opt.list_end) + '_rotate_lmk.txt'), 'w') + f.append(f_rotated) + + init_parallel_jobs(testing_queue, dataloaders, iter_counter, opt, render_layer_list) + # test + # landmarks = [] + + process_num = opt.list_start + # first_time = time.time() + try: + for i, data_i in enumerate(range(len(dataloaders[0]) * opt.render_thread)): + # if i * opt.batchSize >= opt.how_many: + # break + # data = trainer.get_input(data_i) + start_time = time.time() + data = testing_queue.get(block=True) + + current_time = time.time() + time_per_iter = (current_time - start_time) / opt.batchSize + message = '(************* each image render time: %.3f *****************) ' % (time_per_iter) + print(message) + + img_path = data['path'] + # print(img_path) + poses = data['pose_list'] + rotated_landmarks = data['rotated_landmarks'][:, :, :2].cpu().numpy().astype(np.float) + # rotated_landmarks_106 = data['rotated_landmarks_106'][:, :, :2].cpu().numpy().astype(np.float) + + generate_rotateds = [] + for model in models: + generate_rotated = model.forward(data, mode='single') + generate_rotateds.append(generate_rotated) + + for n, name in enumerate(names): + opt.name = name + for b in range(generate_rotateds[n].shape[0]): + # get 5 key points + rotated_keypoints = landmark_68_to_5(rotated_landmarks[b]) + # get savepaths + IDname = img_path[b].split("/")[4] + # print(IDname) + FID = IDname.split("_")[0] # CelebA + # FID= IDname.split("_")[0]+"_"+IDname.split("_")[1]#LFW + # print(FID) + rotated_file_savepath = create_paths(save_paths[n], img_path[b], FID + '_orig', + folderlevel=folderlevel, pose=poses[b]) + + image_numpy = save_img(generate_rotateds[n][b], rotated_file_savepath) + rotated_keypoints_str = rotated_file_savepath + ' 1 ' + ' '.join( + [str(int(n)) for n in rotated_keypoints]) + '\n' + print('process image...' + rotated_file_savepath) + f[n].write(rotated_keypoints_str) + + current_time = time.time() + if n == 0: + if b <= opt.batchSize: + process_num += 1 + print('processed num ' + str(process_num)) + if opt.align: + aligned_file_savepath = create_paths(save_paths[n], img_path[b], FID + '_aligned', + folderlevel=folderlevel, pose=poses[b]) + warped = affine_align(image_numpy, rotated_keypoints.reshape(5, 2)) + util.save_image(warped, aligned_file_savepath, create_dir=True) + + # save 106 landmarks + # rotated_keypoints_106 = rotated_landmarks_106[b] # shape: 106 * 2 + + current_time = time.time() + time_per_iter = (current_time - start_time) / opt.batchSize + message = '(************* each image time total: %.3f *****************) ' % (time_per_iter) + print(message) + + except KeyboardInterrupt: + print("Interrupted!") + for fs in f: + fs.close() + pass + + except Exception as e: + print(e) + for fs in f: + fs.close() + + else: + print('finished') + for fs in f: + fs.close() + + +if __name__ == '__main__': + main() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py new file mode 100644 index 0000000000..f76a1fe8b4 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py @@ -0,0 +1,99 @@ +import torch.multiprocessing as multiprocessing +import sys +from options.train_options import TrainOptions +import data +from trainers import create_trainer +from util.iter_counter import IterationCounter +from util.visualizer import Visualizer +from torch.multiprocessing import Queue +from data.data_utils import init_parallel_jobs +from models.networks.render import Render +multiprocessing.set_start_method('spawn', force=True) + + +if __name__ == '__main__': + + # parse options + opt = TrainOptions().parse() + + # print options to help debugging + print(' '.join(sys.argv)) + + # load the dataset + dataloader = data.create_dataloader_test(opt) + + # create tool for counting iterations + + if type(dataloader) == list: + data_loader_size = len(dataloader[0]) * opt.render_thread + else: + data_loader_size = len(dataloader) + iter_counter = IterationCounter(opt, data_loader_size) + + ngpus = opt.device_count + + training_queue = Queue(10) + + # render layers + + render_gpu_ids = list(range(ngpus - opt.render_thread, ngpus)) + render_layer_list = [] + for gpu in render_gpu_ids: + opt.gpu_ids = gpu + render_layer = Render(opt) + render_layer_list.append(render_layer) + + training_tasks = init_parallel_jobs(training_queue, dataloader, iter_counter, opt, render_layer_list) + + opt.gpu_ids = list(range(0, ngpus - opt.render_thread)) + print('Training gpu ', opt.gpu_ids) + # create trainer for our model + trainer = create_trainer(opt) + # create tool for visualization + visualizer = Visualizer(opt) + + for epoch in iter_counter.training_epochs(): + iter_counter.record_epoch_start(epoch) + for i, data_i in enumerate(range(data_loader_size), start=iter_counter.epoch_iter): + iter_counter.record_one_iteration() + + # data = trainer.get_input(data_i) + data = training_queue.get(block=True) + # Training + # train generator + if i % opt.D_steps_per_G == 0: + trainer.run_generator_one_step(data) + + # train discriminator + trainer.run_discriminator_one_step(data) + + # Visualizations + if iter_counter.needs_printing(): + losses = trainer.get_latest_losses() + visualizer.print_current_errors(epoch, iter_counter.epoch_iter, + losses, iter_counter.time_per_iter) + visualizer.plot_current_errors(losses, iter_counter.total_steps_so_far) + + if iter_counter.needs_displaying(): + visuals = trainer.get_current_visuals(data) + visualizer.display_current_results(visuals, epoch, iter_counter.total_steps_so_far) + + if iter_counter.needs_saving(): + print('saving the latest model (epoch %d, total_steps %d)' % + (epoch, iter_counter.total_steps_so_far)) + trainer.save('latest') + iter_counter.record_current_iter() + + trainer.update_learning_rate(epoch) + iter_counter.record_epoch_end() + + if epoch % opt.save_epoch_freq == 0 or \ + epoch == iter_counter.total_epochs: + print('saving the model at the end of epoch %d, iters %d' % + (epoch, iter_counter.total_steps_so_far)) + trainer.save('latest') + trainer.save(epoch) + + for training_task in training_tasks: + training_task.terminate() + print('Training was successfully finished.') diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py new file mode 100644 index 0000000000..6170993aa3 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py @@ -0,0 +1,36 @@ +import importlib + + +def find_trainer_using_name(trainer_name): + # Given the option --model [modelname], + # the file "models/modelname_model.py" + # will be imported. + trainer_filename = "trainers." + trainer_name + "_trainer" + modellib = importlib.import_module(trainer_filename) + + # In the file, the class called ModelNameModel() will + # be instantiated. It has to be a subclass of torch.nn.Module, + # and it is case-insensitive. + trainer = None + target_model_name = trainer_name.replace('_', '') + 'trainer' + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower(): + trainer = cls + + if trainer is None: + print("In %s.py, there should be a trainer name that matches %s in lowercase." % (trainer_filename, target_model_name)) + exit(0) + + return trainer + + +def get_option_setter(trainer_name): + model_class = find_trainer_using_name(trainer_name) + return model_class.modify_commandline_options + + +def create_trainer(opt): + trainer = find_trainer_using_name(opt.trainer) + instance = trainer(opt) + print("trainer was created") + return instance diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py new file mode 100644 index 0000000000..e337ce7538 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py @@ -0,0 +1,110 @@ +import torch +from models.networks.sync_batchnorm import DataParallelWithCallback +from models import create_model +from collections import OrderedDict + + +class RotateTrainer(object): + """ + Trainer creates the model and optimizers, and uses them to + updates the weights of the network while reporting losses + and the latest visuals to visualize the progress in training. + """ + + def __init__(self, opt): + self.opt = opt + self.pix2pix_model = create_model(opt) + if len(opt.gpu_ids) > 0: + self.pix2pix_model = DataParallelWithCallback(self.pix2pix_model, + device_ids=opt.gpu_ids, + output_device=opt.gpu_ids[-1], + chunk_size=opt.chunk_size) + self.pix2pix_model_on_one_gpu = self.pix2pix_model.module + else: + self.pix2pix_model_on_one_gpu = self.pix2pix_model + # self.Render = networks.Render(opt, render_size=opt.crop_size) + self.generated = None + if opt.isTrain: + self.optimizer_G, self.optimizer_D = \ + self.pix2pix_model_on_one_gpu.create_optimizers(opt) + self.old_lr = opt.lr + + def use_gpu(self): + return len(self.opt.gpu_ids) > 0 + + def run_generator_one_step(self, data): + self.optimizer_G.zero_grad() + g_losses, generated = self.pix2pix_model.forward(data=data, mode='generator') + if not self.opt.train_rotate: + with torch.no_grad(): + g_rotate_losses, generated_rotate = self.pix2pix_model.forward(data=data, mode='generator_rotated') + + else: + g_rotate_losses, generated_rotate = self.pix2pix_model.forward(data=data, mode='generator_rotated') + g_losses['GAN_rotate'] = g_rotate_losses['GAN'] + g_loss = sum(g_losses.values()).mean() + g_loss.backward() + # g_rotate_loss = sum(g_rotate_losses.values()).mean() + # g_rotate_loss.backward() + self.optimizer_G.step() + self.g_losses = g_losses + # self.g_rotate_losses = g_rotate_losses + self.generated = generated + self.generated_rotate = generated_rotate + + def run_discriminator_one_step(self, data): + self.optimizer_D.zero_grad() + d_losses = self.pix2pix_model.forward(data=data, mode='discriminator') + if self.opt.train_rotate: + d_rotated_losses = self.pix2pix_model.forward(data=data, mode='discriminator_rotated') + d_losses['D_rotate_Fake'] = d_rotated_losses['D_Fake'] + d_losses['D_rotate_real'] = d_rotated_losses['D_real'] + d_loss = sum(d_losses.values()).mean() + d_loss.backward() + self.optimizer_D.step() + self.d_losses = d_losses + + def get_latest_generated(self): + return self.generated + + def get_latest_generated_rotate(self): + return self.generated_rotate + + def get_latest_losses(self): + return {**self.g_losses, **self.d_losses} + + def get_current_visuals(self, data): + return OrderedDict([('input_mesh', data['mesh']), + ('input_rotated_mesh', data['rotated_mesh']), + ('synthesized_image', self.get_latest_generated()), + ('synthesized_rotated_image', self.get_latest_generated_rotate()), + ('real_image', data['image'])]) + + def save(self, epoch): + self.pix2pix_model_on_one_gpu.save(epoch) + + ################################################################## + # Helper functions + ################################################################## + + def update_learning_rate(self, epoch): + if epoch > self.opt.niter: + lrd = self.opt.lr / self.opt.niter_decay + new_lr = self.old_lr - lrd + else: + new_lr = self.old_lr + + if new_lr != self.old_lr: + if self.opt.no_TTUR: + new_lr_G = new_lr + new_lr_D = new_lr + else: + new_lr_G = new_lr / 2 + new_lr_D = new_lr * 2 + + for param_group in self.optimizer_D.param_groups: + param_group['lr'] = new_lr_D + for param_group in self.optimizer_G.param_groups: + param_group['lr'] = new_lr_G + print('update learning rate: %f -> %f' % (self.old_lr, new_lr)) + self.old_lr = new_lr diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py new file mode 100644 index 0000000000..f89850673c --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py @@ -0,0 +1,46 @@ +from trainers.rotate_trainer import RotateTrainer +from collections import OrderedDict +import torch + + +class RotateSPADETrainer(RotateTrainer): + """ + Trainer creates the model and optimizers, and uses them to + updates the weights of the network while reporting losses + and the latest visuals to visualize the progress in training. + """ + + def __init__(self, opt): + super(RotateSPADETrainer, self).__init__(opt) + + def run_generator_one_step(self, data): + self.optimizer_G.zero_grad() + g_losses, generated, self.input_mesh = self.pix2pix_model.forward(data=data, mode='generator') + if not self.opt.train_rotate: + with torch.no_grad(): + g_rotate_losses, generated_rotate, self.input_rotated_mesh =\ + self.pix2pix_model.forward(data=data, mode='generator_rotated') + + else: + g_rotate_losses, generated_rotate, self.input_rotated_mesh = self.pix2pix_model.forward(data=data, + mode='generator_rotated') + g_losses['GAN_rotate'] = g_rotate_losses['GAN'] + g_loss = sum(g_losses.values()).mean() + g_loss.backward() + # g_rotate_loss = sum(g_rotate_losses.values()).mean() + # g_rotate_loss.backward() + self.optimizer_G.step() + self.g_losses = g_losses + # self.g_rotate_losses = g_rotate_losses + self.generated = generated + self.generated_rotate = generated_rotate + + def get_current_visuals(self, data): + return OrderedDict([('input_mesh', self.input_mesh), + ('input_rotated_mesh', self.input_rotated_mesh), + ('synthesized_image', self.get_latest_generated()), + ('synthesized_rotated_image', self.get_latest_generated_rotate()), + ('input_images_erode', data['rendered_images_erode']), + ('rendered_images_rotate_artifacts', data['rendered_images_rotate_artifacts']), + ('Rd_a', data['Rd_a']), + ('real_image', data['image'])]) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/__init__.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py new file mode 100644 index 0000000000..d7372e6617 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py @@ -0,0 +1,71 @@ +import datetime +import dominate +from dominate.tags import p, br, img, a, td, tr, table, h3, meta, h1 +import os + + +class HTML: + def __init__(self, web_dir, title, refresh=0): + if web_dir.endswith('.html'): + web_dir, html_name = os.path.split(web_dir) + else: + web_dir, html_name = web_dir, 'index.html' + self.title = title + self.web_dir = web_dir + self.html_name = html_name + self.img_dir = os.path.join(self.web_dir, 'images') + if len(self.web_dir) > 0 and not os.path.exists(self.web_dir): + os.makedirs(self.web_dir) + if len(self.web_dir) > 0 and not os.path.exists(self.img_dir): + os.makedirs(self.img_dir) + + self.doc = dominate.document(title=title) + with self.doc: + h1(datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y")) + if refresh > 0: + with self.doc.head: + meta(http_equiv="refresh", content=str(refresh)) + + def get_image_dir(self): + return self.img_dir + + def add_header(self, str): + with self.doc: + h3(str) + + def add_table(self, border=1): + self.t = table(border=border, style="table-layout: fixed;") + self.doc.add(self.t) + + def add_images(self, ims, txts, links, width=512): + self.add_table() + with self.t: + with tr(): + for im, txt, link in zip(ims, txts, links): + with td(style="word-wrap: break-word;", halign="center", valign="top"): + with p(): + with a(href=os.path.join('images', link)): + img(style="width:%dpx" % (width), src=os.path.join('images', im)) + br() + p(txt.encode('utf-8')) + + def save(self): + html_file = os.path.join(self.web_dir, self.html_name) + f = open(html_file, 'wt') + f.write(self.doc.render()) + f.close() + + +if __name__ == '__main__': + html = HTML('web/', 'test_html') + html.add_header('hello world') + + ims = [] + txts = [] + links = [] + for n in range(4): + ims.append('image_%d.jpg' % n) + txts.append('text_%d' % n) + links.append('image_%d.jpg' % n) + html.add_images(ims, txts, links) + html.save() diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py new file mode 100644 index 0000000000..4cdae6377f --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py @@ -0,0 +1,69 @@ +import os +import time +import numpy as np + + +# Helper class that keeps track of training iterations +class IterationCounter(): + def __init__(self, opt, dataset_size): + self.opt = opt + self.dataset_size = dataset_size + + self.first_epoch = 1 + self.total_epochs = opt.niter + opt.niter_decay if opt.isTrain else 1 + self.epoch_iter = 0 # iter number within each epoch + self.iter_record_path = os.path.join(self.opt.checkpoints_dir, self.opt.name, 'iter.txt') + if opt.isTrain and opt.continue_train: + try: + self.first_epoch, self.epoch_iter = np.loadtxt( + self.iter_record_path, delimiter=',', dtype=int) + print('Resuming from epoch %d at iteration %d' % (self.first_epoch, self.epoch_iter)) + except: + print('Could not load iteration record at %s. Starting from beginning.' % + self.iter_record_path) + + self.total_steps_so_far = (self.first_epoch - 1) * dataset_size + self.epoch_iter + + # return the iterator of epochs for the training + def training_epochs(self): + return range(self.first_epoch, self.total_epochs + 1) + + def record_epoch_start(self, epoch): + self.epoch_start_time = time.time() + self.epoch_iter = 0 + self.last_iter_time = time.time() + self.current_epoch = epoch + + def record_one_iteration(self): + current_time = time.time() + + # the last remaining batch is dropped (see data/__init__.py), + # so we can assume batch size is always opt.batchSize + self.time_per_iter = (current_time - self.last_iter_time) / self.opt.batchSize + self.last_iter_time = current_time + self.total_steps_so_far += self.opt.batchSize + self.epoch_iter += self.opt.batchSize + + def record_epoch_end(self): + current_time = time.time() + self.time_per_epoch = current_time - self.epoch_start_time + print('End of epoch %d / %d \t Time Taken: %d sec' % + (self.current_epoch, self.total_epochs, self.time_per_epoch)) + if self.current_epoch % self.opt.save_epoch_freq == 0: + np.savetxt(self.iter_record_path, (self.current_epoch + 1, 0), + delimiter=',', fmt='%d') + print('Saved current iteration count at %s.' % self.iter_record_path) + + def record_current_iter(self): + np.savetxt(self.iter_record_path, (self.current_epoch, self.epoch_iter), + delimiter=',', fmt='%d') + print('Saved current iteration count at %s.' % self.iter_record_path) + + def needs_saving(self): + return (self.total_steps_so_far % self.opt.save_latest_freq) < self.opt.batchSize + + def needs_printing(self): + return (self.total_steps_so_far % self.opt.print_freq) < self.opt.batchSize + + def needs_displaying(self): + return (self.total_steps_so_far % self.opt.display_freq) < self.opt.batchSize diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py new file mode 100644 index 0000000000..a66ba460b6 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py @@ -0,0 +1,206 @@ +import re +import importlib +import torch +import numpy as np +from PIL import Image +import os +import argparse +import dill as pickle + + +def save_obj(obj, name): + with open(name, 'wb') as f: + pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL) + + +def load_obj(name): + with open(name, 'rb') as f: + return pickle.load(f) + + +# returns a configuration for creating a generator +# |default_opt| should be the opt of the current experiment +# |**kwargs|: if any configuration should be overriden, it can be specified here + + +def copyconf(default_opt, **kwargs): + conf = argparse.Namespace(**vars(default_opt)) + for key in kwargs: + print(key, kwargs[key]) + setattr(conf, key, kwargs[key]) + return conf + + +def tile_images(imgs, picturesPerRow=4): + """ Code borrowed from + https://stackoverflow.com/questions/26521365/cleanly-tile-numpy-array-of-images-stored-in-a-flattened-1d-format/26521997 + """ + + # Padding + if imgs.shape[0] % picturesPerRow == 0: + rowPadding = 0 + else: + rowPadding = picturesPerRow - imgs.shape[0] % picturesPerRow + if rowPadding > 0: + imgs = np.concatenate([imgs, np.zeros((rowPadding, *imgs.shape[1:]), dtype=imgs.dtype)], axis=0) + + # Tiling Loop (The conditionals are not necessary anymore) + tiled = [] + for i in range(0, imgs.shape[0], picturesPerRow): + tiled.append(np.concatenate([imgs[j] for j in range(i, i + picturesPerRow)], axis=1)) + + tiled = np.concatenate(tiled, axis=0) + return tiled + + +# Converts a Tensor into a Numpy array +# |imtype|: the desired type of the converted numpy array +def tensor2im(image_tensor, imtype=np.uint8, normalize=True, tile=False): + if isinstance(image_tensor, list): + image_numpy = [] + for i in range(len(image_tensor)): + image_numpy.append(tensor2im(image_tensor[i], imtype, normalize)) + return image_numpy + + if image_tensor.dim() == 4: + # transform each image in the batch + images_np = [] + for b in range(image_tensor.size(0)): + one_image = image_tensor[b] + one_image_np = tensor2im(one_image) + images_np.append(one_image_np.reshape(1, *one_image_np.shape)) + images_np = np.concatenate(images_np, axis=0) + if tile: + images_tiled = tile_images(images_np) + return images_tiled + else: + return images_np + + if image_tensor.dim() == 2: + image_tensor = image_tensor.unsqueeze(0) + image_numpy = image_tensor.detach().cpu().float().numpy() + if normalize: + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + else: + image_numpy = np.transpose(image_numpy, (1, 2, 0)) * 255.0 + image_numpy = np.clip(image_numpy, 0, 255) + if image_numpy.shape[2] == 1: + image_numpy = image_numpy[:, :, 0] + return image_numpy.astype(imtype) + + +def save_image(image_numpy, image_path, create_dir=False): + if create_dir: + os.makedirs(os.path.dirname(image_path), exist_ok=True) + if len(image_numpy.shape) == 2: + image_numpy = np.expand_dims(image_numpy, axis=2) + if image_numpy.shape[2] == 1: + image_numpy = np.repeat(image_numpy, 3, 2) + image_pil = Image.fromarray(image_numpy) + + # save to png + image_pil.save(image_path) + # image_pil.save(image_path.replace('.jpg', '.png')) + + +def mkdirs(paths): + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) + + +def atoi(text): + return int(text) if text.isdigit() else text + + +def natural_keys(text): + ''' + alist.sort(key=natural_keys) sorts in human order + http://nedbatchelder.com/blog/200712/human_sorting.html + (See Toothy's implementation in the comments) + ''' + return [atoi(c) for c in re.split('(\d+)', text)] + + +def natural_sort(items): + items.sort(key=natural_keys) + + +def str2bool(v): + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + elif v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + else: + raise argparse.ArgumentTypeError('Boolean value expected.') + + +def find_class_in_module(target_cls_name, module): + target_cls_name = target_cls_name.replace('_', '').lower() + clslib = importlib.import_module(module) + cls = None + for name, clsobj in clslib.__dict__.items(): + if name.lower() == target_cls_name: + cls = clsobj + + if cls is None: + print("In %s, there should be a class whose name matches %s in lowercase without underscore(_)" % (module, + target_cls_name)) + exit(0) + + return cls + + +def save_network(net, label, epoch, opt): + save_filename = '%s_net_%s.pth' % (epoch, label) + save_path = os.path.join(opt.checkpoints_dir, opt.name, save_filename) + torch.save(net.cpu().state_dict(), save_path) + if len(opt.gpu_ids) and torch.cuda.is_available(): + net.cuda() + + +def load_network(net, label, epoch, opt): + save_filename = '%s_net_%s.pth' % (epoch, label) + save_dir = os.path.join(opt.checkpoints_dir, opt.name) + save_path = os.path.join(save_dir, save_filename) + weights = torch.load(save_path) + net.load_state_dict(weights) + return net + + +def copy_state_dict(state_dict, model, strip=None): + tgt_state = model.state_dict() + copied_names = set() + for name, param in state_dict.items(): + if strip is not None and name.startswith(strip): + name = name[len(strip):] + if name not in tgt_state: + continue + if isinstance(param, torch.nn.Parameter): + param = param.data + if param.size() != tgt_state[name].size(): + print('mismatch:', name, param.size(), tgt_state[name].size()) + continue + tgt_state[name].copy_(param) + copied_names.add(name) + + missing = set(tgt_state.keys()) - copied_names + if len(missing) > 0: + print("missing keys in state_dict:", missing) + + +############################################################################### +# Code from +# https://github.com/ycszen/pytorch-seg/blob/master/transform.py +# Modified so it complies with the Citscape label map colors +############################################################################### +def uint82bin(n, count=8): + """returns the binary of integer n, count refers to amount of bits""" + return ''.join([str((n >> y) & 1) for y in range(count - 1, -1, -1)]) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py new file mode 100644 index 0000000000..305a8ecd62 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py @@ -0,0 +1,187 @@ +import os +import ntpath +import time +from . import util +from . import html +import scipy.misc +import torchvision.utils as vutils +from torch.utils.tensorboard import SummaryWriter + +try: + from StringIO import StringIO # Python 2.7 +except ImportError: + from io import BytesIO # Python 3.x + + +class Visualizer(): + def __init__(self, opt): + self.opt = opt + self.tf_log = opt.isTrain and opt.tf_log + self.tensorboard = opt.isTrain and opt.tensorboard + self.use_html = opt.isTrain and not opt.no_html + self.win_size = opt.display_winsize + self.name = opt.name + if self.tf_log: + import tensorflow as tf + self.tf = tf + self.log_dir = os.path.join(opt.checkpoints_dir, opt.name, 'logs') + self.writer = tf.summary.FileWriter(self.log_dir) + + if self.tensorboard: + self.log_dir = os.path.join(opt.checkpoints_dir, opt.name, 'logs') + self.writer = SummaryWriter(self.log_dir, comment=opt.name) + + if self.use_html: + self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web') + self.img_dir = os.path.join(self.web_dir, 'images') + print('create web directory %s...' % self.web_dir) + util.mkdirs([self.web_dir, self.img_dir]) + if opt.isTrain: + self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') + with open(self.log_name, "a") as log_file: + now = time.strftime("%c") + log_file.write('================ Training Loss (%s) ================\n' % now) + + # |visuals|: dictionary of images to display or save + def display_current_results(self, visuals, epoch, step): + + # convert tensors to numpy arrays + + if self.tf_log: # show images in tensorboard output + img_summaries = [] + visuals = self.convert_visuals_to_numpy(visuals) + for label, image_numpy in visuals.items(): + # Write the image to a string + try: + s = StringIO() + except: + s = BytesIO() + if len(image_numpy.shape) >= 4: + image_numpy = image_numpy[0] + scipy.misc.toimage(image_numpy).save(s, format="jpeg") + # Create an Image object + img_sum = self.tf.Summary.Image(encoded_image_string=s.getvalue(), height=image_numpy.shape[0], + width=image_numpy.shape[1]) + # Create a Summary value + img_summaries.append(self.tf.Summary.Value(tag=label, image=img_sum)) + + # Create and write Summary + summary = self.tf.Summary(value=img_summaries) + self.writer.add_summary(summary, step) + + if self.tensorboard: # show images in tensorboard output + img_summaries = [] + for label, image_numpy in visuals.items(): + # Write the image to a string + try: + s = StringIO() + except: + s = BytesIO() + # if len(image_numpy.shape) >= 4: + # image_numpy = image_numpy[0] + # scipy.misc.toimage(image_numpy).save(s, format="jpeg") + # Create an Image object + # self.writer.add_image(tag=label, img_tensor=image_numpy, global_step=step, dataformats='HWC') + # Create a Summary value + batch_size = image_numpy.size(0) + x = vutils.make_grid(image_numpy[:min(batch_size, 16)], normalize=True, scale_each=True) + self.writer.add_image(label, x, step) + + if self.use_html: # save images to a html file + for label, image_numpy in visuals.items(): + if isinstance(image_numpy, list): + for i in range(len(image_numpy)): + img_path = os.path.join(self.img_dir, 'epoch%.3d_iter%.3d_%s_%d.png' % (epoch, step, label, i)) + util.save_image(image_numpy[i], img_path) + else: + img_path = os.path.join(self.img_dir, 'epoch%.3d_iter%.3d_%s.png' % (epoch, step, label)) + if len(image_numpy.shape) >= 4: + image_numpy = image_numpy[0] + util.save_image(image_numpy, img_path) + + # update website + webpage = html.HTML(self.web_dir, 'Experiment name = %s' % self.name, refresh=5) + for n in range(epoch, 0, -1): + webpage.add_header('epoch [%d]' % n) + ims = [] + txts = [] + links = [] + + for label, image_numpy in visuals.items(): + if isinstance(image_numpy, list): + for i in range(len(image_numpy)): + img_path = 'epoch%.3d_iter%.3d_%s_%d.png' % (n, step, label, i) + ims.append(img_path) + txts.append(label + str(i)) + links.append(img_path) + else: + img_path = 'epoch%.3d_iter%.3d_%s.png' % (n, step, label) + ims.append(img_path) + txts.append(label) + links.append(img_path) + if len(ims) < 10: + webpage.add_images(ims, txts, links, width=self.win_size) + else: + num = int(round(len(ims) / 2.0)) + webpage.add_images(ims[:num], txts[:num], links[:num], width=self.win_size) + webpage.add_images(ims[num:], txts[num:], links[num:], width=self.win_size) + webpage.save() + + # errors: dictionary of error labels and values + def plot_current_errors(self, errors, step): + if self.tf_log: + for tag, value in errors.items(): + value = value.mean().float() + summary = self.tf.Summary(value=[self.tf.Summary.Value(tag=tag, simple_value=value)]) + self.writer.add_summary(summary, step) + + if self.tensorboard: + for tag, value in errors.items(): + value = value.mean().float() + self.writer.add_scalar(tag=tag, scalar_value=value, global_step=step) + + # errors: same format as |errors| of plotCurrentErrors + def print_current_errors(self, epoch, i, errors, t): + message = '(epoch: %d, iters: %d, time: %.3f) ' % (epoch, i, t) + for k, v in errors.items(): + # print(v) + # if v != 0: + v = v.mean().float() + message += '%s: %.3f ' % (k, v) + + print(message) + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) + + def convert_visuals_to_numpy(self, visuals): + for key, t in visuals.items(): + tile = self.opt.batchSize > 8 + if 'input_label' == key: + t = util.tensor2label(t, self.opt.label_nc + 2, tile=tile) + else: + t = util.tensor2im(t, tile=tile) + visuals[key] = t + return visuals + + # save image to the disk + def save_images(self, webpage, visuals, image_path): + visuals = self.convert_visuals_to_numpy(visuals) + + image_dir = webpage.get_image_dir() + short_path = ntpath.basename(image_path[0]) + name = os.path.splitext(short_path)[0] + + webpage.add_header(name) + ims = [] + txts = [] + links = [] + + for label, image_numpy in visuals.items(): + image_name = os.path.join(label, '%s.png' % (name)) + save_path = os.path.join(image_dir, image_name) + util.save_image(image_numpy, save_path, create_dir=True) + + ims.append(image_name) + txts.append(label) + links.append(image_name) + webpage.add_images(ims, txts, links, width=self.win_size) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/demos/imgs_input/person01145+0-15.jpg b/projects/data_generation/synthetic_multi_view_facial_image_generation/demos/imgs_input/person01145+0-15.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5250dc7dc78c081a035b906a7d3c0d493feb5792 GIT binary patch literal 9282 zcmb7pWmsEX({6y^P~0szg@ocxaVu`YDH1ez2u_OxcPJDuZUtIW+*;fz?i6<^6e(3c zp67kPbI#wh_Mf@dtZVk#Gkey3&#e2U`%M7pGgS>$00ssIK>gtX-2VlHfs}k4Y<(SI zAiF>i!p{Zf;OXY#=Rg2bb#Ze5dDyx;NP!f6kRG0b0wSXPUS94G8lQ(*kO$J;&cR0t zBq=5?DM$b^a(S3T_}aRANr4`gN{EXIOA3hciwFvf3yFh_oRJ`HTOZJaC@3T)C?O>z z3K9|!5GJ@^1twlaq;o-2#6?&i3kaa=*THZD4FP4n3?Dq89^NU+#ohE zJ0l~H_+zkupooYF3%8`Kgpdrsu!zt_ z3=4w|fJugdMTT+T4`2iUFfp+n{Plka1|~KZ5C<3ULHmppfPsYx#C+I4HWnfF!x{_# zCKiwkn^jPOTpx#m&DJ{{S4h-1t(Q{KAicO@nu=Z6u!ovMiBnm{&Ij?T1if`(91)4i zc(6tL5S4!d^ZyKt2U`ys7P5a8|Lg+mzgi43OmbF1ECn_p{reRF5!Sqz=@Q z`bqM<9Tg&L{Ji|ULtgB?u{ON$bmD8fW#s#}M|+n0`nsej39kt4v%+rPgxf)`A4G%A zHtx1g5cN&ur+rcMq)P%lJ}9l~tJyb8*2Q=SM z(S>HsRzSmhsr`!i!D?V+^%ycX0{fcxlOnxU!4O~VBipVITXU9tRRh>o#(ZD3Wu{u! zm~m6<9{)5>GE^U;U*@ofIPXnVtbI4$6%k=Lc%6VJ<_6j7~ zZ#6FFIoUYhuk`?V#F9eNISJhw1J#Bvyu#AcftgL$^efb}(GNF$?l$=l=#jy=D7Klx zj)c6zB+UgT&1wKpQ`uB$6C%!4@y@Elgsk{Q>e_hZ6pFw!0}UWSWcX5T^N2D^6<5^G zYy8m-T2;jkGCQWsYiQ8|)K#5THF3k3dP;iVw)WANF92 z0F^XV<^4B(W=o)}!0CJXmp*xp3fC4vExXqQvE9YD&0($~H0e90iLHg~;Lm{&MC}(# zX`XF~CJ%@>lhI`8J6kxQsU&GKfa7_3RtI%5H-AI#Cr4km;&5#Ia|T9xK$s5y}UdT@QZkVjOcrhJzzQX6MnUEpEY(zbW6X zl(S!%ekXAcD8Ah5v;R>mG=mJhO)AK?60Syhux~@9a#g#ZvZgk#zLnDa(dJp&NMh=B z-~*+ZY3*z(m36CMRsT*d0kLsw2K92hZZ!AM$(o0c(O%T+U$in@oTU#Qj)}EywD}p@ zkUP5DyUQ6{@sl6mQa5k!l2d(iytLw0YF8dP+U|4ON>*QXzG|WUW8&CpqXMDQgt|YTlhcNty60g6tjx3pK8htLr9flwahEug zYgF!svxK3xV= zT3M~ynOc_}a=|sfv3ct5XJuOEtJLW@$?HzZwSqY^M0JIKr6JEI1?;pwM8 z3Gq%gTZ^3_LNg)C?Q6ePPRo7wP`$JL-<{SdnTUJ=8R>JPoH<_!uo5c{8S1wIGj}jk4+u( zK<64>td*Fmi0w(~UkT@ik%BC_(S9naAk?+aa#4Kf(VU+5 zIyJNL5-)}QTKjBt!|zAL&FT}jYt6IKwT~0sHeG=LOp0}a$DqnwaFq&vd6B}v_2=3X zHZg!BZqTl4tH+cWVbxLTJ>a9l+N@VwmUvS6kT^Ny(zH(3Wq|4nQ`0A(_TGG_YT(a0 zW1&tlX2uQ+K1QnFWO*kqMUx8xexC4ypAvX?PtLNBz*TqxPM4yxZY>K|xpYj}2b4ee z3eo&cc%I@9*2$2L+-(&4EJvs@S0HX|sJkluR8yvW#vO5CZlKoJop)TNXh3`#0+lN> zTWHk_j^!3c#*IJW7SVR28==|0un?HbT9u}NxmUKu7UR9Fr9(g-e){*oGKz9N*U z!Z`a602<4kFfZb{YeIYJ<-D1Br7KD3^?soX*R&; zjaVsJ)W)N#Pb1Kv#C~NNps4^{TiJ9j%f(xkhGlDDz__x8fo38g4by5+QJl)F+BAlu zqz}`4jxuWd zclD$6$j|LC%?3xnudwkUwzj)}4C450QJPFh(N_IL1R&IOZIRR}zAc$V^@91Jt-eii*Ubm2O_(=L z7A#RKOtS_OF`roc_>w9$LOVJa)UTug_`y@>-y5JZRFGBn{$gJhj{F*oeRu9Y+W({O z@qrA~8}I|7?A#WqH2DM6k2k!u{Fn=HwpKJ^F2w)TIogiXj_AWFQf7381fJ_de`pjS z?+i#x4joO3cf?#9%7GFmdYyR}8>fygNV69YubcMhEsCS|_MgS~@2Axt+792rQ<5LEYoox>SVDrIMv?Ll8~Jt zPC5sy4zeW~(It*k=$a@cu4@mX=I=W1xVqdqISIutcEOjOLgC}4iz~=;U^>%*NOLiP zxnio$K(}1;A`A8b5i(zVJj|oxSZjO_8O6->KZ<-qUF5-j9qSM2^l~H+w?xikG5GyG zpz6tUeUctLbnszKI+$WMM|BjKHp~@SY!oP`$!wKkMA0@jV%V@i)82Lz(4t-=v;`!I zC{0>PhKt%uP`h-G_7Ns_>u1g*Mo2rdS&^Yk<^1Ks(I=^GmOtw|wcn27DI4|q=)i4S zz^MxtiH&AU77PmB_W*Q%idw&N;A`^tbLzCygXf>)XKM$6p26lKcl9qmQdZY^AD1AT zxD!B4>@?LhRJ17NFWZ|n?ePn;>#sv-ShyDZLLi|GgijFXRU!9JjXrnyB1EEml?8P{Mb!5sF(Qn0Dl#@N}RD8 zQ!A|P9-vokdr(W8Pd+h2P-Q={H{r$jt^I~b;L`S@vk_OaWS7_kBWCdUZzldq0BpxXM)j4696sm_*sb%R{Ff;tSBK7H~*Y2tE zsSjr!bQ!-`neFux(p)u9(#WAuvS0rRWQdQo(qYPK)0eXcDs=l<1xs}p@7!3CtYYLe z+{!)++Q(4%4a=M#s#q2DTz}3+D9r63b?O5=1Od6-|EU}Pku)T`02~>cxGlRDzvHs$ z<+aWAeTs?3EJDKshh_l8^*SZ0e3jx4l{-?zYt(W|;mKH+or%s}#*U8qdhdP$DPTBi zese>!ysxx_jfib0k@P9sJs^F5^Uujow5LCyi##VOzEGm>xbb& zzS~l-fZxKusT|qk1=H5X7ckNgzo;+Un+ur;-k>!3;UqY|WPgpA$;#22IaB15!(rpHJ4KYGa?I*-?W)!O+<`%5Tx(hR& zK44wu**4F0W3(@+juv{+aFQxn4QcX)rGweth@ zjAqY+t(>Z(E!rY}WA8S0TJB}N1vaJ|iH7H6?0_e-^1EIki{6;BkU4lp%Vvzy^*KT4 zWkgRA<3*AK&CWQtmnNnj6c8q5kKlSpTFYid6KDOq(#u(06F#Tl=T7?LlmjINEQp|;;t4_VX3{m*|ctcflRZ|H9e8wa@4Xp~p`&+uBwv$qnTs3^f0 zhp0Y(5w}Q2E>upag!d$ocp<_Vl?gFJC^O}grV>ErMb5c0q>v0nXL6b8-6TCc)nNu$ ztjtudOqvF^Y-%_u%1o9FA2JlHtHLxCtFZGWPPW79d_%yxyYuBV%YR^FN#+%??2NWC z9*<&}rN0i03(p3SZV8rNv}ya2W_w;+TgpCX-&z@+!X&yrl8wotfMN`6L19zmmLKCs-t1j1hDb>hb*|mrB;w(+*5Y4D< zZm-KOBc9wvw6#+e*&b)pAD&_uwHAQuy&=O?GxNhUec9YS2&4g8V6z9FclcIJa^z@j z#zS#d@SvnQo2LCots>I{1f0A{c*Y~{_tuU;ZEkcSsG*TYB5&DULCY}}C-TC?$1s$F z1YwR7c;oqPPfr`hyZ&%l z!zD^W=mc7+UXEyn4}pP#>+ipl>MV+*Ymg)+tnmS3)}pOe3fmF2kvC}26`{y=suB8p zbDM4>9WEYQT-wJj)`;Q{ZxE6YZrBjRyL9FqOtvn({=9#Ep| zVG`UjPV+7jvEd-1&W%mp)bU?>XKj2W8Ih_&r(tJG!$dD#8m z4j^{hc@yn$w9fg-<=-39znVSa+X?h9^itTcX0h!z~J8I48teDsA46 z^|eJW$NGCKX6TN7=s zor*A!Pc`4kF+ZO@{3@?{h@Qn9DOL6eXZwO*?v{#w9u^j+i2K(Jk0`AoZZ`o0Rdlgl zo0lh-sw^_iU6vZ6A*H*KS4ic`tc?Ps%)M|a9~;0;3VI-r{{NJ97grB>ma*E@{STfU zGEMus6{;5tr9ZjU6G?s%GF32zxXa%_oArL$Vi%B{A%FdrpyR!ME^7cx3*ttyJ z2|XE9O>gdQkDhGVJ~+a~m@w}>S+aD=g6jkX#Ysyh?0!J&(2mSuWHzbXHTIQx^=~H2 zeA#*9wO0QTsX2YPdL76GW`teDSV;Sr@YUWhd(_X$Ylv3JE8+wlCM@f@qRgJ|k!HyA ztW&aoQyo^u+fPibNlKrWB`wk@#zQ5r4YykOb>@S+fdA7lImBS!XAY z7n$Sdu{YfaN?mB^G6%baDt^%*WJjE_V`3Eo#JXysLYuy3&U+b6<-Jo?FH_>jzo%7b z0wYsGGBwTpwnOXM8(iE3qE~(|Z=V_@}FEF#I}~x zRsCE#r95k!Hwn)Ve(NxXiY~mNo*KPfo{hf4hRJ-~aapRp4KJ@BUM8s0hD;ez(RSLN1I`DAwTKKPoE%oROYfZe z`8CN=m#<9|>g5(oLf~-$?+Sh!csEd^XWa5hK(!@`I$ej(fh}>9ClvXRf>BP6yacxrg!iZ^i$0m+_~1)DYvH9={yX$sWgp|T&uSo~*^t^1>i*mf+`&1Vzo-({>*~$p z!N1_QzFSSCEFrD3>}#D;Qx>YhRIQ!2N!^y{A1X!sOkMGC+h4vbU5$fWwcRyy>X9>7 z@*Zin{5gv79QE+^Ir1JU6Zj#dveUk-811AeOi~m+on-)uGrQ%1(naV z*A=8iiq0*RV5f$;6BAB&4Qf(8sd?0--~vXz>&tqU%c2p=fx6u*d-G?P`W{dg)os}k zECf$D{8)x3IGQyL{j~C|8*7*GsAv0}B`RPE;CTj7&Xu{Ov@ zq^rwwp+KYw#TeUqsChydyDtw3@Ll$$He@KCgyve~Z1n3tEuvpfSRdq-f8||bI-dtL zVvY+T`@yP(>8zQfVpmfg5ubBIC4}ZN8UE36?~fg0vP{G zNK)nor90Z{J36Lgj~v^BGXL^B5>;~&#o_*+Pm zgG1skj}KRwU}4USsZDDq-}oB43xj^L3$|xVJ|XQ3vP!lUj6FID8b*@MiPvI%I)2Jm zRCZl%IZ0*`WjHGCE0o0&j(epX6q?#b@I*s`N;^t-#<}oi0Sm>8E06Xd-k&n6%+1j? z^x1#sbRS`Dl@Oz*Qm7Q#UD%Ppx-W13!m?_h^8TQbkt971t|U@?E)+iA=X4Q*sNh#G zsxCAw0#M9?|A$MA4Tq8^>}3ASD;9W3XH9uss}GBWjptN=N+!dbQ*APuGyE(%mg7=% zykDMnbP)YU$N>eHj~l&-;6%MK-&dAw-QJ*Y~qbBaayZ*Hv zbnK$_Te)#Sa6cgr^(jHsK(7uQtanM-r+eJIZie(*S3T{@h@NW^S>q*SJrUEgupLXy~U>K>#wDmHY_WU zO7G$3@@VP6G|z$OGxLT}RgUtiXg?+ENfN7G`>LMW?5U(q|rz!H* zk7Ilf%4kD#`MNV{e`dPx+ynBsU+|%O|8Q+xiPjE0E-TGY;;^qxxEK#1^;b8)Hl7Pe zWC|rV5)}+>Eo77YJ(|dq1%n~rhi8Em3 zU>vd^G#bbK`b>RW{bFzTJ%u}EK-{!){*=b&3Hvs#VXgzCR}?N2H2cx##V=-x58Yu& z*9J^Q*fXG{R4W@<_BZ&cs^4H0-e=-|-=o%u6bXhIL}lvIV!V7WcTF-GmL1FMs!B7G z3rz&habo)U{o{W79$f!5DVo!IbfTys5j~m?_OEB@W+d4c;gg0drPQV8*uOAn<^bM- zfrQm@0pS`8Jc%4u@{Q{b>IpU3i(t(#;w(F1H4-1%!KJV63v=FA1Yp-C8!RNXs_DuU z;&b|n{izr;pc}_>f`H(w(=9|)+~t?o1elg)zpPoN*yl zo>0b);11C3huJq9a!c3Je8|%CSl3gR)J<#=jb*$sgwQEYM(hL3W0dWj< zyMf8uUF?ACJfiT%yK5`WYbw3wQ`vKmx=R{n!Z8dptkh6e$_8Bu&S+H)iXjT?89c9Z zUVn^$rx-JdYNazQCF(3%JN~nB^RhdqZ?F$rO`bXQvm;d=yp3Aw-%l6}q-4rzz_*w{ zoDK1YeKcnJJ>^}4_Waa89oj1+Jp%^0B8<4{-g3`7^~kN<_>GHiMqI-@l7`}o|I`_~ zce=efbImAkQn)&2bm#j`6@F#hi2J?%0?IB3Q|$VtF*x#E)=0CyBwO-4VLw%iH!;!l zb1hVrF&R#Q_RqzS2%dX~D44_0Im9L5#GI+{?2dz1_V70BQ>$v-O|0x}mZ-XNAe5kF zb}A@u5`mVIezx?50N~XKVwdu{&xa(PQ{Q#C`aGjj?g6l#E{3%HI!A4h*V<%a%KMS? zj`O-+D6iG7n4VB)q<_YKJ+n^Od>`$^8PlTq%np%FqdULs4{Xi;gt9%Ew~i(4C8gY* z7b!JDgy#E4t`rzhmnaa~cEY-OjIR}VmYyw5vKlKlZXR=!LpfzLGFOzo63vV6T&C*6 zIA!lOdFW!`)jpIPJ4!s7m*QfJD}XM*tidp3=A$}wLnmSCgZw8vw@mYwtl+%G9bqG< zU6HG;#;ax;^&_}VTeDlO@9TcJU zV3fNfInIUr%+(A!zhkc{qn7a)Ps)@$niSc%e!(dB^WaAF7b3ZU$Lmb(N5dcE(!Q!L z{{YBse(zL?SlSoDz#7((%mvn*)gFfoz5uYcRF#mc0ez8wk!#iHx(xN>M{xFM{eKK< zv}Fr5GI0(otMI}(ZdHF3Dd#!mCiP5fs0`Fi^kad$6=VHq`r4SsJ0UYNk}@wBnhUaYENw_l*_U z{E%=#!$WsTzlKljyZV#vin0~MvgG!Kx8GPWeT?1cB-I=tU~+po?lkbUS^6c}#{eEW zcB6WO6Z>Cozj~C1KktigSB=i5r;dY}>_tu(F3sq`&`H&lKRv)7Rm=R%()R$eHvycB zYM$HTUx9|K_Fm|;tS9^8UwPWqKQDR?D6%8p99zM?-?LbKq)I~%^(q9b<>I?;ejrPq zC;yP>m*+{9J;8Ft@I9hx1Z`_~Qu6-K&eIDmZI&yu@2O6>oU6WuWK_Nt$%cW&H~a|j z!!^Vf@RIDeYUo}EYFl4#`_&B-9LxN2V1=GBo^MX7VHnu{9>HgFdrsEOt#S__{XDO{ z)2eU}(3rjl^o!ryqCJ&#|Xt#Pw?OUe2be5_G^dB6NW DLD5ce literal 0 HcmV?d00001 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/requirements.txt b/projects/data_generation/synthetic_multi_view_facial_image_generation/requirements.txt new file mode 100644 index 0000000000..3f74a7cee4 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/requirements.txt @@ -0,0 +1,11 @@ +torch>=1.0.0 +torchvision +dominate>=2.3.1 +dill +scikit-image +numpy>=1.15.4 +scipy>=1.1.0 +matplotlib>=2.2.2 +opencv-python>=3.4.3.18 +tensorboard>=1.14.0 +face-alignment==1.0.0 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py b/projects/data_generation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py new file mode 100755 index 0000000000..ccf35746d1 --- /dev/null +++ b/projects/data_generation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py @@ -0,0 +1,79 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import torch +import matplotlib +import os +import argparse +from SyntheticDataGeneration import MultiviewDataGeneration +from algorithm.DDFA.utils.ddfa import str2bool + +matplotlib.use('Agg') +__all__ = ['torch'] + +if __name__ == '__main__': + print("\n\n**********************************\nTEST Multiview Data Generation Learner\n" + "**********************************") + parser = argparse.ArgumentParser() + parser.add_argument("-device", default="cuda", type=str, help="choose between cuda or cpu ") + parser.add_argument("-path_in", default=os.path.join("opendr_internal", "projects", + "data_generation", + "", + "demos", "imgs_input"), + type=str, help='Give the path of image folder') + parser.add_argument('-path_3ddfa', default=os.path.join("opendr_internal", "projects", + "data_generation", + "", + "algorithm", "DDFA"), + type=str, help='Give the path of DDFA folder') + parser.add_argument('-save_path', default=os.path.join("opendr_internal", "projects", + "data_generation", + "", + "results"), + type=str, help='Give the path of results folder') + parser.add_argument('-val_yaw', default="10 20", nargs='+', type=str, help='yaw poses list between [-90,90] ') + parser.add_argument('-val_pitch', default="30 40", nargs='+', type=str, + help='pitch poses list between [-90,90] ') + parser.add_argument('-f', '--files', nargs='+', + help='image files paths fed into network, single or multiple images') + parser.add_argument('--show_flg', default='false', type=str2bool, help='whether show the visualization result') + parser.add_argument('--dump_res', default='true', type=str2bool, + help='whether write out the visualization image') + parser.add_argument('--dump_vertex', default='false', type=str2bool, + help='whether write out the dense face vertices to mat') + parser.add_argument('--dump_ply', default='true', type=str2bool) + parser.add_argument('--dump_pts', default='true', type=str2bool) + parser.add_argument('--dump_roi_box', default='false', type=str2bool) + parser.add_argument('--dump_pose', default='true', type=str2bool) + parser.add_argument('--dump_depth', default='true', type=str2bool) + parser.add_argument('--dump_pncc', default='true', type=str2bool) + parser.add_argument('--dump_paf', default='true', type=str2bool) + parser.add_argument('--paf_size', default=3, type=int, help='PAF feature kernel size') + parser.add_argument('--dump_obj', default='true', type=str2bool) + parser.add_argument('--dlib_bbox', default='true', type=str2bool, help='whether use dlib to predict bbox') + parser.add_argument('--dlib_landmark', default='true', type=str2bool, + help='whether use dlib landmark to crop image') + parser.add_argument('-m', '--mode', default='gpu', type=str, help='gpu or cpu mode') + parser.add_argument('--bbox_init', default='two', type=str, help='one|two: one-step bbox initialization or two-step') + parser.add_argument('--dump_2d_img', default='true', type=str2bool, help='whether to save 3d rendered image') + parser.add_argument('--dump_param', default='true', type=str2bool, help='whether to save param') + parser.add_argument('--dump_lmk', default='true', type=str2bool, help='whether to save landmarks') + parser.add_argument('--save_dir', default='./algorithm/DDFA/results', type=str, help='dir to save result') + parser.add_argument('--save_lmk_dir', default='./example', type=str, help='dir to save landmark result') + parser.add_argument('--img_list', default='./txt_name_batch.txt', type=str, help='test image list file') + parser.add_argument('--rank', default=0, type=int, help='used when parallel run') + parser.add_argument('--world_size', default=1, type=int, help='used when parallel run') + parser.add_argument('--resume_idx', default=0, type=int) + args = parser.parse_args() + synthetic = MultiviewDataGeneration(args) + synthetic.eval() diff --git a/projects/opendr_ws/src/data_generation/CMakeLists.txt b/projects/opendr_ws/src/data_generation/CMakeLists.txt new file mode 100644 index 0000000000..2a43cfdb27 --- /dev/null +++ b/projects/opendr_ws/src/data_generation/CMakeLists.txt @@ -0,0 +1,32 @@ +cmake_minimum_required(VERSION 3.0.2) +project(data_generation) + +find_package(catkin REQUIRED COMPONENTS + roscpp + rospy + sensor_msgs + std_msgs +) + +################################### +## catkin specific configuration ## +################################### + +catkin_package() + +########### +## Build ## +########### + +include_directories( + ${catkin_INCLUDE_DIRS} +) + +############# +## Install ## +############# + +catkin_install_python(PROGRAMS + scripts/synthetic_facial_generation.py + DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} + ) diff --git a/projects/opendr_ws/src/data_generation/README.md b/projects/opendr_ws/src/data_generation/README.md new file mode 100644 index 0000000000..523347f6a0 --- /dev/null +++ b/projects/opendr_ws/src/data_generation/README.md @@ -0,0 +1,28 @@ +# Perception Package + +This package contains ROS nodes related to data generation package of OpenDR. + +## Pose Estimation ROS Node +Assuming that you have already [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can + + +1. Add OpenDR to `PYTHONPATH` (please make sure you do not overwrite `PYTHONPATH` ), e.g., +```shell +export PYTHONPATH="/home/user/opendr/src:$PYTHONPATH" +``` + +2. Start the node responsible for publishing images. If you have a usb camera, then you can use the corresponding node (assuming you have installed the corresponding package): + +```shell +rosrun usb_cam usb_cam_node +``` + +3. You are then ready to start the synthetic data generation node + +```shell +rosrun data_generation synthetic_facial_generation.py +``` + +3. You can examine the published multiview facial images stream using `rosrun rqt_image_view rqt_image_view` (select the topic `/opendr/synthetic_facial_images`) or `rostopic echo /opendr/synthetic_facial_images` + + diff --git a/projects/opendr_ws/src/data_generation/package.xml b/projects/opendr_ws/src/data_generation/package.xml new file mode 100644 index 0000000000..cd332807fb --- /dev/null +++ b/projects/opendr_ws/src/data_generation/package.xml @@ -0,0 +1,25 @@ + + + data_generation + 1.0.0 + OpenDR's ROS nodes for data generation package + OpenDR Project Coordinator + Apache License v2.0 + opendr.eu + catkin + roscpp + rospy + std_msgs + sensor_msgs + roscpp + rospy + std_msgs + sensor_msgs + roscpp + rospy + std_msgs + sensor_msgs + + + + diff --git a/projects/opendr_ws/src/data_generation/scripts/synthetic_facial_generation.py b/projects/opendr_ws/src/data_generation/scripts/synthetic_facial_generation.py new file mode 100644 index 0000000000..4f25fffd65 --- /dev/null +++ b/projects/opendr_ws/src/data_generation/scripts/synthetic_facial_generation.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3.6 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import rospy +import torch +import numpy as np +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge +from SyntheticDataGeneration import MultiviewDataGenerationLearner +import os +import cv2 +import argparse +from src.opendr.engine.data import Image + + +class Synthetic_Data_Generation: + + def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/synthetic_facial_images", + device="cuda"): + """ + Creates a ROS Node for SyntheticDataGeneration + :param input_image_topic: Topic from which we are reading the input image + :type input_image_topic: str + :param output_image_topic: Topic to which we are publishing the synthetic facial image (if None, we are not publishing + any image) + :type output_image_topic: str + :param device: device on which we are running eval ('cpu' or 'cuda') + :type device: str + """ + + if output_image_topic is not None: + self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) + else: + self.image_publisher = None + rospy.Subscriber(input_image_topic, ROS_Image, self.callback) + + self.bridge = ROSBridge() + self.ID = 0 + + # Initialize the SyntheticDataGeneration + self.parser = argparse.ArgumentParser() + self.parser.add_argument('-path_in', default='/home/ekakalet/Pictures/TEST', type=str, + help='Give the path of image folder') + self.parser.add_argument('-path_3ddfa', default='./', type=str, help='Give the path of DDFA folder') + self.parser.add_argument('-save_path', default='./results/', type=str, help='Give the path of results folder') + self.parser.add_argument('-val_yaw', default="15,-15", nargs='+', type=str, + help='yaw poses list between [-90,90] ') + self.parser.add_argument('-val_pitch', default="15,-15", nargs='+', type=str, + help='pitch poses list between [-90,90] ') + self.args = self.parser.parse_args() + self.synthetic = MultiviewDataGenerationLearner(path_in=self.args.path_in, path_3ddfa=self.args.path_3ddfa, + save_path=self.args.save_path, + val_yaw=self.args.val_yaw, val_pitch=self.args.val_pitch) + + def listen(self): + """ + Start the node and begin processing input data + """ + rospy.init_node('opendr_SyntheticDataGeneration', anonymous=True) + rospy.loginfo("SyntheticDataGeneration node started!") + rospy.spin() + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data) + self.ID = self.ID + 1 + # Get an OpenCV image back + image = np.float32(image.numpy()) + name = str(f"{self.ID:02d}"+"_single.jpg") + cv2.imwrite(os.path.join(self.args.path_in, name), image) + + if (self.ID == 5): + # Run SyntheticDataGeneration + self.synthetic.eval() + self.ID = 0 + # Annotate image and publish results + current_directory_path = os.path.join(self.args.save_path, str("/Documents_orig/")) + for file in os.listdir(current_directory_path): + name, ext = os.path.splitext(file) + if ext == ".jpg": + image_file_savepath = os.path.join(current_directory_path, file) + cv_image = cv2.imread(image_file_savepath) + cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB) + if self.image_publisher is not None: + image = Image(np.array(cv_image, dtype=np.uint8)) + message = self.bridge.to_ros_image(image, encoding="bgr8") + self.image_publisher.publish(message) + for f in os.listdir(self.args.path_in): + os.remove(os.path.join(self.args.path_in, f)) + +if __name__ == '__main__': + # Select the device for running the + try: + if torch.cuda.is_available(): + print("GPU found.") + device = 'cuda' + else: + print("GPU not found. Using CPU instead.") + device = 'cpu' + except: + device = 'cpu' + + syntheticdatageneration_node = Synthetic_Data_Generation(device=device) + syntheticdatageneration_node.listen() diff --git a/tests/test_license.py b/tests/test_license.py index c13a325101..71204177ba 100755 --- a/tests/test_license.py +++ b/tests/test_license.py @@ -98,6 +98,7 @@ def setUp(self): 'src/opendr/simulation/human_model_generation/utilities/PIFu', 'src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/architectures', 'src/opendr/perception/skeleton_based_action_recognition/algorithm', + 'projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm', 'src/opendr/perception/semantic_segmentation/bisenet/algorithm', 'src/opendr/perception/object_detection_2d/retinaface/algorithm', 'src/opendr/perception/object_detection_2d/gem/algorithm',