diff --git a/.devcontainer/Dockerfile.dev b/.devcontainer/Dockerfile.dev index c5583ebb..5b53392e 100644 --- a/.devcontainer/Dockerfile.dev +++ b/.devcontainer/Dockerfile.dev @@ -14,13 +14,12 @@ RUN apt-get update && \ apt-transport-https \ ca-certificates \ git \ - cmake - -# Install locale packages -RUN apt-get update && \ - apt-get install -y locales && \ + cmake \ + locales && \ locale-gen en_US.UTF-8 && \ - update-locale LANG=en_US.UTF-8 + update-locale LANG=en_US.UTF-8 && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* # Set environment variables for locale ENV LANG=en_US.UTF-8 @@ -31,37 +30,43 @@ ENV LC_ALL=en_US.UTF-8 RUN add-apt-repository ppa:deadsnakes/ppa && \ apt-get update && \ apt-get install -y python3.12 python3.12-venv python3.12-dev python3-pip python3-full && \ - apt-get clean && rm -rf /var/lib/apt/lists/* - -# Set Python 3.12 as the default for both python and python3 -RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \ - update-alternatives --install /usr/bin/python python /usr/bin/python3.12 1 + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \ + update-alternatives --install /usr/bin/python python /usr/bin/python3.12 1 && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* # Add R repository and install R RUN wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | tee /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc && \ add-apt-repository 'deb https://cloud.r-project.org/bin/linux/ubuntu noble-cran40/' && \ apt-get update && \ - apt-get install -y r-base r-base-dev zlib1g-dev libicu-dev pandoc make libcurl4-openssl-dev libssl-dev + apt-get install -y r-base r-base-dev zlib1g-dev libicu-dev pandoc make libcurl4-openssl-dev libssl-dev && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* -# Install Python packages -COPY requirements.txt /tmp/requirements.txt -RUN python -m venv /opt/venv && \ - /opt/venv/bin/python -m pip install --upgrade pip && \ - /opt/venv/bin/pip install -r /tmp/requirements.txt - -# Set the virtual environment as the default Python environment -ENV PATH="/opt/venv/bin:$PATH" +# Reuse existing 'ubuntu' user (UID 1000) +ARG USERNAME=ubuntu -# Check out the repo containing the Python package DoubleML (dev) -RUN git clone https://github.com/DoubleML/doubleml-for-py.git /doubleml-for-py -WORKDIR /doubleml-for-py -RUN /opt/venv/bin/pip uninstall -y DoubleML && \ - /opt/venv/bin/pip install -e .[rdd] +RUN mkdir -p /workspace && \ + chown -R $USERNAME:$USERNAME /workspace -# Create a directory for R user libraries and set the default R user library path -RUN mkdir -p /usr/local/lib/R/site-library +# Create a directory for R user libraries +RUN mkdir -p /usr/local/lib/R/site-library && \ + chown -R $USERNAME:$USERNAME /usr/local/lib/R/site-library ENV R_LIBS_USER=/usr/local/lib/R/site-library +# Switch to non-root user for remaining operations +USER $USERNAME + +# Install Python packages in the virtual environment +COPY --chown=$USERNAME:$USERNAME requirements.txt /tmp/requirements.txt +RUN python -m venv /home/$USERNAME/.venv && \ + /home/$USERNAME/.venv/bin/python -m pip install --upgrade pip && \ + /home/$USERNAME/.venv/bin/pip install --no-cache-dir -r /tmp/requirements.txt && \ + /home/$USERNAME/.venv/bin/pip install --no-cache-dir git+https://github.com/DoubleML/doubleml-for-py.git@main#egg=DoubleML[rdd] + +# Set the virtual environment as the default Python environment +ENV PATH="/home/$USERNAME/.venv/bin:$PATH" + # Install R packages and Jupyter kernel RUN Rscript -e "install.packages('remotes')" && \ Rscript -e "remotes::install_github('DoubleML/doubleml-for-r', dependencies = TRUE)" && \ @@ -70,4 +75,3 @@ RUN Rscript -e "install.packages('remotes')" && \ # Set the working directory WORKDIR /workspace - diff --git a/.devcontainer/build_image_guide.md b/.devcontainer/build_image_guide.md new file mode 100644 index 00000000..fcaf6e2c --- /dev/null +++ b/.devcontainer/build_image_guide.md @@ -0,0 +1,134 @@ +# Building and Publishing the Docker Image + +This guide shows how to build the DoubleML documentation development container locally and publish it to Docker Hub. + +## Prerequisites + +- [Docker Desktop](https://www.docker.com/products/docker-desktop/) installed and running +- Access to the `svenklaassen` [Docker Hub](https://www.docker.com/products/docker-hub/) account +- [doubleml-docs](https://github.com/DoubleML/doubleml-docs) repository cloned to your local machine + +## Step 1: Login to Docker Hub + +Open a terminal and login to Docker Hub: + +```bash +docker login +``` + +Enter the Docker Hub username (`svenklaassen`) and password (or token) when prompted. + +## Step 2: Build the Docker Image + +Navigate to your project root directory and build the image (using the `latest`-tag): + +```bash +docker build -t svenklaassen/doubleml-docs:latest -f .devcontainer/Dockerfile.dev . +``` + +To force a complete rebuild without using cache: + +```bash +docker build --no-cache -t svenklaassen/doubleml-docs:latest -f .devcontainer/Dockerfile.dev . +``` + +## Step 3 (Optional): Verify the image + +### Open the repository in VS Code + +1. Ensure your `.devcontainer/devcontainer.json` is configured to use your local image: + + ```json + "image": "svenklaassen/doubleml-docs:latest" + ``` + Note: The `.devcontainer/devcontainer.json` file is configured to use the pre-built image. If you want to build the container from scratch, uncomment the `dockerFile` and `context` lines and comment out the `image` line. + +2. Open the `doubleml-docs` repository in VS Code: + + ```bash + code /path/to/doubleml-docs + ``` + +3. Open the Command Palette (`Ctrl+Shift+P`) and select `Dev Containers: Reopen in Container`. + VS Code will use your locally built image. + +### Build the documentation + +Once inside the container, verify that you can successfully build the documentation: + +1. Open a terminal in VS Code (`Terminal > New Terminal`) + +2. Build the documentation: + + ```bash + cd doc + make html + ``` + +3. Check the output for any errors or warnings + +4. View the built documentation by opening the output files: + + ```bash + # On Windows + explorer.exe _build/html + + # On Linux + xdg-open _build/html + + # On macOS + open _build/html + ``` + +If the documentation builds successfully and looks correct, your Docker image is working properly and ready to be pushed to Docker Hub. + +## Step 4: Push to Docker Hub + +Push your built image to Docker Hub: + +```bash +docker push svenklaassen/doubleml-docs:latest +``` + +## Step 5: Using the Published Image + +After publishing, there are two ways to use the image: + +### Option 1: Manual Container Management +Pull and run the container manually: + +```bash +docker pull svenklaassen/doubleml-docs:latest +# Then run commands to create a container from this image +``` + +### Option 2: VS Code Integration (Recommended) +Simply reference the image in your `devcontainer.json` file: + +```json +"image": "svenklaassen/doubleml-docs:latest" +``` + +VS Code will automatically pull the image when opening the project in a container - no separate `docker pull` command needed. + +## Troubleshooting + +### Clear Docker Cache + +If you're experiencing issues with cached layers: + +```bash +# Remove build cache +docker builder prune + +# For a more thorough cleanup +docker system prune -a +``` + +### Check Image Size + +To verify the image size before pushing: + +```bash +docker images svenklaassen/doubleml-docs +``` \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b2900011..c480f797 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,7 +1,8 @@ { "name": "DoubleML Documentation Development", - "dockerFile": "Dockerfile.dev", // Path to your Dockerfile - "context": "..", // Context for the build (root of your project) + "image": "svenklaassen/doubleml-docs:latest", + // "dockerFile": "Dockerfile.dev", + // "context": "..", "workspaceFolder": "/workspace", // Folder inside the container for your project // Customizations for VS Code "customizations": { @@ -12,14 +13,14 @@ "njpwerner.autodocstring", // Optional: Auto-generate docstrings "ms-python.black-formatter", // Optional: Black formatter "streetsidesoftware.code-spell-checker", // Optional: Spell checker - "github.copilot" // Add GitHub Copilot extension + "github.copilot", // Add GitHub Copilot extension "GitHub.github-vscode-theme", // GitHub theme - "github.vscode-github-actions" // GitHub Actions extension + "github.vscode-github-actions", // GitHub Actions extension "ms-toolsai.jupyter", // Jupyter extension "charliermarsh.ruff" // Ruff extension ], "settings": { - "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python", // Poetry virtual environment path + "python.defaultInterpreterPath": "/home/ubuntu/.venv/bin/python", "editor.formatOnSave": true, // Auto-format code when saving "editor.codeActionsOnSave": { "source.organizeImports": true // Auto-organize imports on save @@ -27,8 +28,12 @@ "python.linting.enabled": true, // Enable linting "python.linting.flake8Enabled": false, // Disable Flake8 for linting "python.linting.ruffEnabled": true, // Enable Ruff for linting - "python.formatting.provider": "black", // Use Black for formatting - "python.testing.pytestEnabled": false, // Enable Pytest for testing + "python.formatting.provider": "none", + "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter", + "editor.formatOnSave": true + }, + "python.testing.pytestEnabled": true, // Enable Pytest for testing "python.testing.pytestArgs": [], "python.testing.unittestEnabled": false, "files.exclude": { @@ -40,7 +45,8 @@ } }, "mounts": [ - "source=${localWorkspaceFolder},target=/workspace,type=bind" // Mount your local workspace into the container + "source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=cached" // Mount your local workspace into the container ], - "remoteUser": "root" // Set the user inside the container + "remoteUser": "ubuntu", + "postCreateCommand": "id && ls -la /workspace && echo 'Container is ready!'" } \ No newline at end of file diff --git a/.devcontainer/docker_guide.md b/.devcontainer/docker_guide.md new file mode 100644 index 00000000..18e318ef --- /dev/null +++ b/.devcontainer/docker_guide.md @@ -0,0 +1,83 @@ +# Build Documentation with Development Container + +This guide shows how to use WSL2 (Windows Subsystem for Linux), Docker Desktop, Visual Studio Code (VS Code), and how to work with Development Containers in VS Code on a Windows machine. + +Requirements: + - [VS Code](https://code.visualstudio.com/) + - [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install) + - [Docker Desktop](https://docs.docker.com/desktop/setup/install/windows-install/) + +## Step 1: Verify installations & Setup + +You can verify the installations in a terminal: + + ```bash + code --version + wsl --version + docker --version + ``` + +### Configure Docker to Use WSL2 + + See [Docker Desktop Documentation](https://docs.docker.com/desktop/features/wsl/#turn-on-docker-desktop-wsl-2). + 1. Open Docker Desktop. + 2. Go to **Settings > General** and make sure **Use the WSL 2 based engine** is checked. + 3. Under **Settings > Resources > WSL Integration**, ensure that your desired Linux distribution(s) are selected for integration with Docker. + +### Install Extensions + + 1. Open Visual Studio Code. + 2. Press `Ctrl+Shift+X` to open the Extensions view. + 3. Search and install (includes WSL and Dev Containers Extensions): + - [Remote Development Extension Pack](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack) + + Helpful VS Code Documentations: + - [Developing in WSL](https://code.visualstudio.com/docs/remote/wsl) + - [Developing inside a Container](https://code.visualstudio.com/docs/devcontainers/containers) + + +## Step 2: Open the Development Container (Using Pre-built Image) + +For faster setup, we'll use a pre-built Docker image: + +1. Open the `doubleml-docs` repository in VS Code: + + ```bash + code /path/to/doubleml-docs + ``` + +2. Open the Command Palette (`Ctrl+Shift+P`). +3. Type `Dev Containers: Reopen in Container`. + +VS Code will pull the `svenklaassen/doubleml-docs:latest` image (if needed) based on `devcontainer.json` and open the project in the container.
+This approach is much faster than building the container from scratch. VS Code automatically downloads the image from Docker Hub if it's not already on your system. + + +## Step 3: Build the documentation + +1. Open a terminal in VS Code (`Terminal > New Terminal`) + +2. Build the documentation: + + ```bash + cd doc + make html + ``` + + To build without notebook examples: + ```bash + make html NBSPHINX_EXECUTE=never + ``` + +3. View the built documentation by opening the output files: + + ```bash + # On Windows + explorer.exe _build/html + + # On Linux + xdg-open _build/html + + # On macOS + open _build/html + ``` diff --git a/.devcontainer/guide.md b/.devcontainer/guide.md deleted file mode 100644 index d508dfd2..00000000 --- a/.devcontainer/guide.md +++ /dev/null @@ -1,64 +0,0 @@ -# Build Documentation with Development Container - -This guide shows how to use WSL2 (Windows Subsystem for Linux), Docker Desktop, Visual Studio Code (VS Code), and how to work with Development Containers in VS Code on a Windows machine. - -Requirements: - - [VS Code](https://code.visualstudio.com/) - - [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install) - - [Docker Desktop](https://docs.docker.com/desktop/setup/install/windows-install/) - -## Verify installations & Setup - -You can verify the installations in a terminal: - - ```bash - code --version - wsl --version - docker --version - ``` - -### Configure Docker to Use WSL2 - - See [Docker Desktop Documentation](https://docs.docker.com/desktop/features/wsl/#turn-on-docker-desktop-wsl-2). - - Open Docker Desktop. - - Go to **Settings > General** and make sure **Use the WSL 2 based engine** is checked. - - Under **Settings > Resources > WSL Integration**, ensure that your desired Linux distribution(s) are selected for integration with Docker. - -### Install Extensions - - - Open Visual Studio Code. - - Press `Ctrl+Shift+X` to open the Extensions view. - - Search and install (includes WSL and Dev Containers Extensions): - - [Remote Development Extension Pack](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack) - - VS Code Documentations: - - [Developing in WSL](https://code.visualstudio.com/docs/remote/wsl) - - [Developing inside a Container](https://code.visualstudio.com/docs/devcontainers/containers) - - -## Build & Open the Development Container - - - Open the project `doubleml-docs` in VS code: - - ```bash - code . - ``` - - - Open the Command Palette (`Ctrl+Shift+P`). - - Type `Remote-Containers: Reopen Folder in Container`. - - VS Code will build the new container(this may take some time) and open the project in it. - - -## Build the documentation - -You can build the documentation via - - ```bash - cd doc - make html - ``` - - Open the directory in WSL with - ```bash - explorer.exe . - ``` \ No newline at end of file diff --git a/doc/_static/basic_iv_example_nb.png b/doc/_static/basic_iv_example_nb.png index d0bbf23b..17155b78 100644 Binary files a/doc/_static/basic_iv_example_nb.png and b/doc/_static/basic_iv_example_nb.png differ diff --git a/doc/_static/robust_iv_example_nb.png b/doc/_static/robust_iv_example_nb.png new file mode 100644 index 00000000..67eb5c2d Binary files /dev/null and b/doc/_static/robust_iv_example_nb.png differ diff --git a/doc/api/api.rst b/doc/api/api.rst index 3a428cca..474c938e 100644 --- a/doc/api/api.rst +++ b/doc/api/api.rst @@ -11,7 +11,6 @@ API Reference DoubleML Data Class DoubleML Models - Other Models Datasets Utility Classes and Functions Score Mixin Classes for DoubleML Models diff --git a/doc/api/data_class.rst b/doc/api/data_class.rst index 312bc2f4..366bdf7f 100644 --- a/doc/api/data_class.rst +++ b/doc/api/data_class.rst @@ -3,7 +3,7 @@ DoubleML Data Class ---------------------------------- -.. currentmodule:: doubleml +.. currentmodule:: doubleml.data .. autosummary:: :toctree: generated/ @@ -11,3 +11,4 @@ DoubleML Data Class DoubleMLData DoubleMLClusterData + DoubleMLPanelData diff --git a/doc/api/datasets.rst b/doc/api/datasets.rst index 5e1497d9..38116c42 100644 --- a/doc/api/datasets.rst +++ b/doc/api/datasets.rst @@ -28,10 +28,12 @@ Dataset Generators datasets.make_iivm_data datasets.make_plr_turrell2018 datasets.make_pliv_multiway_cluster_CKMS2021 - datasets.make_did_SZ2020 + datasets.make_ssm_data datasets.make_confounded_plr_data datasets.make_confounded_irm_data datasets.make_heterogeneous_data datasets.make_irm_data_discrete_treatments + did.datasets.make_did_SZ2020 + did.datasets.make_did_CS2021 rdd.datasets.make_simple_rdd_data \ No newline at end of file diff --git a/doc/api/dml_models.rst b/doc/api/dml_models.rst index 84f5b8cb..11481640 100644 --- a/doc/api/dml_models.rst +++ b/doc/api/dml_models.rst @@ -3,7 +3,13 @@ DoubleML Models ------------------------------ -.. currentmodule:: doubleml + +.. _api_plm_models: + +doubleml.plm +~~~~~~~~~~~~~~~ + +.. currentmodule:: doubleml.plm .. autosummary:: :toctree: generated/ @@ -11,14 +17,57 @@ DoubleML Models DoubleMLPLR DoubleMLPLIV + + +.. _api_irm_models: + +doubleml.irm +~~~~~~~~~~~~~~~ + +.. currentmodule:: doubleml.irm + +.. autosummary:: + :toctree: generated/ + :template: class.rst + DoubleMLIRM DoubleMLAPO DoubleMLAPOS DoubleMLIIVM - DoubleMLDID - DoubleMLDIDCS - DoubleMLSSM DoubleMLPQ DoubleMLLPQ DoubleMLCVAR - DoubleMLQTE \ No newline at end of file + DoubleMLQTE + DoubleMLSSM + + +.. _api_did_models: + +doubleml.did +~~~~~~~~~~~~~~~ + +.. currentmodule:: doubleml.did + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + DoubleMLDIDMulti + DoubleMLDIDAggregation + DoubleMLDIDBinary + DoubleMLDID + DoubleMLDIDCS + + +.. _api_rdd_models: + +doubleml.rdd +~~~~~~~~~~~~~ + +.. currentmodule:: doubleml.rdd + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + RDFlex \ No newline at end of file diff --git a/doc/api/other_models.rst b/doc/api/other_models.rst deleted file mode 100644 index 2595dd30..00000000 --- a/doc/api/other_models.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. _api_other_models: - -Other models ------------------------------- -.. currentmodule:: doubleml - -.. autosummary:: - :toctree: generated/ - :template: class.rst - - rdd.RDFlex diff --git a/doc/conf.py b/doc/conf.py index be565248..051c7795 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -37,6 +37,7 @@ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.coverage', + 'sphinx.ext.doctest', 'sphinx.ext.graphviz', 'sphinx.ext.intersphinx', 'sphinx.ext.mathjax', @@ -109,7 +110,13 @@ { "name": "PyPI", "url": "https://pypi.org/project/DoubleML/", - "icon": "fa-solid fa-cube", + "icon": "fa-brands fa-python", + "type": "fontawesome", + }, + { + "name": "Discussions", + "url": "https://github.com/DoubleML/doubleml-for-py/discussions", + "icon": "fa-solid fa-comments", "type": "fontawesome", }, ], @@ -118,7 +125,7 @@ "version_match": version, }, "show_version_warning_banner": True, - "check_switcher": False, + "check_switcher": True, "announcement": "Interested to learn more? We offer DoubleML Trainings!", } @@ -159,8 +166,11 @@ 'examples/py_double_ml_gate_sensitivity': '_static/sensitivity_example_nb.png', 'examples/py_double_ml_firststage': '_static/firststage_example_nb.png', 'examples/py_double_ml_basic_iv': '_static/basic_iv_example_nb.png', + 'examples/py_double_ml_robust_iv': '_static/basic_iv_example_nb.png', 'examples/R_double_ml_basic_iv': '_static/basic_iv_example_nb.png', + 'examples/py_double_ml_robust_iv': '_static/robust_iv_example_nb.png', 'examples/py_double_ml_ssm': '_static/ssm_example_nb.svg', + 'examples/R_double_ml_ssm': '_static/ssm_example_nb.svg', 'examples/py_double_ml_sensitivity_booking': '_static/dag_usecase_revised.png', } diff --git a/doc/examples/R_double_ml_basic_iv.ipynb b/doc/examples/R_double_ml_basic_iv.ipynb index 979df915..1d0df3ae 100644 --- a/doc/examples/R_double_ml_basic_iv.ipynb +++ b/doc/examples/R_double_ml_basic_iv.ipynb @@ -49,16 +49,11 @@ ] }, { - "attachments": { - "basic_iv_example_nb.png": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaMAAACbCAYAAADPy/SAAAAAAXNSR0IArs4c6QAAIABJREFUeF7tnQl0FUUWhi9CWAwEoyAgYAyi7OuAMg4CCsMSUTQuoCyOrIqAgiDg4OACAoqjw6LRIKNsIsIYEWWJIAKCOIgaQWBkF1FkH1EWWeb85XR4Sfq91+/1e73+dY5Hj+nuqvrqvv67qm7dW+jcuXNnROQCYSEBEiABEiABewjMLkQxsoc8ayUBEiABEsglQDGiMZAACZAACdhOgGJk+xCwASRAAiRAAhQj2gAJkMB5Alu2bJEff/xRzp07J0lJSdKwYUP1xx07dsiuXbt0UdWrV0+Sk5PV35YvX657TZkyZaR27drqb6jjhx9+0L2ucePGkpiYKIcOHZKcnBzdaxISEqRcuXJSoUIFdS2LJwhQjDwxjOwECZggMHjwYJk+fbr89NNPeZ5yzTXXyNq1a9X/GzFihIwePVq3luzsbGnVqpX8/PPPSsD0SocOHSQrK0v9qWfPnvLaa6/pXvf1118r0Vq4cKGkpaWF7dXVV18tbdq0ke7du0v9+vXDXs8LHEuAYuTYoWHDSCCOBBYvXqxe4igrV66UrVu3SpUqVdQ/lStXjmPNsXn0zp07Zc+ePbJs2TJ555135PHHH5f09PTYPJxPsYMAxcgO6qyTBOwisGLFCrn//vvlyJEjsm3bNilRooRdTYlLvZ988omMGjVKzfSwNMjiGgIUI9cMFRtKAiYJDBw4UF588UV54IEHZOzYsUGX1ExWY+vtu3fvVjOk77//Xt58801p0aKFre1h5YYJUIwMo+KFJOBiAr1791azhXfffVdat27t4p4YazoENyMjQ9566y256667jN3Eq+wkQDGykz7rJgErCMBBAQ4D2CeCU4JfymOPPSZjxoyRuXPnyu233+6Xbru1nxQjt44c200CRglgyWr//v2+9Dbr0qWLzJw5U3kKli1b1igyXmc9AYqR9cxZIwlYQwBOChdddJE1lTm4llWrVknTpk0d3EI2TUQoRjQDEvAiAZz5SU1NlVmzZvlij8jIGB49elRKly5t5FJeYz0BipH1zFkjCcSfAA6pYgN/+/btnvSai5QglipxmHbp0qW5USUifQavjysBilFc8fLhJGADAeyPpKSkqIgJgwYNsqEFzqyyZs2aat8Ms0UWxxGgGDluSNggEjBJYPz48TJs2DAVnsdrh1rNoMnMzJS+ffuqOHuVKlUy8yjeG3sCFKPYM+UTScBeAm3btpUzZ84IYsaxnCdw8uRJueyyy+SZZ56RPn36EI2zCFCMnDUebA0JmCNw+vRpKVmypDz77LMyYMAAcw/z4N2dOnWSiy++WF566SUP9s7VXaIYuXr42HgSyEfg+PHjKgwOZkeYBbDkJYCAqkhzgeCwLI4iQDFy1HCwMSRAAnElsG7dOiVGiErB4igCFCNHDQcbQwIkQAL+JEAx8ue4s9deJTBnzhzp2LGjytTKQgIuIkAxctFgsakkEJYAxSg0IkTxvvvuu+Xs2bNhWfICSwlQjCzFzcpIIM4EKEYUozibWLweTzGKF1k+lwTsIOAUMfrll19U7iTkUEL21YSEBGnZsqVK7Fe1alXZuHGj8vizunBmZDVxw/VRjAyj4oUk4AICThCj9evXqzNOl19+uQwfPlxq1KghRYoUkcOHD8s777wjTz31lApV1LlzZ8uJUowsR260QoqRUVK8jgTcQMBuMfr222+la9euUrduXXnhhRckMTGxALaPP/5YPv30Uxk6dKjlSClGliM3WiHFyCgpXkcCbiCAMzSYeSxbtszy5iIE0ZNPPilTpkyRrKysoFllEZYH0bPT0tIsb+Pbb7+t0pDT29By9OEqpBiFI8S/kwAJGCOwa9cuueeeeyQ5OVntFeHfTiucGTltRHLbQzFy7NCwYSTgMgJYemvdurUSJCzRMWK4ywbQ3uZSjOzlz9pJILYE4KU2d+5cGTlyZGwfbOBp8JpDem9ExKYYGQDGSwIJUIxoDyTgJQJ27ols3bpVEBUbSewmT54spUqVchza/fv3yzfffCPNmzd3XNt83iCKkc8NgN33GAE7velwtqhfv36ydu1agSjWqlXLcXS5Z+S4IdEaRDFy7NCwYSQQBQE7xQjNxVIdzg/ddNNNKqeSnms3RGvJkiXSrl07KV68eBS9jP4WilH07OJ8J8UozoD5eBKwlIDdYgSXaRxsHTRokNSpU0eGDBkiTZo0kaJFi8qpU6fU+SJkoMWh2LJly1rKBpVRjCxHbrRCipFRUryOBNxAwG4x0hjt3r1bpk2bJv/617/kiy++UNlVEQ4IQUpxvqhYsWK24KQY2YLdSKUUIyOUeA0JuIUAPOnuvPNOHuoMMmCzZs1Sy4g89Oo4i6YYOW5I2CASIIG4EZgwYYKKi7dv37641cEHR0WAYhQVNt5EAiTgWgKbN2+W6tWru7b9Hm04xcijA8tu+ZgAPNXgMNCiRQsfU2DXXUaAYuSyAWNzSSAsgf79+8vq1avl888/D3utny7Ys2ePbNmyRTlSsDiOAMXIcUPCBpGASQLwXmvYsKF8+eWXUq9ePZNP887tEOl58+bJzp071cyRxVEEKEaOGg42hgRiRKB+/fpSpUoV5VrNInLgwAGpWLGijB07VgYOHEgkziNAMXLemLBFJGCewOLFi1Va7w8//JDLUiLSpUsXef/992Xv3r2MJm7evOLxBIpRPKjymSTgBAJ33HGHOnAK77GEhAQnNMmWNmjBY19//XW59957bWkDKw1LgGIUFhEvIAGXEvjxxx8lPT1dhecpV66cS3thrtlwWqhdu7aaJc6ePdvcw3h3PAlQjOJJl88mARKwl8ChQ4dUWgs4LjgxpYW9dBxVO8XIUcPBxpBAnAgsXLhQkG8IHmUsJOBAAhQjBw4Km0QCMScAh4a77rpLOnToIFOmTPG0azNctxGQdeLEidKoUaOYs+QD40KAYhQXrHwoCTiQwLZt21SeIZRx48YpYfJaycrKkq5du0rdunUFQVFTUlK81kWv9odi5NWRZb9IQI/AiRMnVKDQUaNGyXXXXSdvvPGGVK1a1dWwdu3apYQH6SG++uorldQPeZRYXEWAYuSq4WJjSSBGBDBL6t27t2RkZMhVV10lBw8elPnz50tqamqBGpAcT8vIumLFCjl79myBa5CvCLMRlE2bNgWNiq0969ixY7Ju3Trd3lSuXFmuvPJK9becnByBE0JggaDin1tvvVX979tuu01Fm+jRo4f07NlTypcvHyNKfIyFBChGFsJmVSTgOALr169X4oGXP5LeHT58uEAbd+zYIVdccYX6/4UKFdLtQ5s2bWTRokXqbzhgOnPmTN3rtm/frgRPC1mkdxGyxD7//PPqT3DJxn5X/tK8eXNZvny5+t+DBw+Wp59+modZHWddETWIYhQRLl5MAh4igBTgeNkjjt2yZctc2TN4CKL9mJVBDEuWLOnKfrDRQjGiEZCAHwkgqjdmM/A2++CDD1w9q8AS3Y033qj2vhD+KCkpyY9D6vY+U4zcPoJsPwlESgDLW1iSa9q0qbz33ntSrFixSB/huOs3btyo8jdVqlRJzfKSk5Md10Y2KCQBihENhAT8RCA7O1tuvvlmadWqlQoT5KWYdchVhL2kMmXKqP0k/JvFNQQoRq4ZKjaUBEwSQNRqeJ61b99e5syZI0WKFDH5ROfdDgeJZs2aqb2jjz/+2Lcx+Zw3MmFbRDEKi4gXkIAHCGAWhAgMiOQ9Y8YMKVy4sAd6pd8FnDvCDAl9hCs68hixOJ4AxcjxQ8QGkoBJAohWDXfrzp07C9IoBHPPNlmNo27//vvv1QzpzJkzaobESAyOGh69xlCMHD9EbCAJmCAwbdo0+ctf/qIOg77yyiu+ECIN1759+9QMCQdsMUNC5lsWxxKgGDl2aNgwEjBJIDMzU/r06SMPPvigChrqx4J04/Cyw78xQ6pWrZofMbihzxQjN4wS20gCkRKYNGmSSheB6ATPPfdcpLd76npElcA5JCTag5ddrVq1PNU/j3SGYuSRgWQ3SCCXwPjx41WgUITIGTFiBMmIyH//+1/lzo6IDTiHVL9+fXJxFgGKkbPGg60hAXME/va3vykRwmwIsyKW8wSwd4SoExs2bFCRGho3bkw8ziFAMXLOWLAlJGCOAGZDCDA6YcIE6devn7mHefTu48ePq+gTiBiOAKxIo8HiCAIUI0cMAxtBAiYJYH9o8uTJynW7W7duJp/m7dtPnjypolCsWrVKxeWDgwOL7QQoRrYPARtAAiYInDt3TnnMTZ06VR1m7dSpk4mn+efW3377TUWjwHId4vP9+c9/9k/nndlTipEzx4WtIoHwBCBEOEOELKcI74OXK4txAqdPn1ZRKRYsWKDi9Gkp2Y0/gVfGkADFKIYw+SgSsIwAIgsgqsK8efP4IjVBXeM4d+5cCroJjjG4lWIUA4h8BAlYSkD7osd+B5eYzKPXZpjITsulTvM8o3wCxShKcLyNBGwhoO114KwMN99jNwTa3tuUKVPoBBI7rJE8iWIUCS1eSwJ2EtC8wNasWUO35DgNhOaViDh+vXr1ilMtfKwOAYoRzYIE3EBAOx+zfv16HtiM84DhvBaiWCCeH89rxRn2+cdTjCxDzYpIIEoCWuSATZs2MZRNlAwjvQ1RLBDNgpEsIiUX9fUUo6jR8UYSsIDAkSNHpGXLloKEcR999JHUqVPHglpZBQhoMf5GjhwpTzzxBKHElwDFKL58+XQSiJ4Aok1DiJAoDtGma9SoEf3DeGdUBLBUN2DAAHn00Udl3LhxUT2DNxkiQDEyhIkXkYDFBALz8KxevZqJ4SzmH1idlqDQz3mhLMBPMbIAMqsggYgIQIiaNm3KDKURUYvvxYEZc1999dX4VubPp1OM/Dnu7LVTCQSmyv7kk08kJSXFqU31Xbtmz54t99xzjy9TuFsw2BQjCyCzChIwRAB7Q82aNROEqEGKbAqRIWyWXgRBQhimzp07q8OxhQoVsrR+D1dGMfLw4LJrLiIAIfrTn/4khQsXlhUrVkjFihVd1Hp/NRVBVRFgFbMkClLMxp5iFDOUfBAJREkAbtvNmzdXQgRnhXLlykX5JN5mFYH3339fbr31VrnjjjtUPDuMHYspAhQjU/h4MwmYJLB9+3a1NFeyZEm1NEchMgnUwtshSEjb0b59e3n77bcpSObYU4zM8ePdJBCaALKJwjNOr0CIkPa6TJky6hwR/s3iLgLZ2dkqDxIECTmlihQpUqADv/76q1x44YXu6pj1raUYWc+cNfqJAL6cGzRooELLBJYtW7aopTkI0MqVKyU5OdlPWDzVVwgS0pi3atVKsrKy8ggSoqzfeOON0qNHD5UIkSUoAYoRjYME4kUArtnarOiZZ56R4cOHq6ogRPj/lSpVUrHmKETxGgHrnouZbVpamhIeODgkJCSoypHOHA4pV1xxhRp3FooRbYAELCeQnp4u2Fc4deqUqhtBN9u1ayctWrSQ1NRUFX07KSnJ8naxwvgQgPMJZkf40EDSw9tvv12WLFkimB2h/POf/+TsKDh6zoziY5Z8qt8JBM6KAllg7wDBTrG0U6pUKb9j8lz/Me5t27ZVe0SHDh0SZOXVSrVq1WTz5s2e63OMOkQxihFIPoYE8hDIPysK/OPf//53GThwIIl5lACW5rD8evbs2QI9xLmke++916M9N9UtipEpfLyZBHQIYLkGB1hDlczMTBVWhsVbBLp27SqzZs3SFSL0lLOjoONNMfLWT4G9cQIB7BUsWLAgd68oWJv4leyE0YpdG+AxN3Xq1LAP5LjrIqIYhbUcXkACERAwMivC44oWLarEaubMmSqsDIu7Cdx///3yyiuvGOoEZ0cUI0OGwotIwAwBhIeBJ5XmQZf/WcWKFZOTJ0/K3XffLciPE245z0xbeK+1BN544w3BfmBOTo5ccMEFQZfq0Cpc261bN2sb6OzaODNy9viwdW4isGbNGhVRQa9oM6G+ffsqEapZs6abusa2RkBg/vz58uKLL6o08dq457+9evXqsmnTpgie6vlLKUaeH2J20DICd955p+BFFDgrwhdyiRIl5KGHHpJ+/fpJhQoVLGsPK7KXAA67Tp48WYUJ0mbEgS3i7CjP+FCM7DVX1u4VAnqzIgjPoEGD1EwIgsTiTwJff/21TJo0SZAhNnCmxNkRxcifvwj2Oq4EAmdFNWrUkEceeUTFI2MhAY3Anj17lCj94x//UFEZkESRs6Nc++DMiD8VEjBLQJsVIQzMww8/rMLAsJBAMALHjh1Ty3dwdihbtqxs2LCBsERiK0Z79+6V/fv3y5EjR+TcuXOOBYww74iWDEO45JJLHNtOvzXMrfaDrJ9w7UWQTBYSiIQA3MEROgiHZZHt98CBA359f5oTo8OHD6ukUm+99ZbKx6IX/iKSgbHj2tKlS6uMjZ06dVIRdpmx0bpRgP1gcxf2g8RybrQfJMWDO3fHjh1pP9aZjidqQuw67f3pVvu/6KKLVIJB2D+CxJp4f0YvRk899ZSMHDlSihcvrl7k+DpMSUmRSy+91BVJwvAVsm/fPvniiy9UHnskQatSpYpkZGSolwpLfAk8+eST8sQTTxSwH2Q6dcNsNZj94EsXP0oWEghFQLN/zIpw5gxLu3h/utH+EY0cAWKvvPJK5aQR5QpB5GL06aefyn333SdYUnn66afVJm1iYqLrLQ9ZNx977DH1lY4T8dhkZObN2A8r9le6d+8uP/zwg+CDxqv2M2HCBFeIauxHmE8MRQD2jyR7+BCG/ffq1csTnpbbtm2Tv/71r7nvz4kTJ8rFF18ciTFEJkZIEIYKcXJ4/Pjxas/FawVi27t3bzl48KCaQgc7xOi1flvRn9GjR8uIESNoP1bAZh2OI4CPd2T8xQfY2LFjPfmxixkS9k+1LZw//vGPRsfBmBjBIQHr4kuXLvWNKyI2FGfMmCFjxoyRYcOGGQXK63QIwH7g+oxkctOnT5cuXbp4mtOJEyfUCwfRm/HSGTp0qKf7y86FJoAXM+wf70+8Uzp37uxpZMePH1dpMvAx/+yzz8qQIUOM9NeYGCEz5TfffCMffPCBNGrUyMiDPXENQOJF8vzzz6vDiyzREYD9IPQJsp76yX4gREg1jtAwiMDA4k8CiD+4detW9f78wx/+4BsI+JDH1ge2PAYMGBCu3+HFCCqOnO7Y4G/YsGG4B3ru71jXxWbju+++K+3bt/dc/+LdIc1+1q5dqzKc+q3ASQPLM7Qfv4387/31u/1jWRLL84sWLQrnGBZajHA6GM4KyM2SlpbmT2v6v0EhEjOcHOjUYNwM4KWI5Sqw87P9YIl7yZIltB/jpuOJK5HbCA4KtP/ft3i+/fbbUO/P4GKEdf6qVasqt0N4Rvi5IOT/VVddJThhj30AlvAENPvBlyGm6X4uv/76qyAOWbNmzdSeAYv3CcD+U1NTlefcCy+84P0Oh+gh7B9agiMP06ZNC3ZlcDGCRxmW5zAbKFWqlK9hovNYZsHhWOS2v+GGG3zPIxwAfBFmZWXRfv4PCr+l9PR02k84w/HI37EigAjufH/+PqDz5s1TTnDwtgvioawvRj/99JOUL19epdCFsrP8TgDKjigBECSW4AQ0+8EyHROIneekHYaFVyGLdwlo9o9ZgNc9RyMZRdg/UqpgyVqn6IsRvH+w6Yo4SYUKFYqkPk9fi2RZLVu2VAc2cVKaRZ8AliVGjRpF+8mHB+vmiO5B+/H2LwfetziHiXFmOU8gOztb2rVrp2Lw6bw/9cXommuukQYNGhjO6e4X4Aj5jlA1CIM0cOBAv3Q74n42btxYuXC//PLLEd/r5Rs0+4F3Jl29vTvScN9u0qSJiszNcp6AZv+Y6PTv3z8/moJidPr0aUlISFDx2nBwiSUvgbZt2wqCY86dO5dodAho9oMlChwcZslLoHXr1oLgkggQy+I9Apr9++FwazSjh6U6eCTPnj07vBhhww0B75Ay9/rrr4+mPk/fg9TRn332mfqHpSABxKiC5wzOpeGwH0teAn379pX169cLwk6xeI8A3JevvvpqWb16tUQQCsd7IIL0CKGCcnJyFJ98peDMCCLUvHlzta532WWX+QaS0Y5iP23cuHFcDw4CDKHwEXEB6+VwgmHJSwAJ1bCngN8Xi/cIYF8ZUasRCBUZDFjyEsBeGo56fPfdd+HFSHPBc3JyPDsHGPGWkC6DfPRHgfYT2joRFR4pV2g/dv6K41c3lu8Rh47jq8/4zTffVFkRdPgUnBlpMLHZBDc8lrwEND5w8aanYUHroFiH/sVgrwiJyPiy8uabBR8bCBTgxkSRVowI9opwEB76EnaZjmIUeki8LkbYz8CeT1JSUlS2STGiGEVlOA65CQGhK1SoIMnJyVG1iGIUGhvFKCqz0r/JD2IE11SclsZyA/4dyQyZYkQxiuHPzfJHQYxq1aqloq1gOR72D+9io4Vi5CExOnr0qPTs2TOk63SHDh3UGSg7Dp76RYyKFCkicFMtUaKEWlaCMBkJdmqXGG3ZskW186uvvirwa0DGSZx9gncfXi6IE2fXEiuX6Yy+1u25ThOjwoULq6WkokWL5tr/zTffHLZRdokRwuwgdmZgadOmjcycOTM34zD+OzAiRP6/h+1cDC5w1cwIGVYnTZqksq1iuqwVrLEDJjKFIpo4PP7sKH4Ro0C2+EGeOnVKpRHG5iNe6MH42yVGaC/W6TMzM1WmycBzHr/88os6qgAvHriUImMxroHgWl0oRlYTj6w+TYz07B/nw7AfhA+zYPEp7RIjtHfnzp0qfxCcBAYPHqyioBQrViwPgHXr1inbx8F9REOw+jfgKjHavXu3IOJt3bp180CEyzCCbyI/DAzCri9bP4pR4EDAuBHFvHLlymojEsIUmDDMTjFCO7UvRL1Dhz///LNKloi/ZWRk2GJHFKPIxMHqq/XESM/+K1asmGv/mHVrxU4xQhvgUt6nTx+1qjFlypQ8xyu0D3osu9v1DnWVGOkZH85k4DR/jRo1VBrbxMREq200tz5NjLx6gn7Hjh2G02RrwoRxgTDhixHLZHa6vocSIwyitpxXtmxZFc4+cPZthVFpYuRV+7GCYTzr2LNnj+Gszpr945Arlr/wYYYDnXZ70+HDHdFzbrrppjzvS2RbXr58ufqot3pGpI2Zq8UISyyPPvqoOiRo1z5RoPFrYhTPH4Tbnq0t4+HEOfI+4SVvl+tyODE6ceKEPPLII/LSSy8JfrTIMWRl0cTIyjpZV3wJaPaPmJ6wf+Q8s9O1G789LNUhfiaCFkMcEUkcy9RDhgyJ2lMwFhRdK0aAOmHCBAXUzn0iPTHy6jkjuHYHLrsFM0DtqxA/Pm1WVLNmTXHyMp3WFwRqRDpkO+KHcZkuFq+0+D0j3DKdVrNm/1WqVMmdFdWpU0fsXqbT2odlOkT6QLBiLNdt3rxZZRzAKoadxbVi5JR9IorR7wS0HyDCRMGRActy+BoMLBSj0D91ipGdr8LwdYcSI83+4cWr7Zfmjz/nFDFCT7X9I+Rfw4oOgvTaXVwpRgg4iH0ifKXbvU/kZzHCGYvffvtNSpcunetJhNhbwYrTxej48eNq+QJLKUjyhVD/VhaKkZW0I68rvxhp9o9s11juwr4QclIFK04SI7QRqwAI0YV2VatWLXIgMb7DdWLktH0iv4oR1sK1g38422WkOF2MNAcGLK+89tprlq+fU4yMWJF91wSKETb5Nfu/7bbbDDWKYhQak6vECAfNENkYiamC7RMdPnxYYDR2pCjwg2s3vqa06AsQpEiKk8Uo0LV7+vTpYlRgI+l/uGspRuEI2ft3vFeGDRuWa/849B1JoRh5SIzC7RNBiCBUOLBlZKM9EkMycq3XxejQoUPqcGu0xU4xCnboFUtz8LJD+Pp///vfMnHiRBU5O5IwR9HyyH8fxShWJOPznP379wvc/qMtThIj2P3jjz8ur776qlqmQ2JQu85najxdMzPS9onWrl0b0hauu+46teafkpISrc1EfZ/XxShqMP+/0S4xChcOCDaDcC6IOWZnnhmKkVkLc/b9ThEjI+GB7CAZkRgtWLBA/WihqsWLF7ejvY6uU4vvZNc5GkfDERHNfshHf6SwPIhwLNgXZfEegffee09uueUW287ZOZ0otl4efPBBOXbsWP6mFsxn9Pnnn0ujRo1k69atKv04S14CyPKKw7dIz85SkABiXyE8CvikpqYSUT4CY8aMUY4T+H2xeI/AZ599Jtdee62KE2fHyo3TiY4ePVr5AvznP/8JL0ZIF41zJAgbYVcwUicD7devnwp5s3LlSic307a27d27VxC3C4FJr7/+etva4dSK+/btKxs3blTRH1i8RwDhhBC3cdWqVbY4WDmdKFYFsKSO9Oz5SsGZES7Ay2TAgAGGY5Q5HUAs2weniRYtWqjTzSz6BGA/Dz/8sAo9wpKXQIMGDaRVq1by3HPPEY1HCcD+Bw0apMJOseQlUK9ePeVIgRUmQ2KEr384EcDziOU8ge+++04uv/xyfvWHMQqsCWO5Lpwjit9sS7MffjV7e+QfeOAB+fLLL2XNmjXe7miEvcPSNMKHIY1L/sgVIqI/M/rwww/VKeNdu3aply/L7wQQCQL/wP3TbhdJJ49Jdna2Cj1C+8k7SmPHjlXu5bQfJ1uv+bYtXrxYff1jy6N8+fLmH+iRJyCPGM6QBrF/fTHCeQ1Mp+AOi816FlHehTi1j68eBNlkCU5Asx9knkSgRpbz9oNZIxJEsniXAOwfAUmxHIszkSwiOHCOVBv9+/dXCQB1ir4Y4UJsMCHKK5ZaApNH+RUs0ljArRteYvmzJ/qVSah+037y0kHmTYT1p/3449eyaNEiSUtLU8t1+ROF+oNA3l5iDw1nsELYf3AxwqMAE6HH4e6dnJzsR4aqz/B8QnDQqVOnqqRVLMYIIEoGPGfwg0xKSjJ2kwevwrIllm1ef/11FfyXxR8E8M5AHjbsvdP+26o8Z4h2HqSEFiOseWKDBoFxAAADF0lEQVS5DtMrvJALFy7sDysK6CWiQuDcFZKwzZ8/n3tFEViAZj+1a9cWvJD9aj/woLvhhhsEByJZ/EMADivwvvWz/SO7LFbW8DGG6DUhSmgxwo3wisJ5EaTVzczM9I8licjBgwcVSHzVwDMm0qCJvoIVpLOa/XTr1s13+48HDhxQ9oP0G7Qff/4aMCtCzq8+ffpIRkaGryDAUQF9v+SSS9S5qzARfcKLEehhrQ+BJTt27CiILeSHghlRmzZt5OTJk0qQK1So4Idux6WPmv0gHwxiCvqhYHkSX4PIBQX7oVeVH0Zdv494Z8L2kZAS+85+KNjewTJ9BPZvTIwAD+6K6enpgtS6iD9WpkwZzzLFkiTiS8GtHf1GRAoWcwRoP+b48W53E8A7Ex/zeH8uXLjQ03vwge/PpUuXGg1MbFyMYAo5OTnKqQGzheHDh6uAd17yLNu9e7dy254xY4aaFSEC9YUXXujuX4GDWh9oP3DvRGgcr9kPQvbDfjArov04yPgc0BTYP87fwfUb9o9jIl6zfxxbwOwvCvuPTIwwnsh3gzAX8AzCjKFHjx7qHzcHBUSOeHh6IIAf0gvjYCtiKLHEngD24eDmDPtB2JTu3bu73n7w9QfbQURu2A8Otvbu3Tv28PhE1xPIb/+wk/vuu0/Fs3NrQZAE2D8+wmD/CJXWq1evSLsTuRhpNWBDCmdvtJAXCO/QsGFDNQ2F952TIxScOHFCzfLwDwKeYkaUmJgoSC2MF0m5cuUiBcnrIyQA7kOHDi1gPziTgZAhbrEfBISF15RmP/gh2pkvKcJh4OU2EYD946MMUb5R8P6E5x3enz61/+jFSBvDo0ePqgjfEKUNGzaodOA7duywaYiNVQuvjmrVqknNmjWV6zrctnViJRl7GK8yRcCt9lO9enV1yp72Y2r4fX8z7B8HxBGvDdHc4Qrthvcn7D/w/dmkSROzY2lejMy2gPeTAAmQAAn4ngDFyPcmQAAkQAIkYD8BipH9Y8AWkAAJkIDvCVCMfG8CBEACJEAC9hNQYjTH/nawBSRAAiRAAj4mcOZ/DjaH9m+1TRoAAAAASUVORK5CYII=" - } - }, "cell_type": "markdown", "id": "a3250ef4", "metadata": {}, "source": [ - "![basic_iv_example_nb.png](attachment:basic_iv_example_nb.png)" + "![basic_iv_example_nb.png](../_static/basic_iv_example_nb.png)" ] }, { diff --git a/doc/examples/R_double_ml_ssm.ipynb b/doc/examples/R_double_ml_ssm.ipynb new file mode 100644 index 00000000..0e426b47 --- /dev/null +++ b/doc/examples/R_double_ml_ssm.ipynb @@ -0,0 +1,635 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "f03c754a", + "metadata": {}, + "source": [ + "# R: Sample Selection Models\n", + "\n", + "In this example, we illustrate how the [DoubleML](https://docs.doubleml.org/stable/index.html) package can be used to estimate the average treatment effect (ATE) under sample selection or outcome attrition. The estimation is based on a simulated DGP from Appendix E of [Bia, Huber and Lafférs (2023)](https://doi.org/10.1080/07350015.2023.2271071). \n", + "\n", + "Consider the following DGP:\n", + "$$\n", + "\\begin{align*}\n", + "Y_i &= \\theta_0 D_i + X_i'\\beta_0 + \\varepsilon_i,\\\\\n", + "S_i &= \\mathbb{1}\\{D_i + \\gamma_0 Z_i + X_i'\\beta_0 + \\upsilon_i > 0\\}, \\\\\n", + "D_i &= \\mathbb{1}\\{X_i'\\beta_0 + \\xi_i > 0\\}\n", + "\\end{align*}\n", + "$$\n", + "where $Y_i$ is observed if $S_i=1$\n", + "with\n", + "$$X_i \\sim N(0, \\sigma^2_X), \\quad Z_i \\sim N(0, 1), \\quad (\\varepsilon,_i \\nu_i) \\sim N(0, \\sigma^2_{\\varepsilon, \\nu}), \\quad \\xi_i \\sim N(0, 1).$$\n", + "\n", + "Let $D_i\\in\\{0,1\\}$ denote the treatment status of unit $i$ and let $Y_{i}$ be the outcome of interest of unit $i$.\n", + "Using the potential outcome notation, we can write $Y_{i}(d)$ for the potential outcome of unit $i$ and treatment status $d$. Further, let $X_i$ denote a vector of pre-treatment covariates. \n", + "\n", + "## Outcome missing at random (MAR) \n", + "Now consider the first setting, in which the outcomes are missing at random (MAR), according to assumptions in [Bia, Huber and Lafférs (2023)](https://doi.org/10.1080/07350015.2023.2271071). \n", + "Let the covariance matrix $\\sigma^2_X$ be such that $a_{ij} = 0.5^{|i - j|}$, $\\gamma_0 = 0$, $\\sigma^2_{\\varepsilon, \\upsilon} = \\begin{pmatrix} 1 & 0 \\\\ 0 & 1 \\end{pmatrix}$ and finally, let the vector of coefficients $\\beta_0$ resemble a quadratic decay of coefficients importance; $\\beta_{0,j} = 0.4/j^2$ for $j = 1, \\ldots, p$. \n" + ] + }, + { + "cell_type": "markdown", + "id": "25181797", + "metadata": {}, + "source": [ + "### Data\n", + "\n", + "We will use the implemented data generating process `make_ssm_data` to generate data according to the simulation in Appendix E of [Bia, Huber and Lafférs (2023)](https://doi.org/10.1080/07350015.2023.2271071). The true ATE in this DGP is equal to $\\theta_0=1$ (it can be changed by setting the parameter `theta`). \n", + "\n", + "The data generating process `make_ssm_data` by default settings already returns a `DoubleMLData` object (however, it can return a pandas DataFrame or a NumPy array if `return_type` is specified accordingly). In this first setting, we are estimating the ATE under missingness at random, so we set `mar=True`.\n", + "The selection indicator `S` can be set via `s_col`." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "a1f2f984", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [], + "source": [ + "library(DoubleML)\n", + "library(mlr3)\n", + "library(ggplot2)\n", + "\n", + "# suppress messages during fitting\n", + "lgr::get_logger(\"mlr3\")$set_threshold(\"warn\")" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d35090ed", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "================= DoubleMLData Object ==================\n", + "\n", + "\n", + "------------------ Data summary ------------------\n", + "Outcome variable: y\n", + "Treatment variable(s): d\n", + "Covariates: X1, X2, X3, X4, X5, X6, X7, X8, X9, X10, X11, X12, X13, X14, X15, X16, X17, X18, X19, X20, X21, X22, X23, X24, X25, X26, X27, X28, X29, X30, X31, X32, X33, X34, X35, X36, X37, X38, X39, X40, X41, X42, X43, X44, X45, X46, X47, X48, X49, X50, X51, X52, X53, X54, X55, X56, X57, X58, X59, X60, X61, X62, X63, X64, X65, X66, X67, X68, X69, X70, X71, X72, X73, X74, X75, X76, X77, X78, X79, X80, X81, X82, X83, X84, X85, X86, X87, X88, X89, X90, X91, X92, X93, X94, X95, X96, X97, X98, X99, X100\n", + "Instrument(s): \n", + "Selection variable: s\n", + "No. Observations: 2000" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "set.seed(3141)\n", + "n_obs = 2000\n", + "df = make_ssm_data(n_obs=n_obs, mar=TRUE, return_type=\"data.table\")\n", + "\n", + "dml_data = DoubleMLData$new(df, y_col=\"y\", d_cols=\"d\", s_col=\"s\")\n", + "dml_data\n" + ] + }, + { + "cell_type": "markdown", + "id": "79ee9309", + "metadata": {}, + "source": [ + "### Estimation\n", + "\n", + "To estimate the ATE under sample selection, we will use the `DoubleMLSSM` class. \n", + "\n", + "As for all `DoubleML` classes, we have to specify learners, which have to be initialized first.\n", + "Given the simulated quadratic decay of coefficients importance, Lasso regression should be a suitable option (as for propensity scores, this will be a $\\mathcal{l}_1$-penalized Logistic Regression). \n", + "\n", + "The learner `ml_g` is used to fit conditional expectations of the outcome $\\mathbb{E}[Y_i|D_i, S_i, X_i]$, whereas the learners `ml_m` and `ml_pi` will be used to estimate the treatment and selection propensity scores $P(D_i=1|X_i)$ and $P(S_i=1|D_i, X_i)$." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4a905df9", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [], + "source": [ + "ml_g = lrn(\"regr.cv_glmnet\", nfolds = 5, s = \"lambda.min\")\n", + "ml_m = lrn(\"classif.cv_glmnet\", nfolds = 5, s = \"lambda.min\")\n", + "ml_pi = lrn(\"classif.cv_glmnet\", nfolds = 5, s = \"lambda.min\")" + ] + }, + { + "cell_type": "markdown", + "id": "d90a87ea", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "source": [ + "The `DoubleMLSSM` class can be used as any other `DoubleML` class. \n", + "\n", + "The score is set to `score='missing-at-random'`, since the parameters of the DGP were set to satisfy the assumptions of outcomes missing at random. Further, since the simulation in [Bia, Huber and Lafférs (2023)](https://doi.org/10.1080/07350015.2023.2271071) uses normalization of inverse probability weights, we will apply the same setting by `normalize_ipw=True`.\n", + "\n", + "After initialization, we have to call the `fit()` method to estimate the nuisance elements." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "761ab3d5", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "================= DoubleMLSSM Object ==================\n", + "\n", + "\n", + "------------------ Data summary ------------------\n", + "Outcome variable: y\n", + "Treatment variable(s): d\n", + "Covariates: X1, X2, X3, X4, X5, X6, X7, X8, X9, X10, X11, X12, X13, X14, X15, X16, X17, X18, X19, X20, X21, X22, X23, X24, X25, X26, X27, X28, X29, X30, X31, X32, X33, X34, X35, X36, X37, X38, X39, X40, X41, X42, X43, X44, X45, X46, X47, X48, X49, X50, X51, X52, X53, X54, X55, X56, X57, X58, X59, X60, X61, X62, X63, X64, X65, X66, X67, X68, X69, X70, X71, X72, X73, X74, X75, X76, X77, X78, X79, X80, X81, X82, X83, X84, X85, X86, X87, X88, X89, X90, X91, X92, X93, X94, X95, X96, X97, X98, X99, X100\n", + "Instrument(s): \n", + "Selection variable: s\n", + "No. Observations: 2000\n", + "\n", + "------------------ Score & algorithm ------------------\n", + "Score function: missing-at-random\n", + "DML algorithm: dml2\n", + "\n", + "------------------ Machine learner ------------------\n", + "ml_g: regr.cv_glmnet\n", + "ml_pi: classif.cv_glmnet\n", + "ml_m: classif.cv_glmnet\n", + "\n", + "------------------ Resampling ------------------\n", + "No. folds: 5\n", + "No. repeated sample splits: 1\n", + "Apply cross-fitting: TRUE\n", + "\n", + "------------------ Fit summary ------------------\n", + " Estimates and significance testing of the effect of target variables\n", + " Estimate. Std. Error t value Pr(>|t|) \n", + "d 0.94473 0.03045 31.03 <2e-16 ***\n", + "---\n", + "Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1\n", + "\n", + "\n" + ] + } + ], + "source": [ + "dml_ssm = DoubleMLSSM$new(dml_data, ml_g, ml_m, ml_pi, score=\"missing-at-random\",\n", + " normalize_ipw = TRUE)\n", + "dml_ssm$fit()\n", + "\n", + "print(dml_ssm)" + ] + }, + { + "cell_type": "markdown", + "id": "781e5376", + "metadata": {}, + "source": [ + "Confidence intervals at different levels can be obtained via" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "1297e329", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 5 % 95 %\n", + "d 0.8946549 0.9948104\n" + ] + } + ], + "source": [ + "print(dml_ssm$confint(level = 0.9))" + ] + }, + { + "cell_type": "markdown", + "id": "bfe05e01", + "metadata": {}, + "source": [ + "### ATE estimates distribution\n", + "\n", + "Here, we add a small simulation where we generate multiple datasets, estimate the ATE and collect the results (this may take some time). " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "80a10b47", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [], + "source": [ + "n_rep = 100\n", + "ATE_estimates = rep(NA, n_rep)\n", + "ATE_estimates[1] = dml_ssm$coef" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "8956cb51", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[1] \"Iteration: 20/200\"\n", + "[1] \"Iteration: 40/200\"\n", + "[1] \"Iteration: 60/200\"\n", + "[1] \"Iteration: 80/200\"\n", + "[1] \"Iteration: 100/200\"\n", + "[1] \"Iteration: 120/200\"\n", + "[1] \"Iteration: 140/200\"\n", + "[1] \"Iteration: 160/200\"\n", + "[1] \"Iteration: 180/200\"\n", + "[1] \"Iteration: 200/200\"\n" + ] + } + ], + "source": [ + "n_rep = 200\n", + "ATE = 1.0\n", + "\n", + "ATE_estimates = rep(NA, n_rep)\n", + "\n", + "set.seed(42)\n", + "for (i_rep in seq_len(n_rep)) {\n", + " if (i_rep %% (n_rep %/% 10) == 0) {\n", + " print(paste0(\"Iteration: \", i_rep, \"/\", n_rep))\n", + " }\n", + " dml_data = make_ssm_data(n_obs=n_obs, mar=TRUE)\n", + " dml_ssm = DoubleMLSSM$new(dml_data, ml_g, ml_m, ml_pi, score='missing-at-random', normalize_ipw=TRUE)\n", + " dml_ssm$fit()\n", + " ATE_estimates[i_rep] = dml_ssm$coef\n", + "}\n" + ] + }, + { + "cell_type": "markdown", + "id": "2e8a6094", + "metadata": {}, + "source": [ + "The distribution of the estimates takes the following form" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "e7cd8060", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA0gAAANICAMAAADKOT/pAAAAPFBMVEUAAABNTU1oaGh1dfV8fHx/f/+MjIyampqnp6eysrK9vb3Hx8fQ0NDZ2dnh4eHp6enr6+vw8PD/AAD///+Zzx6PAAAACXBIWXMAABJ0AAASdAHeZh94AAAgAElEQVR4nO2di3baOhAA5TQPkuZxU/7/Xy9vZGODLe9KWu3MOW2gBAYjTWwEJWELAKsJpe8AQAsQEoAAhAQgACEBCEBIAAIQEoAAhAQgACEBCEBIAAIQEoAAK0MKR7q379lX2P31+3b31sLzx+VbY6autr8khKf4H7rQDW5zz+3Z0e8ZGm/uyNgdePgt0DQyIe3YzL3CdnpmXm/teey7pif06+kqZz535z+Ht5kQ0vHsjJBmfAs0zeqQDl9+PkJ4X3ytyX//6sLHopBC6O8RX8MmvI5ec/Q2Jm94dh+E5B2ZkLbb7xB+l19r6t+/9vuXRSH1zv7ujvOe4vtDSKCMVEjbzXGX9PW8O8j6PF206cLT4fT32/551NfpCodDqN/zk5rf67Ob/nw/nvt+605PwQZHXoML4g153+3QPuJd5LKQLvf2dLvnw9H34+Z87L58HLf5abexX/Fdu25+tM3gALGQvg/HUn+vT5h2M+pw+nO/gwnnk5eQduUdJ9zf64wfCekzvmqcy/CCeEO63d7o97LccHPDd7Ziz/Xe9kN6P5z9ejt8+Thohnct2vxom8EBYiEdTn4ffkB/Px8nV/e1/X3dH6Q9hb/b/Sx7ihcbfk57oufwc3Nrn+dDu90R4/suik3Yf1NvfxRfMCjh89D0WzSHF4U0vLfn+9x97ve73fHL036/t/8J8B4tjMSbH90KOEA2pM3xicnvfiYffxj/Xg/Srlc4nn89fcPTza19dued1+a0Gvi2/xrP994FgxKON/wZLTfEIY2szw0X9gbXO/3raXPOx63nZ2HRdfqbf/+Rg7aQDekpmpCni/ZfXndPFv7+XK9w/vl9PBh8j27izOZyg8er/Vz3D0d6F/Rn7fmgrrsuNywKaXhvz/f5N7ql05fvz/fnaIvizY9uBRwgGFIXz8jelPs5PJs4PEOPf+a/7hetoyO789W718tP/cvtD3Zs/Qv6Ib1f7sTEk697W7G9vbc9RfzloxvsxeLNj24FHCAW0tfweKY38z7fThM7npT7XVJ8ZDeY0KkhdZfp3N1896xVu8G9nQrpI4Tnzd+fXkijtwIOEAtps39y3UUvjPZn3mE5uBvM+93e6O/4KvX5XMKh3fW50etluWFhSP17OxXSU/gaXNoNXhc+3Qo4QCqk78OMeQtvxzPPtyFFe5XLokLYxEd2IyFtjjc4ttgQXdC75DWcX7v5uiS1PKR4HzgR0mlfG10ab/69m4b2kAlp/xah/bOB78Mywfdl0W17/tm9XwreXA7+wrmep663PHwb0u7AaXNc5f6OrrYdXBBfMT5WvLy7YVFIvXt7WVwf2yN9HBYYr1sUb350K+CA1SHF62yXl0mjvcT+y+nFye78WtDTec3ss/8c4jak+HXX69W2gwviK75HLx99nm99dNUu3lXG/xjd26frssnoc6QjX5e7Fm1+dCvgAJmQnjbnJwc/m/MbaOKZ93V4u8zlp/v30+mpw2+8kxkNKXon0PVq28EF8RW7+EnJ+cyikKJ7ezTeWbXr3r4Oz8nOd+26+dGtgAPKHsJ/8MI/tEHRkHbPJnhTJzRBwZCuz6wArFMwpCde94dm4GUOAAEICUAAQgIQgJAABCAkAAEICUAAQgIQgJAABCAkAAHqC+m/xjz/MnlybU9zAyTjISQ8lYpseQgJT6UiWx5CwlOpyJaHkPBUKrLlISRtWGxw4SEkbQjJhYeQtCEkFx5C0oaQXHgICU+lIlseQsJTqciWh5DwVCqy5SEkPJWKbHkISRsWG1x4CEkbQnLhISRtCMmFh5C0ISQXHkLCU6nIloeQ8FQqsuUhJDyVimx5CAlPpSJbHkLShsUGFx5C0oaQXHgISRtCcuEhJG0IyYWHkPBUKrLlISQ8lYpseQgJT6UiWx5CwlOpyJZnVUj/wWP+lb4DoIdQSCrY+kH0GFbtXHgISRtCcuEhJG0IyYWHkCQJ4fbxtLw9RUW2PIQkRwgvLy9hGJPd7SkssuUhJCHCIaMjvZSMbk95kS0PIUkQV3RMScdzD0Iq6iEkAQYV9UtiscGFh5BWM9wbDQ7vCMmFh5DWMppRtFMiJBceQlrJZEeE5MpDSKsYP6zrlWRqe2oS2fIQ0hruZXQuydL2VCWy5SGkFTzo6FiSoe2pS2TLQ0jpPOyIkPx4CCmZxx0dSmKxwYWHkFKZ0xEhufEQUhp3l+t6JRGSCw8hJTEzo31JhOTCQ0gpzO/oJVjYnipFtjyElMCCjnpvBFeFkIp6CGk5c58fEZIjDyEtZlFGLy9/Mj3EhFTUQ0hLWdjRyz9C8uAhpKUsDinTwR0hFfUQ0kKWdkRIPjyEtIzFHe1CylMSIRX1ENIilnf08oeQPHgIaQkJHRGSDw8hLSExpCwlEVJRDyEtIKUjQvLhIaT5JHW0X2wgpPY9hDSbtI4IyYeHkGazIqQcJRFSUQ8hzSWxI0Ly4SGkuaSG9IeQPHgIaSapHR1DylASIRX1ENI8kjsiJB8eQprH2pD0SyKkoh5CmkV6R8fFBkJq3UNIsyCk/CJbHkKaw4qOziGpl0RIRT2ENAdCKiCy5SGkGazp6LzYQEhtewhpBoRUQmTLQ0iPWdXRJSTtkgipqIeQHkNIRUS2PIT0kHUdXRYbCKlpDyE9hJDKiGx5COkRKzu6hqRcEiEV9RDSIwipkMiWh5AesLaj62KDckmEVNRDSA8gpFIiWx5Cus/qjgjJh4eQ7kNIxUS2PIR0n/Uh/YtOaz7ahFTUQ0h3Wd8RIfnwENJdCKmcyJaHkO4h0BEh+fAQ0j0kQvoTn1F8uAmpqIeQ7iDRESH58BDSHeRDUiyJkIp6COkOhFRSZMtDSNOIdNRbbCCkZj2ENA0hFRXZ8hDSNIRUVGTLQ0jTaISkVxIhFfUQ0iQyHQ0WGwipUQ8hTUJIZUW2PIQ0CSGVFdnyENIUQh0Rkg8PIU0hFRKLDS48hDSBVEfDkNRKIqSiHkKagJBKi2x5CGkCQiotsuUhpAnEQhosNhBSmx5CGkeso5uQtEoipKIeQhqHkIqLbHkIaRxCKi6y5SGkceRCGi42aJVESEU9hDSKXEeE5MNDSKMQUnmRLQ8hjUJI5UW2PIQ0imBIN4sNhNSih5DGEOxoJCSdkgipqIeQxiCkCkS2PIQ0gmRHhOTDQ0gjiIZ0u9hASA16CGkEQqpBZMtDSCMQUg0iWx5CukW0o7GQVEoipKIeQrpFNqSRxQZCas/zeEi7A/EZEfEk5R8/QqpCZMszc0i7wVdFyj9+hFSFyJZn3pB2Nyf0KP74yXY0GpJGSYRU1LMspAwdlX/8hEMaW2wgpOY8s0b0ukPqP0X6r0XCH1n+jf1jKL2VIEJySL1zOpT+QSS8Q2KP5MOzLKSRc+KUfvyyhKRQEiEV9cwZ0O7uWWlKP37SIY0uNhBSa55lITk4tJPuiJB8eJaHpL1yR0ii2yOPrQmey7MgpENF6m9sKP34EVItIlse3ms3QDyk8cUG+ZIIqaiHkAYQUi0iWx5CGkBItYhseQipj3hHhOTDQ0h95EMaX2yQL4mQinoIqQ8hVSOy5SGkPoRUjciWh5B6yHdESD48hNRDIaSJxQbxkgipqIeQehBSPSJbHkKKUeiIkHx4CCmGkCoS2fIQUoxGSFOLDYTUlIeQYrKGJFwSIRX1EFKERkeE5MNDSBGEVJPIloeQIlRCmlxsIKSWPIQUQUg1iWx5CCmCkGoS2fIQUgQh1SSy5SGkKyod3VlskC2JkIp6COkKIVUlsuUhpCuEVJXIloeQrhBSVSJbHkK6ohPS9GIDITXkIaQLOh3dC0m0JEIq6iGkC4RUl8iWh5AuEFJdIlseQjqj1NG9xQZCasdDSGdKhCRZEiEV9RDSGUKqTGTLQ0hnCKkykS0PIZ3Q6ujuYgMhNeMhpBOEVJvIloeQTpQJSbAkQirqIaQThFSbyJaHkE6ohXR3sYGQWvEQ0hG1jgjJh4eQjhBSdSJbHkI6QkjViWx5COmIXkgsNrjwENIBvY4ehCRXEiEV9RDSAUKqT2TLQ0gHCKk+kS0PIR1QDOn+YgMhNeIhpD2KHRGSDw8h7SkYklhJhFTUQ0h7CKlCkS0PIe3RDOnBYgMhteEhpD2EVKHIloeQ9pQMSaokQirqIaQ9hFShyJaHkLa6HT1cbCCkJjyEtCWkOkW2PIS0JaQ6RbY8hLQtHZJQSYRU1ENIW+WQHi42EFILHkJS7oiQfHgIiZAqFdnyEBIhVSqy5SEk7ZBYbHDhISTljgjJh4eQCKlSkS0PIZUPSaYkQirqISTtkB4vNhBSAx5CIqRKRbY8hKTcESH58BASIVUqsuUhJO2QZiw2iJRESEU9hERIlYpseQiJkCoV2fIQEiFVKrLlcR+SdkdzFhtESiKkoh5CIqRKRbY8hERIlYpseQiJkCoV2fIQknZIcxYbCMm8x3tI6h3NC0mgJEIq6iEkbQjJhYeQtCEkFx5C0mbWYgMhWfc4D0m/I0Ly4SEkbeaFtL4kQirqISRtCMmFh5C0mbfYQEjGPYSkDSG58PgOKUNHc0NaXRIhFfUQkjaE5MJDSNrMXGwgJNseQtKGkFx4CEkbQnLhWTV8/xkn/MnAv7nfGEo/HLAYoZBUyPiDKMcOafZiw9pdEnukoh5C0oaQXHgISRtCcuHxHFKWjgjJh4eQtJm7akdIpj2EpA0hufAQkjazQ1pZEiEV9RCSNoTkwuM4pDwdzV9sICTLHkLShpBceAhJG0Jy4SEkbeaHtK4kQirqISRt5i82EJJhDyFpQ0guPISkDSG58PgNKVNHS0JaVRIhFfUQkjYLFhsIya6HkLQhJBceQtKGkFx4CEkbQnLhcRtSWDLB17BksWFNSYRU1ENI2hCSCw8haUNILjxeQwqLnrusgZBceAhJm0UeQrLqISRtCMmFh5C0WeZJHw9CKuohJG0IyYXHaUhh6QRPZ9FiAyFZ9RCSNoTkwkNI2hCSCw8habMspPSSCKmoh5C0WeghJJseQtKGkFx4CEkbQnLh8RlSWD7Bk1nqSR0RQirqISRtFi42EJJNDyFpQ0guPISkDSG58BCSNoTkwuMypMPnntS62JBaEiEV9RCSNoTkwkNI2hCSCw8haUNILjweQwppEzyRpYsNhGTSQ0jaLA4psSRCKuohJG0IyYWHkLQhJBcehyGdPj2/2sWGxJIIqaiHkLQhJBceQtKGkFx4CEkbQnLhISRtli82EJJBDyFpkxBSUkmEVNRDSNoQkguPv5DOvzuWkBKxNcFzeQhJmxQPIZnzEJI2hOTCQ0jaEJILDyFpQ0guPO5COndU9WJDSkmEVNRDSNoQkgsPIWlDSC483kK6dERIqdia4Lk8hKRNmmf5uBBSUQ8haUNILjyEpA0hufAQkjaJnsUDQ0hFPc5CunZU92IDIVnzEJI2hOTCQ0jaEJILDyFpQ0guPISkTapn6cgQUlGPr5CijggpFVsTPJeHkLQhJBceQtKGkFx4XIUUd1T7YgMh2fIQkjaE5MJDSNoQkgsPIWmTGtLSkgipqMdTSL2Oql9sICRTHkLShpBceAhJG0Jy4SEkbdI9y8aGkIp6CEmb5MUGQrLkISRtCMmFx1FI/Y4MhLSsJEIq6iEkbQjJhYeQtFnhISQ7HkLShpBceAhJG0Jy4fET0qAjCyEtKomQinoISZsViw2EZMczY6i6PdFpEe80hHSFkMx45oQ0OKlcktLjN+yIkFKxNcFzeQhJmzUhLSmJkIp6Ho9UNzxNSItY5SEkK54ZIcVPkS5/He+CIcIfi4TSDxvcZVFIUTx290g3OyQbe6QFuyT2SEU9MweKkJIhJBceQtJm1WIDIVnxcGinDSG58MwLaWKxQQVCiiEkI56Z72w4fI1OK0JIPWaXREhFPU7ea3fbkZHFBkIy4iEkbQjJhYeQtCEkFx5C0oaQXHgISRsWG1x4fIQ00pGVkGaXREhFPYSkDSG58LgIaawjQkrF1gTP5SEkbdZ6CMmEh5C0We2ZOUSEVNRDSNoQkguPh5BGOyKkVGxN8FweQtJm7WLD3JIIqaiHkLQhJBeewSA9vX+J3OwKCGkIIRnwDAYphNC9fYrcciqENISQDHgGg/T793XXUnj++yNy6ymIP37jHdlZbCAkC56RQfrcdLuWnkrtlwjphlklEVJRz9gY/WzCYbckIliM9OM30REhpWJrgufy3I7R9+thd/T1HF5FDEshpBsIqX7PcIw+ny9HdaHM0nhrIa1fbCAkA57h8ncIr9/ni7Q/L2gcQrqBkOr3DJe/N9/j35cP4cdvqiNCSsXWBM/lGS5/i9zoKgjpljklEVJRz80Lssev6h8DOU1rIUl4CKl6TzxEXYgQufUUCOkWQqreEw/RR9TRh8itp0BItxBS9Z6JQ7uCENIIM4aFkIp6yoczRPbxm+zI1GIDIVXviUdotztq7jkSIYlja4Ln8hCSNoTkwsOhnTYiIc0oiZCKehoPabojW4sNhFS7ZzhAH912+xW6d5EbT4KQxiCkyj2DAfrYPTn62b8wW64kycfvTkeElIqtCZ7Lc/Pu76/dn4/vQu/83kNIYxBS5Z7bF2Q/w1PRF2ZbC0lmseFxSYRU1DMYny78vIXv/bMkkVtPgZBGIaS6PYPxed9/Htd+h7QRufUUBB+/ex0RUiq2Jnguz3B8NqH73O2YynVESOMQUt2epl9HqiIkIQ8h1e0hJG0IyYXn5tCua+i9di2F9LAkQirqGQzPpqU3rd7tiJBSsTXBc3lulr/L/dfYE62FJLTYQEh1e1r+H7KEpIKtCZ7LMxie11D8A7kIaYIHJRFSUc9gdH6653K/0OWI2ON3vyNCSsXWBM/luf1FY80sNlQSkpiHkGr2tBvSg47shfSgJEIq6im/uDCEkKYgpIo9hKQNIbnw3AzOx+vusO654O+kaC0kscUGQqrZM/xtFE+H50chfIncegpCj9+jjggpFVsTPJdnMDhvYbN/UfZvqV8guyWkO9wtiZCKekbe2XD+UwhCmoSQ6vW0GtLDjgwuNhBSxZ7xQ7tNeBO59RQIaRJCqtczXGw4/XekrtwbhUS263FHFkO6WxIhFfXcDM37UwhPm4JvXSWkaQipWk+jL8hWFJLgYgMh1eshJG0kQ7pXEiEV9fRH5vf9efcE6bXo/5KV2K4ZHRFSKrYmeC5Pb2Q+z5980pV7YwMh3YOQavXEI/MTwtv+TXZfr6Hgf5RtLSRRDyHV6olH5vrq0ZvxjywmJEVsTfBcnnhkunB+9ehn/xspCiGwXXM6shnSnZIIqahn8MuYR07mhpDuQUiVeghJG9HFBkKq1UNI2hCSC0+LIc3qiJBSsTXBc3n6IYUmPkWIkFSxNcFzeRoMaV5HLDakYmuC5/I0+F67pkOaLomQinoISRtCcuFpL6SZHRFSKrYmeC4PIWkju9hASJV6CEkb4ZAmSyKkoh5C0oaQXHiaC2luR4SUiq0JnstDSNqIeyaGjJCKeghJG0Jy4WktpNkdEVIqtiZ4Lg8haUNILjyNhTS/I7OLDYRUpYeQtBEPaaIkQirqISRtCMmFp62QFnRESKnYmuC5PISkjbyHkCr0EJI2hOTCsyqk/2oj/PFAKP0wwxmhkFRY8QNiyQ6JPVIqtvYUuTyEpI38YsN4SYRU1ENI2hCSC09LIS3qiJBSsTXBc3kISRtCcuFpKKRlHRlebBgviZCKeghJG0Jy4SEkbQjJhaedkBZ2ZDqksZIIqaiHkLRRWGwgpPo8hKQNIbnwNBPS0o4IKRVbEzyXh5C0ISQXnlZCWtyR7cWGkZIIqaiHkLQhJBceQtKGkFx4GglpeUeElIqtCZ7LQ0jaqCw2EFJtHkLSRiek25IIqainjZASOiKkVGxN8FweQtKGkFx4CEkbLc9w5AipqKeJkFI6IqRUbE3wXB5C0oaQXHhaCCmpI0JKxdYEz+UhJG2UFhsIqS5PAyGldWQ+pGFJhFTUQ0jaEJILDyFpQ0guPISkjZqHkGry2A8psSNCSsXWBM/lISRtCMmFx3xIqR0RUiq2JnguDyFpo7bYMCiJkIp6rIeU3BEhpWJrgufyEJI2hOTCYzyk9I4IKRVbEzyXh5C0UfT0Bo+QinoISRtCcuGxHdKKjggpFVsTPJeHkLQhJBce0yGt6aiBxYZ+SYRU1ENI2hCSC4/lkFZ1REip2JrguTyEpA0hufAQkjaaHkKqxmM4pHUdNRFSXBIhFfUQkjaE5MJjN6SVHRFSKrYmeC4PIWmjudhASNV4zIa0tiNCSsXWBM/lISRtCMmFx2pIqztqI6SoJEIq6iEkbXQ9hFSJh5C0ISQXHqMhre+IkFKxNcFzeQhJG0Jy4bEZkkBHjSw2XEsipKIeQtKGkFx4TIYk0REhpWJrgufyEJI2yiFdSiKkoh6LIYl01MhiAyFV4iEkbQjJhYeQtCEkFx6DIcl0REip2JrguTyEpI32YsO5JEIq6rEXklBHhJSKrQmey2MuJKmOCCkVWxM8l4eQtCEkFx5rIYl11MxiAyFV4SEkbQjJhcdYSHIdEVIqtiZ4Lg8haaPvCY8fN0lsTfBcHlshCXbUzmIDIdXgISRtCMmFx1RIkh0RUiq2JnguDyFpox/SsSRCKuohJG0yeAipvMdSSKIdEZL8AHn2EJI2OTzh7uOWbYA8ewyFJNsRIYkPkGsPIWmTYbGBkMp77IQk3BEhSQ+Qb4+ZkKQ7IiThAXLuISRtcoS0L4mQinoISZssHkIq7bESknhHhCQ7QN49hKQNIbnwGAlJviNCEh0g9x5C0ibLYsOuJEIq6rERkkJHhCQ5QHgISRtCcuExEZJGR42F9BIIqahnRkjdjvh0d++b19NaSJk8hFTW8zik7vJX9FWR2+1S6YiQ5AYIDyHpQ0guPDOfI3W9L6rcbJdOR4QkNkB4totD6j9F+i8L4Y9p/mXyhDyjATFLQ+od2WVebFDaIbW2aveSbf3V1p4il2dZSLdn5CGkNAipqGfWw9/dOScOIaVBSEU9cx7+rn8qb0haHbW22EBIZT1zXpDtn9ReuSOkNAipqGfG60jnpbpu23+XgxL97VLrqLmQ/uQqydYEz+Wp/b12hDTbQ0glPZWHpNdRc4sNhFTUQ0jaEJILDyFpky2kXMsNtiZ4Lk/dISl2REgSA4TnDCFpk89DSAU9hKRNRk+ewbQ1wXN5qg5JsyNCEhggPBcISRtCcuGpOSTVjhpcbCCkgh5C0oaQXHgqDkm3oxZDylOSrQmey0NI2hCSC0+9ISl31OJiAyGV8xCSNoTkwkNI2hCSC0+1IWl31GRIWUqyNcFzeQhJm5yLDYRUzFNrSOodEdK6AcLTh5C0ISQXnkpD0u+ozZBylGRrgufyEFJbHkIq5CGktjyEVMhDSI159AfU1gTP5akzpAwdEdKaAcqBLQ8haZN3sYGQCnmqDClHR4S0YoCyYMtDSNpkDkm/JFsTPJenxpCydERI6QOUB1seQmrNQ0hFPITUmoeQingqDClPR82GpF6SrQmey0NIzXkIqYSHkLTJvdigXpKtCZ7LU19ImToipFRsTfBcHkLShpBceAhJG0Jy4akupNDsIkA+j+6g2prguTyE1KCHkPJ7agsptDzBc3kIKb+HkBr0EFJ+DyFpk3+xgZAKeCoLKbS3pygQkm5JtiZ4Lg8haUNILjyEpA0hufAQUoseQsruqSukUGjiNefRHFZbEzyXh5Ca9BBSbk9VIYViE685j+K42prguTyEpE2JxQZCyu6pKaQwMiFyTTw9CMmFh5C0ISQXHkLSpkxIiiXZmuC5PBWFFEYnRK6J15qHkPJ6CKlRDyHl9RBSox5CyuupJ6QwPiFyTbzWPISU10NI2hRabNArydYEz+WpJqQwNSFyTTwtCMmFh5C0ISQXHkLSplRIaiXZmuC5PLWEFKYnRK6J15qHkHJ6CKldj9LY2prguTyE1K6HkDJ6CKldDyFl9FQSUrg3IXJNPB2KLTYQUk4PIWlTLiSlkmxN8FyeOkIK9ydEromnAiG58BCSNoTkwlNFSOHBhMg18ZrzqIyurQmey0NILXsIKZuHkJr2aAyvrQmey1NDSOHxhFDAhYeQcnkISZuCiw0vKiXZmuC5PISkDSG58BCSNoTkwlNBSGHOhJCHkBKxNcFzeQipcY/8ANua4Lk85UMadFR64rXmIaQ8HkJq3SM+wrYmeC5P8ZCGHRWfeK15CCmLh5C0KbvY8CJfkq0JnstDSNoQkgtP6ZBuOiKkRAipqIeQtCkeknRJtiZ4Lg8hte8hpAyewiHddlTBxGvNQ0gZPITkwCM7yLYmeC5P2ZBGOqph4rXmISR9DyFpU36xQbgkWxM8l2fVQ/zfSsIfB/wrfQd2hLUjBeMIhbSWsR0Se6RE7m6P5DDb2lPk8hCSNoTkwkNIPjyC42xrgufylAxptKNKJl5rHkJS9hCSE4/cQNua4Lk8BUMa76iWideah5B0PYSkTRWLDS+CJdma4Lk85UKa6IiQEnm4PVJDbWuC5/IQkjaE5MJTLKSpjggpkcfbIzTWtiZ4Lg8h+fEQkqKHkBx5ZAbb1gTP5SEkTx6R0bY1wXN5SoU02VFVE681DyGpeQhJm3oWG15kSrI1wXN5CoU03REhJTJvewTG29YEz+UhJG3qCkmgJFsTPJenTEh3OiKkROZuz+oRtzXBc3kIyZuHkFQ8RUK611F1E685z9ohtzXBc3kIyZ9n5ZjbmuC5PCVCuttRhROvOU9Y99lRUvOgKQ8haVPbYsOBNcNua4Ln8hQI6X5HhJTIsu1ZMe62JnguDyFpU2dIK0qyNcFzefKH9KAjQkpk6fYkP1EKF0TnxQ2E9MAoPCFSwbOwhHM+f6Ib0OyJkB4YxSdEGnhmp3TsZVKklBIh3RdqTIgU8Ox5sEPpJzQpUtktEdJ9oc6EWA6eE6NHZ2EsoXsi+ZYI6a5Pb0IsxPtiQ48wJEXk+ldsZg7pcUeElEiu7TFH4iAAAAdsSURBVMn2y2oJ6Z5uzTjJQkjyItHjO0K6Y1s3TqIQkopILiVCumNbPU5y4FES+fxE16whzemouYnXmifbZyMT0rRMZJyEwKMmkjm8I6RJl9A4yYBHUeTvY78ISRtniw0n3H3sV8aQ5nVESInUFZLA4R0hTZhEx2k9hKQs8vUhK4SkjduQ1u6UCGlcJD5OK8GjL3L0ISuEhEdR5OdDVnKFNLuj5iZea55sH1dESGMapXFaAZ4sIi8fspInpAUdNTfx/C42HEmeYYQ0YtEbp2QIKZModYoR0q1Ec5xSIaRcosRlcEK6ceiOUyKElE+UNMsIaahQH6ck8GQUpeyUCGmo0B+nFPBkFS2faIQ0MGQZp+XgyStaPNMIaWDIM06LwZNZtPTwjpD6glzjVKuHxYYLyyYbIfVuP+M41ekhpCuLZhshxWQdpyo9hBSx5PCOkGLyjlONHkLqMX/CEVJM7nHCU7to9owjpJjs44SndtHczzUmpJj844SnftGslAgppsg44aleNGPeEVJMoXGqyMNiwyiPD/AIKabUONXjIaQpHqRESDEFx6kSDyFNc3e3REgxRcepCg8h3WW6JUKKKT1OeOoXTbRESDEVjBOe+kVjKRFSzPLHtLWJ15pHSXT4Xeq9qUNIMcsf0dYmXmseTVEvJkKKWf5gtjbxWGxYxiUlQopZ/kASUhqthHRZfCCkmOUPIyGl0U5IL8djPEKKWf4YElIaTYW0Z+67xNdCSOvAU7noj8zvdH4IIa0DT+WivSfHXmk0pJvF+EcQEp5KRSePekuDkA4vaB0+s2fRrRASnkpFV49uS1FI54SOLLoVQtKGxQYJj+I8vYQUBp8dt+hWCEkbQhLx6O2VjiGFYUaEVJmHkKQ8SnP1v9GKXgipMg8hiXl09krjFb0QEp42RKMe8ZZCmNyeZbcje7duEHr8FMBTuWjKIzhlDzsjQloHnspFkx6pvdLpkI6Q1oGnctE9j0BKl2dGhLQOFhsqF933rEspXmAgpHUQUuWiR57klAbLdIS0DkKqXPTYk/Rk6Waxm5DWQUiVi2Z5FrY09poRIa0DT+WiuZ65/+MhTLzySkjrwFO5aInnUUxTEd31LJrohISnUtFST7j9aLxt/N+LFnsWTfQZIXU7xk7PYuGDcW+7hMFTuSjNEwas8AiH1F3+6p8mpHmw2GDVQ0hVeQjJqoeQqvIQklVPvpD+g8f8K30HQA+hkFSw9QGb/jzNbVCuz7UjJDxFRLY8hISnUpEtDyHhqVRky0NI2vzL5CGkop4F72zootOK2Hr8HkNILjx5fnPGEmw9fo8hJBceQtKGkFx4CAlPpSJbHkLCU6nIloeQ8FQqsuUhJDyVimx5CEkbFhtceAhJG0Jy4SEkbQjJhYeQtCEkFx5CwlOpyJaHkPBUKrLlISQ8lYpseQgJT6UiWx5C0obFBhceQtKGkFx4CEkbQnLhISRtCMmFh5DwVCqy5SEkPJWKbHkICU+lIlseQsJTqciWh5C0YbHBhYeQtCEkFx5C0oaQXHgISRtCcuGpLyQAgxASgACEBCAAIQEIQEgAAhASgACEBCAAIQEIQEgAAhASgACEBCBAPSHFvzA9wy9Pz8B1E9rYnm03cdoqkgNUTUjd5a/+abtEPxcuf5kmnmst/GAQHSBC0qLbNhZSF/8Et785wgNESHo0FlLLh3a355ZCSHoQUuW4CKmBg3BCqpxu8sxyqgxpv4jSwEgRUuW0H9LwtE0IqXIkt6fKkBqZeIRUOd3oyTQISQ9Cqpxu5FQq1YR0eXG5i07bprv83cb2RBvUUkjdcXxWjlA9IQEYhpAABCAkAAEICUAAQgIQgJAABCAkAAEIqQpCCOevJ/qnby688NEdLpm+6Y8WXvGpH0Kqgc9dHJ/7E8tDOpy5F9K9y0AMHuUaeAuv4e185jzzbwoYTeJhJ4SUBR7lGgjh9zrf54X03oWnj+Ne6njJ7s9reN3+PIXX393lX68hdJvzN2x/30J4+42uCLIQUgV87nZHb8dju+3MkDaHQ7yPXki7dMLfp91fb8eDxR2bc0jd/utTdEWQhZAqYB/R5+XY7hrS4BlR/A8h/Gy/Qhc9R9r383efzt/9uafwd7v9Pke2fd/9+66hj+iKIAohVUB/wWBWSF14++xfd1/I/hjxfAM/n+/Pl5CeDv+0O/K7XhFEIaTynA7Dzsd2sw7tPnfHak8/215I8bnt83Xxb3ttMLoiiEJI5Xk7zfLTsd3MVbvvp9B9TYb0Fp4+Pn9GQrpcEUQhpPJ0++Ox7e/5icvs5e+PXj/9kI5LdYNDu94VQRQe0eJ8nXZFb+G4n5gVUrf75u/BYsO2d+5r+3t9jrTZLzb8Dc/RFUEUQirO5hTQ536yb0cWG8be2XBcxX7f/2M3FtImeoPE7ht+D8vf4Tu6IohCSMW5fFjA6cSskLabLnT7HD7GQ9o/8Xr+2p86fMP253B+G10RRCEkAAEICUAAQgIQgJAABCAkAAEICUAAQgIQgJAABCAkAAEICUAAQgIQgJAABCAkAAH+Bz9z3UtlmI8JAAAAAElFTkSuQmCC", + "text/plain": [ + "plot without title" + ] + }, + "metadata": { + "image/png": { + "height": 420, + "width": 420 + } + }, + "output_type": "display_data" + } + ], + "source": [ + "# Plot density plot of ATE_estimates with ggplot\n", + "ggplot(data.frame(ATE_estimates), aes(x = ATE_estimates)) +\n", + " geom_density(fill = \"blue\", alpha = 0.5) +\n", + " geom_vline(aes(xintercept = ATE), color = \"red\", linetype = \"dashed\") +\n", + " labs(title = \"Density Plot of ATE Estimates\",\n", + " x = \"ATE Estimates\",\n", + " y = \"Density\") +\n", + " theme_minimal()" + ] + }, + { + "cell_type": "markdown", + "id": "645df4f8", + "metadata": {}, + "source": [ + "## Outcome missing under nonignorable nonresponse\n", + "Now consider a different setting, in which the outcomes are missing under nonignorable nonresponse assumptions in [Bia, Huber and Lafférs (2023)](https://doi.org/10.1080/07350015.2023.2271071). \n", + "Let the covariance matrix $\\sigma^2_X$ again be such that $a_{ij} = 0.5^{|i - j|}$, but now $\\gamma_0 = 1$ and $\\sigma^2_{\\varepsilon, \\upsilon} = \\begin{pmatrix} 1 & 0.8 \\\\ 0.8 & 1 \\end{pmatrix}$ to show a strong correlation between $\\varepsilon$ and $\\upsilon$. Let the vector of coefficients $\\beta$ again resemble a quadratic decay of coefficients importance; $\\beta_{0,j} = 0.4/j^2$ for $j = 1, \\ldots, p$.\n", + "\n", + "The directed acyclic graph (DAG) shows the the structure of the causal model." + ] + }, + { + "cell_type": "markdown", + "id": "bcfc2d49", + "metadata": {}, + "source": [ + "

\n", + " \"Graph\"\n", + "

" + ] + }, + { + "cell_type": "markdown", + "id": "d7121a72", + "metadata": {}, + "source": [ + "### Data\n", + "\n", + "We will again use the implemented data generating process `make_ssm_data` to generate data according to the simulation in Appendix E of [Bia, Huber and Lafférs (2023)](https://doi.org/10.1080/07350015.2023.2271071). We will again leave the default ATE equal to $\\theta_0=1$.\n", + "\n", + "In this setting, we are estimating the ATE under nonignorable nonresponse, so we set `mar=False`. Again, the selection indicator `S` can be set via `s_col`. Further, we need to specify an intrument via `z_col`." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "b6f4f61f", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "================= DoubleMLData Object ==================\n", + "\n", + "\n", + "------------------ Data summary ------------------\n", + "Outcome variable: y\n", + "Treatment variable(s): d\n", + "Covariates: X1, X2, X3, X4, X5, X6, X7, X8, X9, X10, X11, X12, X13, X14, X15, X16, X17, X18, X19, X20, X21, X22, X23, X24, X25, X26, X27, X28, X29, X30, X31, X32, X33, X34, X35, X36, X37, X38, X39, X40, X41, X42, X43, X44, X45, X46, X47, X48, X49, X50, X51, X52, X53, X54, X55, X56, X57, X58, X59, X60, X61, X62, X63, X64, X65, X66, X67, X68, X69, X70, X71, X72, X73, X74, X75, X76, X77, X78, X79, X80, X81, X82, X83, X84, X85, X86, X87, X88, X89, X90, X91, X92, X93, X94, X95, X96, X97, X98, X99, X100\n", + "Instrument(s): z\n", + "Selection variable: s\n", + "No. Observations: 8000\n" + ] + } + ], + "source": [ + "set.seed(3141)\n", + "n_obs = 8000\n", + "df = make_ssm_data(n_obs=n_obs, mar=FALSE, return_type=\"data.table\")\n", + "dml_data = DoubleMLData$new(df, y_col=\"y\", d_cols=\"d\", z_cols = \"z\", s_col=\"s\")\n", + "print(dml_data)" + ] + }, + { + "cell_type": "markdown", + "id": "d7af8539", + "metadata": {}, + "source": [ + "### Estimation\n", + "\n", + "We will again use the `DoubleMLSSM` class. \n", + "\n", + "Further, will leave he learners for all nuisance functions to be the same as in the first setting, as the simulated quadratic decay of coefficients importance still holds.\n", + "\n", + "Now the learner `ml_g` is used to fit conditional expectations of the outcome $\\mathbb{E}[Y_i|D_i, S_i, X_i, \\Pi_i]$, whereas the learners `ml_m` and `ml_pi` will be used to estimate the treatment and selection propensity scores $P(D_i=1|X_i, \\Pi_i)$ and $P(S_i=1|D_i, X_i, Z_i)$." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "d0ccb8f7", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [], + "source": [ + "ml_g = lrn(\"regr.cv_glmnet\", nfolds = 5, s = \"lambda.min\")\n", + "ml_m = lrn(\"classif.cv_glmnet\", nfolds = 5, s = \"lambda.min\")\n", + "ml_pi = lrn(\"classif.cv_glmnet\", nfolds = 5, s = \"lambda.min\")" + ] + }, + { + "cell_type": "markdown", + "id": "5d9bea41", + "metadata": {}, + "source": [ + "The score is now set to `'nonignorable'`, since the parameters of the DGP were set to satisfy the assumptions of outcomes missing under nonignorable nonresponse." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "42a55e3f", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "================= DoubleMLSSM Object ==================\n", + "\n", + "\n", + "------------------ Data summary ------------------\n", + "Outcome variable: y\n", + "Treatment variable(s): d\n", + "Covariates: X1, X2, X3, X4, X5, X6, X7, X8, X9, X10, X11, X12, X13, X14, X15, X16, X17, X18, X19, X20, X21, X22, X23, X24, X25, X26, X27, X28, X29, X30, X31, X32, X33, X34, X35, X36, X37, X38, X39, X40, X41, X42, X43, X44, X45, X46, X47, X48, X49, X50, X51, X52, X53, X54, X55, X56, X57, X58, X59, X60, X61, X62, X63, X64, X65, X66, X67, X68, X69, X70, X71, X72, X73, X74, X75, X76, X77, X78, X79, X80, X81, X82, X83, X84, X85, X86, X87, X88, X89, X90, X91, X92, X93, X94, X95, X96, X97, X98, X99, X100\n", + "Instrument(s): z\n", + "Selection variable: s\n", + "No. Observations: 8000\n", + "\n", + "------------------ Score & algorithm ------------------\n", + "Score function: nonignorable\n", + "DML algorithm: dml2\n", + "\n", + "------------------ Machine learner ------------------\n", + "ml_g: regr.cv_glmnet\n", + "ml_pi: classif.cv_glmnet\n", + "ml_m: classif.cv_glmnet\n", + "\n", + "------------------ Resampling ------------------\n", + "No. folds: 5\n", + "No. repeated sample splits: 1\n", + "Apply cross-fitting: TRUE\n", + "\n", + "------------------ Fit summary ------------------\n", + " Estimates and significance testing of the effect of target variables\n", + " Estimate. Std. Error t value Pr(>|t|) \n", + "d 0.95305 0.03438 27.72 <2e-16 ***\n", + "---\n", + "Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1\n", + "\n", + "\n" + ] + } + ], + "source": [ + "dml_ssm = DoubleMLSSM$new(dml_data, ml_g, ml_m, ml_pi, score=\"nonignorable\")\n", + "dml_ssm$fit()\n", + "\n", + "print(dml_ssm)" + ] + }, + { + "cell_type": "markdown", + "id": "fb9ad184", + "metadata": {}, + "source": [ + "### ATE estimates distribution\n", + "\n", + "Here we again add a small simulation where we generate multiple datasets, estimate the ATE and collect the results (this may take some time). " + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "f7a9eae4", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[1] \"Iteration: 10/100\"\n", + "[1] \"Iteration: 20/100\"\n", + "[1] \"Iteration: 30/100\"\n", + "[1] \"Iteration: 40/100\"\n", + "[1] \"Iteration: 50/100\"\n", + "[1] \"Iteration: 60/100\"\n", + "[1] \"Iteration: 70/100\"\n", + "[1] \"Iteration: 80/100\"\n", + "[1] \"Iteration: 90/100\"\n", + "[1] \"Iteration: 100/100\"\n" + ] + } + ], + "source": [ + "n_rep = 100\n", + "ATE = 1.0\n", + "ATE_estimates = rep(NA, n_rep)\n", + "\n", + "set.seed(42)\n", + "for (i_rep in seq_len(n_rep)) {\n", + " if (i_rep %% (n_rep %/% 10) == 0) {\n", + " print(paste0(\"Iteration: \", i_rep, \"/\", n_rep))\n", + " }\n", + " dml_data = make_ssm_data(n_obs=n_obs, mar=FALSE)\n", + " dml_ssm = DoubleMLSSM$new(dml_data, ml_g, ml_m, ml_pi, score='nonignorable')\n", + " dml_ssm$fit()\n", + " ATE_estimates[i_rep] = dml_ssm$coef\n", + "}" + ] + }, + { + "cell_type": "markdown", + "id": "797f5d2e", + "metadata": {}, + "source": [ + "And plot the estimates distribution" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "bcaebd62", + "metadata": { + "vscode": { + "languageId": "r" + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA0gAAANICAMAAADKOT/pAAAAPFBMVEUAAABNTU1oaGh1dfV8fHx/f/+MjIyampqnp6eysrK9vb3Hx8fQ0NDZ2dnh4eHp6enr6+vw8PD/AAD///+Zzx6PAAAACXBIWXMAABJ0AAASdAHeZh94AAAgAElEQVR4nO2di3baOhAA5TQPkuZxU/7/Xy9vZGODLe9KWu3MOW2gBAYjTWwEJWELAKsJpe8AQAsQEoAAhAQgACEBCEBIAAIQEoAAhAQgACEBCEBIAAIQEoAAK0MKR7q379lX2P31+3b31sLzx+VbY6autr8khKf4H7rQDW5zz+3Z0e8ZGm/uyNgdePgt0DQyIe3YzL3CdnpmXm/teey7pif06+kqZz535z+Ht5kQ0vHsjJBmfAs0zeqQDl9+PkJ4X3ytyX//6sLHopBC6O8RX8MmvI5ec/Q2Jm94dh+E5B2ZkLbb7xB+l19r6t+/9vuXRSH1zv7ujvOe4vtDSKCMVEjbzXGX9PW8O8j6PF206cLT4fT32/551NfpCodDqN/zk5rf67Ob/nw/nvt+605PwQZHXoML4g153+3QPuJd5LKQLvf2dLvnw9H34+Z87L58HLf5abexX/Fdu25+tM3gALGQvg/HUn+vT5h2M+pw+nO/gwnnk5eQduUdJ9zf64wfCekzvmqcy/CCeEO63d7o97LccHPDd7Ziz/Xe9kN6P5z9ejt8+Thohnct2vxom8EBYiEdTn4ffkB/Px8nV/e1/X3dH6Q9hb/b/Sx7ihcbfk57oufwc3Nrn+dDu90R4/suik3Yf1NvfxRfMCjh89D0WzSHF4U0vLfn+9x97ve73fHL036/t/8J8B4tjMSbH90KOEA2pM3xicnvfiYffxj/Xg/Srlc4nn89fcPTza19dued1+a0Gvi2/xrP994FgxKON/wZLTfEIY2szw0X9gbXO/3raXPOx63nZ2HRdfqbf/+Rg7aQDekpmpCni/ZfXndPFv7+XK9w/vl9PBh8j27izOZyg8er/Vz3D0d6F/Rn7fmgrrsuNywKaXhvz/f5N7ql05fvz/fnaIvizY9uBRwgGFIXz8jelPs5PJs4PEOPf+a/7hetoyO789W718tP/cvtD3Zs/Qv6Ib1f7sTEk697W7G9vbc9RfzloxvsxeLNj24FHCAW0tfweKY38z7fThM7npT7XVJ8ZDeY0KkhdZfp3N1896xVu8G9nQrpI4Tnzd+fXkijtwIOEAtps39y3UUvjPZn3mE5uBvM+93e6O/4KvX5XMKh3fW50etluWFhSP17OxXSU/gaXNoNXhc+3Qo4QCqk78OMeQtvxzPPtyFFe5XLokLYxEd2IyFtjjc4ttgQXdC75DWcX7v5uiS1PKR4HzgR0mlfG10ab/69m4b2kAlp/xah/bOB78Mywfdl0W17/tm9XwreXA7+wrmep663PHwb0u7AaXNc5f6OrrYdXBBfMT5WvLy7YVFIvXt7WVwf2yN9HBYYr1sUb350K+CA1SHF62yXl0mjvcT+y+nFye78WtDTec3ss/8c4jak+HXX69W2gwviK75HLx99nm99dNUu3lXG/xjd26frssnoc6QjX5e7Fm1+dCvgAJmQnjbnJwc/m/MbaOKZ93V4u8zlp/v30+mpw2+8kxkNKXon0PVq28EF8RW7+EnJ+cyikKJ7ezTeWbXr3r4Oz8nOd+26+dGtgAPKHsJ/8MI/tEHRkHbPJnhTJzRBwZCuz6wArFMwpCde94dm4GUOAAEICUAAQgIQgJAABCAkAAEICUAAQgIQgJAABCAkAAHqC+m/xjz/MnlybU9zAyTjISQ8lYpseQgJT6UiWx5CwlOpyJaHkPBUKrLlISRtWGxw4SEkbQjJhYeQtCEkFx5C0oaQXHgICU+lIlseQsJTqciWh5DwVCqy5SEkPJWKbHkISRsWG1x4CEkbQnLhISRtCMmFh5C0ISQXHkLCU6nIloeQ8FQqsuUhJDyVimx5CAlPpSJbHkLShsUGFx5C0oaQXHgISRtCcuEhJG0IyYWHkPBUKrLlISQ8lYpseQgJT6UiWx5CwlOpyJZnVUj/wWP+lb4DoIdQSCrY+kH0GFbtXHgISRtCcuEhJG0IyYWHkCQJ4fbxtLw9RUW2PIQkRwgvLy9hGJPd7SkssuUhJCHCIaMjvZSMbk95kS0PIUkQV3RMScdzD0Iq6iEkAQYV9UtiscGFh5BWM9wbDQ7vCMmFh5DWMppRtFMiJBceQlrJZEeE5MpDSKsYP6zrlWRqe2oS2fIQ0hruZXQuydL2VCWy5SGkFTzo6FiSoe2pS2TLQ0jpPOyIkPx4CCmZxx0dSmKxwYWHkFKZ0xEhufEQUhp3l+t6JRGSCw8hJTEzo31JhOTCQ0gpzO/oJVjYnipFtjyElMCCjnpvBFeFkIp6CGk5c58fEZIjDyEtZlFGLy9/Mj3EhFTUQ0hLWdjRyz9C8uAhpKUsDinTwR0hFfUQ0kKWdkRIPjyEtIzFHe1CylMSIRX1ENIilnf08oeQPHgIaQkJHRGSDw8hLSExpCwlEVJRDyEtIKUjQvLhIaT5JHW0X2wgpPY9hDSbtI4IyYeHkGazIqQcJRFSUQ8hzSWxI0Ly4SGkuaSG9IeQPHgIaSapHR1DylASIRX1ENI8kjsiJB8eQprH2pD0SyKkoh5CmkV6R8fFBkJq3UNIsyCk/CJbHkKaw4qOziGpl0RIRT2ENAdCKiCy5SGkGazp6LzYQEhtewhpBoRUQmTLQ0iPWdXRJSTtkgipqIeQHkNIRUS2PIT0kHUdXRYbCKlpDyE9hJDKiGx5COkRKzu6hqRcEiEV9RDSIwipkMiWh5AesLaj62KDckmEVNRDSA8gpFIiWx5Cus/qjgjJh4eQ7kNIxUS2PIR0n/Uh/YtOaz7ahFTUQ0h3Wd8RIfnwENJdCKmcyJaHkO4h0BEh+fAQ0j0kQvoTn1F8uAmpqIeQ7iDRESH58BDSHeRDUiyJkIp6COkOhFRSZMtDSNOIdNRbbCCkZj2ENA0hFRXZ8hDSNIRUVGTLQ0jTaISkVxIhFfUQ0iQyHQ0WGwipUQ8hTUJIZUW2PIQ0CSGVFdnyENIUQh0Rkg8PIU0hFRKLDS48hDSBVEfDkNRKIqSiHkKagJBKi2x5CGkCQiotsuUhpAnEQhosNhBSmx5CGkeso5uQtEoipKIeQhqHkIqLbHkIaRxCKi6y5SGkceRCGi42aJVESEU9hDSKXEeE5MNDSKMQUnmRLQ8hjUJI5UW2PIQ0imBIN4sNhNSih5DGEOxoJCSdkgipqIeQxiCkCkS2PIQ0gmRHhOTDQ0gjiIZ0u9hASA16CGkEQqpBZMtDSCMQUg0iWx5CukW0o7GQVEoipKIeQrpFNqSRxQZCas/zeEi7A/EZEfEk5R8/QqpCZMszc0i7wVdFyj9+hFSFyJZn3pB2Nyf0KP74yXY0GpJGSYRU1LMspAwdlX/8hEMaW2wgpOY8s0b0ukPqP0X6r0XCH1n+jf1jKL2VIEJySL1zOpT+QSS8Q2KP5MOzLKSRc+KUfvyyhKRQEiEV9cwZ0O7uWWlKP37SIY0uNhBSa55lITk4tJPuiJB8eJaHpL1yR0ii2yOPrQmey7MgpENF6m9sKP34EVItIlse3ms3QDyk8cUG+ZIIqaiHkAYQUi0iWx5CGkBItYhseQipj3hHhOTDQ0h95EMaX2yQL4mQinoIqQ8hVSOy5SGkPoRUjciWh5B6yHdESD48hNRDIaSJxQbxkgipqIeQehBSPSJbHkKKUeiIkHx4CCmGkCoS2fIQUoxGSFOLDYTUlIeQYrKGJFwSIRX1EFKERkeE5MNDSBGEVJPIloeQIlRCmlxsIKSWPIQUQUg1iWx5CCmCkGoS2fIQUgQh1SSy5SGkKyod3VlskC2JkIp6COkKIVUlsuUhpCuEVJXIloeQrhBSVSJbHkK6ohPS9GIDITXkIaQLOh3dC0m0JEIq6iGkC4RUl8iWh5AuEFJdIlseQjqj1NG9xQZCasdDSGdKhCRZEiEV9RDSGUKqTGTLQ0hnCKkykS0PIZ3Q6ujuYgMhNeMhpBOEVJvIloeQTpQJSbAkQirqIaQThFSbyJaHkE6ohXR3sYGQWvEQ0hG1jgjJh4eQjhBSdSJbHkI6QkjViWx5COmIXkgsNrjwENIBvY4ehCRXEiEV9RDSAUKqT2TLQ0gHCKk+kS0PIR1QDOn+YgMhNeIhpD2KHRGSDw8h7SkYklhJhFTUQ0h7CKlCkS0PIe3RDOnBYgMhteEhpD2EVKHIloeQ9pQMSaokQirqIaQ9hFShyJaHkLa6HT1cbCCkJjyEtCWkOkW2PIS0JaQ6RbY8hLQtHZJQSYRU1ENIW+WQHi42EFILHkJS7oiQfHgIiZAqFdnyEBIhVSqy5SEk7ZBYbHDhISTljgjJh4eQCKlSkS0PIZUPSaYkQirqISTtkB4vNhBSAx5CIqRKRbY8hKTcESH58BASIVUqsuUhJO2QZiw2iJRESEU9hERIlYpseQiJkCoV2fIQEiFVKrLlcR+SdkdzFhtESiKkoh5CIqRKRbY8hERIlYpseQiJkCoV2fIQknZIcxYbCMm8x3tI6h3NC0mgJEIq6iEkbQjJhYeQtCEkFx5C0mbWYgMhWfc4D0m/I0Ly4SEkbeaFtL4kQirqISRtCMmFh5C0mbfYQEjGPYSkDSG58PgOKUNHc0NaXRIhFfUQkjaE5MJDSNrMXGwgJNseQtKGkFx4CEkbQnLhWTV8/xkn/MnAv7nfGEo/HLAYoZBUyPiDKMcOafZiw9pdEnukoh5C0oaQXHgISRtCcuHxHFKWjgjJh4eQtJm7akdIpj2EpA0hufAQkjazQ1pZEiEV9RCSNoTkwuM4pDwdzV9sICTLHkLShpBceAhJG0Jy4SEkbeaHtK4kQirqISRt5i82EJJhDyFpQ0guPISkDSG58PgNKVNHS0JaVRIhFfUQkjYLFhsIya6HkLQhJBceQtKGkFx4CEkbQnLhcRtSWDLB17BksWFNSYRU1ENI2hCSCw8haUNILjxeQwqLnrusgZBceAhJm0UeQrLqISRtCMmFh5C0WeZJHw9CKuohJG0IyYXHaUhh6QRPZ9FiAyFZ9RCSNoTkwkNI2hCSCw8habMspPSSCKmoh5C0WeghJJseQtKGkFx4CEkbQnLh8RlSWD7Bk1nqSR0RQirqISRtFi42EJJNDyFpQ0guPISkDSG58BCSNoTkwuMypMPnntS62JBaEiEV9RCSNoTkwkNI2hCSCw8haUNILjweQwppEzyRpYsNhGTSQ0jaLA4psSRCKuohJG0IyYWHkLQhJBcehyGdPj2/2sWGxJIIqaiHkLQhJBceQtKGkFx4CEkbQnLhISRtli82EJJBDyFpkxBSUkmEVNRDSNoQkguPv5DOvzuWkBKxNcFzeQhJmxQPIZnzEJI2hOTCQ0jaEJILDyFpQ0guPO5COndU9WJDSkmEVNRDSNoQkgsPIWlDSC483kK6dERIqdia4Lk8hKRNmmf5uBBSUQ8haUNILjyEpA0hufAQkjaJnsUDQ0hFPc5CunZU92IDIVnzEJI2hOTCQ0jaEJILDyFpQ0guPISkTapn6cgQUlGPr5CijggpFVsTPJeHkLQhJBceQtKGkFx4XIUUd1T7YgMh2fIQkjaE5MJDSNoQkgsPIWmTGtLSkgipqMdTSL2Oql9sICRTHkLShpBceAhJG0Jy4SEkbdI9y8aGkIp6CEmb5MUGQrLkISRtCMmFx1FI/Y4MhLSsJEIq6iEkbQjJhYeQtFnhISQ7HkLShpBceAhJG0Jy4fET0qAjCyEtKomQinoISZsViw2EZMczY6i6PdFpEe80hHSFkMx45oQ0OKlcktLjN+yIkFKxNcFzeQhJmzUhLSmJkIp6Ho9UNzxNSItY5SEkK54ZIcVPkS5/He+CIcIfi4TSDxvcZVFIUTx290g3OyQbe6QFuyT2SEU9MweKkJIhJBceQtJm1WIDIVnxcGinDSG58MwLaWKxQQVCiiEkI56Z72w4fI1OK0JIPWaXREhFPU7ea3fbkZHFBkIy4iEkbQjJhYeQtCEkFx5C0oaQXHgISRsWG1x4fIQ00pGVkGaXREhFPYSkDSG58LgIaawjQkrF1gTP5SEkbdZ6CMmEh5C0We2ZOUSEVNRDSNoQkguPh5BGOyKkVGxN8FweQtJm7WLD3JIIqaiHkLQhJBeewSA9vX+J3OwKCGkIIRnwDAYphNC9fYrcciqENISQDHgGg/T793XXUnj++yNy6ymIP37jHdlZbCAkC56RQfrcdLuWnkrtlwjphlklEVJRz9gY/WzCYbckIliM9OM30REhpWJrgufy3I7R9+thd/T1HF5FDEshpBsIqX7PcIw+ny9HdaHM0nhrIa1fbCAkA57h8ncIr9/ni7Q/L2gcQrqBkOr3DJe/N9/j35cP4cdvqiNCSsXWBM/lGS5/i9zoKgjpljklEVJRz80Lssev6h8DOU1rIUl4CKl6TzxEXYgQufUUCOkWQqreEw/RR9TRh8itp0BItxBS9Z6JQ7uCENIIM4aFkIp6yoczRPbxm+zI1GIDIVXviUdotztq7jkSIYlja4Ln8hCSNoTkwsOhnTYiIc0oiZCKehoPabojW4sNhFS7ZzhAH912+xW6d5EbT4KQxiCkyj2DAfrYPTn62b8wW64kycfvTkeElIqtCZ7Lc/Pu76/dn4/vQu/83kNIYxBS5Z7bF2Q/w1PRF2ZbC0lmseFxSYRU1DMYny78vIXv/bMkkVtPgZBGIaS6PYPxed9/Htd+h7QRufUUBB+/ex0RUiq2Jnguz3B8NqH73O2YynVESOMQUt2epl9HqiIkIQ8h1e0hJG0IyYXn5tCua+i9di2F9LAkQirqGQzPpqU3rd7tiJBSsTXBc3lulr/L/dfYE62FJLTYQEh1e1r+H7KEpIKtCZ7LMxie11D8A7kIaYIHJRFSUc9gdH6653K/0OWI2ON3vyNCSsXWBM/luf1FY80sNlQSkpiHkGr2tBvSg47shfSgJEIq6im/uDCEkKYgpIo9hKQNIbnw3AzOx+vusO654O+kaC0kscUGQqrZM/xtFE+H50chfIncegpCj9+jjggpFVsTPJdnMDhvYbN/UfZvqV8guyWkO9wtiZCKekbe2XD+UwhCmoSQ6vW0GtLDjgwuNhBSxZ7xQ7tNeBO59RQIaRJCqtczXGw4/XekrtwbhUS263FHFkO6WxIhFfXcDM37UwhPm4JvXSWkaQipWk+jL8hWFJLgYgMh1eshJG0kQ7pXEiEV9fRH5vf9efcE6bXo/5KV2K4ZHRFSKrYmeC5Pb2Q+z5980pV7YwMh3YOQavXEI/MTwtv+TXZfr6Hgf5RtLSRRDyHV6olH5vrq0ZvxjywmJEVsTfBcnnhkunB+9ehn/xspCiGwXXM6shnSnZIIqahn8MuYR07mhpDuQUiVeghJG9HFBkKq1UNI2hCSC0+LIc3qiJBSsTXBc3n6IYUmPkWIkFSxNcFzeRoMaV5HLDakYmuC5/I0+F67pkOaLomQinoISRtCcuFpL6SZHRFSKrYmeC4PIWkju9hASJV6CEkb4ZAmSyKkoh5C0oaQXHiaC2luR4SUiq0JnstDSNqIeyaGjJCKeghJG0Jy4WktpNkdEVIqtiZ4Lg8haUNILjyNhTS/I7OLDYRUpYeQtBEPaaIkQirqISRtCMmFp62QFnRESKnYmuC5PISkjbyHkCr0EJI2hOTCsyqk/2oj/PFAKP0wwxmhkFRY8QNiyQ6JPVIqtvYUuTyEpI38YsN4SYRU1ENI2hCSC09LIS3qiJBSsTXBc3kISRtCcuFpKKRlHRlebBgviZCKeghJG0Jy4SEkbQjJhaedkBZ2ZDqksZIIqaiHkLRRWGwgpPo8hKQNIbnwNBPS0o4IKRVbEzyXh5C0ISQXnlZCWtyR7cWGkZIIqaiHkLQhJBceQtKGkFx4GglpeUeElIqtCZ7LQ0jaqCw2EFJtHkLSRiek25IIqainjZASOiKkVGxN8FweQtKGkFx4CEkbLc9w5AipqKeJkFI6IqRUbE3wXB5C0oaQXHhaCCmpI0JKxdYEz+UhJG2UFhsIqS5PAyGldWQ+pGFJhFTUQ0jaEJILDyFpQ0guPISkjZqHkGry2A8psSNCSsXWBM/lISRtCMmFx3xIqR0RUiq2JnguDyFpo7bYMCiJkIp6rIeU3BEhpWJrgufyEJI2hOTCYzyk9I4IKRVbEzyXh5C0UfT0Bo+QinoISRtCcuGxHdKKjggpFVsTPJeHkLQhJBce0yGt6aiBxYZ+SYRU1ENI2hCSC4/lkFZ1REip2JrguTyEpA0hufAQkjaaHkKqxmM4pHUdNRFSXBIhFfUQkjaE5MJjN6SVHRFSKrYmeC4PIWmjudhASNV4zIa0tiNCSsXWBM/lISRtCMmFx2pIqztqI6SoJEIq6iEkbXQ9hFSJh5C0ISQXHqMhre+IkFKxNcFzeQhJG0Jy4bEZkkBHjSw2XEsipKIeQtKGkFx4TIYk0REhpWJrgufyEJI2yiFdSiKkoh6LIYl01MhiAyFV4iEkbQjJhYeQtCEkFx6DIcl0REip2JrguTyEpI32YsO5JEIq6rEXklBHhJSKrQmey2MuJKmOCCkVWxM8l4eQtCEkFx5rIYl11MxiAyFV4SEkbQjJhcdYSHIdEVIqtiZ4Lg8haaPvCY8fN0lsTfBcHlshCXbUzmIDIdXgISRtCMmFx1RIkh0RUiq2JnguDyFpox/SsSRCKuohJG0yeAipvMdSSKIdEZL8AHn2EJI2OTzh7uOWbYA8ewyFJNsRIYkPkGsPIWmTYbGBkMp77IQk3BEhSQ+Qb4+ZkKQ7IiThAXLuISRtcoS0L4mQinoISZssHkIq7bESknhHhCQ7QN49hKQNIbnwGAlJviNCEh0g9x5C0ibLYsOuJEIq6rERkkJHhCQ5QHgISRtCcuExEZJGR42F9BIIqahnRkjdjvh0d++b19NaSJk8hFTW8zik7vJX9FWR2+1S6YiQ5AYIDyHpQ0guPDOfI3W9L6rcbJdOR4QkNkB4totD6j9F+i8L4Y9p/mXyhDyjATFLQ+od2WVebFDaIbW2aveSbf3V1p4il2dZSLdn5CGkNAipqGfWw9/dOScOIaVBSEU9cx7+rn8qb0haHbW22EBIZT1zXpDtn9ReuSOkNAipqGfG60jnpbpu23+XgxL97VLrqLmQ/uQqydYEz+Wp/b12hDTbQ0glPZWHpNdRc4sNhFTUQ0jaEJILDyFpky2kXMsNtiZ4Lk/dISl2REgSA4TnDCFpk89DSAU9hKRNRk+ewbQ1wXN5qg5JsyNCEhggPBcISRtCcuGpOSTVjhpcbCCkgh5C0oaQXHgqDkm3oxZDylOSrQmey0NI2hCSC0+9ISl31OJiAyGV8xCSNoTkwkNI2hCSC0+1IWl31GRIWUqyNcFzeQhJm5yLDYRUzFNrSOodEdK6AcLTh5C0ISQXnkpD0u+ozZBylGRrgufyEFJbHkIq5CGktjyEVMhDSI159AfU1gTP5akzpAwdEdKaAcqBLQ8haZN3sYGQCnmqDClHR4S0YoCyYMtDSNpkDkm/JFsTPJenxpCydERI6QOUB1seQmrNQ0hFPITUmoeQingqDClPR82GpF6SrQmey0NIzXkIqYSHkLTJvdigXpKtCZ7LU19ImToipFRsTfBcHkLShpBceAhJG0Jy4akupNDsIkA+j+6g2prguTyE1KCHkPJ7agsptDzBc3kIKb+HkBr0EFJ+DyFpk3+xgZAKeCoLKbS3pygQkm5JtiZ4Lg8haUNILjyEpA0hufAQUoseQsruqSukUGjiNefRHFZbEzyXh5Ca9BBSbk9VIYViE685j+K42prguTyEpE2JxQZCyu6pKaQwMiFyTTw9CMmFh5C0ISQXHkLSpkxIiiXZmuC5PBWFFEYnRK6J15qHkPJ6CKlRDyHl9RBSox5CyuupJ6QwPiFyTbzWPISU10NI2hRabNArydYEz+WpJqQwNSFyTTwtCMmFh5C0ISQXHkLSplRIaiXZmuC5PLWEFKYnRK6J15qHkHJ6CKldj9LY2prguTyE1K6HkDJ6CKldDyFl9FQSUrg3IXJNPB2KLTYQUk4PIWlTLiSlkmxN8FyeOkIK9ydEromnAiG58BCSNoTkwlNFSOHBhMg18ZrzqIyurQmey0NILXsIKZuHkJr2aAyvrQmey1NDSOHxhFDAhYeQcnkISZuCiw0vKiXZmuC5PISkDSG58BCSNoTkwlNBSGHOhJCHkBKxNcFzeQipcY/8ANua4Lk85UMadFR64rXmIaQ8HkJq3SM+wrYmeC5P8ZCGHRWfeK15CCmLh5C0KbvY8CJfkq0JnstDSNoQkgtP6ZBuOiKkRAipqIeQtCkeknRJtiZ4Lg8hte8hpAyewiHddlTBxGvNQ0gZPITkwCM7yLYmeC5P2ZBGOqph4rXmISR9DyFpU36xQbgkWxM8l2fVQ/zfSsIfB/wrfQd2hLUjBeMIhbSWsR0Se6RE7m6P5DDb2lPk8hCSNoTkwkNIPjyC42xrgufylAxptKNKJl5rHkJS9hCSE4/cQNua4Lk8BUMa76iWideah5B0PYSkTRWLDS+CJdma4Lk85UKa6IiQEnm4PVJDbWuC5/IQkjaE5MJTLKSpjggpkcfbIzTWtiZ4Lg8h+fEQkqKHkBx5ZAbb1gTP5SEkTx6R0bY1wXN5SoU02VFVE681DyGpeQhJm3oWG15kSrI1wXN5CoU03REhJTJvewTG29YEz+UhJG3qCkmgJFsTPJenTEh3OiKkROZuz+oRtzXBc3kIyZuHkFQ8RUK611F1E685z9ohtzXBc3kIyZ9n5ZjbmuC5PCVCuttRhROvOU9Y99lRUvOgKQ8haVPbYsOBNcNua4Ln8hQI6X5HhJTIsu1ZMe62JnguDyFpU2dIK0qyNcFzefKH9KAjQkpk6fYkP1EKF0TnxQ2E9MAoPCFSwbOwhHM+f6Ib0OyJkB4YxSdEGnhmp3TsZVKklBIh3RdqTIgU8Ox5sEPpJzQpUtktEdJ9oc6EWA6eE6NHZ2EsoXsi+ZYI6a5Pb0IsxPtiQ48wJEXk+ldsZg7pcUeElEiu7TFH4iAAAAdsSURBVMn2y2oJ6Z5uzTjJQkjyItHjO0K6Y1s3TqIQkopILiVCumNbPU5y4FES+fxE16whzemouYnXmifbZyMT0rRMZJyEwKMmkjm8I6RJl9A4yYBHUeTvY78ISRtniw0n3H3sV8aQ5nVESInUFZLA4R0hTZhEx2k9hKQs8vUhK4SkjduQ1u6UCGlcJD5OK8GjL3L0ISuEhEdR5OdDVnKFNLuj5iZea55sH1dESGMapXFaAZ4sIi8fspInpAUdNTfx/C42HEmeYYQ0YtEbp2QIKZModYoR0q1Ec5xSIaRcosRlcEK6ceiOUyKElE+UNMsIaahQH6ck8GQUpeyUCGmo0B+nFPBkFS2faIQ0MGQZp+XgyStaPNMIaWDIM06LwZNZtPTwjpD6glzjVKuHxYYLyyYbIfVuP+M41ekhpCuLZhshxWQdpyo9hBSx5PCOkGLyjlONHkLqMX/CEVJM7nHCU7to9owjpJjs44SndtHczzUmpJj844SnftGslAgppsg44aleNGPeEVJMoXGqyMNiwyiPD/AIKabUONXjIaQpHqRESDEFx6kSDyFNc3e3REgxRcepCg8h3WW6JUKKKT1OeOoXTbRESDEVjBOe+kVjKRFSzPLHtLWJ15pHSXT4Xeq9qUNIMcsf0dYmXmseTVEvJkKKWf5gtjbxWGxYxiUlQopZ/kASUhqthHRZfCCkmOUPIyGl0U5IL8djPEKKWf4YElIaTYW0Z+67xNdCSOvAU7noj8zvdH4IIa0DT+WivSfHXmk0pJvF+EcQEp5KRSePekuDkA4vaB0+s2fRrRASnkpFV49uS1FI54SOLLoVQtKGxQYJj+I8vYQUBp8dt+hWCEkbQhLx6O2VjiGFYUaEVJmHkKQ8SnP1v9GKXgipMg8hiXl09krjFb0QEp42RKMe8ZZCmNyeZbcje7duEHr8FMBTuWjKIzhlDzsjQloHnspFkx6pvdLpkI6Q1oGnctE9j0BKl2dGhLQOFhsqF933rEspXmAgpHUQUuWiR57klAbLdIS0DkKqXPTYk/Rk6Waxm5DWQUiVi2Z5FrY09poRIa0DT+WiuZ65/+MhTLzySkjrwFO5aInnUUxTEd31LJrohISnUtFST7j9aLxt/N+LFnsWTfQZIXU7xk7PYuGDcW+7hMFTuSjNEwas8AiH1F3+6p8mpHmw2GDVQ0hVeQjJqoeQqvIQklVPvpD+g8f8K30HQA+hkFSw9QGb/jzNbVCuz7UjJDxFRLY8hISnUpEtDyHhqVRky0NI2vzL5CGkop4F72zootOK2Hr8HkNILjx5fnPGEmw9fo8hJBceQtKGkFx4CAlPpSJbHkLCU6nIloeQ8FQqsuUhJDyVimx5CEkbFhtceAhJG0Jy4SEkbQjJhYeQtCEkFx5CwlOpyJaHkPBUKrLlISQ8lYpseQgJT6UiWx5C0obFBhceQtKGkFx4CEkbQnLhISRtCMmFh5DwVCqy5SEkPJWKbHkICU+lIlseQsJTqciWh5C0YbHBhYeQtCEkFx5C0oaQXHgISRtCcuGpLyQAgxASgACEBCAAIQEIQEgAAhASgACEBCAAIQEIQEgAAhASgACEBCBAPSHFvzA9wy9Pz8B1E9rYnm03cdoqkgNUTUjd5a/+abtEPxcuf5kmnmst/GAQHSBC0qLbNhZSF/8Et785wgNESHo0FlLLh3a355ZCSHoQUuW4CKmBg3BCqpxu8sxyqgxpv4jSwEgRUuW0H9LwtE0IqXIkt6fKkBqZeIRUOd3oyTQISQ9Cqpxu5FQq1YR0eXG5i07bprv83cb2RBvUUkjdcXxWjlA9IQEYhpAABCAkAAEICUAAQgIQgJAABCAkAAEIqQpCCOevJ/qnby688NEdLpm+6Y8WXvGpH0Kqgc9dHJ/7E8tDOpy5F9K9y0AMHuUaeAuv4e185jzzbwoYTeJhJ4SUBR7lGgjh9zrf54X03oWnj+Ne6njJ7s9reN3+PIXX393lX68hdJvzN2x/30J4+42uCLIQUgV87nZHb8dju+3MkDaHQ7yPXki7dMLfp91fb8eDxR2bc0jd/utTdEWQhZAqYB/R5+XY7hrS4BlR/A8h/Gy/Qhc9R9r383efzt/9uafwd7v9Pke2fd/9+66hj+iKIAohVUB/wWBWSF14++xfd1/I/hjxfAM/n+/Pl5CeDv+0O/K7XhFEIaTynA7Dzsd2sw7tPnfHak8/215I8bnt83Xxb3ttMLoiiEJI5Xk7zfLTsd3MVbvvp9B9TYb0Fp4+Pn9GQrpcEUQhpPJ0++Ox7e/5icvs5e+PXj/9kI5LdYNDu94VQRQe0eJ8nXZFb+G4n5gVUrf75u/BYsO2d+5r+3t9jrTZLzb8Dc/RFUEUQirO5hTQ536yb0cWG8be2XBcxX7f/2M3FtImeoPE7ht+D8vf4Tu6IohCSMW5fFjA6cSskLabLnT7HD7GQ9o/8Xr+2p86fMP253B+G10RRCEkAAEICUAAQgIQgJAABCAkAAEICUAAQgIQgJAABCAkAAEICUAAQgIQgJAABCAkAAH+Bz9z3UtlmI8JAAAAAElFTkSuQmCC", + "text/plain": [ + "plot without title" + ] + }, + "metadata": { + "image/png": { + "height": 420, + "width": 420 + } + }, + "output_type": "display_data" + } + ], + "source": [ + "# Plot density plot of ATE_estimates with ggplot\n", + "ggplot(data.frame(ATE_estimates), aes(x = ATE_estimates)) +\n", + " geom_density(fill = \"blue\", alpha = 0.5) +\n", + " geom_vline(aes(xintercept = ATE), color = \"red\", linetype = \"dashed\") +\n", + " labs(title = \"Density Plot of ATE Estimates\",\n", + " x = \"ATE Estimates\",\n", + " y = \"Density\") +\n", + " theme_minimal()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "R", + "language": "R", + "name": "ir" + }, + "language_info": { + "codemirror_mode": "r", + "file_extension": ".r", + "mimetype": "text/x-r-source", + "name": "R", + "pygments_lexer": "r", + "version": "4.4.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/doc/examples/did/mpdta.rda b/doc/examples/did/mpdta.rda new file mode 100644 index 00000000..29d1597f Binary files /dev/null and b/doc/examples/did/mpdta.rda differ diff --git a/doc/examples/py_double_ml_did.ipynb b/doc/examples/did/py_did.ipynb similarity index 99% rename from doc/examples/py_double_ml_did.ipynb rename to doc/examples/did/py_did.ipynb index dd6587e1..58b9e8ac 100644 --- a/doc/examples/py_double_ml_did.ipynb +++ b/doc/examples/did/py_did.ipynb @@ -5,8 +5,21 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Python: Difference-in-Differences\n", - "\n", + "# Python: Difference-in-Differences" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Remark:**\n", + "*This notebook is based on the deprecated version of the DiD implementation of DoubleML. Please check out the [DiD Section](https://docs.doubleml.org/dev/guide/models.html#difference-in-differences-models-did) to find out about the current implementation.*" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ "In this example, we illustrate how the [DoubleML](https://docs.doubleml.org/stable/index.html) package can be used to estimate the average treatment effect on the treated (ATT) under the conditional parallel trend assumption. The estimation is based on [Chang (2020)](https://doi.org/10.1093/ectj/utaa001), [Sant'Anna and Zhao (2020)](https://doi.org/10.1016/j.jeconom.2020.06.003) and [Zimmert et al. (2018)](https://arxiv.org/abs/1809.01643).\n", "\n", "In this example, we will adopt the notation of [Sant'Anna and Zhao (2020)](https://doi.org/10.1016/j.jeconom.2020.06.003).\n", @@ -24,7 +37,7 @@ "\n", "- **Overlap:** For some $\\epsilon > 0$, $P(D_i=1) > \\epsilon$ and $P(D_i=1|X_i) \\le 1-\\epsilon$ a.s.\n", "\n", - "For a detailed explanation of the assumptions see e.g. [Sant'Anna and Zhao (2020)](https://doi.org/10.1016/j.jeconom.2020.06.003) or [Zimmert et al. (2018)](https://arxiv.org/abs/1809.01643).\n" + "For a detailed explanation of the assumptions see e.g. [Sant'Anna and Zhao (2020)](https://doi.org/10.1016/j.jeconom.2020.06.003) or [Zimmert et al. (2018)](https://arxiv.org/abs/1809.01643)." ] }, { diff --git a/doc/examples/py_double_ml_did_pretest.ipynb b/doc/examples/did/py_did_pretest.ipynb similarity index 99% rename from doc/examples/py_double_ml_did_pretest.ipynb rename to doc/examples/did/py_did_pretest.ipynb index 03bed5c6..daa17b78 100644 --- a/doc/examples/py_double_ml_did_pretest.ipynb +++ b/doc/examples/did/py_did_pretest.ipynb @@ -5,8 +5,21 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Python: Difference-in-Differences Pre-Testing\n", - "\n", + "# Python: Difference-in-Differences Pre-Testing" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Remark:**\n", + "*This notebook is based on the deprecated version of the DiD implementation of DoubleML. Please check out the [DiD Section](https://docs.doubleml.org/dev/guide/models.html#difference-in-differences-models-did) to find out about the current implementation.*" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ "This example illustrates how to use the Difference-in-Differences implmentation `DoubleMLDID` of the [DoubleML](https://docs.doubleml.org/stable/index.html) package can be used to pre-test the parallel trends assumptions.\n", "The example is based on the great implmentation of the [did-package](https://cran.r-project.org/web/packages/did/vignettes/did-basics.html) in `R`. \n", "You can find further references and a detailed guide on pre-testing with the `did`-package at the [did-package pre-testing documentation](https://cran.r-project.org/web/packages/did/vignettes/pre-testing.html)." diff --git a/doc/examples/did/py_panel.ipynb b/doc/examples/did/py_panel.ipynb new file mode 100644 index 00000000..64d84be8 --- /dev/null +++ b/doc/examples/did/py_panel.ipynb @@ -0,0 +1,948 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Python: Panel Data with Multiple Time Periods\n", + "\n", + "In this example, a detailed guide on Difference-in-Differences with multiple time periods using the [DoubleML-package](https://docs.doubleml.org/stable/index.html). The implementation is based on [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001).\n", + "\n", + "The notebook requires the following packages:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import seaborn as sns\n", + "import matplotlib.pyplot as plt\n", + "import pandas as pd\n", + "import numpy as np\n", + "\n", + "from lightgbm import LGBMRegressor, LGBMClassifier\n", + "from sklearn.linear_model import LinearRegression, LogisticRegression\n", + "\n", + "from doubleml.did import DoubleMLDIDMulti\n", + "from doubleml.data import DoubleMLPanelData\n", + "\n", + "from doubleml.did.datasets import make_did_CS2021" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Data\n", + "\n", + "We will rely on the `make_did_CS2021` DGP, which is inspired by [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001) (Appendix SC) and [Sant'Anna and Zhao (2020)](https://doi.org/10.1016/j.jeconom.2020.06.003).\n", + "\n", + "We will observe `n_obs` units over `n_periods`. Remark that the dataframe includes observations of the potential outcomes `y0` and `y1`, such that we can use oracle estimates as comparisons. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n_obs = 5000\n", + "n_periods = 6\n", + "\n", + "df = make_did_CS2021(n_obs, dgp_type=4, n_periods=n_periods, n_pre_treat_periods=3, time_type=\"datetime\")\n", + "df[\"ite\"] = df[\"y1\"] - df[\"y0\"]\n", + "\n", + "print(df.shape)\n", + "df.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Data Details\n", + "\n", + "*Here, we slightly abuse the definition of the potential outcomes. $Y_{i,t}(1)$ corresponds to the (potential) outcome if unit $i$ would have received treatment at time period $\\mathrm{g}$ (where the group $\\mathrm{g}$ is drawn with probabilities based on $Z$).*\n", + "\n", + "More specifically\n", + "\n", + "$$\n", + "\\begin{align*}\n", + "Y_{i,t}(0)&:= f_t(Z) + \\delta_t + \\eta_i + \\varepsilon_{i,t,0}\\\\\n", + "Y_{i,t}(1)&:= Y_{i,t}(0) + \\theta_{i,t,\\mathrm{g}} + \\epsilon_{i,t,1} - \\epsilon_{i,t,0}\n", + "\\end{align*}\n", + "$$\n", + "\n", + "where\n", + " - $f_t(Z)$ depends on pre-treatment observable covariates $Z_1,\\dots, Z_4$ and time $t$\n", + " - $\\delta_t$ is a time fixed effect\n", + " - $\\eta_i$ is a unit fixed effect\n", + " - $\\epsilon_{i,t,\\cdot}$ are time varying unobservables (iid. $N(0,1)$)\n", + " - $\\theta_{i,t,\\mathrm{g}}$ correponds to the exposure effect of unit $i$ based on group $\\mathrm{g}$ at time $t$\n", + "\n", + "For the pre-treatment periods the exposure effect is set to\n", + "$$\n", + "\\theta_{i,t,\\mathrm{g}}:= 0 \\text{ for } t<\\mathrm{g}\n", + "$$\n", + "such that \n", + "\n", + "$$\n", + "\\mathbb{E}[Y_{i,t}(1) - Y_{i,t}(0)] = \\mathbb{E}[\\epsilon_{i,t,1} - \\epsilon_{i,t,0}]=0 \\text{ for } t<\\mathrm{g}\n", + "$$\n", + "\n", + "The [DoubleML Coverage Repository](https://docs.doubleml.org/doubleml-coverage/) includes coverage simulations based on this DGP." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Data Description" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The data is a balanced panel where each unit is observed over `n_periods` starting Janary 2025." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df.groupby(\"t\").size()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The treatment column `d` indicates first treatment period of the corresponding unit, whereas `NaT` units are never treated.\n", + "\n", + "*Generally, never treated units should take either on the value `np.inf` or `pd.NaT` depending on the data type (`float` or `datetime`).*\n", + "\n", + "The individual units are roughly uniformly divided between the groups, where treatment assignment depends on the pre-treatment covariates `Z1` to `Z4`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df.groupby(\"d\", dropna=False).size()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, the group indicates the first treated period and `NaT` units are never treated. To simplify plotting and pands" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df.groupby(\"d\", dropna=False).size()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To get a better understanding of the underlying data and true effects, we will compare the unconditional averages and the true effects based on the oracle values of individual effects `ite`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# rename for plotting\n", + "df[\"First Treated\"] = df[\"d\"].dt.strftime(\"%Y-%m\").fillna(\"Never Treated\")\n", + "\n", + "# Create aggregation dictionary for means\n", + "def agg_dict(col_name):\n", + " return {\n", + " f'{col_name}_mean': (col_name, 'mean'),\n", + " f'{col_name}_lower_quantile': (col_name, lambda x: x.quantile(0.05)),\n", + " f'{col_name}_upper_quantile': (col_name, lambda x: x.quantile(0.95))\n", + " }\n", + "\n", + "# Calculate means and confidence intervals\n", + "agg_dictionary = agg_dict(\"y\") | agg_dict(\"ite\")\n", + "\n", + "agg_df = df.groupby([\"t\", \"First Treated\"]).agg(**agg_dictionary).reset_index()\n", + "agg_df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def plot_data(df, col_name='y'):\n", + " \"\"\"\n", + " Create an improved plot with colorblind-friendly features\n", + " \n", + " Parameters:\n", + " -----------\n", + " df : DataFrame\n", + " The dataframe containing the data\n", + " col_name : str, default='y'\n", + " Column name to plot (will use '{col_name}_mean')\n", + " \"\"\"\n", + " plt.figure(figsize=(12, 7))\n", + " n_colors = df[\"First Treated\"].nunique()\n", + " color_palette = sns.color_palette(\"colorblind\", n_colors=n_colors)\n", + "\n", + " sns.lineplot(\n", + " data=df,\n", + " x='t',\n", + " y=f'{col_name}_mean',\n", + " hue='First Treated',\n", + " style='First Treated',\n", + " palette=color_palette,\n", + " markers=True,\n", + " dashes=True,\n", + " linewidth=2.5,\n", + " alpha=0.8\n", + " )\n", + " \n", + " plt.title(f'Average Values {col_name} by Group Over Time', fontsize=16)\n", + " plt.xlabel('Time', fontsize=14)\n", + " plt.ylabel(f'Average Value {col_name}', fontsize=14)\n", + " \n", + "\n", + " plt.legend(title='First Treated', title_fontsize=13, fontsize=12, \n", + " frameon=True, framealpha=0.9, loc='best')\n", + " \n", + " plt.grid(alpha=0.3, linestyle='-')\n", + " plt.tight_layout()\n", + "\n", + " plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So let us take a look at the average values over time" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plot_data(agg_df, col_name='y')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Instead the true average treatment treatment effects can be obtained by averaging (usually unobserved) the `ite` values.\n", + "\n", + "The true effect just equals the exposure time (in months):\n", + "\n", + "$$\n", + "ATT(\\mathrm{g}, t) = \\min(\\mathrm{t} - \\mathrm{g} + 1, 0) =: e\n", + "$$\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "plot_data(agg_df, col_name='ite')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### DoubleMLPanelData\n", + "\n", + "Finally, we can construct our `DoubleMLPanelData`, specifying\n", + "\n", + " - `y_col` : the outcome\n", + " - `d_cols`: the group variable indicating the first treated period for each unit\n", + " - `id_col`: the unique identification column for each unit\n", + " - `t_col` : the time column\n", + " - `x_cols`: the additional pre-treatment controls\n", + " - `datetime_unit`: unit required for `datetime` columns and plotting" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_data = DoubleMLPanelData(\n", + " data=df,\n", + " y_col=\"y\",\n", + " d_cols=\"d\",\n", + " id_col=\"id\",\n", + " t_col=\"t\",\n", + " x_cols=[\"Z1\", \"Z2\", \"Z3\", \"Z4\"],\n", + " datetime_unit=\"M\"\n", + ")\n", + "print(dml_data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ATT Estimation\n", + "\n", + "The [DoubleML-package](https://docs.doubleml.org/stable/index.html) implements estimation of group-time average treatment effect via the `DoubleMLDIDMulti` class (see [model documentation](https://docs.doubleml.org/stable/guide/models.html#difference-in-differences-models-did))." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Basics\n", + "\n", + "The class basically behaves like other `DoubleML` classes and requires the specification of two learners (for more details on the regression elements, see [score documentation](https://docs.doubleml.org/stable/guide/scores.html#difference-in-differences-models)).\n", + "\n", + "The basic arguments of a `DoubleMLDIDMulti` object include\n", + "\n", + " - `ml_g` \"outcome\" regression learner\n", + " - `ml_m` propensity Score learner\n", + " - `control_group` the control group for the parallel trend assumption\n", + " - `gt_combinations` combinations of $(\\mathrm{g},t_\\text{pre}, t_\\text{eval})$\n", + " - `anticipation_periods` number of anticipation periods\n", + "\n", + "We will construct a `dict` with \"default\" arguments." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "default_args = {\n", + " \"ml_g\": LGBMRegressor(n_estimators=500, learning_rate=0.01, verbose=-1, random_state=123),\n", + " \"ml_m\": LGBMClassifier(n_estimators=500, learning_rate=0.01, verbose=-1, random_state=123),\n", + " \"control_group\": \"never_treated\",\n", + " \"gt_combinations\": \"standard\",\n", + " \"anticipation_periods\": 0,\n", + " \"n_folds\": 5,\n", + " \"n_rep\": 1,\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " The model will be estimated using the `fit()` method." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj = DoubleMLDIDMulti(dml_data, **default_args)\n", + "dml_obj.fit()\n", + "print(dml_obj)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The summary displays estimates of the $ATT(g,t_\\text{eval})$ effects for different combinations of $(g,t_\\text{eval})$ via $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$, where\n", + " - $\\mathrm{g}$ specifies the group\n", + " - $t_\\text{pre}$ specifies the corresponding pre-treatment period\n", + " - $t_\\text{eval}$ specifies the evaluation period\n", + "\n", + "The choice `gt_combinations=\"standard\"`, used estimates all possible combinations of $ATT(g,t_\\text{eval})$ via $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$,\n", + "where the standard choice is $t_\\text{pre} = \\min(\\mathrm{g}, t_\\text{eval}) - 1$ (without anticipation).\n", + "\n", + "Remark that this includes pre-tests effects if $\\mathrm{g} > t_{eval}$, e.g. $\\widehat{ATT}(g=\\text{2025-04}, t_{\\text{pre}}=\\text{2025-01}, t_{\\text{eval}}=\\text{2025-02})$ which estimates the pre-trend from January to February even if the actual treatment occured in April." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As usual for the DoubleML-package, you can obtain joint confidence intervals via bootstrap." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "level = 0.95\n", + "\n", + "ci = dml_obj.confint(level=level)\n", + "dml_obj.bootstrap(n_rep_boot=5000)\n", + "ci_joint = dml_obj.confint(level=level, joint=True)\n", + "ci_joint" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A visualization of the effects can be obtained via the `plot_effects()` method.\n", + "\n", + "Remark that the plot used joint confidence intervals per default. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [ + "nbsphinx-thumbnail" + ] + }, + "outputs": [], + "source": [ + "dml_obj.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Sensitivity Analysis\n", + "\n", + "As descripted in the [Sensitivity Guide](https://docs.doubleml.org/stable/guide/sensitivity.html), robustness checks on omitted confounding/parallel trend violations are available, via the standard `sensitivity_analysis()` method." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj.sensitivity_analysis()\n", + "print(dml_obj.sensitivity_summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this example one can clearly, distinguish the robustness of the non-zero effects vs. the pre-treatment periods." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Control Groups\n", + "\n", + "The current implementation support the following control groups\n", + "\n", + " - ``\"never_treated\"``\n", + " - ``\"not_yet_treated\"``\n", + "\n", + "Remark that the ``\"not_yet_treated\" depends on anticipation.\n", + "\n", + "For differences and recommendations, we refer to [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj_nyt = DoubleMLDIDMulti(dml_data, **(default_args | {\"control_group\": \"not_yet_treated\"}))\n", + "dml_obj_nyt.fit()\n", + "dml_obj_nyt.bootstrap(n_rep_boot=5000)\n", + "dml_obj_nyt.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Linear Covariate Adjustment\n", + "\n", + "Remark that we relied on boosted trees to adjust for conditional parallel trends which allow for a nonlinear adjustment. In comparison to linear adjustment, we could rely on linear learners.\n", + "\n", + "**Remark that the DGP (`dgp_type=4`) is based on nonlinear conditional expectations such that the estimates will be biased**\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "linear_learners = {\n", + " \"ml_g\": LinearRegression(),\n", + " \"ml_m\": LogisticRegression(),\n", + "}\n", + "\n", + "dml_obj_linear = DoubleMLDIDMulti(dml_data, **(default_args | linear_learners))\n", + "dml_obj_linear.fit()\n", + "dml_obj_linear.bootstrap(n_rep_boot=5000)\n", + "dml_obj_linear.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Aggregated Effects\n", + "As the [did-R-package](https://bcallaway11.github.io/did/index.html), the $ATT$'s can be aggregated to summarize multiple effects.\n", + "For details on different aggregations and details on their interpretations see [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001).\n", + "\n", + "The aggregations are implemented via the `aggregate()` method." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Group Aggregation\n", + "\n", + "\n", + "To obtain group-specific effects one can would like to average $ATT(\\mathrm{g}, t_\\text{eval})$ over $t_\\text{eval}$.\n", + "As a sample oracle we will combine all `ite`'s based on group $\\mathrm{g}$." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_post_treatment = df[df[\"t\"] >= df[\"d\"]]\n", + "df_post_treatment.groupby(\"d\")[\"ite\"].mean()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To obtain group-specific effects it is possible to aggregate several $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$ values based on the group $\\mathrm{g}$ by setting the `aggregation=\"group\"` argument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated_group = dml_obj.aggregate(aggregation=\"group\")\n", + "print(aggregated_group)\n", + "_ = aggregated_group.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The output is a `DoubleMLDIDAggregation` object which includes an overall aggregation summary based on group size." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Time Aggregation\n", + "\n", + "To obtain time-specific effects one can would like to average $ATT(\\mathrm{g}, t_\\text{eval})$ over $\\mathrm{g}$ (respecting group size).\n", + "As a sample oracle we will combine all `ite`'s based on group $\\mathrm{g}$. As oracle values, we obtain" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_post_treatment.groupby(\"t\")[\"ite\"].mean()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To aggregate $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$, based on $t_\\text{eval}$, but weighted with respect to group size. Corresponds to *Calendar Time Effects* from the [did-R-package](https://bcallaway11.github.io/did/index.html).\n", + "\n", + "For calendar time effects set `aggregation=\"time\"`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated_time = dml_obj.aggregate(\"time\")\n", + "print(aggregated_time)\n", + "fig, ax = aggregated_time.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Event Study Aggregation\n", + "\n", + "To obtain event-study-type effects one can would like to aggregate $ATT(\\mathrm{g}, t_\\text{eval})$ over $e = t_\\text{eval} - \\mathrm{g}$ (respecting group size).\n", + "As a sample oracle we will combine all `ite`'s based on group $\\mathrm{g}$. As oracle values, we obtain" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df[\"e\"] = pd.to_datetime(df[\"t\"]).values.astype(\"datetime64[M]\") - \\\n", + " pd.to_datetime(df[\"d\"]).values.astype(\"datetime64[M]\")\n", + "df.groupby(\"e\")[\"ite\"].mean()[1:]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Analogously, `aggregation=\"eventstudy\"` aggregates $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$ based on exposure time $e = t_\\text{eval} - \\mathrm{g}$ (respecting group size)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated_eventstudy = dml_obj.aggregate(\"eventstudy\")\n", + "print(aggregated_eventstudy)\n", + "aggregated_eventstudy.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Aggregation Details\n", + "\n", + "The `DoubleMLDIDAggregation` objects include several `DoubleMLFrameworks` which support methods like `bootstrap()` or `confint()`.\n", + "Further, the weights can be accessed via the properties\n", + "\n", + " - ``overall_aggregation_weights``: weights for the overall aggregation\n", + " - ``aggregation_weights``: weights for the aggregation\n", + "\n", + "To clarify, e.g. for the eventstudy aggregation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(aggregated_eventstudy)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, the overall effect aggregation aggregates each effect with positive exposure" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(aggregated_eventstudy.overall_aggregation_weights)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If one would like to consider how the aggregated effect with $e=0$ is computed, one would have to look at the corresponding set of weights within the ``aggregation_weights`` property" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# the weights for e=0 correspond to the fifth element of the aggregation weights\n", + "aggregated_eventstudy.aggregation_weights[4]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Taking a look at the original `dml_obj`, one can see that this combines the following estimates (only show month):\n", + "\n", + " - $\\widehat{ATT}(04,03,04)$\n", + " - $\\widehat{ATT}(05,04,05)$\n", + " - $\\widehat{ATT}(06,05,06)$" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(dml_obj.summary[\"coef\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Anticipation\n", + "\n", + "As described in the [Model Guide](https://docs.doubleml.org/stable/guide/models.html#difference-in-differences-models-did), one can include anticipation periods $\\delta>0$ by setting the `anticipation_periods` parameter." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Data with Anticipation\n", + "\n", + "The DGP allows to include anticipation periods via the `anticipation_periods` parameter.\n", + "In this case the observations will be \"shifted\" such that units anticipate the effect earlier and the exposure effect is increased by the number of periods where the effect is anticipated." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n_obs = 4000\n", + "n_periods = 6\n", + "\n", + "df_anticipation = make_did_CS2021(n_obs, dgp_type=4, n_periods=n_periods, n_pre_treat_periods=3, time_type=\"datetime\", anticipation_periods=1)\n", + "\n", + "print(df_anticipation.shape)\n", + "df_anticipation.head()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To visualize the anticipation, we will again plot the \"oracle\" values" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_anticipation[\"ite\"] = df_anticipation[\"y1\"] - df_anticipation[\"y0\"]\n", + "df_anticipation[\"First Treated\"] = df_anticipation[\"d\"].dt.strftime(\"%Y-%m\").fillna(\"Never Treated\")\n", + "agg_df_anticipation = df_anticipation.groupby([\"t\", \"First Treated\"]).agg(**agg_dictionary).reset_index()\n", + "agg_df_anticipation.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "One can see that the effect is already anticipated one period before the actual treatment assignment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plot_data(agg_df_anticipation, col_name='ite')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Initialize a corresponding `DoubleMLPanelData` object." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_data_anticipation = DoubleMLPanelData(\n", + " data=df_anticipation,\n", + " y_col=\"y\",\n", + " d_cols=\"d\",\n", + " id_col=\"id\",\n", + " t_col=\"t\",\n", + " x_cols=[\"Z1\", \"Z2\", \"Z3\", \"Z4\"],\n", + " datetime_unit=\"M\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### ATT Estimation\n", + "\n", + "Let us take a look at the estimation without anticipation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj_anticipation = DoubleMLDIDMulti(dml_data_anticipation, **default_args)\n", + "dml_obj_anticipation.fit()\n", + "dml_obj_anticipation.bootstrap(n_rep_boot=5000)\n", + "dml_obj_anticipation.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The effects are obviously biased. To include anticipation periods, one can adjust the `anticipation_periods` parameter. Correspondingly, the outcome regression (and not yet treated units) are adjusted." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj_anticipation = DoubleMLDIDMulti(dml_data_anticipation, **(default_args| {\"anticipation_periods\": 1}))\n", + "dml_obj_anticipation.fit()\n", + "dml_obj_anticipation.bootstrap(n_rep_boot=5000)\n", + "dml_obj_anticipation.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Group-Time Combinations\n", + "\n", + "The default option `gt_combinations=\"standard\"` includes all group time values with the specific choice of $t_\\text{pre} = \\min(\\mathrm{g}, t_\\text{eval}) - 1$ (without anticipation) which is the weakest possible parallel trend assumption.\n", + "\n", + "Other options are possible or only specific combinations of $(\\mathrm{g},t_\\text{pre},t_\\text{eval})$." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### All combinations\n", + "\n", + "The option `gt_combinations=\"all\"` includes all relevant group time values with $t_\\text{pre} < \\min(\\mathrm{g}, t_\\text{eval})$, including longer parallel trend assumptions.\n", + "This can result in multiple estimates for the same $ATT(\\mathrm{g},t)$, which have slightly different assumptions (length of parallel trends)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj_all = DoubleMLDIDMulti(dml_data, **(default_args| {\"gt_combinations\": \"all\"}))\n", + "dml_obj_all.fit()\n", + "dml_obj_all.bootstrap(n_rep_boot=5000)\n", + "dml_obj_all.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Selected Combinations\n", + "\n", + "Instead it is also possible to just submit a list of tuples containing $(\\mathrm{g}, t_\\text{pre}, t_\\text{eval})$ combinations. E.g. only two combinations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gt_dict = {\n", + " \"gt_combinations\": [\n", + " (np.datetime64('2025-04'),\n", + " np.datetime64('2025-01'),\n", + " np.datetime64('2025-02')),\n", + " (np.datetime64('2025-04'),\n", + " np.datetime64('2025-02'),\n", + " np.datetime64('2025-03')),\n", + " ]\n", + "}\n", + "\n", + "dml_obj_all = DoubleMLDIDMulti(dml_data, **(default_args| gt_dict))\n", + "dml_obj_all.fit()\n", + "dml_obj_all.bootstrap(n_rep_boot=5000)\n", + "dml_obj_all.plot_effects()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/doc/examples/did/py_panel_data_example.ipynb b/doc/examples/did/py_panel_data_example.ipynb new file mode 100644 index 00000000..0f87e648 --- /dev/null +++ b/doc/examples/did/py_panel_data_example.ipynb @@ -0,0 +1,392 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Python: Real-Data Example for Multi-Period Difference-in-Differences\n", + "\n", + "In this example, we replicate a [real-data demo notebook](https://bcallaway11.github.io/did/articles/did-basics.html#an-example-with-real-data) from the [did-R-package](https://bcallaway11.github.io/did/index.html) in order to illustrate the use of `DoubleML` for multi-period difference-in-differences (DiD) models. \n", + "\n", + "\n", + "\n", + "The notebook requires the following packages:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pyreadr\n", + "import pandas as pd\n", + "import numpy as np\n", + "\n", + "from sklearn.linear_model import LinearRegression, LogisticRegression\n", + "from sklearn.dummy import DummyRegressor, DummyClassifier\n", + "from sklearn.linear_model import LassoCV, LogisticRegressionCV\n", + "\n", + "from doubleml.data import DoubleMLPanelData\n", + "from doubleml.did import DoubleMLDIDMulti" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Causal Research Question\n", + "\n", + "[Callaway and Sant'Anna (2021)](https://doi.org/10.1016/j.jeconom.2020.12.001) study the causal effect of raising the minimum wage on teen employment in the US using county data over a period from 2001 to 2007. A county is defined as treated if the minimum wage in that county is above the federal minimum wage. We focus on a preprocessed balanced panel data set as provided by the [did-R-package](https://bcallaway11.github.io/did/index.html). The corresponding documentation for the `mpdta` data is available from the [did package website](https://bcallaway11.github.io/did/reference/mpdta.html). We use this data solely as a demonstration example to help readers understand differences in the `DoubleML` and `did` packages. An analogous notebook using the same data is available from the [did documentation](https://bcallaway11.github.io/did/articles/did-basics.html#an-example-with-real-data).\n", + "\n", + "We follow the original notebook and provide results under identification based on unconditional and conditional parallel trends. For the Double Machine Learning (DML) Difference-in-Differences estimator, we demonstrate two different specifications, one based on linear and logistic regression and one based on their $\\ell_1$ penalized variants Lasso and logistic regression with cross-validated penalty choice. The results for the former are expected to be very similar to those in the [did data example](https://bcallaway11.github.io/did/articles/did-basics.html#an-example-with-real-data). Minor differences might arise due to the use of sample-splitting in the DML estimation.\n", + "\n", + "\n", + "## Data\n", + "\n", + "We will download and read a preprocessed data file as provided by the [did-R-package](https://bcallaway11.github.io/did/index.html).\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# download file from did package for R\n", + "url = \"https://github.com/bcallaway11/did/raw/refs/heads/master/data/mpdta.rda\"\n", + "pyreadr.download_file(url, \"mpdta.rda\")\n", + "\n", + "mpdta = pyreadr.read_r(\"mpdta.rda\")[\"mpdta\"]\n", + "mpdta.head()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To work with [DoubleML](https://docs.doubleml.org/stable/index.html), we initialize a `DoubleMLPanelData` object. The input data has to satisfy some requirements, i.e., it should be in a *long* format with every row containing the information of one unit at one time period. Moreover, the data should contain a column on the unit identifier and a column on the time period. The requirements are virtually identical to those of the [did-R-package](https://bcallaway11.github.io/did/index.html), as listed in [their data example](https://bcallaway11.github.io/did/articles/did-basics.html#an-example-with-real-data). In line with the naming conventions of [DoubleML](https://docs.doubleml.org/stable/index.html), the treatment group indicator is passed to `DoubleMLPanelData` by the `d_cols` argument. To flexibly handle different formats for handling time periods, the time variable `t_col` can handle `float`, `int` and `datetime` formats. More information are available in the [user guide](https://docs.doubleml.org/dev/guide/data_backend.html#doublemlpaneldata). To indicate never treated units, we set their value for the treatment group variable to `np.inf`." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, we can initialize the ``DoubleMLPanelData`` object, specifying\n", + "\n", + " - `y_col` : the outcome\n", + " - `d_cols`: the group variable indicating the first treated period for each unit\n", + " - `id_col`: the unique identification column for each unit\n", + " - `t_col` : the time column\n", + " - `x_cols`: the additional pre-treatment controls\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Set values for treatment group indicator for never-treated to np.inf\n", + "mpdta.loc[mpdta['first.treat'] == 0, 'first.treat'] = np.inf\n", + "\n", + "dml_data = DoubleMLPanelData(\n", + " data=mpdta,\n", + " y_col=\"lemp\",\n", + " d_cols=\"first.treat\",\n", + " id_col=\"countyreal\",\n", + " t_col=\"year\",\n", + " x_cols=['lpop']\n", + ")\n", + "print(dml_data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that we specified a pre-treatment confounding variable `lpop` through the `x_cols` argument. To consider cases under unconditional parallel trends, we can use dummy learners to ignore the pre-treatment confounding variable. This is illustrated below." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ATT Estimation: Unconditional Parallel Trends\n", + "\n", + "We start with identification under the unconditional parallel trends assumption. To do so, initialize a `DoubleMLDIDMulti` object (see [model documentation](https://docs.doubleml.org/stable/guide/models.html#difference-in-differences-models-did)), which takes the previously initialized `DoubleMLPanelData` object as input. We use scikit-learn's `DummyRegressor` (documentation [here](https://scikit-learn.org/stable/modules/generated/sklearn.dummy.DummyRegressor.html)) and `DummyClassifier` (documentation [here](https://scikit-learn.org/stable/modules/generated/sklearn.dummy.DummyClassifier.html)) to ignore the pre-treatment confounding variable. At this stage, we can also pass further options, for example specifying the number of folds and repetitions used for cross-fitting. \n", + "\n", + "When calling the `fit()` method, the model estimates standard combinations of $ATT(g,t)$ parameters, which corresponds to the defaults in the [did-R-package](https://bcallaway11.github.io/did/index.html). These combinations can also be customized through the `gt_combinations` argument, see [the user guide](https://docs.doubleml.org/stable/guide/models.html#panel-data)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj = DoubleMLDIDMulti(\n", + " obj_dml_data=dml_data,\n", + " ml_g=DummyRegressor(),\n", + " ml_m=DummyClassifier(),\n", + " control_group=\"never_treated\",\n", + " n_folds=10\n", + ")\n", + "\n", + "dml_obj.fit()\n", + "print(dml_obj.summary.round(4))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The summary displays estimates of the $ATT(g,t_\\text{eval})$ effects for different combinations of $(g,t_\\text{eval})$ via $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$, where\n", + " - $\\mathrm{g}$ specifies the group\n", + " - $t_\\text{pre}$ specifies the corresponding pre-treatment period\n", + " - $t_\\text{eval}$ specifies the evaluation period\n", + "\n", + "This corresponds to the estimates given in `att_gt` function in the [did-R-package](https://bcallaway11.github.io/did/index.html), where the standard choice is $t_\\text{pre} = \\min(\\mathrm{g}, t_\\text{eval}) - 1$ (without anticipation).\n", + "\n", + "Remark that this includes pre-tests effects if $\\mathrm{g} > t_{eval}$, e.g. $ATT(2007,2005)$." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As usual for the DoubleML-package, you can obtain joint confidence intervals via bootstrap." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "level = 0.95\n", + "\n", + "ci = dml_obj.confint(level=level)\n", + "dml_obj.bootstrap(n_rep_boot=5000)\n", + "ci_joint = dml_obj.confint(level=level, joint=True)\n", + "print(ci_joint)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A visualization of the effects can be obtained via the `plot_effects()` method.\n", + "\n", + "Remark that the plot used joint confidence intervals per default. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [ + "nbsphinx-thumbnail" + ] + }, + "outputs": [], + "source": [ + "fig, ax = dml_obj.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Effect Aggregation\n", + "\n", + "As the [did-R-package](https://bcallaway11.github.io/did/index.html), the $ATT$'s can be aggregated to summarize multiple effects.\n", + "For details on different aggregations and details on their interpretations see [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001).\n", + "\n", + "The aggregations are implemented via the `aggregate()` method. We follow the structure of the [did package notebook](https://bcallaway11.github.io/did/articles/did-basics.html#an-example-with-real-data) and start with an aggregation relative to the treatment timing." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Event Study Aggregation\n", + "\n", + "\n", + "We can aggregate the $ATT$s relative to the treatment timing. This is done by setting `aggregation=\"eventstudy\"` in the `aggregate()` method. \n", + " `aggregation=\"eventstudy\"` aggregates $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$ based on exposure time $e = t_\\text{eval} - \\mathrm{g}$ (respecting group size)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# rerun bootstrap for valid simultaneous inference (as values are not saved) \n", + "dml_obj.bootstrap(n_rep_boot=5000)\n", + "aggregated_eventstudy = dml_obj.aggregate(\"eventstudy\")\n", + "# run bootstrap to obtain simultaneous confidence intervals\n", + "aggregated_eventstudy.aggregated_frameworks.bootstrap()\n", + "print(aggregated_eventstudy)\n", + "fig, ax = aggregated_eventstudy.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Alternatively, the $ATT$ could also be aggregated according to (calendar) time periods or treatment groups, see the [user guide](https://docs.doubleml.org/dev/guide/models.html#effect-aggregation)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Aggregation Details\n", + "\n", + "The `DoubleMLDIDAggregation` objects include several `DoubleMLFrameworks` which support methods like `bootstrap()` or `confint()`.\n", + "Further, the weights can be accessed via the properties\n", + "\n", + " - ``overall_aggregation_weights``: weights for the overall aggregation\n", + " - ``aggregation_weights``: weights for the aggregation\n", + "\n", + "To clarify, e.g. for the eventstudy aggregation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If one would like to consider how the aggregated effect with $e=0$ is computed, one would have to look at the third set of weights within the ``aggregation_weights`` property" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated_eventstudy.aggregation_weights[2]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ATT Estimation: Conditional Parallel Trends\n", + "\n", + "We briefly demonstrate how to use the `DoubleMLDIDMulti` model with conditional parallel trends. As the rationale behind DML is to flexibly model nuisance components as prediction problems, the DML DiD estimator includes pre-treatment covariates by default. In DiD, the nuisance components are the outcome regression and the propensity score estimation for the treatment group variable. This is why we had to enforce dummy learners in the unconditional parallel trends case to ignore the pre-treatment covariates. Now, we can replicate the classical doubly robust DiD estimator as of [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001) by using linear and logistic regression for the nuisance components. This is done by setting `ml_g` to `LinearRegression()` and `ml_m` to `LogisticRegression()`. Similarly, we can also choose other learners, for example by setting `ml_g` and `ml_m` to `LassoCV()` and `LogisticRegressionCV()`. We present the results for the ATTs and their event-study aggregation in the corresponding effect plots.\n", + "\n", + "Please note that the example is meant to illustrate the usage of the `DoubleMLDIDMulti` model in combination with ML learners. In real-data applicatoins, careful choice and empirical evaluation of the learners are required. Default measures for the prediction of the nuisance components are printed in the model summary, as briefly illustrated below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj_linear_logistic = DoubleMLDIDMulti(\n", + " obj_dml_data=dml_data,\n", + " ml_g=LinearRegression(),\n", + " ml_m=LogisticRegression(penalty=None),\n", + " control_group=\"never_treated\",\n", + " n_folds=10\n", + ")\n", + "\n", + "dml_obj_linear_logistic.fit()\n", + "dml_obj_linear_logistic.bootstrap(n_rep_boot=5000)\n", + "dml_obj_linear_logistic.plot_effects(title=\"Estimated ATTs by Group, Linear and logistic Regression\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We briefly look at the model summary, which includes some standard diagnostics for the prediction of the nuisance components." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(dml_obj_linear_logistic)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "es_linear_logistic = dml_obj_linear_logistic.aggregate(\"eventstudy\")\n", + "es_linear_logistic.aggregated_frameworks.bootstrap()\n", + "es_linear_logistic.plot_effects(title=\"Estimated ATTs by Group, Linear and logistic Regression\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj_lasso = DoubleMLDIDMulti(\n", + " obj_dml_data=dml_data,\n", + " ml_g=LassoCV(),\n", + " ml_m=LogisticRegressionCV(),\n", + " control_group=\"never_treated\",\n", + " n_folds=10\n", + ")\n", + "\n", + "dml_obj_lasso.fit()\n", + "dml_obj_lasso.bootstrap(n_rep_boot=5000)\n", + "dml_obj_lasso.plot_effects(title=\"Estimated ATTs by Group, LassoCV and LogisticRegressionCV()\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Model summary\n", + "print(dml_obj_lasso)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "es_rf = dml_obj_lasso.aggregate(\"eventstudy\")\n", + "es_rf.aggregated_frameworks.bootstrap()\n", + "es_rf.plot_effects(title=\"Estimated ATTs by Group, LassoCV and LogisticRegressionCV()\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/doc/examples/did/py_panel_simple.ipynb b/doc/examples/did/py_panel_simple.ipynb new file mode 100644 index 00000000..6314e418 --- /dev/null +++ b/doc/examples/did/py_panel_simple.ipynb @@ -0,0 +1,367 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Python: Panel Data Introduction\n", + "\n", + "In this example, we replicate the results from the guide [Getting Started with the did Package](https://bcallaway11.github.io/did/articles/did-basics.html) of the [did-R-package](https://bcallaway11.github.io/did/index.html).\n", + "\n", + "As the [did-R-package](https://bcallaway11.github.io/did/index.html) the implementation of [DoubleML](https://docs.doubleml.org/stable/index.html) is based on [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001).\n", + "\n", + "The notebook requires the following packages:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "\n", + "from sklearn.linear_model import LinearRegression, LogisticRegression\n", + "\n", + "from doubleml.data import DoubleMLPanelData\n", + "from doubleml.did import DoubleMLDIDMulti" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Data\n", + "\n", + "The data we will use is simulated and part of the [CSDID-Python-Package](https://d2cml-ai.github.io/csdid/index.html).\n", + "\n", + "A description of the data generating process can be found at the [CSDID-documentation](https://d2cml-ai.github.io/csdid/examples/csdid_basic.html#Examples-with-simulated-data).\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dta = pd.read_csv(\"https://raw.githubusercontent.com/d2cml-ai/csdid/main/data/sim_data.csv\")\n", + "dta.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To work with the [DoubleML-package](https://docs.doubleml.org/stable/index.html), we initialize a ``DoubleMLPanelData`` object.\n", + "\n", + "Therefore, we set the *never-treated* units in group column `G` to `np.inf` (we have to change the datatype to `float`)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# set dtype for G to float\n", + "dta[\"G\"] = dta[\"G\"].astype(float)\n", + "dta.loc[dta[\"G\"] == 0, \"G\"] = np.inf\n", + "dta.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, we can initialize the ``DoubleMLPanelData`` object, specifying\n", + "\n", + " - `y_col` : the outcome\n", + " - `d_cols`: the group variable indicating the first treated period for each unit\n", + " - `id_col`: the unique identification column for each unit\n", + " - `t_col` : the time column\n", + " - `x_cols`: the additional pre-treatment controls\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_data = DoubleMLPanelData(\n", + " data=dta,\n", + " y_col=\"Y\",\n", + " d_cols=\"G\",\n", + " id_col=\"id\",\n", + " t_col=\"period\",\n", + " x_cols=[\"X\"]\n", + ")\n", + "print(dml_data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ATT Estimation\n", + "\n", + "The [DoubleML-package](https://docs.doubleml.org/stable/index.html) implements estimation of group-time average treatment effect via the `DoubleMLDIDMulti` class (see [model documentation](https://docs.doubleml.org/stable/guide/models.html#difference-in-differences-models-did)).\n", + "\n", + "The class basically behaves like other `DoubleML` classes and requires the specification of two learners (for more details on the regression elements, see [score documentation](https://docs.doubleml.org/stable/guide/scores.html#difference-in-differences-models)). The model will be estimated using the `fit()` method." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_obj = DoubleMLDIDMulti(\n", + " obj_dml_data=dml_data,\n", + " ml_g=LinearRegression(),\n", + " ml_m=LogisticRegression(),\n", + " control_group=\"never_treated\",\n", + ")\n", + "\n", + "dml_obj.fit()\n", + "print(dml_obj)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The summary displays estimates of the $ATT(g,t_\\text{eval})$ effects for different combinations of $(g,t_\\text{eval})$ via $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$, where\n", + " - $\\mathrm{g}$ specifies the group\n", + " - $t_\\text{pre}$ specifies the corresponding pre-treatment period\n", + " - $t_\\text{eval}$ specifies the evaluation period\n", + "\n", + "This corresponds to the estimates given in `att_gt` function in the [did-R-package](https://bcallaway11.github.io/did/index.html), where the standard choice is $t_\\text{pre} = \\min(\\mathrm{g}, t_\\text{eval}) - 1$ (without anticipation).\n", + "\n", + "Remark that this includes pre-tests effects if $\\mathrm{g} > t_{eval}$, e.g. $ATT(4,2)$." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As usual for the DoubleML-package, you can obtain joint confidence intervals via bootstrap." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "level = 0.95\n", + "\n", + "ci = dml_obj.confint(level=level)\n", + "dml_obj.bootstrap(n_rep_boot=5000)\n", + "ci_joint = dml_obj.confint(level=level, joint=True)\n", + "ci_joint" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A visualization of the effects can be obtained via the `plot_effects()` method.\n", + "\n", + "Remark that the plot used joint confidence intervals per default. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [ + "nbsphinx-thumbnail" + ] + }, + "outputs": [], + "source": [ + "fig, ax = dml_obj.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Effect Aggregation\n", + "\n", + "As the [did-R-package](https://bcallaway11.github.io/did/index.html), the $ATT$'s can be aggregated to summarize multiple effects.\n", + "For details on different aggregations and details on their interpretations see [Callaway and Sant'Anna(2021)](https://doi.org/10.1016/j.jeconom.2020.12.001).\n", + "\n", + "The aggregations are implemented via the `aggregate()` method." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Group Aggregation\n", + "\n", + "To obtain group-specific effects it is possible to aggregate several $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$ values based on the group $\\mathrm{g}$ by setting the `aggregation=\"group\"` argument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated = dml_obj.aggregate(aggregation=\"group\")\n", + "print(aggregated)\n", + "_ = aggregated.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The output is a `DoubleMLDIDAggregation` object which includes an overall aggregation summary based on group size." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Time Aggregation\n", + "\n", + "This aggregates $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$, based on $t_\\text{eval}$, but weighted with respect to group size. Corresponds to *Calendar Time Effects* from the [did-R-package](https://bcallaway11.github.io/did/index.html).\n", + "\n", + "For calendar time effects set `aggregation=\"time\"`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated_time = dml_obj.aggregate(\"time\")\n", + "print(aggregated_time)\n", + "fig, ax = aggregated_time.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Event Study Aggregation\n", + "\n", + "Finally, `aggregation=\"eventstudy\"` aggregates $\\widehat{ATT}(\\mathrm{g},t_\\text{pre},t_\\text{eval})$ based on exposure time $e = t_\\text{eval} - \\mathrm{g}$ (respecting group size)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated_eventstudy = dml_obj.aggregate(\"eventstudy\")\n", + "print(aggregated_eventstudy)\n", + "fig, ax = aggregated_eventstudy.plot_effects()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Aggregation Details\n", + "\n", + "The `DoubleMLDIDAggregation` objects include several `DoubleMLFrameworks` which support methods like `bootstrap()` or `confint()`.\n", + "Further, the weights can be accessed via the properties\n", + "\n", + " - ``overall_aggregation_weights``: weights for the overall aggregation\n", + " - ``aggregation_weights``: weights for the aggregation\n", + "\n", + "To clarify, e.g. for the eventstudy aggregation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(aggregated_eventstudy)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, the overall effect aggregation aggregates each effect with positive exposure" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(aggregated_eventstudy.overall_aggregation_weights)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If one would like to consider how the aggregated effect with $e=0$ is computed, one would have to look at the third set of weights within the ``aggregation_weights`` property" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "aggregated_eventstudy.aggregation_weights[2]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Taking a look at the original `dml_obj`, one can see that this combines the following estimates:\n", + "\n", + " - $\\widehat{ATT}(2,1,2)$\n", + " - $\\widehat{ATT}(3,2,3)$\n", + " - $\\widehat{ATT}(4,3,4)$" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(dml_obj.summary)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/doc/examples/index.rst b/doc/examples/index.rst index f363e19e..97fb0848 100644 --- a/doc/examples/index.rst +++ b/doc/examples/index.rst @@ -21,14 +21,14 @@ General Examples py_double_ml_pension.ipynb py_double_ml_sensitivity.ipynb py_double_ml_apo.ipynb + py_double_ml_irm_vs_apo.ipynb py_double_ml_learner.ipynb py_double_ml_firststage.ipynb py_double_ml_multiway_cluster.ipynb py_double_ml_ssm.ipynb py_double_ml_sensitivity_booking.ipynb - py_double_ml_did.ipynb - py_double_ml_did_pretest.ipynb py_double_ml_basic_iv.ipynb + py_double_ml_robust_iv.ipynb py_double_ml_plm_irm_hetfx.ipynb py_double_ml_meets_flaml.ipynb py_double_ml_rdflex.ipynb @@ -50,6 +50,20 @@ Effect Heterogeneity py_double_ml_pq.ipynb py_double_ml_cvar.ipynb + +.. _did_examplegallery: + +Difference-in-Differences ++++++++++++++++++++++++++ + +.. nbgallery:: + :name: case-studies-py-did + + did/py_panel_simple.ipynb + did/py_panel.ipynb + did/py_panel_data_example.ipynb + + R: Case studies --------------- @@ -62,10 +76,11 @@ These are case studies with the R package :ref:`DoubleML `. R_double_ml_pension.ipynb R_double_ml_did.ipynb R_double_ml_multiway_cluster.ipynb + R_double_ml_ssm.ipynb R_double_ml_basic_iv.ipynb -Sandbox ----------- +Sandbox/Archive +--------------- These are examples which are work-in-progress and/or not yet fully documented. @@ -75,3 +90,5 @@ These are examples which are work-in-progress and/or not yet fully documented. R_double_ml_pipeline.ipynb double_ml_bonus_data.ipynb + did/py_did.ipynb + did/py_did_pretest.ipynb diff --git a/doc/examples/py_double_ml_apo.ipynb b/doc/examples/py_double_ml_apo.ipynb index d36d136b..1f9925fc 100644 --- a/doc/examples/py_double_ml_apo.ipynb +++ b/doc/examples/py_double_ml_apo.ipynb @@ -17,7 +17,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -55,9 +55,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Average Individual effects in each group:\n", + "[ 0. 1.75 7.03 9.43 10.4 10.49]\n", + "\n", + "Average Potential Outcomes in each group:\n", + "[210.04 211.79 217.06 219.47 220.44 220.53]\n", + "\n", + "Levels and their counts:\n", + "(array([0., 1., 2., 3., 4., 5.]), array([615, 487, 465, 482, 480, 471]))\n" + ] + } + ], "source": [ "# Parameters\n", "n_obs = 3000\n", @@ -97,9 +112,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAnEAAAHHCAYAAADQ9g7NAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAk8dJREFUeJzs3Xd4FOXax/Hv7ibZ9EAIqUASQg0tdCkKKBqKFFFAjudQVGwgcFBUVAQrNhRFpBxfAQuKKGABUUAQFZSOIBBaQiSkEUgnbXfeP0JWlmQhfWY39+e6crGZnZ39zWbI3HnmmefRKYqiIIQQQggh7Ipe7QBCCCGEEKLipIgTQgghhLBDUsQJIYQQQtghKeKEEEIIIeyQFHFCCCGEEHZIijghhBBCCDskRZwQQgghhB2SIk4IIYQQwg5JESeEEEIIYYekiBPiGvr27Uvfvn2rdZsbN24kKioKV1dXdDod6enpAHz88ce0atUKZ2dn6tWrV63vKWpeXFwcOp2O5cuXqx1FCFFHSBEnbIqNjWXy5Mm0aNECd3d33N3diYyMZNKkSfz5559qx9OUsLAwdDpdmV8DBgywrJeWlsaoUaNwc3Nj4cKFfPzxx3h4eHDs2DHGjx9PREQE//vf/1i6dGm1Z9yxYwdz5syxFI32bu3atQwcOBA/Pz9cXFwIDg5m1KhR/PTTTzX6vitXrmT+/Pk1+h5aN2fOHJvH+5Vf1f0HkC1Hjhxhzpw5xMXF1cr7VdX7778vxb6oFk5qBxDa9N133zF69GicnJy455576NChA3q9nmPHjrFmzRoWLVpEbGwsoaGhakfVjKioKB577LFSy4ODgy2Pd+/eTVZWFi+++CL9+/e3LN+2bRtms5l33nmHZs2a1Ui+HTt28PzzzzN+/Hi7bulTFIV7772X5cuX07FjR6ZPn05gYCCJiYmsXbuWW265hd9++42ePXvWyPuvXLmSw4cPM23aNKvloaGhXLp0CWdn5xp5Xy0ZMWKE1XGanZ3Nww8/zB133MGIESMsywMCAmolz5EjR3j++efp27cvYWFhtfKeVfH+++/j5+fH+PHj1Y4i7JwUcaKUU6dOcffddxMaGsqWLVsICgqyev61117j/fffR6+/dkNuTk4OHh4eNRlVU0JCQvj3v/99zXVSUlIAShVRtpaL0ubNm8fy5cuZNm0ab731FjqdzvLcM888w8cff4yTU+3/atPpdLi6utb6+6qhffv2tG/f3vL9+fPnefjhh2nfvv01/w/k5eXh4uJy3d8dQohyUoS4ygMPPKAAyu+//17u14wbN07x8PBQTp48qQwcOFDx9PRUhg0bpiiKomRnZyvTp09XGjVqpLi4uCgtWrRQ3njjDcVsNlteHxsbqwDKsmXLSm0bUGbPnm35fvbs2QqgHD16VBk5cqTi5eWl+Pr6KlOmTFEuXbpU6vUff/yx0qlTJ8XV1VWpX7++Mnr0aCU+Pr7UekuWLFGaNm2quLq6Kl27dlW2b9+u9OnTR+nTp8919z80NFQZPHjwNdfp06ePAlh9jRs3TgkNDS21/Mr93bBhg9K7d2/F3d1d8fT0VAYNGqQcPny41PZLPg8/Pz/F1dVVadGihfL0009bfWZXf8XGxiqKoig//vij0qtXL8XHx0fx8PBQWrRoocycOfOa+9OmTRulb9++pZabTCYlODhYufPOOy3LPvvsM6VTp06Kp6en4uXlpbRt21aZP3/+NbdfltzcXMXX11dp1aqVUlRUVK7XnDp1SrnrrruU+vXrK25ubkr37t2V7777zmqdrVu3KoCyatUq5aWXXlJCQkIUo9Go3HzzzcqJEycs65X1MwwNDVUUpexjuOT/xdmzZ5Vhw4YpHh4eip+fn/LYY49Z5S95/61bt1rlsvX/YsuWLZZjwsfHRxk6dKhy5MgRq3VKjq2rlRwLV6rMz/9KqamppY7bkn367LPPlGeeeUYJDg5WdDqdcvHiRUVRFOX3339XoqOjFW9vb8XNzU256aablF9//dVqu3FxccrDDz+stGjRQnF1dVV8fX2Vu+66y3LcKoqiLFu2rMxju+SzLPm/uXXrVqVz586Kq6ur0rZtW8vzX331ldK2bVvFaDQqnTp1Uvbt21dq/44eParceeedSv369RWj0ah07txZ+frrr63WKcnx66+/Kv/9738VPz8/xd3dXRk+fLiSkpJiWa+s/+/l+R0jRFmkJU6U8t1339GsWTO6d+9eodcVFRURHR1N7969efPNN3F3d0dRFIYOHcrWrVu57777iIqK4ocffmDGjBkkJCTw9ttvVzrnqFGjCAsLY+7cufz++++8++67XLx4kY8++siyzssvv8ysWbMYNWoU999/P6mpqSxYsICbbrqJ/fv3W1q+/u///o8HH3yQnj17Mm3aNE6fPs3QoUPx9fWlcePG5cpTWFjI+fPnSy338PDAzc2NZ555hpYtW7J06VJeeOEFwsPDiYiIYPjw4Xz00UesXbuWRYsW4enpaWnl+Pjjjxk3bhzR0dG89tpr5ObmsmjRInr37s3+/fstl47+/PNPbrzxRpydnXnggQcICwvj1KlTfPvtt7z88suMGDGC48eP89lnn/H222/j5+cHQMOGDfnrr7+4/fbbad++PS+88AJGo5GTJ0/y22+/XXN/R48ezZw5c0hKSiIwMNCy/Ndff+XcuXPcfffdAGzatIkxY8Zwyy238NprrwFw9OhRfvvtN6ZOnVquz/bKbV+4cIFp06ZhMBiuu35ycjI9e/YkNzeXKVOm0KBBA1asWMHQoUP58ssvueOOO6zWf/XVV9Hr9Tz++ONkZGTw+uuvc8899/DHH38AxS19GRkZnD171nLsenp6XjODyWQiOjqa7t278+abb7J582bmzZtHREQEDz/8cIX2H2Dz5s0MHDiQpk2bMmfOHC5dusSCBQvo1asX+/btq/DlxMr+/MvrxRdfxMXFhccff5z8/HxcXFz46aefGDhwIJ07d2b27Nno9XqWLVvGzTffzC+//EK3bt2A4u4HO3bs4O6776ZRo0bExcWxaNEi+vbty5EjR3B3d+emm25iypQpvPvuuzz99NO0bt0awPIvwMmTJ/nXv/7Fgw8+yL///W/efPNNhgwZwuLFi3n66ad55JFHAJg7dy6jRo0iJibG0lr4119/0atXL0JCQnjqqafw8PDgiy++YPjw4Xz11VeljqFHH32U+vXrM3v2bOLi4pg/fz6TJ09m1apVAMyfP59HH30UT09PnnnmGaD2LjsLB6R2FSm0JSMjQwGU4cOHl3ru4sWLSmpqquUrNzfX8ty4ceMUQHnqqaesXrNu3ToFUF566SWr5XfddZei0+mUkydPKopSuZa4oUOHWq33yCOPKIBy8OBBRVGK/4o3GAzKyy+/bLXeoUOHFCcnJ8vygoICxd/fX4mKilLy8/Mt6y1durTcfyWX9dd1ydfcuXMt65X8tb57926r15fsU2pqqmVZVlaWUq9ePWXixIlW6yYlJSk+Pj5Wy2+66SbFy8tLOXPmjNW6V7Z2vvHGG1atbyXefvvtUu9dHjExMQqgLFiwwGr5I488onh6elqOj6lTpyre3t7lbjm7lnfeeUcBlLVr15Zr/WnTpimA8ssvv1iWZWVlKeHh4UpYWJhiMpkURfmn1ah169ZWx0DJ+x06dMiybPDgwWW2cNlqiQOUF154wWrdjh07Kp07d7Z8X5GWuKioKMXf319JS0uzLDt48KCi1+uVsWPHWr13eVriKvvzv9K1WuKaNm1q9bvCbDYrzZs3V6Kjo62Oz9zcXCU8PFy59dZbrZZdbefOnQqgfPTRR5Zlq1evLvPzU5R//m/u2LHDsuyHH35QAMXNzc3q/8ySJUtKbeeWW25R2rVrp+Tl5VntQ8+ePZXmzZtblpX83+7fv7/Vfv33v/9VDAaDkp6eblnWpk0baX0T1UI6JggrmZmZQNmtC3379qVhw4aWr4ULF5Za5+qWhQ0bNmAwGJgyZYrV8sceewxFUfj+++8rnXXSpElW3z/66KOW9wRYs2YNZrOZUaNGcf78ectXYGAgzZs3Z+vWrQDs2bOHlJQUHnroIVxcXCzbGz9+PD4+PuXO0717dzZt2lTqa8yYMZXav02bNpGens6YMWOs8hsMBrp3727Jn5qayvbt27n33ntp0qSJ1Tau7C9mS0lr5Ndff43ZbC53vhYtWhAVFWVpYYDiVqcvv/ySIUOG4ObmZtl+Tk4OmzZtKve2bSk5Pr28vMq1/oYNG+jWrRu9e/e2LPP09OSBBx4gLi6OI0eOWK0/YcIEq2PgxhtvBOD06dNVyv3QQw9ZfX/jjTdWapuJiYkcOHCA8ePH4+vra1nevn17br31VsuxXxGV/fmX17hx4yzHAsCBAwc4ceIE//rXv0hLS7Mc1zk5Odxyyy1s377dkuPK1xUWFpKWlkazZs2oV68e+/btK3eGyMhIevToYfm+5CrDzTffbPV/pmR5yc/mwoUL/PTTT4waNYqsrCxL1rS0NKKjozlx4gQJCQlW7/XAAw9Y/b+78cYbMZlMnDlzptx5hSgvuZwqrJScHLOzs0s9t2TJErKyskhOTi6z87KTkxONGjWyWnbmzBmCg4NLnXRLLnVU5Rdb8+bNrb6PiIhAr9dbhhk4ceIEiqKUWq9EyV2EJRmuXs/Z2ZmmTZuWO4+fn5/VHadVdeLECaD4RFMWb29v4J8TTtu2bSv1PqNHj+aDDz7g/vvv56mnnuKWW25hxIgR3HXXXdftgD569GiefvppEhISCAkJYdu2baSkpDB69GjLOo888ghffPEFAwcOJCQkhNtuu41Ro0ZZDb1SXiX7nJWVVa71z5w5U2a3gCuPvys/t6uL4Pr16wNw8eLFCmct4erqSsOGDUtttzLbLDlWW7ZsWeq51q1b88MPP1T4hqKq/PzLIzw83Or7kuN63LhxNl+TkZFB/fr1uXTpEnPnzmXZsmUkJCSgKIrVOuV19c+15I+zq7tKlCwv+dmcPHkSRVGYNWsWs2bNKnPbKSkphISE2Hyv6jiGhLBFijhhxcfHh6CgIA4fPlzquZKToa2xmIxGY6V/6dtqMTKZTJXehtlsRqfT8f3335fZf+p6fZnUVtIa8fHHH1v1OStRXXdgurm5sX37drZu3cr69evZuHEjq1at4uabb+bHH3+8Zt+z0aNHM3PmTFavXs20adP44osv8PHxsSrQ/P39OXDgAD/88APff/8933//PcuWLWPs2LGsWLGiQllbtWoFwKFDhxg+fHil9vdabO3rlcVDdW3zStVx/Fd2m1X5+ZfHla1p8M9x/cYbbxAVFVXma0r+bz766KMsW7aMadOm0aNHD3x8fNDpdNx9990VajW0tQ/X+3mXvMfjjz9OdHR0metePSRQTRxDQtgiRZwoZfDgwXzwwQfs2rXL0sG4skJDQ9m8eTNZWVlWrXHHjh2zPA///LV69UC012qpO3HihNVf+SdPnsRsNls6dkdERKAoCuHh4bRo0eKaGUu2d2WrV2FhIbGxsXTo0KEce1r9IiIigOIi6FotfCWthWUV3le61qVVvV7PLbfcwi233MJbb73FK6+8wjPPPMPWrVuv+d7h4eF069aNVatWMXnyZNasWcPw4cMxGo1W67m4uDBkyBCGDBmC2WzmkUceYcmSJcyaNatC4+L17t2b+vXr89lnn/H0009ft8AIDQ0lJiam1PKrj7+KKM8l6ooq7/FfktfWPvn5+Vla4erXr1/mwM5l/Z+q7M+/MkqOa29v7+tu+8svv2TcuHHMmzfPsiwvL6/UftXEzwT++b/l7OxcrZ9DTeUVdY/0iROlPPHEE7i7u3PvvfeSnJxc6vmK/EU5aNAgTCYT7733ntXyt99+G51Ox8CBA4HiX+h+fn5s377dar3333/f5rav7pO3YMECAMs2R4wYgcFg4Pnnny+VWVEU0tLSAOjSpQsNGzZk8eLFFBQUWNZZvny5qrMbREdH4+3tzSuvvEJhYWGp51NTU4HiO0xvuukmPvzwQ+Lj463WuXK/S07uV+/ThQsXSm27pIUkPz//ujlHjx7N77//zocffsj58+etLqUCls+5hF6vt9x9W7L9wsJCjh07RmJi4jXfy93dnSeffJKjR4/y5JNPlnksfvLJJ+zatQsoPv527drFzp07Lc/n5OSwdOlSwsLCiIyMvO7+Xc3Dw6NCl/LKIzQ0FIPBcN3jPygoiKioKFasWGH1czx8+DA//vgjgwYNsiyLiIggIyPDanaVkgGRr1TVn39Fde7cmYiICN58880yu22UHNdQ3Kp19c94wYIFpVoTbR3bVeXv70/fvn1ZsmRJmcfmlVkrwsPDw2FmThHqkpY4UUrz5s1ZuXIlY8aMoWXLlpYZGxRFITY2lpUrV6LX60v1fyvLkCFD6NevH8888wxxcXF06NCBH3/8ka+//ppp06ZZ/ioHuP/++3n11Ve5//776dKlC9u3b+f48eM2tx0bG8vQoUMZMGAAO3fu5JNPPuFf//qXpeUsIiKCl156iZkzZxIXF8fw4cPx8vIiNjaWtWvX8sADD/D444/j7OzMSy+9xIMPPsjNN9/M6NGjiY2NZdmyZRXqE5eQkMAnn3xSarmnp2elLv15e3uzaNEi/vOf/9CpUyfuvvtuGjZsSHx8POvXr6dXr16W4vjdd9+ld+/edOrUiQceeIDw8HDi4uJYv349Bw4cAIpPnlA8TMbdd9+Ns7MzQ4YM4YUXXmD79u0MHjyY0NBQUlJSeP/992nUqJHVDQG2jBo1iscff5zHH38cX1/fUi0W999/PxcuXODmm2+mUaNGnDlzhgULFhAVFWXpm5aQkEDr1q0ZN27cdacjmjFjBn/99Rfz5s1j69at3HXXXQQGBpKUlMS6devYtWsXO3bsAOCpp57is88+Y+DAgUyZMgVfX19WrFhBbGwsX331VaUu/3fu3JlVq1Yxffp0unbtiqenJ0OGDKnwdq7k4+PDyJEjWbBgATqdjoiICL777jvLINBXeuONNxg4cCA9evTgvvvuswwx4uPjw5w5cyzr3X333Tz55JPccccdTJkyxTI8TYsWLaxuCqjqz7+i9Ho9H3zwAQMHDqRNmzZMmDCBkJAQEhIS2Lp1K97e3nz77bcA3H777Xz88cf4+PgQGRnJzp072bx5Mw0aNLDaZlRUFAaDgddee42MjAyMRiM333wz/v7+Vc67cOFCevfuTbt27Zg4cSJNmzYlOTmZnTt3cvbsWQ4ePFjhbXbu3JlFixbx0ksv0axZM/z9/W32fRXimlS4I1bYiZMnTyoPP/yw0qxZM8XV1VVxc3NTWrVqpTz00EPKgQMHrNYtGdS0LFlZWcp///tfJTg4WHF2dlaaN29earBfRSkeTuC+++5TfHx8FC8vL2XUqFFKSkqKzSFGjhw5otx1112Kl5eXUr9+fWXy5MllDvb71VdfKb1791Y8PDwUDw8PpVWrVsqkSZOUmJgYq/Xef/99JTw8XDEajUqXLl0qPNgvNoYYuXKYh4oMMVJi69atSnR0tOLj46O4uroqERERyvjx45U9e/ZYrXf48GHljjvuUOrVq6e4uroqLVu2VGbNmmW1zosvvqiEhIQoer3eMtzIli1blGHDhinBwcGKi4uLEhwcrIwZM0Y5fvz4dfe7RK9evRRAuf/++0s99+WXXyq33Xab4u/vr7i4uChNmjRRHnzwQSUxMdGyTslQGuPGjSv3e5Zs19fXV3FyclKCgoKU0aNHK9u2bbNar2Sw35LPpVu3bjYH+129erXV8rKG+MjOzlb+9a9/KfXq1Sv3YL9XK2vA3dTUVOXOO+9U3N3dlfr16ysPPvigcvjw4TKH3tm8ebPSq1cvxc3NTfH29laGDBlSarBfRSkexLdt27aKi4uL0rJlS+WTTz4p9d7V8fO/1hAjV3+mJfbv36+MGDFCadCggWI0GpXQ0FBl1KhRypYtWyzrXLx4UZkwYYLi5+eneHp6KtHR0cqxY8eU0NDQUsfK//73P6Vp06aKwWAoc7DfqwHKpEmTrJaV/AzfeOMNq+WnTp1Sxo4dqwQGBirOzs5KSEiIcvvttytffvmlZR1b/7fLGj4mKSlJGTx4sOLl5SWD/Yoq0SmK9LYU9mXOnDk8//zzpKamWgatFUIIIeoa6RMnhBBCCGGHpIgTQgghhLBDUsQJIYQQQtgh6RMnhBBCCGGHpCVOCCGEEMIOSREnhBBCCGGH6tRgv0VFRezfv5+AgIBqmdhZCCGEEDXPbDaTnJxMx44dq23eaEdQpz6J/fv3V3kuUCGEEEKoY9euXXTt2lXtGJpRp4q4gIAAoPggCAoKqpZtFhUV8d133wHFU8TIXwjCUWj52NZCNi1ksEXL2YRjquljLjExkW7dulnO46KYZv5nb9++nTfeeIO9e/daJmkumW+ysLCQZ599lg0bNnD69Gl8fHzo378/r776KsHBweV+j5JLqEFBQeWa97M8ioqKLPP4NWrUSH5ZCoeh5WNbC9m0kMEWLWcTjqm2jjnpCmVNM59GTk4OHTp0YOHChaWey83NZd++fcyaNYt9+/axZs0aYmJiGDp0qApJhRBCCCHUp5k/zwYOHMjAgQPLfM7Hx4dNmzZZLXvvvffo1q0b8fHxNGnSpDYiCiGEEEJohmaKuIrKyMhAp9NRr149m+vk5+eTn59v+T4rK6vac+h0OsLDwy2PhXAUWj62tZBNCxls0XI24ZjkmFOHJmds0Ol0Vn3irpaXl0evXr1o1aoVn376qc3tzJkzh+eff77U8r///ttmnziz2UxBQUGlcgshaoeLi4v0jRGiDjl79iyNGze+5vm7LrK7lrjCwkJGjRqFoigsWrTomuvOnDmT6dOnW75PSEggMjLS5voFBQXExsZiNpurLa8Qovrp9XrCw8NxcXFRO4oQQqjGroq4kgLuzJkz/PTTT3h7e19zfaPRiNFotHyfmZlpc11FUUhMTMRgMNC4ceNy/5V/dUOmNCMLR6HVY9tsNpOQkEBCQoLlLjg1simKgslkAsBgMGjm8wFtZxOOSY45ddhNEVdSwJ04cYKtW7dabmWuLkVFReTm5hIcHIy7u3u5X6coChcuXADA19dXDlzhMLR8bPv7+xMTE8Pu3bsZMWKEKkNomEwmVq9eDcDIkSM1NYyHlrMJxyTHnDo08ylnZ2dz8uRJy/exsbEcOHAAX19fgoKCuOuuu9i3bx/fffcdJpOJpKQkoPjkUh2XVEr+gpDLM0Jon7OzM3q9XvrFCSHqNM0UcXv27KFfv36W70v6so0bN445c+bwzTffABAVFWX1uq1bt9K3b99qy6Gl1gYhRNnk/6kQQmioiOvbt2+pPjhX0uBNtEIIIYQQqpFrEUIIIYQQdkiKODs3fvx4hg8fjk6nu+bXnDlziIuLs/n877//rvauCCGEEKICNHM5VVRNYmKi5fGqVat47rnniImJsSzz9PTk/PnzAGzevJk2bdpYvb667/YVQghRtxSazGTmFakdo06RIq4aaOGO1sDAQMtjHx8fdDqd1TLAUsQ1aNCg1HNClEULx7YtBoOBoKAgVafdaty4seWxlmg5m3BMP59K48vYQnIKTERfyKW5/7XHcRXVQ4o4GxRFIbfAVK519S5uAOVe/3rcXWSgRKE+nU6Hl5eX2jHKpNPpcHV1pXXr1hgMBlUyGAwGevfurcp7X4+WswnHcqnQxMwNR3n3l1igAeG+7mQWyKxHtUWKOBtyC0x4PfO9Ku+d9fJAPIw196Pp2bNnqfG1srOza+z9hKgt38b/VenXDmnS5vorCSEsdsVfZNxn+4lJzQFg4g1NePP2Nni5SmlRW+STroNWrVpF69at1Y4hhBDCDhUUmXlx83Fe/ekkJrNCkLeRD0Z2YGDrALWj1TlSxNng7mIg6+WB111PQeHChYsA+PrWR0fVL4O6u9Ts5aHGjRvTrFmzGn0PYf+0PO2Woijk5OTw3XffMXz4cFWm+CkqKtLsNENazibs2+HETMZ9vp/9CcVzkY/pGMKCO9ri7aLns88+A+SYq03yKdug0+nKdUlTURTyLxddHi7qTMQthBBC1CSTWWHez6d4bmMMBSYzDdydef/O9ozsEAwU/+Egap8UcXVQWlqaZe7ZEvXq1cPV1VWlREIIIbTq5PkcJny+n9/iiq863d46gKUj2xPoLecMtUkRVwf179+/1LLPPvuMu+++W4U0QgghtEhRFBbvPMOMb4+QW2jCy+jE28PaMKFrY7nqpBFSxNm55cuXl1o2fvx4xo8fX2p5WFiYzEErhBDius6mX+L+Lw7y4/FUAPpGNODD0VGE+bqrnExcSYo4IYQQQgDFrW/Ldv/N498eIf1SIa5OeuYObs2jvcLR66X1TWukiBNCCCEEp87n8OCXf/LTyeLZfbo2rseKMVG08tfmoN9CirhqoeWpiYSoCi0f2waDAX9/f1Wn3QoKCrI81hItZxPaU2Qy8/b208z5MYZLhWbcnPW8EN2KqTeG42TQX38DyDGnFiniqkjLUxMJURVaPra1Mu1W3759VXnv69FyNqEtBxIymLj6IHvPZgBwczM/ltzVngg/jwptR445dUgRJ4QQQtQxlwpNvLDpOG9uO4XJrFDPzZk3h0TKnad2Roo4IYQQog75+dR5Hlj9JyfOF895elf7IN4d3lbGfbNDUsRVkaIoXLxYPABi/fr15S8Y4TC0fGyXTLv1/fffM2TIENWm3VqzZg0AI0aM0NQ0Q1rOJtSTcamQJ9Yf4X+/xwMQ5G1k4Yh2DG8bVOVtyzGnDvmUq4GMvSYcldaPbZPJVKff/1q0nE3ULkVR+PLPRP779V+cy8wDYOINTXhtcCT13Jyr7X3kmKt9UsQJIYQQDupQYiZT1x1m26k0AJr5ebD0rvb0beancjJRHcp377AQKgkLC2P+/Plqx6hT5syZQ0BAAHq9ng0bNlgt0+l0rFu3Tt2AQojrupBbwOQ1h+j41s9sO5WGq5Oe2be14OBjfaSAcyBSxDmApKQkpk6dSrNmzXB1dSUgIIBevXqxaNEicnNz1Y5XY8LCwtDpdDa/ypp6rDrMmTOHqKioGtl2ZWzbtg2dTkd6enq51ivrKykpCYCjR4/y/PPPs2TJEs6dO8ctt9zC8ePHeeGFF1iyZAmJiYkMHDiwypm19hkK4SjyCk28vf0ULV/9ifd3xGFWim9cOPpEP2bf1hI3Z3WG5BE1Qy6n2rnTp0/Tq1cv6tWrxyuvvEK7du0wGo0cOnSIpUuXEhISwtChQ8t8bWFhIc7O1dcforbt3r3b0gdjx44d3HnnncTExODt7Q2Am5ub1fr2vr/V5crPqIS/vz8Ap06dAmDYsGEAXLhwgdjYWMsyLd3cIIT4R5HJzIo9Z3lhUwx/pxf3e2sT4MU7w9tyc3NpeXNU0hJXzUx5Fym4EENe4i4KLsRgyrtYo+/3yCOP4OTkxJ49exg1ahStW7emadOmDBs2jPXr1zNkyBDLujqdjkWLFjF06FA8PDx4+eWXAVi0aBERERG4uLjQsmVLPv74Y8tr4uLi0Ol0HDhwwLIsPT0dnU7Htm3bgH9aeNavX0/79u1xdXXlhhtu4PDhw1ZZf/31V2688Ubc3Nxo3LgxU6ZMIScnx/J8SkoKQ4YMwc3NjfDwcD799NNr7nvDhg0JDAwkMDAQX19foLgYCQwMJC8vj3r16rFq1Sr69OmDq6urZXsffPABrVu3xtXVlVatWvH+++9bbffJJ5+kRYsWuLu707RpU2bNmkVhYSEAy5cv5/nnn+fgwYOWVqzly5dbPt8lS5Zw++234+7uTuvWrdm5cycnT56kb9++eHh40LNnT0uhVOLrr7+mU6dOuLq60rRpU55//nmKioqsfm4ffPABd9xxB+7u7jRv3pxvvvnG8vPp168f8M8dpNdrgSz5jK780uv1zJkzx3K86PV69Ho9r732Gvfcc49l2ZVF3PU+x7NnzzJmzBh8fX3x8PCgS5cu/PHHHzY/Q0VRmDNnDk2aNMFoNBIcHMyUKVOuuS9C1HWKovDlwXO0e3MbE1cf5O/0PBr5uPK/kR3YP/0mKeAcnLTEVYOS1h1T9llSNz1IXvxmy3OuobfSsP9inLwaV/v7pqWl8eOPP/LKK6/g4VH26NpXt5zMmTOHV199lfnz5+Pk5MTatWuZOnUq8+fPp3///nz33XdMmDCBRo0aWYqD8poxYwbvvPMOgYGBPP300wwZMoTjx4/j7OzMqVOnGDBgAC+99BIffvghqampTJ48mcmTJ7Ns2TIAxo8fz7lz59i6dSvOzs5MmTKFlJSUyn04lz311FPMmzePjh07Wgq55557jvfee4+OHTuyf/9+Jk6ciIeHB+PGjQPAy8uL5cuXExwczKFDh5g4cSJeXl488cQTjB49msOHD7Nx40Y2by7+Ofv4+Fje78UXX+Stt97irbfe4sknn+Rf//oXTZs2ZebMmTRp0oR7772XyZMn8/333wPwyy+/MHbsWN59911uvPFGTp06xQMPPADA7NmzLdt9/vnnef3113njjTdYsGAB99xzD2fOnKFx48Z89dVXVq2QV7dAltfjjz9OWFgYEyZMIDExEUVRUBSF8PBwHnnkERITEy3rXu9zzM7Opk+fPoSEhPDNN98QGBjIvn37MJvNNj/Dr776irfffpv/Ll9AkyZNuJieTlxsLN/G/1Uqa0FBAWezE8l2LWT92aPo9VX/e7Ss97kWs9nMKc/iu3e11kKp0+lo2LCh5bFwPIqisPnEeZ7ZcJQ9l2dbaODuzMxbmvNIzzBca/myqRxz6pAirop0Oh3e3t6Y8i6SssG6gAPIO7OJ1M0P4T/wEwyu9av1vU+ePImiKLRs2dJquZ+fH3l5xc3pkyZN4rXXXrM8969//YsJEyZYvh8zZgzjx4/nkUceAWD69On8/vvvvPnmmxUu4mbPns2tt94KwIoVK2jUqBFr165l1KhRzJ07l3vuuYdp06YB0Lx5c95991369OnDokWLiI+P5/vvv2fXrl107doVgP/7v/+jdevWFftQrjJt2jRGjBhhlXHevHmWZeHh4Rw5coQlS5ZYirhnn33Wsn5YWBiPP/44n3/+OU888QRubm54enri5OREYGBgqfebMGECo0aNAopb9Hr06MGsWbOIjo4GYOrUqVaf//PPP89TTz1lee+mTZvy4osv8sQTT1gVcePHj2fMmDEAvPLKK7z77rvs2rWLAQMGWLVC1qtX77qfSaNGjay+Dw0N5a+//sLT09Py+iv3rWQ+xCuXXe9zXLlyJampqezevduSr1mzZpbXl/UZxsfHExgYSFRUFAaDgYYNG9KiefMy90Gn0+Hi4kLr1q2rpYCrDL1eT4cOHQBUm/rLFoPBQP/+/dWOIWqA2ayw/mgyr209yY644is9nkYD/70pgsf6NMXbVZ0uI3LMqUOKuGpiyk0pVcCVyDuzCVNuSrUXcbbs2rULs9nMPffcQ35+vtVzXbp0sfr+6NGjlpafEr169eKdd96p8Pv26NHD8tjX15eWLVty9OhRAA4ePMiff/5pdYlUURTMZjOxsbEcP34cJycnOnfubHm+VatW5SpKruXK/c3JyeHUqVPcd999TJw40bK8qKjIqjVt1apVvPvuu5w6dYrs7GyKiopK9SGzpX379pbHAQEBALRr185qWV5eHpmZmXh7e3Pw4EF+++03y6VtKB5rKS8vj9zcXNzd3Utt18PDA29v70q3Uv7yyy9Wc6JWtJ9geT7HAwcO0LFjR0sBVx4jR45k/vz5TJw4kU6dOtGlSxe6du2quQJJixRFISu/iJTsAlKz80nJLiAlO5+s/CIuFZq4VGgmt9BEXmFxH1K9Toder0OvK37s7mzA02jAy+iEp4sTXq5O+Lg64e9pxN/TiJ+HCwZ93WhdMeVdxJSbgjk/A73RB4O7f6397r6WvEITqw6cY97PpziclAWAi0HPQz1Defrm5vh7GVVOKNQgRVw1MednXPv5gms/XxnNmjVDp9MRExNjtbxp06ZA6Y79gM3LrraUtHJcOehrSf+wisjOzubBBx8ss49TkyZNOH78eIW3WR5X7m92djYA//vf/+jevbvVeiWFws6dO7nnnnt4/vnniY6OxsfHh88//5x58+aV6/2uLIhKLimUtcxsNlsyPf/881athSVcXf+ZAufqQkun01m2UVHh4eFVKo7L8zlW5pJu48aNiYmJ4ZX1n3PgwAEWLVrEmjVrmDt3rhRyl+UWFHEoMYuDiZmcPJ/D6bQcTqflcvpCLpl5RdffQCXpdODn7oK/p5FgHyNN6rsTWt+NsMv/htZ3I9jbFSeDfXezLsr6u1a7xJRH3IVcFu2I48Nd8aTlFv/u9TI68VCPUKbd1JQgmSqrTpMirooURSE9PR03Z89rrqd38bnm85XRoEEDbr31Vt577z0effTRChdoAK1bt+a3336zXM4D+O2334iMjASw9HFITEykY8eOAFY3OVzp999/p0mTJgBcvHiR48ePWy6HdurUiSNHjlhdUrtSq1atKCoqYu/evZbLqTExMdcdNqMiAgICCA4O5vTp05bO+lfbsWMHoaGhPPPMM5ZlZ86csVrHxcWl2kYm79SpEzExMTY/l/JwcXEBqn+09JJju6RoK1Gez7F9+/Z88MEHXLhwoczWOFufoZubG926daNbt24MHjyYhx9+mLi4OCIiIkply8/PZ//+/XTu3FmVIs9kMrFr1y4ABga3rPI0Q1e3AOlcG3I03Yntp9PYEXeB/QkZHD+fw7Um0fBwMeDvaSTA3UCXguM4G/RkhHTC3dUFNyc9bs4GdDowK2BWFMyKgskMuYUmsvOLyM4vIuvyvxcvFZKSXUBabgGKAqk5BaTmFPBXclaZ7+2k1xHm607Lhh60aOhJS39PWvh50NLfk0Avo+b7SZnyLpYq4KBmu8TYYjYr/Hg8lfd/i2P9sWTLz7xJPTce7BHKQz1Cqe/uUitZyquoqMhyw9XQoUNl2q1aIp9yNTCbzZgM9XANvZW8M5tKPe8aeisGd/8aee/333+fXr160aVLF+bMmUP79u3R6/Xs3r2bY8eOWV2eLMuMGTMYNWoUHTt2pH///nz77besWbPG0uHczc2NG264gVdffZXw8HBSUlKs+oxd6YUXXqBBgwYEBATwzDPP4Ofnx/Dhw4Hi/mE33HADkydP5v7778fDw4MjR46wadMm3nvvPVq2bMmAAQN48MEHWbRoEU5OTkybNq3SnfRtef7555kyZQo+Pj4MGDCA/Px89uzZw8WLF5k+fTrNmzcnPj6ezz//nK5du7J+/XrWrl1rtY2wsDBiY2M5cOAAjRo1wsvLC6OxcpcynnvuOW6//XaaNGnCXXfdhV6v5+DBgxw+fJiXXnqpXNsIDQ1Fp9Px3XffMWjQIEu/PVtSUlIsfSZLNGjQoMzLqmazucypt673OY4ZM4ZXXnmF4cOHM3fuXIKCgti/fz/BwcH06NGjzM/ws88+w2QycallMEYXI9u2bcPFxcUy/MnVFEWxuotXDZVplS5LSQvQydMH2ZrXme15Hdlb2I4MU+njKsDLSIcgb1oFeNLU152IBh40beBOk3pueBiLf6UXFRWxenXxjSgj72pfpRNqkcnM+ZwCUrILSM7O52z6Jc5cvET8xUucuZjLmfTix0VmhZPnczh5Pof1R60v9XsZnWjR0INW/p608vekdYAXrRp60szPAxcnbbTeaaFLzJGkLD7ed5ZP957lbMY//0f7N/djUq9wBrf213Rr59Xdd0TNkyKumuSZjTTsv5jUzQ9ZFXLFTfFLauw/f0REBPv37+eVV15h5syZnD17FqPRSGRkJI8//rjlhgVbhg8fzjvvvMObb77J1KlTCQ8PZ9myZfTt29eyzocffsh9991H586dadmyJa+//jq33XZbqW29+uqrTJ06lRMnThAVFcW3335raSVq3749P//8M8888ww33ngjiqIQERHB6NGjLa9ftmwZ999/P3369CEgIICXXnqJWbNmVc8Hddn999+Pu7s7b7zxBjNmzMDDw4N27dpZbrgYOnQo//3vf5k8eTL5+fkMHjyYWbNmMWfOHMs27rzzTtasWUO/fv1IT09n2bJllR5YODo6mu+++44XXniB1157DWdnZ1q1asX9999f7m2EhIRYbpCYMGECY8eOtQx7Uparb4SB4svIN9xwQ7nf83qfo4uLCz/++COPPfYYgwYNoqioiMjISBYuXAiU/RnWq1ePV199lUP6HMxmM6GhocyaNcuq/56jURSF/XFnWb7uQ35MHsTxooetnnfXF9KraUNubBZA18b16BDkTWAtXz5zMugJ9Ha95vuazArnMvM4kZrD8dRsYlKzOX75ceyFXLLyi9h7NoO9Z627lRj0OiIauNPa35NWlwu71gHFhV5td9BXo0uMoijsT8hgzaEk1h1O5EjyP63ePq5OjO3SmEd6htHS/9pXekTdpVO0PsN1NTp79iyNGzfm77//LnWHXl5eHrGxsYSHh1v1RboeRVG4cOECUNyZ35yfXnxJpCADvYt2OsXWpG3bttGvXz8uXrxY5RsRhHZcfWzX1uWw8gz1UVBQQHx8PCkpKXTv3l21y6m//fYbAHPvnlih1q74i7ms3J/Ayn0Jlk7qAAZMdHY5Rl+3vfQwHibSOZbQcQdx8S1deF9LcUvcaqD4hhE1L23lF5k4dT6XmNRsjqVkE5OSzdGU4sdZ+bZbUoO9XWkd4EnLKwq71v5eBHnXzKXZggsxJHzUzubzIWMPVfjnUJb0S4VsPXmeTcdT+f5YCmcuXrI856TXMai1P//u1IjbIwNqfZiQqqjpY+5a5++6TFriqpnBtb7DF21CiIozmxU2nUhl4a/W/ZxcDNDP5XcGuO3kJtcD1NNb90GsiRag2mR0MhAZ6EVkoHWLqqIUt94dTS4u6I6mZHMsOYtjqdkkZuZzLjOPc5l5bDlx3up13q5Olha7lpcLu1b+nkQ0cK/SpUaDu3+1d4lRFIW/0y/x+5l0fo+/yM64C+z+Ox3zFU0n7s4GBrRqyB3tghjcOoB6bjKrjCg/KeKEEKIG5RYU8X+7/mbBr7GcPP/PDCV9IxpwT6dGDGmcTc4Xd9p8fU3cFKUFOp2OEB83Qnzc6N+iodVz6ZcKLS12R5OziEnN5mhyNqfScsjMK2LX3+ns+jvd6jXOBh3N/Ur63XnR0t+DJvWK75oN9na19Be0xeBav9JdYkxmhaSsPM5cvMSxlGwOJ2VxJCmLPxMzScoq3U+sZUMP+rdoyK0tGtK/uR/uLnIqFpUjR46osr59+5bZ+V2Iuiw7v4hFO+KY9/MpUrILgOJWpPFdi/s5tWhY3M/JlHcRkwo3RWlZPTdnuofWp3uodeGUX2Ti5Plcjl5usTt2uRXvWEo2uYUmjiRnX+5XllRqm96uTgR7uxLkZaSBhws+bs7Uc3WmnpsTPq7OuDrrcTHoMQS/jVNAJnpzLiadO0VOXhQdU8gviiO30MTF3EIu5BZy8VIhCRnFN3WczcijyFz270AnvY6oYO/i/WlSnz4RDWhcr3pv2BJ1lxRx1UBupRaOSsvHtk6nw8PDQ7WhK3Q6neWmiysz5BWaWPBrLK9tPcmFy+N6hdV34/G+zRjbpRGeV7UIVaUF6FrZSoZ20frQHhVhdDLQJtCLNlddmjWbiy9bHrvcYncsJZvjqdmcy8wjISOPnAITmXlFZOYVP1d+GUDiddeC4ps0QrxdadHQw5KxbaA3HYK9cbOjvm2V5ajHnNbJjQ2XldzYEBYWVu3DWgghyq+8NzakpKQQEBBQ4RknaooC/Ho6jY/2nCUlu/gSWrC3K3d1CKZvRAObMx4MadIGuGKcuDp0U1RtycorIiHzEucy8knMyuNibiHpeYWkXyok/VIRGZcKyS8yU2g2U1BkptCsUGgy42LQ4+JU3EJnvDzOXj03Z3zdnanv5kywtytN6rvRpJ4bQd6udWZWCzXIjQ1l0+6f2bWs5O62goICKeKE0LiS8eHUmjf1aqfTclm8M46Yy608DTxcuKdTI/o186O853W5KarmeLk60crVi1b+jjtcjaibNFPEbd++nTfeeIO9e/eSmJjI2rVrLQPFQvFdPrNnz+Z///sf6enp9OrVi0WLFtHcxgTZFeXk5IS7uzupqak4Oztr5uQgRF1zvQF0zWYzGRkZGI1G9CpPx5VfaOLT/Ql8eyQZs1nB6GzgznZBDG8bgNHJ8S+hCSHUpZkiLicnhw4dOnDvvfeWOY/k66+/zrvvvsuKFSsIDw9n1qxZREdHc+TIkQqN62aLTqcjKCiI2NjYUtMsXYuiKFy6VDzOj5ubm/QFEA5DrWM7+WJyudb7+++/OX36NF26dFFlnLh9f19k62+/k1doBrMfvcP9uL97E+q7q395t6ioiPXr1wMwePBgTfdtFI5Bjjl1aOZTHjhwIAMHDizzOUVRmD9/Ps8++yzDhg0D4KOPPiIgIIB169Zx9913V0sGFxcXmjdvTkFBQblfU1RUxMaNGwEYMGCAHLjCYah1bB91uf7UPTqdjtOnT9dCmtIKTWaW7z7LhiOJ9DIW4O2sZ+ZNzegW2kCVPLbk5uaqHUHUMXLM1T67qDhiY2NJSkqif//+lmU+Pj50796dnTt32izi8vPzreZyy8oqe+LmK+n1+gq17BUVFVku/7i6ukoRJxyGWsd2eW5UMJlMtZCktDMXc5m37TRnLuaiBwI8jTTycaNzo3qq5BFC2LZw4ULeeOMNkpKS6NChAwsWLKBbt24211+9ejWzZs0iLi6O5s2b89prrzFo0CDL89nZ2Tz11FOsW7eOtLQ0wsPDmTJlCg899FBt7E6Z7KLjV1JS8Zg/AQEBVssDAgIsz5Vl7ty5+Pj4WL4iIyNrNKcQwnFtPpHKY98c4czFXHzcnHm6fzNC67vLHYlCaNCqVauYPn06s2fPZt++fXTo0IHo6GhSUlLKXH/Hjh2MGTOG++67j/379zN8+HCGDx/O4cOHLetMnz6djRs38sknn3D06FGmTZvG5MmT+eabb2prt0qxiyKusmbOnElGRobl68iRI2pHEkLYmUKTmfd3xLHgl1gKTWY6N6rHguFt6RRST+1oQggb3nrrLSZOnMiECROIjIxk8eLFuLu78+GHH5a5/jvvvMOAAQOYMWMGrVu35sUXX6RTp0689957lnV27NjBuHHj6Nu3L2FhYTzwwAN06NCBXbt21dZulWIXRVxgYCAAycnWHZ6Tk5Mtz5XFaDTi7e1t+SoZmFMIIcrjQm4hz3x/jB+OpaDTwT2dGzHr1hb4yPyWQqgiKyuLzMxMy9eVXaZKFBQUsHfvXqsuWHq9nv79+7Nz584yt7tz506r9QGio6Ot1u/ZsyfffPMNCQkJKIrC1q1bOX78OLfddls17V3F2UURFx4eTmBgIFu2bLEsy8zM5I8//qBHjx4qJhNCOKq4C7k8/u1fxKRk42F0YtatLRjVIRi5AV0I9URGRlp1k5o7d26pdc6fP4/JZKpQF6ykpKTrrr9gwQIiIyNp1KgRLi4uDBgwgIULF3LTTTdVw55VjmZ64WdnZ3Py5EnL97GxsRw4cABfX1+aNGnCtGnTeOmll2jevLlliJHg4GCrseTU4u3trXYEIWqElo9td3f3Gtv2wXOZvPrTCXILTDSq58as/i0I9DbWaoaq0vLPTjim2jjmjhw5QkhIiOV7o7H0/8uasmDBAn7//Xe++eYbQkND2b59O5MmTSI4OLhUK15t0UwRt2fPHvr162f5fvr06QCMGzeO5cuX88QTT5CTk8MDDzxAeno6vXv3ZuPGjdUyRlxVODk5MXjwYFUzCFETtHxsGwwGunTpUiPb3nryPAt+jcVkVmgb6M3MW5qVmu+0pjNUlZZ/dsIx1dYx5+Xldd1i0c/PD4PBUKEuWIGBgddc/9KlSzz99NOsXbvWsp/t27fnwIEDvPnmm6oVcZq5nNq3b18URSn1tXz5cqB4XKgXXniBpKQk8vLy2Lx5My1atFA3tBDCoaw/msz87acxmRVubNqAOdEtyizghBDa5eLiQufOna26YJnNZrZs2WKzC1aPHj2s1gfYtGmTZf3CwkIKCwtLzeZkMBgwm83VvAflJ7+dhBACWHc4iWW74gEY0iaQ+7o1kf5vQtip6dOnM27cOLp06UK3bt2YP38+OTk5TJgwAYCxY8cSEhJi6VM3depU+vTpw7x58xg8eDCff/45e/bsYenSpUDxpeI+ffowY8YM3NzcCA0N5eeff+ajjz7irbfeUm0/pYiroqKiIn744Qeg+E4WGexXOAotH9smk4n9+/cD0LFjxypPu/XFwXN8uvcsAHd1CObfnRtxvfqtujNUJy3/7IRj0toxN3r0aFJTU3nuuedISkoiKiqKjRs3Wm5eiI+Pt2pV69mzJytXruTZZ5/l6aefpnnz5qxbt462bdta1vn888+ZOXMm99xzDxcuXCA0NJSXX35Z1cF+5X92NcjMzFQ7ghA1QsvHdnVN8fPln/8UcPd0bsSoDsG1nqEmaPlnJxyT1o65yZMnM3ny5DKf27ZtW6llI0eOZOTIkTa3FxgYyLJly6orXrWQIk4IUe2+jf9L7QjXZS7MZv3hc3y8Px2A/3QM4K4KFHBCCKE2zdzYIIQQtcV06TwbtnzOB5cLuMHuv3FL/gpMl86rG0wIISpAijghRJ1iLszm51++5P/OFfd16ee2h6Fuv1CQcoDMg+9jLsxWOaEQQpSPFHFCiDrleGIq7ye0x4yeHsZD3O2+2XIXakHKAcz5GeoGFEKIcpIiTghRZ6Rm5zP31zQKFSfaOJ9mrOeGUsOIKIXavVlBCCGuJDc2VAMtT70jRFVo+diu6HQ7uQUmXtx0gvQ8MyFOqTzgvQ6DTim1ns65/Ptcm1P+VJSWf3bCMckxV/t0iqKU/i3moM6ePUvjxo35+++/adSokdpxhHBYWrs71azAC5ti2H82g/ruzjwbsgmvi7+XWs/FPwqfztPRO3vWWrYhTdrU2nsJYa/k/F02aYkTQji8T/adZf/ZDIxOBmbd2oIw98ZkHsyjIOWAZR0X/yi8O0yq1QJOCCGqQoo4IYRD23nmIl8dPAfAozeGEdHAA/DAp/N0zPkZKIW56Jzd0Rt9pIATQtgVKeKqyGQysXnzZgD69++vqal3hKgKLR/bZrOZgwcPAtChQ4dSk1KXOJtxifnbTwMwtG0gN4Y3sDynd/asUtFW3gxq0PLPTjgmOebUIUVcFSmKwoULFyyPhXAUWj62FUUhKyvL8rgslwpNzN1ykrxCE20DvRnfpXGtZ1CLln92wjHJMacO7fzpKIQQ1WjJzjOcTb9EAw8XZvSLwKC/3pT2QghhX6SIE0I4nJ9PpbH15Hl0Oni8bwT13JzVjiSEENVOijghhENJyspn0Y44AEZHhRAZ4KVuICGEqCFSxAkhHIbJrDBv2ykuFZpoHeDJqA7BakcSQogaI0WcEMJhfH4ggeOp2bi7GHisj/SDE0I4Nrk7tRpoeeodIapCy8e2s7N1P7dTaTl8+WciAI/0CqOhZ81nvzqDlmj5ZycckxxztU+m3RJCVLvannaryKQw/Zu/OHMxl97hDZjRL6JW378qZNotIa5Pzt9lk8upQgi7t+pgAmcu5uLt6syDPZqoHUcIIWqFXE4VQti1Ky+jPtwjFG9X7V7iLEtVWi2lFU+Iuk2KuCoymUxs3boVgH79+slUI8JhaPnYNpvNHDp0CLOisCLWgNms0Cvcl57hvrWeAaBdu3aam3ZLqz874ZjkmFOHFHFVpCgKqamplsdCOAotH9uKopCRkUFiZh5nLvrg7erCgzeEqpKh5LGWaPlnJxyTHHPq0M6fjkIIUQH5RWYSMvIAuLdbY3xkVgYhRB0jRZwQwi7FX8zFjEKbQC/6NvNTO44QQtQ6KeKEEHZnd3w6F/MK0aNjYvcmyJC+Qoi6SIo4IYRdyS808eGueAACvIw0quemciIhhFCHFHFCCLuy+s9EUnMKMBr0hPi4qh1HCCFUI3enVgO5lVo4Kq0d26nZ+aw7nARAqK8nTirn09KwIlfT2s9OOD455mqfTLslhKh2NTXt1pvbTvHL6TTaBXnz4sBWdb4vnAz2K+oKOX+XTbt/RgohxBWOJmfzy+k0dDq4r5vczCCEEFLECSE0T1Hg/y7fzNC/RUPCG7irnEgIIdQnfeKqyGQy8csvvwBw4403Sp8A4TC0dGxvO32eE6nZuDobuKdjI8xmM38dOQJAm8hIVfqmaSGDLVr62Ym6QY45dUgRV0WKopCYmGh5LISj+Db+L347exSA9Hhf1X4pF5jMfLznLAAjOwRR390Zk8nExQsXAPX+3ymKonoGW+T3kqhtcsypQzt/OgohRBk2HE0hLacAP08XhrUJVDuOEEJohhRxQgjNyi008eXBcwDcHRWCs0F+ZQkhRAn5jSiE0Kx1h5PIyi8ixMeVm2V+VCGEsGI3RZzJZGLWrFmEh4fj5uZGREQEL774olx7F8JBZVwq5OvLA/v+u1MjDHoZVEQIIa5kNzc2vPbaayxatIgVK1bQpk0b9uzZw4QJE/Dx8WHKlClqxxNCVLPVfyaSV2giws+DHuG+ascRQgjNsZsibseOHQwbNozBgwcDEBYWxmeffcauXbtUTiaEqG6p2fl8fywFgP90biQD+wohRBnspojr2bMnS5cu5fjx47Ro0YKDBw/y66+/8tZbb9l8TX5+Pvn5+Zbvs7Kyqj2Xk5MTY8aMqfbtCqE2g8HATTfdpMp7f3HwHEUmM22DvIkK8Sn1vJrZtJTBFvm9JGqbHHPqsJsi7qmnniIzM5NWrVphMBgwmUy8/PLL3HPPPTZfM3fuXJ5//vlaTCmEqKrU7Hy2nDwPwD2dQqQVTgghbLCbGxu++OILPv30U1auXMm+fftYsWIFb775JitWrLD5mpkzZ5KRkWH5OnJ5dHUhhHZ9dSgJk0mhXZA3kQFeascRQgjNspuWuBkzZvDUU09x9913A9CuXTvOnDnD3LlzGTduXJmvMRqNGI1Gy/eZmZnVnstkMrFz504AevToIVONCIdhNps5duwYAK1ataqVaaXScgvYdDwVgNFRwZrKpsUMtsjvJVHb5JhTh3Z+61xHbm5uqV+SBoMBs9msUqJiiqLw999/8/fff8twJ8KhKIrC+fPnOX/+fK0d22sPJVFkMhMZ6EXbIG9NZdNiBlvk95KobXLMqcNuWuKGDBnCyy+/TJMmTWjTpg379+/nrbfe4t5771U7mhCiGlzMLWTj5TtS746SvnBCCHE9dlPELViwgFmzZvHII4+QkpJCcHAwDz74IM8995za0YQQ1WDt4UQKTWZa+nvSPth2K5wQQohidlPEeXl5MX/+fObPn692FCFENcvML2TjsX/6wkkrXPl8G/9XmctNJhP7TOkAuP19pMz+SUOatKnJaEKIWmA3feKEEI5r/ZEU8ouKZ2fo1Kie2nGEEMIuSBEnhFBVfpGJ9UeSARjRLkha4YQQopykiBNCqGrzifNk5RcR4GWkR2h9teMIIYTdsJs+cVplMBgYOXKk5bEQjkJvMNCrVy/L45pgMit881cSAMPaBmLQl68drjay2UMGW7ScTTgmOReqQ4q4KtLpdDg5yccoHI+Omv9lvPPMRZIy8/EyOtG/uV+5X1cb2ewhgy1aziYck5wL1SGXU4UQqlCANYcSARgcGYDRSYoOIYSoCCmbq8hkMrF7924AunbtKn/9CodhNps5ceIEAM2bN6/2aaUOJ2Zy6nwOzgY9g1v7ayqbvWSwRcvZhGOSc6E65H92FSmKQmxsLLGxsTLViHAoiqKQnJxMcnJyjRzbaw8V94W7pbkf3q7OmspmLxls0XI24ZjkXKgOKeKEELXubMYl9p5NR6eD4W2D1I4jhBB2SYo4IUStW3+keI7Urk3qE+RtVDmNEELYJynihBC1KrfAxJaT5wG4vXWAymmEEMJ+SREnhKhVm0+cJ7/QROP6bjLRvRBCVIEUcUKIWmNWYP3R4hsabm8dIFNsCSFEFcgQI0KIGmcuzMacn8Hu+AySMvPxcDHQL6KB2rGEEMKuSRFXRQaDgTvuuMPyWAhHoTcY6NGjh+VxZZkunSfzwEIKUg/yTcZoIJwbvU7iVBQKzuWfpaEmslWFFjLYouVswjHJuVAdUsRVkU6nw9XVVe0YQlQ7HeDsXLHx265mLsy2FHDnTA04UhiODjN9zN+TeTAGn87T0Tt7qpKtqrSQwRYtZxOOSc6F6pA+cUKIGmPOz6Ag9SAAWy91BqCDy0kaGDIpSDmAOT9DzXhCCGHXpCWuikwmE/v27QOgU6dO0owsHIbZbObUqVMAREREVGrqJqUwF4BLigu/57cF4GbXPaWeVyNbVWkhgy1aziYck5wL1SH/s6tIURROnjzJyZMnZaoR4VAURSExMZHExMRKH9s6Z3cAduW3IV9xIdCQRkvn+FLPq5GtqrSQwRYtZxOOSc6F6pAiTghRY/RGH5wbRvFzXhQAN7nuR3d5XBEX/yj0Rh/1wgkhHNrChQsJCwvD1dWV7t27s2vXrmuuv3r1alq1aoWrqyvt2rVjw4YNpdY5evQoQ4cOxcfHBw8PD7p27Up8fHwZW6sdUsQJIWqM3tmT5Mb3crYoAGeK6OF6GCgu4Lw7TKrUTQ1CCHE9q1atYvr06cyePZt9+/bRoUMHoqOjSUlJKXP9HTt2MGbMGO677z7279/P8OHDGT58OIcPH7asc+rUKXr37k2rVq3Ytm0bf/75J7NmzVL1hg7pEyeEqFGbYgsB6BnmTeOOz6Nzdkdv9JECTghRY9566y0mTpzIhAkTAFi8eDHr16/nww8/5Kmnniq1/jvvvMOAAQOYMWMGAC+++CKbNm3ivffeY/HixQA888wzDBo0iNdff93yuoiIiFrYG9ukJU4IUWOy84v4JTYNgEFtw3Cu3xwnzxAp4IQQNaagoIC9e/fSv39/yzK9Xk///v3ZuXNnma/ZuXOn1foA0dHRlvXNZjPr16+nRYsWREdH4+/vT/fu3Vm3bl2N7Ud5SBEnhKgxW0+lUVBkJtTXnZb+UrgJIaomKyuLzMxMy1d+fn6pdc6fP4/JZCIgIMBqeUBAAElJSWVuNykp6Zrrp6SkkJ2dzauvvsqAAQP48ccfueOOOxgxYgQ///xzNe1dxUkRJ4SoEQrwQ0xx/5MBLf1lnlQhRJVFRkbi4+Nj+Zo7d26tvK/ZbAZg2LBh/Pe//yUqKoqnnnqK22+/3XK5VQ3SJ66KDAYDQ4cOtTwWwlHoDQa6d+9ueVxRR5Oy+PviJYxOBvpW8zypVc3mKBls0XI24Zhq61x45MgRQkJCLN8bjcZS6/j5+WEwGEhOTrZanpycTGBgYJnbDQwMvOb6fn5+ODk5ERkZabVO69at+fXXXyu1L9VBWuKqSKfT4eHhgYeHBzqdtDUIx6Gj+Bek0WisVCvaxsutcDc29cXdpXp/qVc1m6NksEXL2YRjqq1zoZeXF97e3pavsoo4FxcXOnfuzJYtWyzLzGYzW7ZsscwpfLUePXpYrQ+wadMmy/ouLi507dqVmJgYq3WOHz9OaGhoVXer0qQlTghR7bLyi9gRdxGAga38VU4jhKhrpk+fzrhx4+jSpQvdunVj/vz55OTkWO5WHTt2LCEhIZbLsVOnTqVPnz7MmzePwYMH8/nnn7Nnzx6WLl1q2eaMGTMYPXo0N910E/369WPjxo18++23bNu2TY1dBKSIqzKz2czBg8VzQ3bo0EGmtxEOQ1EUYmNjAQgPD6/QX9c/n0qj0GSmaQN3mvl5aCqbI2WwRcvZhGPS2rlw9OjRpKam8txzz5GUlERUVBQbN2603LwQHx9vlbFnz56sXLmSZ599lqeffprmzZuzbt062rZta1nnjjvuYPHixcydO5cpU6bQsmVLvvrqK3r37l3r+1dCirgqMpvNHDt2DIB27dqpfuAKUV3MZjNnz54FIDQ0tEL9XDafSAWgf/OGmsvmSBlsKU+2b+P/qvT2hzRpU+nXCsekxXPh5MmTmTx5cpnPldV6NnLkSEaOHHnNbd57773ce++91RGvWqj/KQshHMrptFxi03JxMujpU803NAghhPiHFHFCiGpV0grXI7Q+nkZp7BdCiJoiRZwQotoUmsz8fKp4hoZbmvupnEYIIRyb/JkshAOrSr+nyvgjPp3s/CIaeLjQIdinVt9bCCHqGmmJE0JUm5JLqbc090MvN0QKIUSNkiJOCFEtzucUcCAhA4BbmtXMXalCCCH+IZdTq8hgMDBo0CDLYyEchd5goEuXLpbH1/PTyfMoCrQN8ibQu/Qo6mpmc9QMtmg5m3BMci5UhxRxVaTT6fDxkb4/wvHoAHd393KtqyiwxTI2XM3f0FCRbI6cwRYtZxOOSc6F6pDLqUKIKvsrOZOkzHzcnA30DK2vdhwhhKgT7KqIS0hI4N///jcNGjTAzc2Ndu3asWfPHlUzmc1mDh06xKFDhzCbzapmEaI6KYrCmTNnOHPmDIqiXHPdzcfPA8WT3Ruda/5SSkWyOXIGW7ScTTgmOReqw24up168eJFevXrRr18/vv/+exo2bMiJEyeoX1/dv/rNZjOHDx8GoHXr1pqYakSI6mA2mzlz5gwAjRo1stnPJb/QxI4zxZPd31JD02xVNpujZ7BFy9mEY5JzoTrspoh77bXXaNy4McuWLbMsCw8PVzGREAJgZ3w6+YUmgrxdaenvqXYcIYSoM+ymVP7mm2/o0qULI0eOxN/fn44dO/K///3vmq/Jz88nMzPT8pWVlVVLaYWoO7adLL6U2ieiATI0nBBC1B67KeJOnz7NokWLaN68OT/88AMPP/wwU6ZMYcWKFTZfM3fuXHx8fCxfkZGRtZhYCMd3MbeQA+eKx4brGyHTbAkhRG2ymyLObDbTqVMnXnnlFTp27MgDDzzAxIkTWbx4sc3XzJw5k4yMDMvXkSNHajGxEI7v59NpKAq08vckqIbHhhNCCGHNboq4oKCgUi1prVu3Jj4+3uZrjEYj3t7eli8vL6+ajilEnbLtVPGl1L7NpBVOCCFqm90Ucb169SImJsZq2fHjxwkNDVUpkRB125mLucSm5WIw6Ogd7qt2HCGEqHPs5u7U//73v/Ts2ZNXXnmFUaNGsWvXLpYuXcrSpUtVzWUwGLjtttssj4VwFHqDgY4dO1oeX23byTQAujaqh5exdn+VXC9bXclgi5azCcck50J12E0R17VrV9auXcvMmTN54YUXCA8PZ/78+dxzzz2q5tLpdDRo0EDVDELUBB3Y7IJgVor7w4E6l1Kvla0uZbBFy9mEY5JzoTrspogDuP3227n99tvVjiFEnXcoMZO0nAI8jE50aSTzJQohRHmcPn2apk2bVtv27KZPnFaZzWaOHj3K0aNHZaoR4VAUReHs2bOcPXu21NRNJTc09Aqvj7Oh9n+NXCtbXcpgi5azCcck58LyadasGf369eOTTz4hLy+vytuTIq6KzGYzBw4c4MCBA3LgCodiNps5ffo0p0+ftjq284tM7Igrnmarn0pjw9nKVtcy2KLlbMIxybmwfPbt20f79u2ZPn06gYGBPPjgg+zatavS26tUEbdx40Z+/fVXy/cLFy4kKiqKf/3rX1y8eLHSYYQQ2vf7mXTyCk0EeBlpHSD9roQQoryioqJ45513OHfuHB9++CGJiYn07t2btm3b8tZbb5Gamlqh7VWqiJsxYwaZmZkAHDp0iMcee4xBgwYRGxvL9OnTK7NJIYSdKLmUKtNsCSFE5Tg5OTFixAhWr17Na6+9xsmTJ3n88cdp3LgxY8eOJTExsVzbqVQRFxsbaxl496uvvuL222/nlVdeYeHChXz//feV2aQQwg6kXypkf0LxNFv9ZIBfIYSolD179vDII48QFBTEW2+9xeOPP86pU6fYtGkT586dY9iwYeXaTqXuTnVxcSE3NxeAzZs3M3bsWAB8fX0tLXRCCMfza+wFFAVaNPQk2NtV7ThCCGFX3nrrLZYtW0ZMTAyDBg3io48+YtCgQej1xW1q4eHhLF++nLCwsHJtr1JFXO/evZk+fTq9evVi165drFq1CiieQaFRo0aV2aQQwg78Els8NtxNETIelBBCVNSiRYu49957GT9+PEFBQWWu4+/vz//93/+Va3uVupz63nvv4eTkxJdffsmiRYsICQkB4Pvvv2fAgAGV2aQQQuNSs/M5lpyNTge9wmSaLSGEqKhNmzbx5JNPlirgFEWxzAXv4uLCuHHjyrW9SrXENWnShO+++67U8rfffrsym7NrBoOBm2++2fJYCEehNxho37695fEvsSkAtAn0xtfdWc1opbLV1Qy2aDmbcExyLiyfiIgIEhMT8ff3t1p+4cIFwsPDMZlMFdpepYo4g8FQZoi0tDT8/f0rHMKe6XQ6AgIC1I4hRLXTAfXq1bN8/8vlabZuaqr+pdSrs9XVDLZoOZtwTHIuLB9bg29nZ2fj6lrxfsaVKuJshcjPz8fFxaUymxRCaFhCRh6n03Ix6HX0CKundhwhhLArJcOv6XQ6nnvuOdzd3S3PmUwm/vjjD6Kioiq83QoVce+++64lxAcffICnp6dViO3bt9OqVasKh7BnZrOZkydPAsXTaZTcYSKEvVMUxTJW0fbk4hHYo4J98DaqeykVrLMFBQWh09X+iHVayGCLlrMJxyTnwmvbv38/UPx/89ChQ1YNXi4uLnTo0IHHH3+8wtutUBFX0udNURQWL15sdd3bxcWFsLAwFi9eXOEQ9sxsNrN3714AmjZtKgeucBglv5QVReG38/UAuLGpNm5ouPKEERAQoEofHC1ksEXL2YRjknPhtW3duhWACRMm8M477+Dt7V0t261QERcbGwtAv379WLNmDfXr16+WEEII7bpUZCYhIw9ng4HuofJ/XgghKmvZsmXVur1K9YkrqSiFEI4vLacAcKdLYx/cnaVFRwghKmLEiBEsX74cb29vRowYcc1116xZU6FtV6qIu/POO+nWrRtPPvmk1fLXX3+d3bt3s3r16spsVghRhm/j/1LtvRVF4UJuAQA3hqt/V6oQQtgbHx8fS79UHx+fat12pYq47du3M2fOnFLLBw4cyLx586qaSQihETkFJvJNZlyd9XRpXL2/fIQQoi648hJqdV9OrVTPw+zs7DKHEnF2dpa5U4VwIGmXW+G6Na6H0UkupQohRFVcunTJMvc8wJkzZ5g/fz4//vhjpbZXqSKuXbt2lvlSr/T5558TGRlZqSBCCG0xK3DxUiEAvcK1cVeqEELYs2HDhvHRRx8BkJ6eTrdu3Zg3bx7Dhg1j0aJFFd5epS6nzpo1ixEjRnDq1CnLNBtbtmzhs88+q3P94fR6PTfddJPlsRCO4mhKNvvz6uHmbGBqSD2141jR6/W0adPG8riuZrBFy9mEY5JzYfns27fPMlzbl19+SWBgIPv37+err77iueee4+GHH67Q9ipVxA0ZMoR169bxyiuv8OWXX+Lm5kb79u3ZvHkzffr0qcwm7ZZeryckJETtGEJUu19iL3DBbOTWsIY4a+xSqk6no0EDdW+00EIGW7ScTTgmOReWT25uLl5eXgD8+OOPjBgxAr1ezw033MCZM2cqvL1KFXEAgwcPZvDgwZV9uRBCw4pMCjviLgJyV6oQQlSXZs2asW7dOu644w5++OEH/vvf/wKQkpJSqQGAK93mmZ6ezgcffMDTTz/NhQsXgOJmwoSEhMpu0i6ZzWZOnz7N6dOnMZvNascRolocTMwgJ7+ICPdC/PW5NudLVouiKCQlJZOUlKxaNi1ksEXL2YRjknNh+Tz33HM8/vjjhIWF0b17d3r06AEUt8p17NixwturVEvcn3/+Sf/+/fHx8SEuLo77778fX19f1qxZQ3x8vKXTXl1gNpv5448/AGjSpIn0BRAO4ZfTF9Ch0NEtixPHj+PfsKGmpm4ym80cPx4DQMOGfqpNu6V2Blu0nE04JjkXls9dd91F7969SUxMpEOHDpblt9xyC3fccUeFt1epT3n69OmMHz+eEydO4Orqalk+aNAgtm/fXplNCiE0osBkZueZ4kupvu6lhxISQghReYGBgXTs2NGq0O3WrRutWrWq8LYq1RK3e/dulixZUmp5SEgISUlJldmkEEIj9vydTl6hiUBPFzxd8tSOI4QQDiMnJ4dXX32VLVu2kJKSUurS8+nTpyu0vUoVcUajscxBfY8fP07Dhg0rs0khhEb8Elvcx7VnmC+6vCyV0wghhOO4//77+fnnn/nPf/5DUFCQZTquyqpUETd06FBeeOEFvvjiC6D4dvb4+HiefPJJ7rzzzioFEkKoJ7fQxO74dKB4gN9zRyt+y7sQQoiyff/996xfv55evXpVy/Yq1Sdu3rx5ZGdn4+/vz6VLl+jTpw/NmjXDy8uLl19+uVqCCSFq3674dApNZkJ8XAn3dVc7jhBCOJT69evj61t9M+BUqiXOx8eHTZs28dtvv3Hw4EGys7Pp1KkT/fv3r7ZgQoja98vpNABubCpjwwkhRHV78cUXee6551ixYgXu7lX/Q7ncRZyvry/Hjx/Hz8+Pe++9l3feeYdevXpVW5OgvdLr9ZbPQG6pFvYsK7+I/QkZANwY7oter6d169aA9o5tLWTTQgZbtJxNOCY5F5bPvHnzOHXqFAEBAYSFheHs7Gz1/L59+yq0vXIXcQUFBWRmZuLn58eKFSt47bXXLFNH1GV6vZ4mTZqoHUOIKtsZdxGTWSG8gTuN6rkBaPZGJZ1Op3o2LWSwRcvZhGOSc2H5DB8+vFq3V+4irkePHgwfPpzOnTujKApTpkzBzc2tzHU//PDDagsohKgdv8RevpQq02wJIUSNmD17drVur9xF3CeffMLbb7/NqVOnAMjIyCAvT8aQMpvNnD17FoBGjRpJM7KwSxdzCzmUWDxs0I1NizvdKorC+fPnAfDz86vyrfDVSQvZtJDBFi1nE45JzoXll56ezpdffsmpU6eYMWMGvr6+7Nu3j4CAAEJCQiq0rXIXcQEBAbz66qsAhIeH8/HHH9OggfzFbjab+e233wAYOXKkHLjCLv0adwFFgZb+nvh7GoHiY/vo0aMA9OrVS1NTN2khmxYy2KLlbMIxybmwfK6etnTixIlVmra03J+yr6+v5S+7fv364eIi0/EI4Sh+lbtShRCixlX3tKXlLuJKbmwAWLFihVxKFcJBpGTncywlG50OeodV3/hFQgghrO3evZsHH3yw1PLKTlsqNzYIUcf9crp4mq22gd7Ud3e+ztpCCCEqq7qnLS13S9wnn3zCoEGDyM7ORqfTkZGRwcWLF8v8EkLYj5K7Um+SS6lCCFGjSqYtLSwsBKo+band3tjw6quvMnPmTKZOncr8+fNVyyGEPTubcYnYtFwMeh09wuqrHUcIIRzavHnzuOuuu2jYsKFl2tKkpCR69OhRqWlLK3T7yKBBg8jIyCA2NpYGDRrw6quvkp6ebnk+LS2NyMjICoeoqN27d7NkyRLat29f4+8lhCMruZQaFeyDl7FSs/AJIYQop5JpS9evX8+7777L5MmT2bBhAz///DMeHh4V3l6Ffmtv3LiR/Px8y/evvPIKo0aNol69egAUFRURExNT4RAVkZ2dzT333MP//vc/XnrppRp9r/LQ6/V0797d8lgIe6Hwz1ypN0WUblXX6/W0aNHS8lhLtJBNCxls0XI24ZjkXHh9ZrOZ5cuXs2bNGuLi4tDpdISHhxMYGIiiKJUaz7FKf3orilKVl1fKpEmTGDx4MP37979uEZefn29VdGZlZVV7Hr1eT9OmTat9u0LUtLi0XBIy8nA26OnWpF6p53U6HYGBAbUfrBy0kE0LGWzRcjbhmORceG2KojB06FA2bNhAhw4daNeuHYqicPToUcaPH8+aNWtYt25dhbdrV9dPPv/8c/bt28fu3bvLtf7cuXN5/vnnaziVEPZp++VWuK6N6+HuLIPBCiFETVm+fDnbt29ny5Yt9OvXz+q5n376ieHDh/PRRx8xduzYCm23Qm2eOp2uVHNfbU3n8vfffzN16lQ+/fRTqwHyrmXmzJlkZGRYvo4cOVLtucxmMwkJCSQkJGA2m6t9+0LUBAX4Jba4P9yN4WWPDacoCmlpaaSlpanS6n4tWsimhQy2aDmbcExyLry2zz77jKeffrpUAQdw880389RTT/Hpp59WeLsVaolTFIXx48djNBZPy5OXl8dDDz1k6Yx35aXL6rZ3715SUlLo1KmTZZnJZGL79u2899575Ofnl5paxmg0WrICZY7NUlVms9kyyrJMNSLsRUxKNqnZ+bg5G+hSxqVUKD62//rrL0B7UzdpIZsWMtii5WzCMcm58Nr+/PNPXn/9dZvPDxw4kHfffbfC261QETdu3Dir7//973+XWqeiTYHldcstt3Do0CGrZRMmTKBVq1Y8+eST8ktKiAoouZR6Q2h9XAzyy1YIIWrShQsXCAiw3U81ICCgUuPsVqiIW7ZsWYXfoLp4eXnRtm1bq2UeHh40aNCg1HIhhG0ms8KvJZdSm8o0W0IIx7Rw4ULeeOMNkpKS6NChAwsWLKBbt24211+9ejWzZs0iLi6O5s2b89prrzFo0KAy133ooYdYsmQJb7/9NtOmTbtuFpPJhJOT7ZLLYDBQVFR03e1cza5ubBBCVN3hpCwyLhXiZXQiKthH7ThCCFHtVq1axfTp01m8eDHdu3dn/vz5REdHExMTg7+/f6n1d+zYwZgxY5g7dy633347K1euZPjw4ezbt69UQ9HatWv5/fffCQ4OLneeq7ujXa2y3dHs+jrKtm3bZLYGISqo5FJqz3BfDPrauTFJCCFq01tvvcXEiROZMGECkZGRLF68GHd3d5tzu7/zzjsMGDCAGTNm0Lp1a1588UU6derEe++9Z7VeQkICjz76KJ9++inOzuWfa3rcuHH4+/vj4+NT5pe/v3+luqNJS5wQdUihyczOM8X9LmzdlSqEEFqVlZVldZPi1TcwAhQUFLB3715mzpxpWabX6+nfvz87d+4sc7s7d+5k+vTpVsuio6Otxm4zm8385z//YcaMGbRp06ZCuWuqO5pdt8QJISpmf0ImOflF+Lq70CbQW+04QghRIZGRkVYtWHPnzi21zvnz5zGZTKVuJAgICCApKanM7SYlJV13/ddeew0nJyemTJlSDXtSPaQlror0ej2dO3e2PBZCy36JLb6U2ivcl+tdSdXr9TRr1szyWEu0kE0LGWzRcjbhmGrrXHjkyBFCQkIs39vqY1bd9u7dyzvvvMO+fftqbXzc8pAiroqK5yhsoXYMIa4rv9DEH2fSAbipHHel6nS6CnXcrU1ayKaFDLZoOZtwTLV1LvTy8sLb+9pXEfz8/DAYDCQnJ1stT05OJjAwsMzXBAYGXnP9X375hZSUFJo0aWJ53mQy8dhjjzF//nzi4uIqsTdVJ3+iCVFH7Po7nfwiE4HeRpo39FQ7jhBC1AgXFxc6d+7Mli1bLMvMZjNbtmyhR48eZb6mR48eVusDbNq0ybL+f/7zH/78808OHDhg+QoODmbGjBn88MMPNbcz1yEtcVWkKAopKSkA+Pv7a6qZVYgrlUyz1Tu8AeU5ShUgIz0dAJ969cr1mtqihWxayGCLlrMJx6S1c+H06dMZN24cXbp0oVu3bsyfP5+cnBwmTJgAFE9MEBISYulTN3XqVPr06cO8efMYPHgwn3/+OXv27GHp0qUANGjQgAYNGli9h7OzM4GBgbRs2bJ2d+4K0hJXRSaTiZ9++omffvoJk8mkdhwhypRTYGLv2QygfJdSAcwmE3/++Sd//vknZo0d21rIpoUMtmg5m3BMWjsXjh49mjfffJPnnnuOqKgoDhw4wMaNGy03L8THx5OYmGhZv2fPnqxcuZKlS5fSoUMHvvzyS9atW6f5yQSkJU6IOmDnmQsUmcyE1ncntL672nGEEKLGTZ48mcmTJ5f53LZt20otGzlyJCNHjiz39tXqB3claYkTog745fIAv71lmi0hhHAYUsQJ4eDSLxVy8Fzx4Jg3hTe4ztpCCCHshRRxQji43+IuoCjQvKEngd61M6aSEEKImidFnBAOruRS6o1yKVUIIRyK3NgghANLzc7naHI2Oh30lrlSRTX5Nv6vSr92SJOKzTkphLBNirgq0uv1REVFWR4LoSUlY8O1CfCmgbtLhV6r1+tp2rSp5bGWaCGbFjLYouVswjHJuVAdUsRVkV6vp3Xr1mrHEKJMP58qvpR6U0TFb2jQ6XQ0atSouiNVCy1k00IGW7ScTTgmOReqQ8plIRxU3IVc4i7k4mTQ0yusvtpxhBBCVDNpiasiRVG4cKH4kpWvr6/qU40IUaKkFa5LIx88jRX/r64A2VlZAHh6eWlq6iYtZNNCBlu0nE04JjkXqkNa4qrIZDLx448/8uOPP2piqhEhABQFfr58V2rfZn6V2obZZGL//v3s379fc1M3aSGbFjLYouVswjHJuVAdUsQJ4YAOJ2WSllOAh9GJLo181I4jhBCiBkgRJ4QD2nb5UmqvsPo4G+S/uRBCOCL57S6Egyk0mdkRV9w3pW9E5S6lCiGE0D4p4oRwMLvj08ktMOHn6UJkgJfacYQQQtQQKeKEcDDbLt/Q0CfCD7lBTAghHJcUcUI4kKz8IvacTQegb9OKD/ArhBDCfsg4cVWk1+tp27at5bEQavot9gImk0J4A3ea1Her0rb0ej2hoaGWx1qihWxayGCLlrMJxyTnQnVIEVdFer2edu3aqR1DCAC2nToPVM8NDTqdzlIIaI0Wsmkhgy1aziYck5wL1SHlshAOIikrn6PJ2eh0cJNcShVCCIcnLXFVpCgKmZmZAHh7e8tUI0I1JdNstQ/yxtfducrbU4BLubkAuLm7a2rqJi1k00IGW7ScTTgmOReqQ1riqshkMrFhwwY2bNggU40I1SjATydTgeobG85sMrFnzx727NmjuambtJBNCxls0XI24ZjkXKgOKeKEcABHk7NIyszH1dlAr7D6ascRQghRC6SIE8IBbDlRfENDrzBfjM4GldMIIYSoDVLECWHn8gtN/BpbPM3WLc1lmi0hhKgrpIgTws7tiL9IXqGJQG8jkYEyzZYQQtQVUsQJYee2HC++lHpzs4ZyF6IQQtQhUsQJYcdSsvM5lJiJTgc3N5Ox4YQQoi6RceKqSK/X06pVK8tjIWrTTyf+GRuuoaexWret1+tp1KiR5bGWaCGbFjLYouVswjHJuVAdUsRVkV6vp2PHjmrHEHWQovwzNtwtzRtW+/Z1Oh1Nmzat9u1WBy1k00IGW7ScTTgmOReqQ8plIezUX8mZJGfl4+Zs4IbQemrHEUIIUcukJa6KFEUh9/L0Nu7u7jLViKg1JZdSezf1xehU/WPDKUBBfj4ALkajpm6a0EI2LWSwRcvZhGOSc6E67KYlbu7cuXTt2hUvLy/8/f0ZPnw4MTExasfCZDLxzTff8M0338hUI6LWXCo08Wtc8dhw/WvgUioUT930xx9/8Mcff2hu6iYtZNNCBlu0nE04JjkXqsNuiriff/6ZSZMm8fvvv7Np0yYKCwu57bbbyMnJUTuaELVu++kL5BeaCPFxpaW/p9pxhBBCqMBuLqdu3LjR6vvly5fj7+/P3r17uemmm1RKJYQ6fohJASC6pb9cKhNCiDrKboq4q2VkZADg6+trc538/HzyL/cLAcjKyqrxXELUtFNpOZw6n4OTQU+/5jI2nBBC1FV2czn1SmazmWnTptGrVy/atm1rc725c+fi4+Nj+YqMjKzFlELUjB9jiocV6RFaH2+js8pphBBCqMUuW+ImTZrE4cOH+fXXX6+53syZM5k+fbrl+4SEBCnkhF3LKzTx86niu1KjW9bMDQ1C1KRv4/+q9GuHNGlTjUmEsH92V8RNnjyZ7777ju3bt1tGJLfFaDRiNP4zin1mZmZNxxM1zJR3EVNuCub8DPRGHwzu/hhc66sdq9b8EnuBS4UmgrxdaRvkrXYcIYQQKrKbIk5RFB599FHWrl3Ltm3bCA8PVzsSUDwyerNmzSyPRc0pyvqb1E0Pkhe/2bLMNfRWGvZfjJNXYxWT1Z6SS6m3taz5ye51Oh1BQUGWx1qihWxayGCLlrMJxyTnQnXYTRE3adIkVq5cyddff42XlxdJSUkA+Pj44Obmploug8FA165dVXv/usKUd7FUAQeQd2YTqZsfwn/gJw7fIheblsvx1GwMBh23NPOr8ffT6/U0b968xt+nMrSQTQsZbNFyNuGY5FyoDru5sWHRokVkZGTQt29fgoKCLF+rVq1SO5qoBabclFIFXIm8M5sw5abUcqLa98Pxyzc0NPHFx01uaBBCiLrOblriFEVRO0KZFEWxDGNiNBqlGbmGmPMzrv18wbWft3f5hSZ+PnUeKL6UWhsUoKiwEAAnZ2dNjUenhWxayGCLlrMJxyTnQnXYTUucVplMJtauXcvatWtlqpEapDf6XPt5l2s/b++2nb5AboGJQG8j7WvphgazycTOnTvZuXOn5qZu0kI2LWSwRcvZhGOSc6E67KYlTtRtBnd/XENvJe/MplLPuYbeisHdX4VU5VOVIRWguFVl/dFkAAa1DkD+wBVCCAHSEifshMG1Pg37L8Y19Far5cV3py5x6JsajiRlceZCLkYnQ63c0CCEEMI+SEucsBtOXo3xH/hJ8ThxBRnoXerGOHElrXB9mvniaZT/skIIIYrJGUHYFYNrfYcv2q6UllvAzjMXAbi9dYDKaYQQQmiJXE4VQsM2HkvBbFZoG+hNaH13teMIIYTQECnihNCoQpOZHy7P0DC4tXZv3BBCCKEOuZxaRTqdzjIFmIyLI6rTjriLZFwqpIGHC91Da/8Ssk6nIyAgwPJYS7SQTQsZbNFyNuGY5FyoDiniqshgMHDDDTeoHUM4GAX45q/iqeWiW/pj0Nf+L0W9Xk/Lli1r/X3LQwvZtJDBFi1nE45JzoXqkMupQmjQkaQsTp7PwdmgZ2Cr2pmhQQghhH2RlrgqUhTFMjq1wWCQZmRRLdZdboW7ubkf3q7qzJOqgGW0f73BoKmpm7SQTQsZbNFyNuGY5FyoDmmJqyKTycTq1atZvXq1TDUiqkVCRh67Lg8rMqxNoGo5zCYTv/32G7/99pvmpm7SQjYtZLBFy9mEY5JzoTqkiBNCY76+3ArXrUl9QnxcVU4jhBBCq6SIE0JDMvMK+enEeQCGt1WvFU4IIYT2SREnhIasP5pCoclMMz8PIgO91I4jhBBCw6SIE0IjCkxmNhxNAYpb4aRbsBBCiGuRIk4Ijdhy4jyZeYU09DTSM8xX7ThCCCE0Too4ITTAZFb46s9EoLgVTo3BfYUQQtgXGSeuinQ6HY0bN7Y8FqIyfj6VRmp2Pj5uztzWUhuD++p0Ovz8/CyPtUQL2bSQwRYtZxOOSc6F6pAirooMBgO9e/dWO4aoYd/G/1Vj2zYrsPrPc0BxK5yLQRsN5Hq9nsjISLVjlEkL2bSQwRYtZxOOSc6F6tDG2UKIOmxH3AXOZeThaXRiYCt/teMIIYSwE1LECaEiBVh9sLgV7vbIANycDeoGEkIIYTfkcmoVFRUVsXr1agBGjhyJk5N8pKL8dsenE3chF1dnA0MiA9SOY8V0eeomgF69emEwaKfA1EI2LWSwRcvZhGOSc6E65FMWQiWKAp/tPwvAoFb+eBrlv6MQ11KVvqlDmrSpxiRCaINcThVCJTvPXOB0Wi5uzgbuaC9TbAkhhKgYKeKEUIFZgU/3JQAwtG0g3kZnlRMJIYRjWbhwIWFhYbi6utK9e3d27dp1zfVXr15Nq1atcHV1pV27dmzYsMHyXGFhIU8++STt2rXDw8OD4OBgxo4dy7lz52p6N65JijghVPDz6fOcTb+Ep9GJ4W2kFU4IIarTqlWrmD59OrNnz2bfvn106NCB6OhoUlJSylx/x44djBkzhvvuu4/9+/czfPhwhg8fzuHDhwHIzc1l3759zJo1i3379rFmzRpiYmIYOnRobe5WKVLECVHLTGaFz/cXt8Ld0S4QdxfpdC6EENXprbfeYuLEiUyYMIHIyEgWL16Mu7s7H374YZnrv/POOwwYMIAZM2bQunVrXnzxRTp16sR7770HgI+PD5s2bWLUqFG0bNmSG264gffee4+9e/cSHx9fm7tmRYo4IWrZ5hPnScosnp1hSGtt3ZEqhBBalpWVRWZmpuUrPz+/1DoFBQXs3buX/v37W5bp9Xr69+/Pzp07y9zuzp07rdYHiI6Otrk+QEZGBjqdjnr16lVuZ6qB3A5XRTqdjqCgIMtjIa4lv9DEqgPFrXAjOwRj1PC4cDqdjvq+vpbHWqKFbFrIYIuWswnHVFvnwqtnIpk9ezZz5syxWnb+/HlMJhMBAdZ/JAcEBHDs2LEyt5uUlFTm+klJSWWun5eXx5NPPsmYMWPw9vau4F5UHyniqshgMNC3b1+1Ywg7sfZwMmk5BTT0NDJAI3Ok2qLX62nXtq3aMcqkhWxayGCLlrMJx1Rb58IjR44QEhJi+d5oNNb4e16tsLCQUaNGoSgKixYtqvX3v5IUcULUkou5haw5nAjAuK6NcNbIHKlCCGEvvLy8rtvy5efnh8FgIDk52Wp5cnIygYFl30gWGBhYrvVLCrgzZ87w008/qdoKB9InToha88m+s+QXmmjp70nv8AZqxxFCCIfk4uJC586d2bJli2WZ2Wxmy5Yt9OjRo8zX9OjRw2p9gE2bNlmtX1LAnThxgs2bN9Oggfq/x6UlroqKiopYs2YNACNGjJCpRkSZ4i7ksuVEKgD3dmuCPfRSMplMlk69PXr00NTUTVrIpoUMtmg5m3BMWjsXTp8+nXHjxtGlSxe6devG/PnzycnJYcKECQCMHTuWkJAQ5s6dC8DUqVPp06cP8+bNY/DgwXz++efs2bOHpUuXAsUF3F133cW+ffv47rvvMJlMlv5yvr6+uLi4qLKfUnFUA5PJpHYEoWEK8OGueBQFeoc3oJW/p9qRys1sNqsdwSYtZNNCBlu0nE04Ji2dC0ePHk1qairPPfccSUlJREVFsXHjRsvNC/Hx8ej1/1yM7NmzJytXruTZZ5/l6aefpnnz5qxbt462l/uWJiQk8M033wAQFRVl9V5bt25VrW+8FHFC1LDd8ekcPJeJk0HP2C6N1I4jhBB1wuTJk5k8eXKZz23btq3UspEjRzJy5Mgy1w8LC0NRlOqMVy2kT5wQNSi/0MTS388AMKxtIAFetX8nlRBCCMckRZwQNeiLPxNJzc6noaeR0R2C1I4jhBDCgUgRJ0QNOZt+ibWXhxR54IZQjE7SuVwIIUT1sbsibuHChYSFheHq6kr37t3ZtWuX2pGEKEUBFu88g8mk0LVxfbo1qad2JCGEEA7Grm5sWLVqFdOnT2fx4sV0796d+fPnEx0dTUxMDP7+/qpk0ul0NGzY0PJYCIDtp9I4lJiJs0HPxBuaqB2nUnQ6HT4+PpbHWqKFbFrIYIuWswnHJOdCdegULd5uYUP37t3p2rUr7733HlB8C33jxo159NFHeeqpp677+rNnz9K4cWP+/vtvGjWSuwRF+X0b/1e51824VMikNYfIyi/ins6NGNUhuAaTCSHKY0iTNmpHEFUg5++y2c3l1IKCAvbu3Uv//v0ty/R6Pf3797cManm1/Px8MjMzLV9ZWVm1FVfUYUt+P0NWfhFhvu6MaCs3MwghhKgZdlPEnT9/HpPJZBmor0RAQIBl1OSrzZ07Fx8fH8tXZGRkbUQVddiOuAv8FnsBvV7HlBvDcTLIZQUhhBA1w26KuMqYOXMmGRkZlq8jR45U+3uUTDWyZs0aioqKqn37wn5k5heyeGfxmHAj2gUR0cBD5URVUzJ1086dOzU1EjtoI5sWMtii5WzCMcm5UB12c2ODn58fBoOB5ORkq+XJyckEBgaW+Rqj0YjR+M/gqpmZmTWSLT8/v0a2K+yHAiz4NY6MS4U0ru/G3VGO0Q+usLBQ7Qg2aSGbFjLYouVswjHJubD22U1LnIuLC507d2bLli2WZWazmS1bttCjRw8VkwkBPxxLYdeZizgZ9Dx2UwTOBrv5ryWEEMJO2U1LHMD06dMZN24cXbp0oVu3bsyfP5+cnBwmTJigdjRRh51Nv8T/7YoHYGznRoQ3cFc5kRBCiLrAroq40aNHk5qaynPPPUdSUhJRUVFs3Lix1M0OQtSWQpOZN38+RUGRmagQb4a2KfvSvhBCCFHd7KqIA5g8eTKTJ09WO4YQAPzfrr+JTcvFy+jE1BsjkDEuhRBC1BbpuCNEJW09eZ7vjxbfaDPtpqb4ujurnEgIIURdYnctcVqj0+nw9fW1PBZ1w+m0XBb+FgfA3R1D6NK4nqp5aoJOp8PLy8vyWEu0kE0LGWzRcjbhmORcqA4p4qrIYDAQHR2tdgxRi7Lzi3j1pxMUmsx0blSPu6NC1I5UI/R6PR07dlQ7Rpm0kE0LGWzRcjbhmORcqA65nCpEBZjMCm/+fIrkrHwCvY1M79NU+sEJIYRQhRRxQpSTAiz+PY79ZzNwcdLz1M3N8TRKY7YQQgh1yBmoioqKili/fj0AgwcPxslJPlJHtebPRH48lopOB4/3bUa4r2OPB2cymdizZw8AXbp0wWAwqJzoH1rIpoUMtmg5m3BMci5Uh3zK1SA3N1ftCKKG/XI6jY/2/A3Afd1D6d6knrqBaomWp9HRQjYtZLBFy9mEY5JzYe2Ty6lCXMeWE6nM/yUWgCFtAhkSKYNLCyGEUJ+0xIk649v4vyr8miPJWcz54ThFJjM3hNbn3m5NaiCZEEIIUXFSxAlhw8nzObzw43Hyi0x0bOTD430j0MudqELYpcr8EVdiSJM21ZhEiOojl1OFKMPJ8znM/iGGS4Um2gZ6M/PmZjgb5L+LEEII7ZCWOCGucjgpk5c2neBSoYmW/p48e2tzjE5yd58QQghtkSKuGnh7e6sdQVSTPX+n8+pPJyk0mWkX5M0z/Zvj5lx3Czh3d+0Oo6KFbFrIYIuWswnHJOfC2qdTFEVRO0RtOXv2LI0bN+bvv/+mUaNGascRtex6fWJ+OZ3G29tPYzIrdGtSnyf6RcglVCGE9InTADl/l01a4kSdpwBfHjzHJ3vPAtAnogFTb2yKQe5iEEIIoWFSxIk6rdBkZsGvsfx8Kg2AwZGBTOzeROZDFUIIoXlSxFVRUVERP/zwAwDR0dEy1YgduZhbyNyfThCTko1er+OBG0IZ2Mpf7ViaYTKZ2L9/PwAdO3bU1NRNWsimhQy2aDmbcExyLlSHfMrVIDMzU+0IooIOnMvkrZ9PkXGpEE+jE0/0a0aHYOmUezUtT6OjhWxayGCLlrMJxyTnwtonRZyoU0xmhZX7E/jqz3MoCoT5uvPkzc0I9nZVO5oQQghRIVLEiTrjXGYe7/xymmPJ2QAMaOXPfd2b4CJ3oAohhLBDUsQJh1dkMvP29tM8++thCk1m3F0MTOoVTu9wX7WjCSGEEJUmRZxwaIcTM7nvi4Ps/jsdPM1EhXgzqVc4/p5GtaMJIYQQVSJFnHBIF3MLeH7TcRb+FofJrODj6sTYG8O5pXlDZPQQIYQQjkCKuGog09toR5HJzAd/xDNr4zHScgsBGNYmgPdGtGNfxmmV09kfo1G7LZZayKaFDLZoOZtwTHIurH0y7ZawK7amzlIU+CU2jc/2J3AuIw+AxvXduL97KFEydIgQQiUyZVf1kPN32aQlTtg1k1lhx5kLrD6QyJmLxeNieRmdGNMxhAGt/GXqLCGEEA5Lijhhl7Lzi/jxeCrrjyZzPrsAAHcXA3e0C2JIZABuzjJCvRBCCMcmRVwVmUwmNm/eDED//v1lepsadi4zj2//SmbLyfPkF5oA8HZ1ZlBrf4ZEBuBplEO6upjNZg4ePAhAhw4d0Ou1M56eFrJpIYMtWs4mHJOcC9UhZ7wqUhSFCxcuWB6L6peTX8Saw4l8tOcsPyUdo+RjDq3vztC2AfRp2gBnGbC32imKQlZWluWxlmghmxYy2KLlbMIxyblQHVLECU0qKDLz08nzfHHwHF/+eY7s/OJWNzyhc6N6DGsbSPtgbxkuRAghRJ0lRZzQjNyCIn6ISWXNoUS+O5JMRl6R5bmmDdwZ27kxwSEhBHjJ0AlCCCGEFHFCNWazwoFzGWw6fp4tJ1L5JfYC+UVmy/OBXkaGtQ3kXx1D6B3ui06nsznEiBBCCFHXSBEnao3JrHAoMZOlf+3jcGIWB89lkJX/T2sbruDvaaRHWH16hvnSsqEnOh2kk8R3fyepF1wIIYTQICniRI0wmxVOpuWwPyGD/QmZ7D2bzh/xF4v7tnmet6zn5mygXbA3HYK8iQrxJsTHTfq5CSGEEOUgRVw1qMvT2yiKQnJWPsdTcziWms3Bc5kcPJfBwXOZ5BSYSq3vZXSiWYgPrfy96BDsTYuGHjIgr4Y5OzurHcEmLWTTQgZbtJxNOKa6fC5Ui0y7Ja4rK6+IhMxLJGTkkZCRR3z6JY6nZBOTmkNMajaZV9yAcCVXJz3tg7zpEOJNpxAfeoT60ibQiw1nj9TyHgghhDpk2q3qIefvsklLXB1mNiukZOdz9nJxlpCRR0LmJc5dflyy3KrfWhl0Ogir707Lhh60CSy+LNox2IcWDT1wkvHbhBBCiBohRZwdKygyk11QRFZeEdkFJrLyi8jOLypell9Edn7xsqz8ItJyCriQW0habgFpOQWczykgKSufInP5GmK9XZ0I8XYlxMeVRj5uNG/oQUt/T1o29CSigTuuMs2VEEIIUaukiKsik8nEdz9spsik0LpLD8zoKDCZKTQpV/1rLrW8rGUFJjP5RWZyCkoKsiuKs8tFWUmRVmiq+pVwna54KI+SAi3Ex+3yv1d8ebvh5SqHSl1jNps5dOgQAO3atdPU1E1ayKaFDLZoOZtwTCaTia1btwLQr18/mXarltjFmTkuLo4XX3yRn376iaSkJIKDg/n3v//NM888g4uLi6rZFEXhu73HySkoYvXP+Zio/V+Wrk56PI1OeLoY8HJ1wtPFCS+jE55GA15GJzxcnPB1d6aBhwsN3F1ocPlxkLcrgV5GmbJKlElRFDIyMiyPtUQL2bSQwRYtZxOOSVEUUlNTLY9F7bCLIu7YsWOYzWaWLFlCs2bNOHz4MBMnTiQnJ4c333xT7Xg46XQYdDrquTmj1xtwcdLjrNdd/lePi5Pu8r9lLDfocTHocTLocDHoL3+vsyrEPI2XH5exzMPFIEWYEEIIUQfZRRE3YMAABgwYYPm+adOmxMTEsGjRIk0UcTdFNABg2cjbcHKyi4+0ymTmBCGEEEJddltxZGRk4Ovre8118vPzyc/Pt3yflZVV07GEEEIIIWqFXV6HO3nyJAsWLODBBx+85npz587Fx8fH8hUZGVlLCYUQQgghapaqRdxTTz2FTqe75texY8esXpOQkMCAAQMYOXIkEydOvOb2Z86cSUZGhuXryBEZZFYIIYQQjkHVy6mPPfYY48ePv+Y6TZs2tTw+d+4c/fr1o2fPnixduvS62zcajVbTgGRmZlY667XIrdTCUWl5aAotZNNCBlu0nK0uUav/sBozRci5sPbZzbRbCQkJ9OvXj86dO/PJJ59U6mCRaTuqj9zYIIQQ2uVo033J+btsdnFjQ0JCAn379iU0NJQ333zTMhYNQGBgoIrJhBBCCCHUYRdF3KZNmzh58iQnT54sVYHbSUOiEEIIUWuqcrXE0VrxHJlddJoYP348iqKU+aU2k8nEtm3b2LZtGyaTSe04QlQbs9nMocOHOXT4MGazWe04VrSQTQsZbNFyNuGY5FyoDrtoidMyRVFITEy0PLYn0q9NXIuiKFy8cMHyWEu0kE0LGWzRcjbhmOz5XGjP7KIlTgghhBBCWJMiTgghhBAOZ+HChYSFheHq6kr37t3ZtWvXNddfvXo1rVq1wtXVlXbt2rFhwwar5xVF4bnnniMoKAg3Nzf69+/PiRMnanIXrksup9o5uSQqhBCiOlXmvGIymdhnSqeToV71B6qEVatWMX36dBYvXkz37t2ZP38+0dHRxMTE4O/vX2r9HTt2MGbMGObOncvtt9/OypUrGT58OPv27aNt27YAvP7667z77rusWLGC8PBwZs2aRXR0NEeOHMHV1bW2dxGQljghhBBCOJi33nqLiRMnMmHCBCIjI1m8eDHu7u58+OGHZa7/zjvvMGDAAGbMmEHr1q158cUX6dSpE++99x5Q3Ao3f/58nn32WYYNG0b79u356KOPOHfuHOvWravFPbMmLXHVZJ8pHbe/j8iI1UIIIYSKCgoK2Lt3LzNnzrQs0+v19O/fn507d5b5mp07dzJ9+nSrZdHR0ZYCLTY2lqSkJPr372953sfHh+7du7Nz507uvvvu6t+RcqhTRVzJrfYld9BUh6KiItLS0sgim7TU8+j1umrbthBqMpsVstIzADR3bGshmxYy2KLlbMIxlRxzaZg4e/YsTk7VW16UnLczMjLw9va2LL96ek2A8+fPYzKZCAgIsFoeEBBQaj72EklJSWWun5SUZHm+ZJmtddRQp4q45ORkALp161Yj219SI1sVQn1aPra1kE0LGWzRcjbhmB599NEa23ZJ/7QSs2fPZs6cOTX2flpXp4q4jh07smvXLgICAq47OXRWVhaRkZEcOXIELy+vWkqoDXV132W/69Z+Q93d97q631B3993e99tsNhMfH09kZKRVK9/VrXAAfn5+GAwGS8NNieTkZJtTdQYGBl5z/ZJ/k5OTCQoKslonKiqqUvtUHepUEefk5ETXrl3LtW5mZiYAISEhVk23dUFd3XfZ77q131B3972u7jfU3X13hP1u0qRJudZzcXGhc+fObNmyheHDhwPFReCWLVuYPHlyma/p0aMHW7ZsYdq0aZZlmzZtokePHgCEh4cTGBjIli1bLEVbZmYmf/zxBw8//HCl96mq6lQRJ4QQQgjHN336dMaNG0eXLl3o1q0b8+fPJycnhwkTJgAwduxYQkJCmDt3LgBTp06lT58+zJs3j8GDB/P555+zZ88eli5dCoBOp2PatGm89NJLNG/e3DLESHBwsKVQVIMUcUIIIYRwKKNHjyY1NZXnnnuOpKQkoqKi2Lhxo+XGhPj4eKtuVT179mTlypU8++yzPP300zRv3px169ZZ9cF74oknyMnJ4YEHHiA9PZ3evXuzceNG1caIAynibDIajcyePbvM6+2Orq7uu+x33dpvqLv7Xlf3G+ruvtfF/Z48ebLNy6fbtm0rtWzkyJGMHDnS5vZ0Oh0vvPACL7zwQnVFrDKdIjPVCiGEEELYHZmxQQghhBDCDkkRJ4QQQghhh6SIE0IIIYSwQ1LECSGEEELYISniKiA/P5+oqCh0Oh0HDhxQO06tGDp0KE2aNMHV1ZWgoCD+85//cO7cObVj1ai4uDjuu+8+wsPDcXNzIyIigtmzZ1NQUKB2tFrx8ssv07NnT9zd3alXr57acWrMwoULCQsLw9XVle7du7Nr1y61I9W47du3M2TIEIKDg9HpdJbJvR3d3Llz6dq1K15eXvj7+zN8+HBiYmLUjlUrFi1aRPv27fH29sbb25sePXrw/fffqx1LVBMp4irgiSeeIDg4WO0Ytapfv3588cUXxMTE8NVXX3Hq1CnuuusutWPVqGPHjmE2m1myZAl//fUXb7/9NosXL+bpp59WO1qtKCgoYOTIkaqOQl7TVq1axfTp05k9ezb79u2jQ4cOREdHk5KSona0GpWTk0OHDh1YuHCh2lFq1c8//8ykSZP4/fff2bRpE4WFhdx2223k5OSoHa3GNWrUiFdffZW9e/eyZ88ebr75ZoYNG8Zff/2ldjRRHRRRLhs2bFBatWql/PXXXwqg7N+/X+1Iqvj6668VnU6nFBQUqB2lVr3++utKeHi42jFq1bJlyxQfHx+1Y9SIbt26KZMmTbJ8bzKZlODgYGXu3LkqpqpdgLJ27Vq1Y6giJSVFAZSff/5Z7SiqqF+/vvLBBx+oHUNUA2mJK4fk5GQmTpzIxx9/jLu7u9pxVHPhwgU+/fRTevbsibOzs9pxalVGRga+vr5qxxDVoKCggL1799K/f3/LMr1eT//+/dm5c6eKyURtycjIAKhz/6dNJhOff/45OTk5ljlBhX2TIu46FEVh/PjxPPTQQ3Tp0kXtOKp48skn8fDwoEGDBsTHx/P111+rHalWnTx5kgULFvDggw+qHUVUg/Pnz2MymSzT75QICAggKSlJpVSitpjNZqZNm0avXr2splRyZIcOHcLT0xOj0chDDz3E2rVriYyMVDuWqAZ1toh76qmn0Ol01/w6duwYCxYsICsri5kzZ6odudqUd99LzJgxg/379/Pjjz9iMBgYO3Ysih1O9FHR/QZISEhgwIABjBw5kokTJ6qUvOoqs+9COKJJkyZx+PBhPv/8c7Wj1JqWLVty4MAB/vjjDx5++GHGjRvHkSNH1I4lqkGdnXYrNTWVtLS0a67TtGlTRo0axbfffotOp7MsN5lMGAwG7rnnHlasWFHTUatdeffdxcWl1PKzZ8/SuHFjduzYYXfN8RXd73PnztG3b19uuOEGli9fbjVZsr2pzM98+fLlTJs2jfT09BpOV7sKCgpwd3fnyy+/ZPjw4Zbl48aNIz09vc60NOt0OtauXWv1GTi6yZMn8/XXX7N9+3bCw8PVjqOa/v37ExERwZIlS9SOIqrISe0AamnYsCENGza87nrvvvsuL730kuX7c+fOER0dzapVq+jevXtNRqwx5d33spjNZqB4uBV7U5H9TkhIoF+/fnTu3Jlly5bZdQEHVfuZOxoXFxc6d+7Mli1bLAWM2Wxmy5YtNifLFvZNURQeffRR1q5dy7Zt2+p0AQfFx7s9/g4XpdXZIq68mjRpYvW9p6cnABERETRq1EiNSLXmjz/+YPfu3fTu3Zv69etz6tQpZs2aRUREhN21wlVEQkICffv2JTQ0lDfffJPU1FTLc4GBgSomqx3x8fFcuHCB+Ph4TCaTZUzEZs2aWY5/ezd9+nTGjRtHly5d6NatG/PnzycnJ4cJEyaoHa1GZWdnc/LkScv3sbGxHDhwAF9f31K/6xzJpEmTWLlyJV9//TVeXl6Wvo8+Pj64ubmpnK5mzZw5k4EDB9KkSROysrJYuXIl27Zt44cfflA7mqgOqt4ba4diY2PrzBAjf/75p9KvXz/F19dXMRqNSlhYmPLQQw8pZ8+eVTtajVq2bJkClPlVF4wbN67Mfd+6dava0arVggULlCZNmiguLi5Kt27dlN9//13tSDVu69atZf5sx40bp3a0GmXr//OyZcvUjlbj7r33XiU0NFRxcXFRGjZsqNxyyy3Kjz/+qHYsUU3qbJ84IYQQQgh7Zt8dfYQQQggh6igp4oQQQggh7JAUcUIIIYQQdkiKOCGEEEIIOyRFnBBCCCGEHZIiTgghhBDCDkkRJ4QQQghhh6SIE0LYtG3bNnQ6ncPNnyqEEI5AijghNC4pKYlHH32Upk2bYjQaady4MUOGDGHLli3V+j59+/Zl2rRpVst69uxJYmIiPj4+1fpetWn58uXodLprfsXFxdXIe+t0OtatW1cj266Msn7GQgj7JXOnCqFhcXFx9OrVi3r16vHGG2/Qrl07CgsL+eGHH5g0aRLHjh2r0fd3cXGx+/liR48ezYABAyzfjxgxgrZt2/LCCy9YljVs2NDyuKCgABcXl1rNKIQQlaL2vF9CCNsGDhyohISEKNnZ2aWeu3jxouXxmTNnlKFDhyoeHh6Kl5eXMnLkSCUpKcny/OzZs5UOHTooH330kRIaGqp4e3sro0ePVjIzMxVFKXu+1NjYWMtcmyXvtWzZMsXHx0fZuHGj0qpVK8XDw0OJjo5Wzp07Z3mvPn36KFOnTrXKOmzYMKv5OS9cuKD85z//3869hUT1fQEc/45W0pg55Q2JSfGX1ZgSabfpwbDGDJMmCisw05KMKHzJl8jHgowiCqyIzKgsggILU0mGtDTTLmRFk6lpFmiW2sUu5GX/H8JDk/1rNB8cWB+YhzPnnLXX2fthFmedOcnKYDCo8ePHq+XLl6vnz58Pyvdnhw4dUkFBQdr2jRs31Lx585Rer1fe3t5q0aJFqrm5+a9z+mt+KSkpymq1qj179qjAwEAVHByslFKqpaVFJSYmKm9vbzVp0iS1cuVK1dTUpJ1XU1OjLBaL8vHxURMnTlTR0dHq/v372v6goCCH+RzIfeDacnNzldFoVJ6enmrbtm2qt7dXZWdnq4CAAOXn56f27NnjkHdXV5dKS0tTvr6+ysvLS8XExKiHDx8OmrOhrrEQwnVJO1WIUaqzs5OSkhK2b9+Op6fnoP0GgwGA/v5+rFYrnZ2dlJeXU1payosXL1i3bp3D8Y2NjRQUFFBYWEhhYSHl5eXs27cPgMOHD2M2m9myZQutra20trZiNBp/m9eXL184cOAAZ8+e5ebNm7S0tJCZmTmka0tNTeXevXtcvXqVqqoqlFLEx8fT09Pj1Pm9vb2sWrWKxYsX8+jRI6qqqkhPT0en0w0pjwE2m426ujpKS0spLCykp6eHuLg4vLy8uHXrFpWVlUyYMIHly5fz/ft3AD59+kRKSgoVFRXcuXOH0NBQ4uPj+fTpEwB3794FIC8vj9bWVm0bfqxFcXExJSUlXLhwgdzcXFasWMHr168pLy8nOzubrKwsqqurtXMSExNpb2+nuLiY+/fvExkZydKlS+ns7HSIOxJrLIRwDdJOFWKUamhoQCnFzJkz/3iczWbj8ePHNDU1aT/KZ86cYdasWdy9e5d58+YBP4q906dP4+XlBUBycjI2m429e/fi7e3NuHHj0Ov1f22f9vT0cPz4cf777z8AduzY4dCa/Jv6+nquXr1KZWUlixYtAiA/Px+j0UhBQQGJiYl/jfHx40c+fPhAQkKClofJZHI6h195enpy8uRJrY167tw5+vv7OXnypFYY5uXlYTAYKCsrY9myZSxZssQhxokTJzAYDJSXl5OQkKC1aA0Gw6A57e/v59SpU3h5eREWFkZMTAx1dXUUFRXh5ubGjBkzyM7O5saNGyxYsICKigpqampob2/Hw8MDgAMHDlBQUMClS5dIT0/X4o7EGgshXIMUcUKMUkopp46z2+0YjUaHuyphYWEYDAbsdrtWxAUHB2s/7gCBgYG0t7cPOS+9Xq8VTsOJY7fbGTNmDAsWLNC+8/HxYcaMGdjtdqdiTJ48mdTUVOLi4oiNjcVisbB27VoCAwOdv5CfREREODwHV1tbS0NDg8N8AXz79o3GxkYA3rx5Q1ZWFmVlZbS3t9PX18eXL19oaWn563i/rkVAQADu7u64ubk5fDcwr7W1tXR3d+Pj4+MQ5+vXr1o+v4s73DUWQrgGKeKEGKVCQ0PR6XQj9ueFsWPHOmzrdDr6+/tHJM7PBaebm9ugAtTZNulQYuTl5ZGRkUFJSQkXL14kKyuL0tJSFi5cOKSxgEHt6u7ubqKiosjPzx907MAdtpSUFDo6Ojh8+DBBQUF4eHhgNpu1duuf/G4O/7Q+3d3dBAYGUlZWNijWQFv9/8UdzhoLIVyDPBMnxCg1efJk4uLiyMnJ4fPnz4P2D7y7zWQy8erVK169eqXte/r0Ke/fvycsLMzp8caNG0dfX98/5+3n50dra6u23dfXx5MnT7Rtk8lEb2+vw/NeHR0d1NXVafn6+fnR1tbmUMg9fPhw0Fhz5sxh165d3L59m/DwcM6fP//P+QNERkZSX1+Pv78/06ZNc/gMvG6lsrKSjIwM4uPjmTVrFh4eHrx7984hztixY0dkTiMjI2lra2PMmDGD8vH19XU6zkitsRBidJAiTohRLCcnh76+PubPn8/ly5epr6/Hbrdz5MgRzGYzABaLhYiICJKSknjw4AE1NTVs3LiRxYsXM3fuXKfHCg4Oprq6mubmZt69ezfsOzhLlizh2rVrXLt2jWfPnrFt2zaHlwWHhoZitVrZsmULFRUV1NbWsmHDBqZMmYLVagV+vM/s7du37N+/n8bGRnJyciguLtZiNDU1sWvXLqqqqnj58iXXr1+nvr7+n56L+1lSUhK+vr5YrVZu3bpFU1MTZWVlZGRk8Pr1a+06zp49i91up7q6mqSkJMaPH+8QJzg4GJvNRltbG11dXcPOx2KxYDabWbVqFdevX6e5uZnbt2+ze/du7t2753SckVpjIcToIEWcEKNYSEgIDx48ICYmhp07dxIeHk5sbCw2m41jx44BP1pmV65cYdKkSURHR2OxWAgJCeHixYtDGiszMxN3d3fCwsLw8/Nz6tmu39m8eTMpKSlaIRkSEkJMTIzDMXl5eURFRZGQkIDZbEYpRVFRkdYONJlMHD16lJycHGbPnk1NTY3DP2D1ej3Pnj1jzZo1TJ8+nfT0dLZv387WrVuHlfOv9Ho9N2/eZOrUqaxevRqTyURaWhrfvn1j4sSJAOTm5tLV1UVkZCTJyclkZGTg7+/vEOfgwYOUlpZiNBqZM2fOsPPR6XQUFRURHR3Npk2bmD59OuvXr+fly5cEBAQ4HWek1lgIMTrolLNPTwshhBBCiFFD7sQJIYQQQrggKeKEEEIIIVyQFHFCCCGEEC5IijghhBBCCBckRZwQQgghhAuSIk4IIYQQwgVJESeEEEII4YKkiBNCCCGEcEFSxAkhhBBCuCAp4oQQQgghXJAUcUIIIYQQLkiKOCGEEEIIF/Q/uznPqupiheMAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "# Get a colorblind-friendly palette\n", "palette = sns.color_palette(\"colorblind\")\n", @@ -150,9 +176,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "================== DoubleMLData Object ==================\n", + "\n", + "------------------ Data summary ------------------\n", + "Outcome variable: y\n", + "Treatment variable(s): ['d']\n", + "Covariates: ['x0', 'x1', 'x2', 'x3', 'x4']\n", + "Instrument variable(s): None\n", + "No. Observations: 3000\n", + "\n", + "------------------ DataFrame info ------------------\n", + "\n", + "RangeIndex: 3000 entries, 0 to 2999\n", + "Columns: 7 entries, y to x4\n", + "dtypes: float64(7)\n", + "memory usage: 164.2 KB\n", + "\n" + ] + } + ], "source": [ "y = data_apo['y']\n", "x = data_apo['x']\n", @@ -186,7 +235,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -198,14 +247,110 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Further, the [DoubleMLAPO](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLAPO.html#doubleml.DoubleMLAPO) model requires a specification of the treatment level $a$ for which the APOs should be estimated. In this example, we will loop over all treatment levels." + "Further, the [DoubleMLAPO](https://docs.doubleml.org/dev/api/generated/doubleml.irm.DoubleMLAPO.html) model requires a specification of the treatment level $a$ for which the APOs should be estimated. In this example, we will loop over all treatment levels." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
treatment_levelapothetaci_lowerci_upper
00.0210.036240210.077702208.768798211.386831
11.0211.785815211.881937210.545492213.218383
22.0217.063017217.069443215.750701218.388185
33.0219.468907219.404300218.096418220.712095
44.0220.439699220.503700219.186589221.820963
55.0220.525064220.417834219.095104221.740505
\n", + "
" + ], + "text/plain": [ + " treatment_level apo theta ci_lower ci_upper\n", + "0 0.0 210.036240 210.077702 208.768798 211.386831\n", + "1 1.0 211.785815 211.881937 210.545492 213.218383\n", + "2 2.0 217.063017 217.069443 215.750701 218.388185\n", + "3 3.0 219.468907 219.404300 218.096418 220.712095\n", + "4 4.0 220.439699 220.503700 219.186589 221.820963\n", + "5 5.0 220.525064 220.417834 219.095104 221.740505" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "np.random.seed(42)\n", "\n", @@ -250,9 +395,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1IAAAIjCAYAAAAJLyrXAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAeJhJREFUeJzt3XmcjeX/x/H3mX2fsczYd9myL/naBxkkpQWhLImWUU1S32gxqEQKSZYWKmSLSKiJhpQkIlRElsJky4yZYczMuX5/+M35OmaGuTHOGV7Px2Mejzn3fd33/TnnXOee8577vq/bZowxAgAAAADkmYerCwAAAACAgoYgBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIIXrTmRkpCIjI11dxlW1b98+2Ww2zZw509WluJTNZtOgQYNcXYbbmzlzpmw2m/bt2+fqUq4bK1euVN26deXn5yebzaaTJ0+qb9++Kl++/CWX5fNb8OT1Pcv6rP3000/XpjDcsNivuyeCFK6ZrJ1Abj8//PBDntf166+/KjY21u12KO+8847bfFlavny5bDabSpYsKbvdnmOb8uXLO70HERERatGihRYvXpytrTFGH3/8sVq2bKmwsDAFBASoVq1aGjlypFJSUi6rxsjIyIv2iayf2NjYy1r/pRw6dEixsbHasmVLvqy/IIiLi1Pz5s0VEBCgQoUK6d57783xc3VhX8n6eeSRR5za/frrr2rRooWCg4PVsGFDrV+/Ptu63nzzTd18883KyMiwVGt8fLzuvvtuFS9eXD4+PoqIiFDnzp21aNEiS+ux6vjx4+rWrZv8/f01efJkffzxxwoMDMzXbbqjrHAxbty4y1renfaPBUV8fHye9pE2m+2a1JOamqrY2FjFx8dfk+1dqeXLl1v6+xEZGamaNWvmX0G47ni5ugDceEaOHKkKFSpkm165cuU8r+PXX3/ViBEjFBkZme0/wl999dWVlnjZ3nnnHRUtWlR9+/Z1WQ1ZZs+erfLly2vfvn1avXq1br311hzb1a1bV08//bSkc8Fi2rRpuvvuuzVlyhTHl+TMzEz17NlT8+fPV4sWLRQbG6uAgAB9++23GjFihBYsWKCvv/5axYoVs1Tj888/r4ceesjxeOPGjXrrrbc0bNgwVa9e3TG9du3aVp9+nhw6dEgjRoxQ+fLlVbdu3XzZhjtbtmyZ7rzzTtWvX1+vvfaakpKSNHHiRDVv3lw///yzwsPDndqf31eyVKlSxfF7Zmam7r77bhUuXFivv/66li5dqjvvvFO7d+9WSEiIJOnIkSMaOXKk5s+fLy+vvP8JGj58uEaOHKmbbrpJDz/8sMqVK6fjx49r+fLluueeezR79mz17NnzCl6N3G3cuFGnTp3SqFGjnD5H7777bq7/pEB27rR/LCiqV6+ujz/+2Gna0KFDFRQUpOeff/6a15OamqoRI0ZIUoE482P58uWaPHlyvv0zDiBI4Zrr2LGjGjZsmG/r9/Hxybd1FxQpKSlasmSJRo8erRkzZmj27Nm5BqlSpUrp/vvvdzzu3bu3KleurPHjxzuC1NixYzV//nwNGTJEr7/+uqPtwIED1a1bN3Xp0kV9+/bVihUrLNXZrl07p8d+fn5666231K5duwLxR7qg++9//6uKFSvqu+++c3xuOnfu7AhWb7zxhlP7C/vKhf744w/t3LlT+/fvV9myZdW7d28VLVpU69evV/v27SVJw4YNU8uWLRUVFZXnOhcuXKiRI0fq3nvv1Zw5c+Tt7e2Y98wzz+jLL79Uenq6laduyZEjRyRJYWFhTtPPrwOukZGRIbvdft3u94sVK5btM/faa6+paNGiF/0s2u12nT17Vn5+fvldInBD49Q+uKW5c+eqQYMGCg4OVkhIiGrVqqWJEydKOneKYNeuXSVJrVu3dpzWkHWqwYXXSGWdGjF//nyNGDFCpUqVUnBwsO69914lJiYqLS1NMTExioiIUFBQkPr166e0tDSnembMmKE2bdooIiJCvr6+qlGjhqZMmeLUpnz58tqxY4fWrFnjqOn8Ok6ePKmYmBiVKVNGvr6+qly5ssaMGZPtP9pZ116EhoYqLCxMffr00cmTJy29fosXL9bp06fVtWtX3XfffVq0aJHOnDmTp2WLFy+u6tWra+/evZKk06dP6/XXX1eVKlU0evTobO07d+6sPn36aOXKlZZOz7wSn332mWrWrClfX1/dfPPNWrlyZbY2Bw8e1IMPPqhixYo52n3wwQeO+fHx8WrUqJEkqV+/fo73LOvUo2+//VZdu3ZV2bJl5evrqzJlyuipp57S6dOnL7vuvPQj6Vxfuv3227Vu3Trdcsst8vPzU8WKFfXRRx9la7tjxw61adNG/v7+Kl26tF5++eU8HSU5ceKEfv31V911111OX0Lr1Kmj6tWra+7cuTkud/bs2VxP5cx6bQoVKiRJCggIkL+/v1JTUyVJmzdv1uzZs/Xmm29esr7zvfjiiypcuLA++OCDHMNL+/btdfvttzseHzlyRP3791exYsXk5+enOnXq6MMPP3Ra5vzT1KZPn65KlSrJ19dXjRo10saNGx3tIiMj1adPH0lSo0aNZLPZHEdUcrpGysrn9/fff9e9996rwoULy8/PTw0bNtTSpUud2mSdEv3dd99p8ODBCg8PV2BgoO666y4dPXo02zpXrFihVq1aOfadjRo10pw5c5zabNiwQR06dFBoaKgCAgLUqlUrfffddznWeCl5re9q7B/Pf88mTJjgeM9+/vlneXl5OY6UnG/nzp2y2Wx6++23JZ3r90OGDFGtWrUUFBSkkJAQdezYUVu3br2s558lNTVVDz/8sIoUKaKQkBD17t1b//77r2N+nz59VLRo0RwDf1RUlKpWrXpF28+6fnT27Nm6+eab5evr69gvXmpfKJ37XL/00ktq0KCBQkNDFRgYqBYtWuibb75xtNm3b5/jKPWIESOynXrdt29fBQUF6cCBA7r99tsVFBSkUqVKafLkyZKkbdu2qU2bNgoMDFS5cuWy9UvJej+42Ge3b9++jm1f7VMgV6xYoRYtWigwMFDBwcHq1KmTduzY4Zg/btw42Ww27d+/P9uyQ4cOlY+Pj1P/uJqfSVxjBrhGZsyYYSSZr7/+2hw9etTp59ixY452X331lZFk2rZtayZPnmwmT55sBg0aZLp27WqMMWbPnj3miSeeMJLMsGHDzMcff2w+/vhjk5CQYIwxplWrVqZVq1aO9X3zzTdGkqlbt65p0qSJeeutt8wTTzxhbDabue+++0zPnj1Nx44dzeTJk80DDzxgJJkRI0Y41d6oUSPTt29fM378eDNp0iQTFRVlJJm3337b0Wbx4sWmdOnSplq1ao6avvrqK2OMMSkpKaZ27dqmSJEiZtiwYWbq1Kmmd+/exmazmSeffNKxDrvdblq2bGk8PDzMY489ZiZNmmTatGljateubSSZGTNm5Om17tChg2nbtq0xxpj9+/cbm81m5s+fn61duXLlTKdOnZymnT171hQrVswUL17c6f2IjY3NdXtZr/Hzzz+fp/pys2DBAiPJfPPNNznOl2Tq1KljSpQoYUaNGmUmTJhgKlasaAICApz6UEJCgildurQpU6aMGTlypJkyZYq54447jCQzfvx4R5uRI0caSWbgwIGO92zPnj3GGGMef/xxc9ttt5lXX33VTJs2zfTv3994enqae++997KfX176kTHn3peqVauaYsWKmWHDhpm3337b1K9f39hsNrN9+3ZHu8OHD5vw8HBTqFAhExsba15//XVz0003OfrL3r17c63l0KFDRpJ56aWXcqxTkjl8+LBTTf7+/sbT09NIMuXKlTMTJkxwWi45OdmEhoaawYMHm3379pmxY8caLy8vs2/fPmOMMc2aNTNDhgyx9Jrt2rXLSDIPPvhgntqnpqaa6tWrG29vb/PUU0+Zt956y7Ro0cJIcqp37969RpKpV6+eqVy5shkzZowZO3asKVq0qCldurQ5e/asMeZc/x84cKCRZEaOHGk+/vhj8/333xtjjOnTp48pV66cY51WPr/bt283oaGhpkaNGmbMmDHm7bffNi1btjQ2m80sWrTI0S5rv1mvXj3Tpk0bM2nSJPP0008bT09P061bN6fnPmPGDGOz2UzNmjXNK6+8YiZPnmweeugh88ADDzjarFq1yvj4+JgmTZqYN954w4wfP97Url3b+Pj4mA0bNlz0tc16zV5//XXL9V2N/WPW9mvUqGEqVqxoXnvtNTN+/Hizf/9+06ZNG1OjRo1sNY8YMcJ4eno6/j5s3LjRVKpUyTz33HNm2rRpZuTIkaZUqVImNDTUHDx4MNu2LrXPzXr+tWrVMi1atDBvvfWWiY6ONh4eHqZly5bGbrcbY4yJi4szksznn3/utPzhw4eNp6enGTly5EW3c76bb77Z6W+cMef2jdWrVzfh4eFmxIgRZvLkyebnn3/O077QGGOOHj1qSpQoYQYPHmymTJlixo4da6pWrWq8vb3Nzz//bIw59/meMmWKkWTuuusux/u4detWY8y5z4Ofn5+pUaOGeeSRR8zkyZNN06ZNHa9jyZIlzTPPPGMmTZpkbr75ZuPp6Wn+/PNPRw1W+8GlPrvff/+9adeunZHkqPXjjz++6GvbqlUrc/PNN1+0zUcffWRsNpvp0KGDmTRpkhkzZowpX768CQsLc+xzs/7ujh07NtvyFStWdPq7m9fPZFZfu9h+HdceQQrXTNZOIKcfX19fR7snn3zShISEmIyMjFzXdbEv3LkFqZo1azp2sMYY06NHD2Oz2UzHjh2dlm/SpInTlyNjzn05u1D79u1NxYoVnabl9AfOGGNGjRplAgMDza5du5ymP/fcc8bT09McOHDAGGPMZ599ZiQ57XwzMjIcXwTzEqT++ecf4+XlZd59913HtKZNm5o777wzW9ty5cqZqKgoR6DdunWrue+++4wk8/jjjxtjjJkwYYKRZBYvXpzrNk+cOGEkmbvvvvuS9V1MXoKUj4+P2b17t2Pa1q1bjSQzadIkx7T+/fubEiVKOIUrY4y57777TGhoqOP93LhxY66va07v+ejRo43NZjP79++/jGeX935Urlw5I8msXbvWMe3IkSPG19fXPP30045pMTExRpLTH9sjR46Y0NDQS/7BzczMNGFhYY7AneXYsWMmMDDQSDI//fSTY3rnzp3NmDFjzGeffWbef/99R5989tlnnZafM2eO8ff3N5KMp6enGTdunDHGmNmzZ5tixYqZxMTEi7xC2S1ZsiTbl76Lyeqvs2bNckw7e/asadKkiQkKCjJJSUnGmP99GStSpIg5ceJEtu2d/4U3a9+1ceNGp21dGKSsfH7btm1ratWqZc6cOeOYZrfbTdOmTc1NN92Ubdu33nqr40u5McY89dRTxtPT05w8edIYY8zJkydNcHCwady4sTl9+rRTnVnL2e12c9NNN5n27ds7rSs1NdVUqFDBtGvX7iKv7MWD1KXqM+bK949Z2w8JCTFHjhxxajtt2jQjyWzbts1peo0aNUybNm0cj8+cOWMyMzOzPS9fX1+nMGM1SDVo0MDp78vYsWONJLNkyRJjzLnPW+nSpU337t2dln/zzTeNzWZzChSXkluQ8vDwMDt27HCantd9YUZGhklLS3Nq8++//5pixYo5/RPj6NGjRpIZPnx4trr69OljJJlXX33VaR3+/v7GZrOZuXPnOqb//vvv2dZjtR/k5bMbHR1trBwzuFSQOnXqlAkLCzMDBgxwmp6QkGBCQ0Odpjdp0sQ0aNDAqd2PP/5oJJmPPvrIGGPtM0mQck+c2odrbvLkyYqLi3P6Of/amrCwMKWkpCguLu6qbrd3795OpwU1btxYxhg9+OCDTu0aN26sv/76y2lEMX9/f8fviYmJOnbsmFq1aqU///xTiYmJl9z2ggUL1KJFCxUqVEjHjh1z/Nx6663KzMzU2rVrJZ27MNbLy0uPPvqoY1lPT089/vjjeX6ec+fOlYeHh+655x7HtB49emjFihVOpxJk+eqrrxQeHq7w8HDVqVNHCxYs0AMPPKAxY8ZIkk6dOiVJCg4OznWbWfOSkpLyXOfluvXWW1WpUiXH49q1ayskJER//vmnpHOjC3766afq3LmzjDFOr3f79u2VmJiozZs3X3I757/nKSkpOnbsmJo2bSpjjH7++efLqt1KP6pRo4ZatGjheBweHq6qVas6nqd0rr/85z//0S233OLUrlevXpesxcPDQw8//LBWrVqloUOH6o8//tCmTZvUrVs3nT17VpKcTmNcunSpnn32Wd1555168MEHtWbNGrVv315vvvmm/v77b0e7Hj166ODBg1q/fr0OHjyop59+Wqmpqfrvf/+rV155RUFBQRoxYoQqVqyo2rVr5zhC5Pmy+tTF+t/5li9fruLFi6tHjx6Oad7e3nriiSeUnJysNWvWOLXv3r2741RESY7X/PzXOa/y+vk9ceKEVq9erW7duunUqVOO/nn8+HG1b99ef/zxhw4ePOi0zMCBA51OS2rRooUyMzMdpw7FxcXp1KlTeu6557JdF5O13JYtW/THH3+oZ8+eOn78uGO7KSkpatu2rdauXXvZg2dcqr6Lyev+Mcs999yTbSCUu+++W15eXpo3b55j2vbt2/Xrr7+qe/fujmm+vr7y8Dj31SczM1PHjx9XUFCQqlatmqf9Qm4GDhzo9Pfl0UcflZeXl5YvXy7p3OetV69eWrp0qWOfKp0bFKhp06Y5DsBkVatWrVSjRg3HYyv7Qk9PT8cpvna7XSdOnFBGRoYaNmxo+XU5fwChsLAwVa1aVYGBgerWrZtjetWqVRUWFub0ObPaD67mZzev4uLidPLkSfXo0cOpRk9PTzVu3NjpVMju3btr06ZN2rNnj2PavHnz5OvrqzvvvFNS/n4mcW0w2ASuuVtuueWig0089thjmj9/vjp27KhSpUopKipK3bp1U4cOHa5ou2XLlnV6HBoaKkkqU6ZMtul2u12JiYkqUqSIJOm7777T8OHDtX79esf1HlkSExMd68rNH3/8oV9++SXbH/8sWRez79+/XyVKlFBQUJDTfCvnz8+aNUu33HKLjh8/ruPHj0uS6tWrp7Nnz2rBggUaOHCgU/vGjRvr5Zdfls1mU0BAgKpXr+50UX3WF9jz//hfKC9h62q58H2Uzl2TkxUSjx49qpMnT2r69OmaPn16juvIer0v5sCBA3rppZe0dOnSbAE0L+E5J1b60aWep3SuvzRu3Dhbu7z2l5EjR+rYsWMaO3asXnvtNUnnrtfo37+/pk6dmq0fns9ms+mpp57Sl19+qfj4eKcL3wsVKqT//Oc/jsejR49WRESE+vXrpw8++EBTp07V7NmztW/fPnXv3l2//vprrqN2Zo32d7H+d779+/frpptucnxZzpI1CuSFX+wvfJ2zvpjl9E+HvGw7L5/f3bt3yxijF198US+++GKO6zpy5IhKlSqV5zqzvqxdbOjmP/74Q5Ic13zlJDEx0enLaV5dyeuY1/1jlpxCR9GiRdW2bVvNnz9fo0aNknTuS6uXl5fuvvtuRzu73a6JEyfqnXfe0d69e5WZmemYl7W/vxw33XST0+OgoCCVKFHC6VYCvXv31pgxY7R48WL17t1bO3fu1KZNmzR16tTL3u75LnxdrO4LP/zwQ73xxhv6/fffna7lshLy/Pz8sr2PoaGhKl26dLbrk0JDQ536h9V+cDU/u3mV9Rlq06ZNjvOz9leS1LVrVw0ePFjz5s3TsGHDZIzRggUL1LFjR0e7/PxM4togSMHtREREaMuWLfryyy+1YsUKrVixQjNmzFDv3r2zXTBuhaenp6XpxhhJ576gtG3bVtWqVdObb76pMmXKyMfHR8uXL9f48ePz9N8iu92udu3a6dlnn81x/vlDSF+JP/74w3Gx7YV/2KVz//28MEgVLVo01xH9pP99Af3ll1/UpUuXHNv88ssvkuT039D8cqn3K+v9uP/++3P943Sp4dQzMzPVrl07nThxQv/9739VrVo1BQYG6uDBg+rbt+9l/YfQaj+61PO8Gnx8fPTee+/plVde0a5du1SsWDFVqVJFPXv2lIeHxyVvSZD1T4gTJ07k2mbfvn1644039NVXX8nDw0OffPKJHn74YccXkQ8//FBz587VCy+8kOPy1apVk3TuQvX8cC1e5wtlvddDhgxxjGZ4oQtf+6tRZ9Z2X3/99VyH+79YeL6YK6nP6v7x/CO757vvvvvUr18/bdmyRXXr1tX8+fPVtm1bFS1a1NHm1Vdf1YsvvqgHH3xQo0aNUuHCheXh4aGYmJh8/89/jRo11KBBA82aNUu9e/fWrFmz5OPj43Sk5kpc+LpY2RfOmjVLffv2VZcuXfTMM88oIiJCnp6eGj16tNMRlUu53L+zWfVa6Qeu/Ox+/PHHKl68eLb559/SoWTJkmrRooXmz5+vYcOG6YcfftCBAwccZ3ucv778+Ezi2iBIwS35+Pioc+fO6ty5s+x2ux577DFNmzZNL774oipXrnzNbj4oSZ9//rnS0tK0dOlSp/+AnX8IP0tudVWqVEnJyckXDSySVK5cOa1atUrJyclOO8+dO3fmqdbZs2fL29tbH3/8cbY/MuvWrdNbb72lAwcO5Hi0IzfNmzdXWFiY5syZo+effz7HP15Zo8mdP3Kaq4SHhys4OFiZmZmXfL1ze7+2bdumXbt26cMPP1Tv3r0d06/kdFMr/SivypUr5/iP5vny2l+yFCtWzHEPsMzMTMXHx6tx48aX/AOedQpNbv9Bls6FhTvuuEPNmzeXdO7eXSVLlnTML1myZLbT2M5XpUoVVa1aVUuWLNHEiRMvWVO5cuX0yy+/yG63Ox2V+v333x3z80teP78VK1aUdO6Uw0v10bzKOt11+/btuQbgrDYhISFXbbtWXOn+8VK6dOmihx9+2HF6365duzR06FCnNgsXLlTr1q31/vvvO00/efKkU+Cy6o8//lDr1q0dj5OTk3X48GHddtttTu169+6twYMH6/Dhw5ozZ446deqUb0cbrOwLFy5cqIoVK2rRokVO79Pw4cOd2uXn396r1Q/Od7XrzfoMRURE5KnO7t2767HHHtPOnTs1b948BQQEqHPnztnW56rPJK4c10jB7WSdjpbFw8PD8V+zrGHJAwMDJcnysOCXIys4nP9frsTERM2YMSNb28DAwBxr6tatm9avX68vv/wy27yTJ086rse67bbblJGR4TQkdmZmpiZNmpSnWmfPnq0WLVqoe/fuuvfee51+nnnmGUnSJ598kqd1ZQkICNCQIUO0c+fOHG8A+cUXX2jmzJlq37690+lcruLp6al77rlHn376qbZv355t/vlDMufWj3J6z40xjiH4L7euC9eZWz/Kq9tuu00//PCDfvzxR8e0o0ePavbs2Ze9znHjxunw4cNON949ceKE0ylQkpSenq7XXntNPj4+Tl8gz/fNN99o+fLlGjt2rGNasWLFHKFGkn777bcc/7N7vhEjRuj48eN66KGHnK5dzPLVV19p2bJlks69JgkJCU7XymRkZGjSpEkKCgpSq1atLrqtK5HXz29ERIQiIyM1bdo0HT58ONt6chrW/FKioqIUHBys0aNHZ7vVQVafa9CggSpVqqRx48YpOTn5qmzXiivdP15KWFiY2rdvr/nz52vu3Lny8fHJdhTd09Mz2xGLBQsWXDTM58X06dOdToebMmWKMjIy1LFjR6d2PXr0kM1m05NPPqk///zzoveCulJW9oU57Z82bNig9evXOy0TEBAgKX/+9l6tfnC+q/1doX379goJCdGrr76a41D2F36G7rnnHnl6euqTTz7RggULdPvttztqklz/mcSV44gUrrkVK1Y4fZHK0rRpU1WsWFEPPfSQTpw4oTZt2qh06dLav3+/Jk2apLp16zpOM6tbt648PT01ZswYJSYmytfX13F/nqstKirKcYTs4YcfVnJyst59911FRERk+xLUoEEDTZkyRS+//LIqV66siIgItWnTRs8884yWLl2q22+/XX379lWDBg2UkpKibdu2aeHChdq3b5+KFi2qzp07q1mzZnruuee0b98+1ahRQ4sWLcrTNTkbNmzQ7t27NWjQoBznlypVSvXr19fs2bP13//+19Jr8Nxzz+nnn3/WmDFjtH79et1zzz3y9/fXunXrNGvWLFWvXj3baZczZ85Uv379NGPGDMd9d66V1157Td98840aN26sAQMGqEaNGjpx4oQ2b96sr7/+2nEqWqVKlRQWFqapU6cqODhYgYGBaty4sapVq6ZKlSppyJAhOnjwoEJCQvTpp5/meO79vn37VKFCBfXp08dxD6qcWOlHefXss8/q448/VocOHfTkk08qMDBQ06dPdxyVuZRZs2bp008/VcuWLRUUFKSvv/5a8+fP10MPPeQ0WMnSpUv18ssv695771WFChV04sQJzZkzR9u3b9err76aYxDKzMxUTEyMnnnmGacjcPfee6+effZZhYeHa//+/dq2bdslg1/37t21bds2vfLKK/r555/Vo0cPlStXTsePH9fKlSu1atUqxz1pBg4cqGnTpqlv377atGmTypcvr4ULF+q7777ThAkT8vU6Piuf38mTJ6t58+aqVauWBgwYoIoVK+qff/7R+vXr9ffff1u+r1FISIjGjx+vhx56SI0aNVLPnj1VqFAhbd26Vampqfrwww/l4eGh9957Tx07dtTNN9+sfv36qVSpUjp48KC++eYbhYSE6PPPP79aL0c2V7p/zIvu3bvr/vvv1zvvvKP27dtnu4ny7bffrpEjR6pfv35q2rSpo/9lHSW8XGfPnlXbtm3VrVs37dy5U++8846aN2+uO+64w6ldeHi4OnTooAULFigsLEydOnW6ou1eSl73hbfffrsWLVqku+66S506ddLevXs1depU1ahRw+kLvr+/v2rUqKF58+apSpUqKly4sGrWrHnRa/Py6mr2gywNGjSQJD3xxBNq3769PD09dd999110maNHj+rll1/ONr1ChQrq1auXpkyZogceeED169fXfffdp/DwcB04cEBffPGFmjVr5rhnmXTunyatW7fWm2++qVOnTjkNfCLJ5Z9JXAXXcIRA3OAuNvy5zhtmduHChSYqKspEREQYHx8fU7ZsWfPwww873dPGGGPeffddU7FiRcd9bbKGzM5t+PMFCxbkWM+FQxoPHz7cSDJHjx51TFu6dKmpXbu28fPzM+XLlzdjxowxH3zwQbahSBMSEkynTp1McHCwkeRUx6lTp8zQoUNN5cqVjY+PjylatKhp2rSpGTdunNOwucePHzcPPPCACQkJMaGhoeaBBx4wP//88yWH4n388ceNJMd9kHISGxtrJDnu+5HTfaRyk5mZaWbMmGGaNWtmQkJCjJ+fn7n55pvNiBEjTHJycrb2kyZNMpLMypUr87R+Y/I2/Hl0dHS26eXKlTN9+vRxmvbPP/+Y6OhoU6ZMGePt7W2KFy9u2rZta6ZPn+7UbsmSJaZGjRrGy8vL6TX+9ddfza233mqCgoJM0aJFzYABAxxDrZ//Pmzbts1IMs8999wln19e+1Fu78uFfdsYY3755RfTqlUr4+fnZ0qVKmVGjRpl3n///TwNk7thwwbTsmVLU6hQIePn52fq1Kljpk6d6jQMrzHG/PTTT6Zz586mVKlSxsfHxwQFBZnmzZvneG+yLJMnTzalS5c2KSkpTtPT09PN4MGDTdGiRU25cuXMhx9+eNEaz7dq1Spz5513moiICOPl5WXCw8NN586dHUNMZ/nnn39Mv379TNGiRY2Pj4+pVatWts9OTkN5Z9EFwzLndfhzY6x9fvfs2WN69+5tihcvbry9vU2pUqXM7bffbhYuXHjJbWft1y78rCxdutQ0bdrU+Pv7m5CQEHPLLbeYTz75xKnNzz//bO6++25TpEgR4+vra8qVK2e6detmVq1ale21uNRrZqW+K90/Xuw9y5KUlOQYfv/8IfCznDlzxjz99NOmRIkSxt/f3zRr1sysX78+22fL6vDna9asMQMHDjSFChUyQUFBplevXub48eM5LjN//nyj/79/3eXIbfjznPaNxuRtX2i3282rr75qypUrZ3x9fU29evXMsmXLcuzj33//vWnQoIHx8fFx+qz06dPHBAYGZtt+bkOK57Sfu9J+cOFnNyMjwzz++OMmPDzc2Gy2Sw6F3qpVq1y/o5x/q4hvvvnGtG/f3oSGhho/Pz9TqVIl07dvX6dbRmR59913jSQTHByc7dYEWfLymWT4c/dkMyYfr8oDcMPq1q2b9u3b53Ta2fXonXfe0bPPPqs9e/Y4rjMCgNwsWbJEXbp00dq1a51ucQCg4OHUPgBXnTFG8fHxmjVrlqtLyXfffPONnnjiCUIUgDx59913VbFiRccALAAKLoIUgKvOZrPl6V5N14MFCxa4ugQABcDcuXP1yy+/6IsvvtDEiROv6eizAPIHp/YBAADkM5vNpqCgIHXv3l1Tp051uucQgIKJTzEAAEA+4//WwPWH+0gBAAAAgEUEKQAAAACwiFP7JNntdh06dEjBwcFc/AkAAADcwIwxOnXqlEqWLCkPj9yPOxGkJB06dEhlypRxdRkAAAAA3MRff/2l0qVL5zqfICUpODhY0rkXKyQkxKW1pKen66uvvlJUVJS8vb1dWgsKBvoMrKLPwCr6DKyiz8Aqd+ozSUlJKlOmjCMj5IYgJTlO5wsJCXGLIBUQEKCQkBCXdyIUDPQZWEWfgVX0GVhFn4FV7thnLnXJD4NNAAAAAIBFBCkAAAAAsIggBQAAAAAWcY1UHmVmZio9PT3ft5Oeni4vLy+dOXNGmZmZ+b49FHzXus94enrKy8uLWwUAAIAbGkEqD5KTk/X333/LGJPv2zLGqHjx4vrrr7/4ooo8cUWfCQgIUIkSJeTj43NNtgcAAOBuCFKXkJmZqb///lsBAQEKDw/P9y+qdrtdycnJCgoKuugNwIAs17LPGGN09uxZHT16VHv37tVNN91EPwUAADckgtQlpKenyxij8PBw+fv75/v27Ha7zp49Kz8/P76gIk+udZ/x9/eXt7e39u/f79guAADAjYZv6nnEaXbA/xDyAQDAjc6l34ZGjx6tRo0aKTg4WBEREerSpYt27tzpmH/ixAk9/vjjqlq1qvz9/VW2bFk98cQTSkxMdLTZunWrevTooTJlysjf31/Vq1fXxIkTXfF0AAAAANwgXHpq35o1axQdHa1GjRopIyNDw4YNU1RUlH799VcFBgbq0KFDOnTokMaNG6caNWpo//79euSRR3To0CEtXLhQkrRp0yZFRERo1qxZKlOmjL7//nsNHDhQnp6eGjRokCufHgAAAIDrlEuD1MqVK50ez5w5UxEREdq0aZNatmypmjVr6tNPP3XMr1Spkl555RXdf//9ysjIkJeXlx588EGndVSsWFHr16/XokWL3DJIZdgz5eXh6eoyAAAAAFwBtxpsIuuUvcKFC1+0TUhIiLy8ci89MTHxoutIS0tTWlqa43FSUpKkcwNLXHivqKzBJux2u+x2e56eR27WJuzR3as/1OI2fdWieMUc22QNsZ61zSu1fv16tWzZUu3bt9eyZcsc0/ft26dKlSo5HhcuXFj169fXa6+9pnr16jmm79ixQyNHjlR8fLySkpJUrlw5de/eXf/9738VEBBwxfXhyl3tPpMXdrtdxhilp6fL05N/DBQ0Wfu5a3FvPFwf6DOwij4Dq9ypz+S1Bpu5FjdHygO73a477rhDJ0+e1Lp163Jsc+zYMTVo0ED333+/XnnllRzbfP/992rVqpW++OILRUVF5dgmNjZWI0aMyDZ9zpw52cKBl5eXihcvrjJlylzRPXPS7Zlqtmqa9iSfUOWgIlrXdqC8r8GRqSeeeEKBgYGaNWuWfvzxR5UoUUKSdODAAdWpU0efffaZqlWrpkOHDum///2v9u/frx9//FGhoaHauHGj7rrrLrVq1UqDBw9WeHi4Nm/erBdeeEGlSpXS559/zn2EblBnz57VX3/9pYSEBGVkZLi6HAAAgKsmNTVVPXv2dBzAyY3bBKlHH31UK1as0Lp161S6dOls85OSktSuXTsVLlxYS5culbe3d7Y227dvV+vWrfXkk0/qhRdeyHVbOR2RKlOmjI4dO5btxTpz5oz++usvlS9f/oqGeR6/Y63++9MyGUk2SWMb3q6Ym1tma2eM0alTpxQcHHzFIwUmJyerVKlS+vHHHxUbG6vatWtr6NChkv53RGrTpk2qW7eupHMhtEWLFlq+fLmioqJUu3ZtBQQEaP369U6jtG3dulUNGjTQq6++qmefffaKasSVu5p9Jq/OnDmjffv2qUyZMgx/XgClp6crLi5O7dq1y3FfClyIPgOr6DOwyp36TFJSkooWLXrJIOUWp/YNGjRIy5Yt09q1a3MMUadOnVKHDh0UHBysxYsX5/ji/vrrr2rbtq0GDhx40RAlSb6+vvL19c023dvbO9u6MzMzZbPZ5OHhcdlDPh9KTdTwn79UVmI1koZv+VL3VaqnkgGhTm2zTs3K2uaVWLhwoapVq6bq1avrgQceUExMjIYNG+a07vOfV2BgoCQpIyNDv/zyi3799VfNmTMn22mU9erV06233qq5c+fqueeeu6IaceWuZp/JKw8PD9lsthw/Myg4eP9gFX0GVtFnYJU79Jm8bt+lw58bYzRo0CAtXrxYq1evVoUKFbK1SUpKUlRUlHx8fLR06dIc//u9Y8cOtW7dWn369Mn1lD9XGvLj5zprz3SalpaZqWd+/Dxft/v+++/r/vvvlyR16NBBiYmJWrNmTY5tT548qVGjRikoKEi33HKLdu3aJUmqXr16ju2rV6/uaAMAAADcaFwapKKjozVr1izNmTNHwcHBSkhIUEJCgk6fPi3pfyEqJSVF77//vpKSkhxtMjPPBZOs0/mioqI0ePBgx/yjR4+68qk5fHN4t+bt3aJM4zwIQKaxa+7eLYo/vDtftrtz5079+OOP6tGjh6Rz13p1795d77//vlO7pk2bKigoSIUKFdLWrVs1b948FStWzDHfTc78BAAAANyKS0/tmzJliiQpMjLSafqMGTPUt29fbd68WRs2bJAkVa5c2anN3r17Vb58eS1cuFBHjx7VrFmzNGvWLMf8cuXKad++fflaf158vPsnecgmu7IHEg/Z9NHunxRZonIOS16Z999/XxkZGSpZsqRjmjFGvr6+evvttx3T5s2bpxo1aqhIkSIKCwtzTK9SpYok6bfffnMaxS/Lb7/95mgDAABuTImbJyhx88TsM4xRtTNndHimn5TD9buh9Z9UaP2Y/C8QyEcuDVKXOtoRGRl5yTaxsbGKjY29ilVdXb0rN9SHu3/KcZ5dRn1uanTVt5mRkaGPPvpIb7zxRraRC7t06aJPPvlEHTp0kCSVKVPGaRj0LHXr1lW1atU0fvx43XfffdkGm/j66681evToq147AAAoOOxpScpMPpjjPB9JmSm5LwcUdG4x2MT1LLJEZXWvUFcL9/3idHqfp81DXcvXVqvi2UPMlVq2bJn+/fdf9e/fX6GhzoNZ3HPPPXr//fcdQSo3NptN77//vtq1a6d77rlHQ4cOVfHixbVhwwY9/fTTatKkiWJiYq567QAAoODw8A2RZ1Ap54nGKDPl0Ln5gSVks2W/ksTDN/eR0HB9u56OYhKkroFxt3TW0gM7dDrzf0HK19NTr9/SOV+29/777+vWW2/NFqKkc0Fq7NixjpsQX0zTpk31ww8/aMSIEerYsaNOnTqlsmXLqk+fPho6dGiOIx8CAIAbR2j9mGxfbu3pKdo/uZAkqXivrfINCLv2hcFtXU9HMQlS10DJgFCNqt9Bz2z83HEfqVH1O2Yb+vxq+fzz3EcDvOWWWxynS+ZlIIlatWpp4cKFV602AAAA3Liup6OYBKlrZFCN5np35w/amXRUVULDFV29matLAgAAAK6p6+kopkuHP7+ReHt4akqzexXq7aepTe+Vt4enq0sCAAAAcJk4InUNtSpeSUd6jpAXIQoAAAAo0DgidY0RogAAAICCjyAFAAAAABYRpAAAAADAIq6Ryge53mjsEkLrP6nguk/kQ0UAAAAAriaCVD642I3GLrUcAAAAAPdHkMoHl7rRmGdgSclmy3E5AAAAAO6PIJUPcrrRWHpashaOb6Ej9kKq1fEjtbqplDw9socpu91+jaq0bubMmYqJidHJkyddXUq+6Nu3r06ePKnPPvss1zbx8fFq3bq1/v33X4WFhV2z2gAAAOBeGGziGli07bAqjf1evY6N1FMnntKt7/6sCq98rUXbDufbNvv27SubzZbtp0OHDnlavnz58powYYLTtO7du2vXrl35UK2zmTNnuiSkTJw4UTNnznQ8joyMVExMzFVZ9+bNm9WuXTuFhYWpSJEiGjhwoJKTk53a5PR+zZ071zH/559/Vr169RQUFKTOnTvrxIkTjnkZGRlq1KiRfvzxxzzV8/PPP6tr164qVqyY/Pz8dNNNN2nAgAGO93ffvn2y2WzasmXLlT95AACA6xBBKp8t2nZYXT/8SX8npjlNP5h4Rl0//Clfw1SHDh10+PBhp59PPvnkstfn7++viIiIq1ihewkNDc2XAHfo0CHdeuutqly5sjZs2KCVK1dqx44d6tu3b7a2M2bMcHq/unTp4pj30EMPqU2bNtq8ebMSExP16quvOua9/fbbatq0qW655ZZL1rNs2TL95z//UVpammbPnq3ffvtNs2bNUmhoqF588cWr8ZQBAACuewSpfJRpN4r5bLtMDvOypj21ZLsy7Tm1uHK+vr4qXry400+hQoXObd8YxcbGqmzZsvL19VXJkiX1xBPnRgyMjIzU/v379dRTTzmOjEjZjxTFxsaqbt26+uCDD1S2bFkFBQXpscceU2ZmpsaOHavixYsrIiJCr7zyilNdb775pmrVqqXAwECVKVNGjz32mOPoTHx8vPr166fExETHtmNjYyVJaWlpGjJkiEqVKqXAwEA1btxY8fHxuT7/IUOG6Pbbb3c8njBhgmw2m1auXOmYVrlyZb333nuSzh3Fywouffv21Zo1azRx4kRHHfv27XMst2nTJjVs2FABAQFq2rSpdu7cmWsdy5Ytk7e3tyZPnqyqVauqUaNGmjp1qj799FPt3r3bqW1YWJjT++Xn5+eY99tvv2nAgAGqUqWKevTood9++02S9Oeff2rWrFl6+eWXc60hS2pqqvr166fbbrtNS5cu1a233qoKFSqocePGGjdunKZNm3bJdQAAAIAgla++/fO4/k48k+t8I+mvk2f07Z/Hr11R/+/TTz/V+PHjNW3aNP3xxx/67LPPVKtWLUnSokWLVLp0aY0cOdJxZCQ3e/bs0YoVK7Ry5Up98sknev/999WpUyf9/fffWrNmjcaMGaMXXnhBGzZscCzj4eGht956Szt27NCHH36o1atX69lnn5UkNW3aVBMmTFBISIhj20OGDJEkDRo0SOvXr9fcuXP1yy+/qGvXrurQoYP++OOPHGtr1aqV1q1bp8zMTEnSmjVrVLRoUUf4OnjwoPbs2aPIyMhsy06cOFFNmjTRgAEDHHWUKVPGMf/555/XG2+8oZ9++kleXl568MEHc32N0tLS5OPjIw+P/33c/P39JUnr1q1zahsdHa2iRYvqlltu0QcffCBj/hey69Spo7i4OGVkZGjVqlWqXbu2JOnRRx/ViBEjFBwcnGsNWb788ksdO3bM8XpfiOu+AAAA8oYglY8On0q7dCML7axatmyZgoKCnH6yTgc7cOCAihcvrltvvVVly5bVLbfcogEDBkiSChcuLE9PTwUHBzuOjOTGbrfrgw8+UI0aNdS5c2e1bt1aO3fu1IQJE1S1alX169dPVatW1TfffONYJiYmRq1bt1b58uXVpk0bvfzyy5o/f74kycfHR6GhobLZbI5tBwUF6cCBA5oxY4YWLFigFi1aqFKlShoyZIiaN2+uGTNm5FhbixYtdOrUKf38888yxmjt2rV6+umnHUEqPj5epUqVUuXKlbMtGxoaKh8fHwUEBDjq8PT0dMx/5ZVX1KpVK9WoUUPPPfecvv/+e505k3NobtOmjRISEvT666/r7Nmz+vfff/Xcc89JklNIHTlypObPn6+4uDjdc889euyxxzRp0iTH/Pfee08LFy5UpUqV5OPjo6FDh+rjjz9WQECA6tWrpw4dOqhy5cp64YUXcn2/skJntWrVcm0DAACAS2PUvnxUItj3qrazqnXr1poyZYrTtMKFC0uSunbtqgkTJqhixYrq0KGDbrvtNnXu3FleXta6RPny5Z2OhBQrVkyenp5OR1+KFSumI0eOOB5//fXXGj16tH7//XclJSUpIyNDZ86cUWpqqgICAnLczrZt25SZmakqVao4TU9LS1ORIkVyXCYsLEx16tRRfHy8fHx85OPjo4EDB2r48OFKTk7WmjVr1KpVK0vPN0vW0SBJKlGihCTpyJEjKlu2bLa2N998sz788EMNHjxYQ4cOlaenp5544gkVK1bM6XU6//qkevXqKSUlRa+//rrjlMubb75Za9ascbQ5fvy4hg8frvj4eEVHR6tp06ZavHixGjVqpMaNG6tz587Zajn/CBcAAAAuH0ek8lGLikVUOtRP2Qc5P8cmqUyYn1pUzDkIXKnAwEBVrlzZ6ScrSJUpU0Y7d+7UO++8I39/fz322GNq2bKl0tPTLW3D29vb6bHNZstxWtaw7vv27dPtt9+u2rVr69NPP9WmTZs0efJkSdLZs2dz3U5ycrI8PT21adMmbdmyxfHz22+/aeLEibkuFxkZqfj4eEdoKly4sKpXr65169ZdUZA6/zlmXUN2saHre/bsqYSEBB08eFDHjx9XbGysjh49qooVK+a6TOPGjfX3338rLS3nI5aDBw9WTEyMSpcurXXr1unee+9VYGCgOnXqlOu1Y1lB9Pfff7/UUwQAAMBFEKTykaeHTRO61JSkbGEq6/H4O2vmeD+pa8Hf31+dO3fWW2+9pfj4eK1fv17btm2TdO4Uu6xri66mTZs2yW6364033tB//vMfValSRYcOHXJqk9O269Wrp8zMTB05ciRbOLzYqYdZ10mtWrXKcS1UZGSkPvnkE+3atSvH66MuVseVKlasmIKCgjRv3jz5+fmpXbt2ubbdsmWLChUqJF/f7EcsV61apd9++02DBg2SJGVmZjpCcHp6eq51R0VFqWjRoho7dmyO86/Xe4QBAABcbQSpfHZ3rRJa0KehSoU6fxkuHeanBX0a6u5aJfJt22lpaUpISHD6OXbsmKRzI/C9//772r59u2PUN39/f5UrV07SuVP21q5dq4MHDzqWuRoqV66s9PR0TZo0SX/++ac+/vhjTZ061alN+fLllZycrFWrVunYsWNKTU1VlSpV1KtXL/Xu3VuLFi3S3r179eOPP2r06NH64osvct1ey5YtderUKS1btswpSM2ePVslSpTIdqrghXVs2LBB+/bt07Fjx67oZslvv/22Nm/erF27dmny5MkaNGiQRo8e7Rjc4fPPP9d7772n7du3a/fu3ZoyZYpeffVVPf7449nWdebMGQ0aNEjTp093nBrYuHFjvfPOO9q6das+/fRTNWvWLMc6AgMD9d577+mLL77QHXfcoa+//lr79u3TTz/9pGeffVaPPPLIZT9HAACAGwlB6hq4u1YJ7Xm2qWYXfUnjC4/X1wPq6c9ht+ZriJKklStXqkSJEk4/zZs3l3Tu+qF3331XzZo1U+3atfX111/r888/d1xvNHLkSO3bt0+VKlVSeHj4VaupTp06evPNNzVmzBjVrFlTs2fP1ujRo53aNG3aVI888oi6d++u8PBwx9GTGTNmqHfv3nr66adVtWpVdenSRRs3bszxuqQshQoVUq1atRQeHu4YYKFly5ay2+2XPK1vyJAh8vT0VI0aNRQeHq4DBw5c9vP+8ccf1a5dO9WqVUvTp0/XtGnTHNc+SXIMj96kSRPVrVtX06ZN05tvvqnhw4dnW9eIESPUqVMn1a1b1zFtzJgx2rp1q1q2bKnOnTvrnnvuybWWO++8U99//728vb3Vs2dPVatWTT169FBiYmKehlAHAACAZDNcfa6kpCSFhoYqMTFRISEhTvPOnDmjvXv3qkKFCk739LmYxM0TlLj5gut2jFFmyrlT2DwDS0q27KfzhdZ/UsF1n1BSUpJCQkKcBiIAcmO32695n7mczwXcR3p6upYvX67bbrst2zWNQE7oM7DCnp6i/ZPP3bey5MAj8g0Ic21BcHvu1mculg3Ox6h9+cCelqTM5IO5zs8KVDktBwAAAMD9EaTygYdviDyDSl3WcgAAAADcH0EqH4TWj1Fo/ZjLWvZKBjQAAAAAcG1wEQ4AAAAAWESQyiPG5AD+h88DAAC40RGkLsHT01OSdPbsWRdXAriP1NRUSWL0LgAAcMPiGqlL8PLyUkBAgI4ePSpvb+98H17abrfr7NmzOnPmDMOfI0+uZZ8xxig1NVVHjhxRWFiY4x8NAAAANxqC1CXYbDaVKFFCe/fu1f79+/N9e8YYnT59Wv7+/rLlcK8p4EKu6DNhYWEqXrz4NdkWAACAOyJI5YGPj49uuumma3J6X3p6utauXauWLVty2hTy5Fr3GW9vb45EAQCAGx5BKo88PDzk5+eX79vx9PRURkaG/Pz8CFLIE/oMgKslcfMEJW6emH2GMap25owOz/STcjjyHVr/ycu+7QcAZMlQwTobiyAFAAAkSfa0JGUmH8xxno+kzJTclwOAK7HBs5QeCbxdnx7Zr7blw1xdTp4QpAAAgCTJwzdEnkGlnCcao8yUQ+fmB5aQzZZ9UBsP35BrUR6A61S6PVMv+LfRKZuvBv20XL+UrSlvD/e/jIAgBQAAJEmh9WOynaJnT0/R/smFJEnFe22Vb0DYtS8MBUqm3eiHMzfrsL2Qau09qTbVQ+XpUbBO2cK19fbvG7TXI0yS9Mep45r823eKubmla4vKA8bXBgAAwFWxaNthVRr7vXqlDtaQiEZqP+9bVXjlay3adtjVpcFNHUpN1PCtqxzXXxpJL25eoUOpia4tLA8IUgAAALhii7YdVtcPf9Lfiaelkrtk88yUSu7SwcRUdf3wJ8IUcjTkx8911p7pNC0tM1PP/Pi5iyrKO4IUAAAArkim3Sjms+0yklTkoORz+twMn9MyRc5dY/fUku3KtBuX1Qj3883h3Zq3d4syjXO/yDR2zd27RfGHd7uosrwhSAEAAOCKfPvncf2deEbySpMi9jlGybfZJEXslfFK018nz+jbP4+7tE64l493/yRbLkOe22TTR7t/usYVWUOQAgAAwBU5fCrt3C/F9kg2u/NMm/3c9PPbAZIqeFSUUc5HKY2MKnhWvMYVWUOQAgAAwBUpEewrBfwrW9jRbPdsttkkW9hRKeDkuXaAzp0O+u7qRJmT4brgzL5zj0+G673ViW59OihBCgAAAFekRcUiCow4plwOLkhGCow4qhYVi1zTuuC+HKeD/lNJMhdEEuMh808ltz8dlCAFAACAK+LpYdOz9Zorl8tdJJv0bP3m3E8KDo7TPDN8pSPlHUeljJF0pMK56XLv00EJUgAAALhiLza7RU0LVZXMBWHJ2NSscDW92PQW1xQGt+R0mufxUtJZ/3O/n/WXjpfMuZ2bIUgBAADgqpgX1U3+Xp5O0wK8vDS3XVcXVQR31aJiEZUO9fv/g5ge0qEqMpme0qEqkjxkk1QmzM+tTwclSAEAAOCqKBkQqhF12irrPC2bpFENOqpkQKhrC4Pb8fSwaUKXmpL+/4zQ1DDp92ZSapjjDNHxd9Z069NBCVIAAAC4agZVa6yK9n8lSTcFF1F09WYurgju6u5aJbSgT0OVCs06fe9caCod5qcFfRrq7lolXFdcHni5ugAAAABcP7w9PDXq9Dd6JPB2vd3wNnl7eF56Idyw7q5VQp2rBGvh+BY6Yi+kGnd+oDbVy7n1kagsBCkAAABcVY0zD2pj0nSVjRjl6lJQAHh62PQfvx2SpJIVwgpEiJI4tQ8AAAD5wCvXm0oB1weCFAAAAABY5NIgNXr0aDVq1EjBwcGKiIhQly5dtHPnTsf8EydO6PHHH1fVqlXl7++vsmXL6oknnlBiYqLTeg4cOKBOnTopICBAEREReuaZZ5SRkXGtnw4AAACAG4RLg9SaNWsUHR2tH374QXFxcUpPT1dUVJRSUlIkSYcOHdKhQ4c0btw4bd++XTNnztTKlSvVv39/xzoyMzPVqVMnnT17Vt9//70+/PBDzZw5Uy+99JKrnhYAAACA65xLB5tYuXKl0+OZM2cqIiJCmzZtUsuWLVWzZk19+umnjvmVKlXSK6+8ovvvv18ZGRny8vLSV199pV9//VVff/21ihUrprp162rUqFH673//q9jYWPn4+FzrpwUAAADgOudWo/ZlnbJXuHDhi7YJCQmRl9e50tevX69atWqpWLFijjbt27fXo48+qh07dqhevXrZ1pGWlqa0tDTH46SkJElSenq60tPTr8pzuVxZ23d1HSg46DOwij4DK+zn9ZOM9HR50G9wCfQZWOVufSavfx/dJkjZ7XbFxMSoWbNmqlmzZo5tjh07plGjRmngwIGOaQkJCU4hSpLjcUJCQo7rGT16tEaMGJFt+ldffaWAgIDLfQpXVVxcnKtLQAFDn4FV9Bnkhc1+RrX+//dVq1fLePi5tB64P/oMrHK3PpOampqndm4TpKKjo7V9+3atW7cux/lJSUnq1KmTatSoodjY2Cva1tChQzV48GCndZcpU0ZRUVEKCQm5onVfqfT0dMXFxaldu3by9vZ2aS0oGOgzsIo+Ayvs6Sk6NP3c723btJFPQJhL64H7o8/AKnfrM1lnq12KWwSpQYMGadmyZVq7dq1Kly6dbf6pU6fUoUMHBQcHa/HixU5/+IsXL64ff/zRqf0///zjmJcTX19f+fr6Zpvu7e3tNl8q3KkWFAz0GVhFn0Fe2PW/PuJFn0Ee0Gdglbv1mbxu36Wj9hljNGjQIC1evFirV69WhQoVsrVJSkpSVFSUfHx8tHTpUvn5OR/qa9KkibZt26YjR444psXFxSkkJEQ1atTI9+cAAAAA4Mbj0iNS0dHRmjNnjpYsWaLg4GDHNU2hoaHy9/d3hKjU1FTNmjVLSUlJjkNt4eHh8vT0VFRUlGrUqKEHHnhAY8eOVUJCgl544QVFR0fneNQJAAAAAK6US4PUlClTJEmRkZFO02fMmKG+fftq8+bN2rBhgySpcuXKTm327t2r8uXLy9PTU8uWLdOjjz6qJk2aKDAwUH369NHIkSOvyXMAAAAAcONxaZAyxlx0fmRk5CXbSFK5cuW0fPnyq1UWAAAAAFyUS6+RAgAAAICCiCAFAAAAABYRpAAAAADAIoIUAAC4pAzZXF0CALgVghQAALioDZ6l1ChkoNYd2e/qUgDAbRCkAABArtLtmXrBv41O2Xw16KflSrdnurokAHALBCkAAJCrt3/foL0eYZKkP04d1+TfvnNtQQDgJghSAAAgR4dSEzV86yrJdu76KCPpxU0rdCg10bWFAYAbIEgBAIAcdf9qvk5nOJ/Kl5qRofviFrioIgBwHwQpAACQzcjvNuj7f3dKNuM8w2b03YnfNeq7H11TGAC4CYIUAABwkmk3ev3n72RMLg2MNPbndcq059YAAK5/BCkAAODk2z+PK+VIeNalUdnZpJQj4fr2z+PXtC4AcCcEKQAA4OTwqTQpNUzmZHi2o1LGSOZkuJQadq4dANygCFIAAMBJiWDfc7/8U0kyF3xVMB7npp/fDgBuQAQpAADgpEXFIiod6idbhq90pLzjqJQxko5UkC3DV2XC/NSiYhGX1gkArkSQAgAATjw9bJrQpaYkyXa8lHTW/9yMs/6yHS8pSRp/Z015euR2ERUAXP8IUgAAIJu7a5XQgj4NVSrUXzpURSbTUzpURaXDArSgT0PdXauEq0sEAJfycnUBAADAPd1dq4Q6VwnWwvEtdPhIIdXq/qjaVC/HkSgAEEEKAABchKeHTf/x2yFJKlkhjBAFJ4mbJyhx80TniecN9Zgwu45stuwnQIXWf1Kh9WPyuTogfxGkAAAAcFnsaUnKTD6Y+/yUw7kuBxR0BCkAAABcFg/fEHkGlco+wxidPnNG/n5+yunOzh6+IdegOiB/EaQAAABwWULrx+R4il56erqWL1+u2267Td7e3te+MLit6+l0UIIUAAAAgGviejodlCAFAAAA4Jq4nk4HJUgBAAAAuCaup9NBuSEvAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYJGXqwsAAOSPxM0TlLh5YvYZxqjamTM6PNNPstmyzQ6t/6RC68fkf4EAABRgBCkAuE7Z05KUmXwwx3k+kjJTcl8OAABcHEEKAK5THr4h8gwq5TzRGGWmHDo3P7CEbLbsZ3h7+IZci/IAACjQCFIAcJ0KrR+T7RQ9e3qK9k8uJEkq3murfAPCrn1hAABcBxhsAgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFjk0uHPR48erUWLFun333+Xv7+/mjZtqjFjxqhq1aqONtOnT9ecOXO0efNmnTp1Sv/++6/CwsKc1rNr1y4988wz+u6773T27FnVrl1bo0aNUuvWra/xMwIAoOBK3DxBiZsnOk80xvFrwuw6Od57LLT+k9mG2geA651Lj0itWbNG0dHR+uGHHxQXF6f09HRFRUUpJSXF0SY1NVUdOnTQsGHDcl3P7bffroyMDK1evVqbNm1SnTp1dPvttyshIeFaPA0AAK4L9rQkZSYfdP75/xs4S5I95XD2+ckHZU9LcmHVAOAaLj0itXLlSqfHM2fOVEREhDZt2qSWLVtKkmJiYiRJ8fHxOa7j2LFj+uOPP/T++++rdu3akqTXXntN77zzjrZv367ixYvnW/0AAFxPPHxD5BlUKvsMY3T6zBn5+/lJNluOywHAjcalQepCiYmJkqTChQvneZkiRYqoatWq+uijj1S/fn35+vpq2rRpioiIUIMGDXJcJi0tTWlpaY7HSUnn/pOWnp6u9PT0K3gGVy5r+66uAwUHfQZW2M/rJxnp6fKg3+A8AbWiFVArOtv09PR0xcXFqV27dvL29s5xWfZBOB9/m2CVO/WZvNZgM+a8k59dyG6364477tDJkye1bt26bPPj4+PVunXrHK+R+vvvv9WlSxdt3rxZHh4eioiI0BdffKF69erluK3Y2FiNGDEi2/Q5c+YoICDgqjwfAHBHNvsZ1dr/gCRpW7mPZTz8XFwRAADuJTU1VT179lRiYqJCQnI/4u42R6Sio6O1ffv2HEPUxRhjFB0drYiICH377bfy9/fXe++9p86dO2vjxo0qUaJEtmWGDh2qwYMHOx4nJSWpTJkyioqKuuiLdS3k5b9+wPnoM7DCnp6iQ9PP/d62TRv5BIS5tB4UDOxnYBV9Bla5U5/JOlvtUtwiSA0aNEjLli3T2rVrVbp0aUvLrl69WsuWLdO///7rCEHvvPOO4uLi9OGHH+q5557Ltoyvr698fX2zTff29nb5G5fFnWpBwUCfQV7Y9b8+4kWfgUXsZ2AVfQZWuUOfyev2XRqkjDF6/PHHtXjxYsXHx6tChQqW15GamipJ8vBwHoDQw8NDdrv9qtQJAAAAAOdzaZCKjo7WnDlztGTJEgUHBzuGKw8NDZW/v78kKSEhQQkJCdq9e7ckadu2bQoODlbZsmVVuHBhNWnSRIUKFVKfPn300ksvyd/fX++++6727t2rTp06uey5AQAAALh+ufQ+UlOmTFFiYqIiIyNVokQJx8+8efMcbaZOnap69eppwIABkqSWLVuqXr16Wrp0qSSpaNGiWrlypZKTk9WmTRs1bNhQ69at05IlS1SnTh2XPC8AAAAA1zeXn9p3KbGxsYqNjb1om4YNG+rLL7+8SlUBAAAAwMW59IgUAAAAABREBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBwA0oQzZXlwAAQIFGkAKAG8wGz1JqFDJQ647sd3UpAAAUWAQpALiBnMnI0NN+HXTK5qsH13+uMxkZri4JAIACiSAFADeIRdsOq9TUeTrsGShJ+uv0SZWcNEOLth12cWUAABQ8BCkAuAEs2nZYXWd/p8SQ3bL9/+VRNpuUGPKHus7+jjAFAIBFBCkAuM5l2o1iPtsuU2yPZLM7zTM2u1Rsj55asl2ZduOiCgEAKHgIUgBwnfv2z+P6O/2wbGFHHUejsthsksKO6q+zCfr2z+MuqQ8AgIKIIAUA17nDp9KksH9kcjngZIyksIRz7QAAQJ4QpADgOlci2Fc6WTzb0agsNpukk8XPtQMAAHlCkAKA61yLikVU2ru4dDI821EpYySdDFcZn+JqUbGIS+oDAKAgIkgBwHXO08OmCV1qSv9Uks047/ZtxkP6p5LG31lTnh65HLICAADZEKQA4AZwd60SWtCrmUKSKjuOShkjhSbdpAW9munuWiVcWyAAAAUMQQoAbhB31yqhg490V8nMFElSGf8wHXq8HyEKAIDLQJACgBuIn5eXxp1ZqWCTpg+adJafl5erSwIAoEAiSAHADaZx5kFtTJqu5hHlXF0KAAAFFkEKAG5AXsrlplIAACBPCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFrk0SI0ePVqNGjVScHCwIiIi1KVLF+3cudOpzfTp0xUZGamQkBDZbDadPHkyx3V98cUXaty4sfz9/VWoUCF16dIl/58AAAAAgBuSS4PUmjVrFB0drR9++EFxcXFKT09XVFSUUlJSHG1SU1PVoUMHDRs2LNf1fPrpp3rggQfUr18/bd26Vd9995169ux5LZ4CAAAAgBuQlys3vnLlSqfHM2fOVEREhDZt2qSWLVtKkmJiYiRJ8fHxOa4jIyNDTz75pF5//XX179/fMb1GjRr5UjMAAAAAuDRIXSgxMVGSVLhw4Twvs3nzZh08eFAeHh6qV6+eEhISVLduXb3++uuqWbNmjsukpaUpLS3N8TgpKUmSlJ6ervT09Ct4Blcua/uurgMFB30GVtjP6ycZ6enyoN8gD9jPwCr6DKxypz6T1xpsxhiTz7Xkid1u1x133KGTJ09q3bp12ebHx8erdevW+vfffxUWFuaYPnfuXPXo0UNly5bVm2++qfLly+uNN97QV199pV27duUYymJjYzVixIhs0+fMmaOAgICr+rwAwJ3Y7GdUa/8DkqRt5T6W8fBzcUUAALiX1NRU9ezZU4mJiQoJCcm1ndsckYqOjtb27dtzDFEXY7fbJUnPP/+87rnnHknSjBkzVLp0aS1YsEAPP/xwtmWGDh2qwYMHOx4nJSWpTJkyioqKuuiLdS2kp6crLi5O7dq1k7e3t0trQcFAn4EV9vQUHZp+7ve2bdrIJyDMpfWgYGA/A6voM7DKnfpM1tlql+IWQWrQoEFatmyZ1q5dq9KlS1tatkSJEpKcr4ny9fVVxYoVdeDAgRyX8fX1la+vb7bp3t7eLn/jsrhTLSgY6DPIC7v+10e86DOwiP0MrKLPwCp36DN53b5LR+0zxmjQoEFavHixVq9erQoVKlheR4MGDeTr6+s0bHp6err27duncuXKXc1yAQAAAECSi49IRUdHa86cOVqyZImCg4OVkJAgSQoNDZW/v78kKSEhQQkJCdq9e7ckadu2bQoODlbZsmVVuHBhhYSE6JFHHtHw4cNVpkwZlStXTq+//rokqWvXrq55YgAAAACuay4NUlOmTJEkRUZGOk2fMWOG+vbtK0maOnWq08AQWcOin9/m9ddfl5eXlx544AGdPn1ajRs31urVq1WoUKF8fw4AAAAAbjyXFaQyMjIUHx+vPXv2qGfPngoODtahQ4cUEhKioKCgPK8nLwMGxsbGKjY29qJtvL29NW7cOI0bNy7P2wYAAACAy2U5SO3fv18dOnTQgQMHlJaWpnbt2ik4OFhjxoxRWlqapk6dmh91AgAsStw8QYmbJzpPPO8fWAmz68hmy36pbGj9JxVaPyafqwMAoGCzHKSefPJJNWzYUFu3blWRIkUc0++66y4NGDDgqhYHALh89rQkZSYfzH1+yuFclwMAABdnOUh9++23+v777+Xj4+M0vXz58jp4MPc/2ACAa8vDN0SeQaWyzzBGp8+ckb+fn2Sz5bgcAAC4OMtBym63KzMzM9v0v//+W8HBwVelKADAlQutH5PjKXrp6elavny5brvtNpffqwMAgILK8n2koqKiNGHCBMdjm82m5ORkDR8+XLfddtvVrA0AAAAA3JLlI1JvvPGG2rdvrxo1aujMmTPq2bOn/vjjDxUtWlSffPJJftQIAAAAAG7FcpAqXbq0tm7dqrlz5+qXX35RcnKy+vfvr169ejluogsAAAAA17PLuo+Ul5eX7r///qtdCwAAAAAUCJaD1EcffXTR+b17977sYgAAAACgILis+0idLz09XampqfLx8VFAQABBCgAAAMB1z/Koff/++6/TT3Jysnbu3KnmzZsz2AQAAACAG4LlIJWTm266Sa+99lq2o1UAAAAAcD26KkFKOjcAxaFDh67W6gAAAADAbVm+Rmrp0qVOj40xOnz4sN5++201a9bsqhUGAAAAAO7KcpDq0qWL02Obzabw8HC1adNGb7zxxtWqCwAAAADcluUgZbfb86MOAAAAACgwrto1UgAAAABwo8jTEanBgwfneYVvvvnmZRcDAAAAAAVBnoLUzz//nKeV2Wy2KyoGAAAAAAqCPAWpb775Jr/rAAAAAIACg2ukAAAAAMAiy6P2SdJPP/2k+fPn68CBAzp79qzTvEWLFl2VwgAAAADAXVk+IjV37lw1bdpUv/32mxYvXqz09HTt2LFDq1evVmhoaH7UCAAAAABuxXKQevXVVzV+/Hh9/vnn8vHx0cSJE/X777+rW7duKlu2bH7UCAAAAABuxXKQ2rNnjzp16iRJ8vHxUUpKimw2m5566ilNnz79qhcIAAAAAO7GcpAqVKiQTp06JUkqVaqUtm/fLkk6efKkUlNTr251AAAAAOCG8hyksgJTy5YtFRcXJ0nq2rWrnnzySQ0YMEA9evRQ27Zt86dKAAAAAHAjeR61r3bt2mrUqJG6dOmirl27SpKef/55eXt76/vvv9c999yjF154Id8KBQAAAAB3kecgtWbNGs2YMUOjR4/WK6+8onvuuUcPPfSQnnvuufysDwAAAADcTp5P7WvRooU++OADHT58WJMmTdK+ffvUqlUrValSRWPGjFFCQkJ+1gkAAAAAbsPyYBOBgYHq16+f1qxZo127dqlr166aPHmyypYtqzvuuCM/agQAAAAAt2I5SJ2vcuXKGjZsmF544QUFBwfriy++uFp1AQAAAIDbyvM1Uhdau3atPvjgA3366afy8PBQt27d1L9//6tZGwAAAAC4JUtB6tChQ5o5c6Zmzpyp3bt3q2nTpnrrrbfUrVs3BQYG5leNAAAAAOBW8hykOnbsqK+//lpFixZV79699eCDD6pq1ar5WRsAAAAAuKU8Bylvb28tXLhQt99+uzw9PfOzJgAAAABwa3kOUkuXLs3POgAAAACgwLiiUfsAAAAA4EZEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWOTSIDV69Gg1atRIwcHBioiIUJcuXbRz506nNtOnT1dkZKRCQkJks9l08uTJXNeXlpamunXrymazacuWLflbPAAAAIAblkuD1Jo1axQdHa0ffvhBcXFxSk9PV1RUlFJSUhxtUlNT1aFDBw0bNuyS63v22WdVsmTJ/CwZAAAAAOTlyo2vXLnS6fHMmTMVERGhTZs2qWXLlpKkmJgYSVJ8fPxF17VixQp99dVX+vTTT7VixYqLtk1LS1NaWprjcVJSkiQpPT1d6enpFp/F1ZW1fVfXgYKDPgOr6DOwij4Dq+gzsMqd+kxea3BpkLpQYmKiJKlw4cKWlvvnn380YMAAffbZZwoICLhk+9GjR2vEiBHZpn/11Vd5Wv5aiIuLc3UJKGDoM7CKPgOr6DOwij4Dq9yhz6SmpuapndsEKbvdrpiYGDVr1kw1a9bM83LGGPXt21ePPPKIGjZsqH379l1ymaFDh2rw4MGOx0lJSSpTpoyioqIUEhJyOeVfNenp6YqLi1O7du3k7e3t0lpQMNBnYBV9BlbRZ2AVfQZWuVOfyTpb7VLcJkhFR0dr+/btWrdunaXlJk2apFOnTmno0KF5XsbX11e+vr7Zpnt7e7v8jcviTrWgYKDPwCr6DKyiz8Aq+gyscoc+k9ftu8Xw54MGDdKyZcv0zTffqHTp0paWXb16tdavXy9fX195eXmpcuXKkqSGDRuqT58++VEuAAAAgBucS49IGWP0+OOPa/HixYqPj1eFChUsr+Ott97Syy+/7Hh86NAhtW/fXvPmzVPjxo2vZrkAAAAAIMnFQSo6Olpz5szRkiVLFBwcrISEBElSaGio/P39JUkJCQlKSEjQ7t27JUnbtm1TcHCwypYtq8KFC6ts2bJO6wwKCpIkVapUyfLRLQAAAADIC5ee2jdlyhQlJiYqMjJSJUqUcPzMmzfP0Wbq1KmqV6+eBgwYIElq2bKl6tWrp6VLl7qqbAAAAAA3OJef2ncpsbGxio2NzfM6y5cvn6f1AgAAAMDlcovBJgAAAACgICFIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAs8nJ1AQDyJnHzBCVunph9hjGqduaMDs/0k2y2bLND6z+p0Pox+V8gAADADYQgBRQQ9rQkZSYfzHGej6TMlNyXAwAAwNVFkAIKCA/fEHkGlXKeaIwyUw6dmx9YQjZb9rN1PXxDrkV5AAAANxSCFFBAhNaPyXaKnj09RfsnF5IkFe+1Vb4BYde+MAAAgBsQg00AAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALHJpkBo9erQaNWqk4OBgRUREqEuXLtq5c6dTm+nTpysyMlIhISGy2Ww6efKk0/x9+/apf//+qlChgvz9/VWpUiUNHz5cZ8+evYbPBAAAAMCNxKVBas2aNYqOjtYPP/yguLg4paenKyoqSikpKY42qamp6tChg4YNG5bjOn7//XfZ7XZNmzZNO3bs0Pjx4zV16tRc2wMAAADAlfJy5cZXrlzp9HjmzJmKiIjQpk2b1LJlS0lSTEyMJCk+Pj7HdXTo0EEdOnRwPK5YsaJ27typKVOmaNy4cflSNwAAAIAbm0uD1IUSExMlSYULF77i9VxsHWlpaUpLS3M8TkpKkiSlp6crPT39irZ9pbK27+o6UDCkp53VD2du1hF7IVX945giqwXI08Pm6rLg5tjPwCr6DKyiz8Aqd+ozea3BZowx+VxLntjtdt1xxx06efKk1q1bl21+fHy8WrdurX///VdhYWG5rmf37t1q0KCBxo0bpwEDBuTYJjY2ViNGjMg2fc6cOQoICLjs5wBcS+uPSe/vkY6f9ZBkJNlUxMeofyWjJkVdXR0AAEDBlJqaqp49eyoxMVEhISG5tnObIPXoo49qxYoVWrdunUqXLp1tfl6C1MGDB9WqVStFRkbqvffey3VbOR2RKlOmjI4dO3bRF+taSE9PV1xcnNq1aydvb2+X1gL39dn2BN03e4uMJAWclMpulw7UlC01TJI0t1dddalZ3IUVwp2xn4FV9BlYRZ+BVe7UZ5KSklS0aNFLBim3OLVv0KBBWrZsmdauXZtjiMqLQ4cOqXXr1mratKmmT59+0ba+vr7y9fXNNt3b29vlb1wWd6oF7iXTbvT0st/PhSjZpZK7ZPPMlCm5S2Z3Q9nkoSFf/K6765TmND9cFPsZWEWfgVX0GVjlDn0mr9t36ah9xhgNGjRIixcv1urVq1WhQoXLWs/BgwcVGRmpBg0aaMaMGfLw4PZYuH59++dx/Z145tyDIgcln9Pnfvc5LRU5JCPpr5Nn9O2fx11WIwAAwPXOpUekoqOjNWfOHC1ZskTBwcFKSEiQJIWGhsrf31+SlJCQoISEBO3evVuStG3bNgUHB6ts2bIqXLiwI0SVK1dO48aN09GjRx3rL16cU5tw/Tl86v9PS/VKkyL2yfb/B51sNslE7JUSw6UM3/+1AwAAwFXn0iA1ZcoUSVJkZKTT9BkzZqhv376SpKlTpzoNDJE1LHpWm7i4OO3evVu7d+/Odlqgm1z+BVxVJYL//7TUYnskm915ps1+bvrBGv9rBwAAgKvO5af25fSTFaKkcyPsXaxN3759c10PcD1qUbGIwiNSZAs76jgalcVmk2xhRxUekaoWFYu4pkAAAIAbABcTAQWMp4dNNaudlnL7X4GRalZLZaAJAACAfESQAgqgFxu1kHLLSTbppVtaXtN6AAAAbjQEKaAAiixRWd0r1JXnBef2edo8dF+FumpVvJKLKgMAALgxEKSAAmrcLZ3l4+HpNM3X01Ov39LZRRUBAADcOAhSQAFVMiBUI+q0lf5/YBWbpFH1O6pkQKhrCwMAALgBEKSAAmxQtcaqaP9XknRTcBFFV2/m4ooAAABuDAQpoADz9vDUqNPfKNik6e2Gt8n7glP9AAAAkD8IUkAB1zjzoDYmTVfziHKuLgUAAOCGQZACrgNeud5UCgAAAPmBIAUAAAAAFhGkAAAAAMAiL1cXcKNK3DxBiZsnZp9hjKqdOaPDM/2kC262Kkmh9Z9UaP2Y/C8QAAAAQK4IUi5iT0tSZvLBHOf5SMpMyX05AAAAAK5FkHIRD98QeQaVcp5ojDJTDp2bH1hCNlv2My89fEOuRXkAAAAALoIg5SKh9WOynaJnT0/R/smFJEnFe22Vb0DYtS8MAAAAwCUx2AQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAiL1cXACBvEjdPUOLmic4TjXH8mjC7jmy27P8bCa3/pELrx+RzdQAAADcWghRQQNjTkpSZfDD3+SmHc10OAAAAVxdBCiggPHxD5BlUKvsMY3T6zBn5+/lJNluOywEAAODqIkgBBURo/ZgcT9FLT0/X8uXLddttt8nb2/vaFwYAAHADYrAJAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFByg1lKPu9gAAAAAC4D4KUG8m0G83IbKa6wY9q8tYdyrQbV5cEAAAAIAcEKTexaNthVRy7Ti8HNtcZDy8N2bpS5V/5Sou2HXZ1aQAAAAAuQJByA4u2HVbXD3/SQa8/JZ/T5yb6nNYhrz/V9cOfCFMAAACAm3FpkBo9erQaNWqk4OBgRUREqEuXLtq5c6dTm+nTpysyMlIhISGy2Ww6efJktvWcOHFCvXr1UkhIiMLCwtS/f38lJydfo2dxZTLtRjGfbZfxSpMi9sn2/5dH2WySidgreaXpqSXbOc0PAAAAcCMuDVJr1qxRdHS0fvjhB8XFxSk9PV1RUVFKSUlxtElNTVWHDh00bNiwXNfTq1cv7dixQ3FxcVq2bJnWrl2rgQMHXouncMW+/fO4/k48IxXbI9nszjNtdplie/TXyTP69s/jrikQAAAAQDZertz4ypUrnR7PnDlTERER2rRpk1q2bClJiomJkSTFx8fnuI7ffvtNK1eu1MaNG9WwYUNJ0qRJk3Tbbbdp3LhxKlmyZL7VfzUcPpUmBfwrW9jRbPNsNklhR2X+PXmuHQAAAAC34NIgdaHExERJUuHChfO8zPr16xUWFuYIUZJ06623ysPDQxs2bNBdd92VbZm0tDSlpf0vmCQlJUmS0tPTlZ6efrnlX5bwAE8p7B8ZI8dpfeczRlJYgsIDPK95bSgYsvoF/QN5RZ+BVfQZWEWfgVXu1GfyWoPbBCm73a6YmBg1a9ZMNWvWzPNyCQkJioiIcJrm5eWlwoULKyEhIcdlRo8erREjRmSb/tVXXykgIMBa4Vco00ghKcV0qtA/Oc632c7NT/ptg5b/fk1LQwETFxfn6hJQwNBnYBV9BlbRZ2CVO/SZ1NTUPLVzmyAVHR2t7du3a926dfm+raFDh2rw4MGOx0lJSSpTpoyioqIUEhKS79u/0LvlEnRf/GEp9Kic7sVrJCWGa/rtrdW5ZvFrXhcKhvT0dMXFxaldu3by9vZ2dTkoAOgzsIo+A6voM7DKnfpM1tlql+IWQWrQoEGOQSJKly5tadnixYvryJEjTtMyMjJ04sQJFS+ec/jw9fWVr69vtune3t4ueeO61iujRPvdemTLuzLnDThhMx6a2vJuda1X5prXhILHVf0XBRd9BlbRZ2AVfQZWuUOfyev2XTpqnzFGgwYN0uLFi7V69WpVqFDB8jqaNGmikydPatOmTY5pq1evlt1uV+PGja9mufnqoQY36bUGt/7/RVHnjL2lkx5qcJMLqwIAAACQE5cGqejoaM2aNUtz5sxRcHCwEhISlJCQoNOnTzvaJCQkaMuWLdq9e7ckadu2bdqyZYtOnDghSapevbo6dOigAQMG6Mcff9R3332nQYMG6b777nP7Efsu9Hj1/6ii/V9JUpXgInr85uYurggAAABATlwapKZMmaLExERFRkaqRIkSjp958+Y52kydOlX16tXTgAEDJEktW7ZUvXr1tHTpUkeb2bNnq1q1amrbtq1uu+02NW/eXNOnT7/mz+dKeXt4atTpbxRs0vR2w9vk7eHp6pIAAAAA5MCl10iZ805jy01sbKxiY2Mv2qZw4cKaM2fOVarKtRpnHtTGpOkqGzHK1aUAAAAAyIVLj0ghZ166dMAEAAAA4DoEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALPJydQE3qsTNE5S4eaLzRGMcvybMriObLXvODa3/pELrx+RzdQAAAAAuhiDlIva0JGUmH8x9fsrhXJcDAAAA4FoEKRfx8A2RZ1Cp7DOM0ekzZ+Tv5yfZbDkuBwAAAMC1CFIuElo/JsdT9NLT07V8+XLddttt8vb2vvaFAQAAALgkBpsAAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAi7xcXYA7MMZIkpKSklxciZSenq7U1FQlJSXJ29vb1eWgAKDPwCr6DKyiz8Aq+gyscqc+k5UJsjJCbghSkk6dOiVJKlOmjIsrAQAAAOAOTp06pdDQ0Fzn28ylotYNwG6369ChQwoODpbNZnNpLUlJSSpTpoz++usvhYSEuLQWFAz0GVhFn4FV9BlYRZ+BVe7UZ4wxOnXqlEqWLCkPj9yvhOKIlCQPDw+VLl3a1WU4CQkJcXknQsFCn4FV9BlYRZ+BVfQZWOUufeZiR6KyMNgEAAAAAFhEkAIAAAAAiwhSbsbX11fDhw+Xr6+vq0tBAUGfgVX0GVhFn4FV9BlYVRD7DINNAAAAAIBFHJECAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpNzN58mSVL19efn5+aty4sX788UdXlwQ3tXbtWnXu3FklS5aUzWbTZ5995uqS4MZGjx6tRo0aKTg4WBEREerSpYt27tzp6rLgxqZMmaLatWs7bo7ZpEkTrVixwtVloQB57bXXZLPZFBMT4+pS4KZiY2Nls9mcfqpVq+bqsvKMIOVG5s2bp8GDB2v48OHavHmz6tSpo/bt2+vIkSOuLg1uKCUlRXXq1NHkyZNdXQoKgDVr1ig6Olo//PCD4uLilJ6erqioKKWkpLi6NLip0qVL67XXXtOmTZv0008/qU2bNrrzzju1Y8cOV5eGAmDjxo2aNm2aateu7epS4OZuvvlmHT582PGzbt06V5eUZwx/7kYaN26sRo0a6e2335Yk2e12lSlTRo8//riee+45F1cHd2az2bR48WJ16dLF1aWggDh69KgiIiK0Zs0atWzZ0tXloIAoXLiwXn/9dfXv39/VpcCNJScnq379+nrnnXf08ssvq27dupowYYKry4Ibio2N1WeffaYtW7a4upTLwhEpN3H27Flt2rRJt956q2Oah4eHbr31Vq1fv96FlQG4HiUmJko698UYuJTMzEzNnTtXKSkpatKkiavLgZuLjo5Wp06dnL7TALn5448/VLJkSVWsWFG9evXSgQMHXF1Snnm5ugCcc+zYMWVmZqpYsWJO04sVK6bff//dRVUBuB7Z7XbFxMSoWbNmqlmzpqvLgRvbtm2bmjRpojNnzigoKEiLFy9WjRo1XF0W3NjcuXO1efNmbdy40dWloABo3LixZs6cqapVq+rw4cMaMWKEWrRooe3btys4ONjV5V0SQQoAbjDR0dHavn17gToPHa5RtWpVbdmyRYmJiVq4cKH69OmjNWvWEKaQo7/++ktPPvmk4uLi5Ofn5+pyUAB07NjR8Xvt2rXVuHFjlStXTvPnzy8QpxATpNxE0aJF5enpqX/++cdp+j///KPixYu7qCoA15tBgwZp2bJlWrt2rUqXLu3qcuDmfHx8VLlyZUlSgwYNtHHjRk2cOFHTpk1zcWVwR5s2bdKRI0dUv359x7TMzEytXbtWb7/9ttLS0uTp6enCCuHuwsLCVKVKFe3evdvVpeQJ10i5CR8fHzVo0ECrVq1yTLPb7Vq1ahXnowO4YsYYDRo0SIsXL9bq1atVoUIFV5eEAshutystLc3VZcBNtW3bVtu2bdOWLVscPw0bNlSvXr20ZcsWQhQuKTk5WXv27FGJEiVcXUqecETKjQwePFh9+vRRw4YNdcstt2jChAlKSUlRv379XF0a3FBycrLTf2z27t2rLVu2qHDhwipbtqwLK4M7io6O1pw5c7RkyRIFBwcrISFBkhQaGip/f38XVwd3NHToUHXs2FFly5bVqVOnNGfOHMXHx+vLL790dWlwU8HBwdmuuwwMDFSRIkW4HhM5GjJkiDp37qxy5crp0KFDGj58uDw9PdWjRw9Xl5YnBCk30r17dx09elQvvfSSEhISVLduXa1cuTLbABSAJP30009q3bq14/HgwYMlSX369NHMmTNdVBXc1ZQpUyRJkZGRTtNnzJihvn37XvuC4PaOHDmi3r176/DhwwoNDVXt2rX15Zdfql27dq4uDcB14u+//1aPHj10/PhxhYeHq3nz5vrhhx8UHh7u6tLyhPtIAQAAAIBFXCMFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQDAdSwyMlIxMTGuLgMArjsEKQBAntlstov+xMbG5st2+/btqy5duuTLui/HzJkzFRYWdtXaAQAKHi9XFwAAKDgOHz7s+H3evHl66aWXtHPnTse0oKAgx+/GGGVmZsrLiz81AIDrD0ekAAB5Vrx4ccdPaGiobDab4/Hvv/+u4OBgrVixQg0aNJCvr6/WrVsnu92u0aNHq0KFCvL391edOnW0cOFCxzozMzPVv39/x/yqVatq4sSJjvmxsbH68MMPtWTJEseRr/j4eO3bt082m03z589XixYt5O/vr0aNGmnXrl3auHGjGjZsqKCgIHXs2FFHjx51eh7vvfeeqlevLj8/P1WrVk3vvPOOY17WehctWqTWrVsrICBAderU0fr16yVJ8fHx6tevnxITE6/4SNzJkyf10EMPKTw8XCEhIWrTpo22bt0qSdq1a5dsNpt+//13p2XGjx+vSpUqOR5v375dHTt2VFBQkIoVK6YHHnhAx44du6x6AAB5R5ACAFxVzz33nF577TX99ttvql27tkaPHq2PPvpIU6dO1Y4dO/TUU0/p/vvv15o1ayRJdrtdpUuX1oIFC/Trr7/qpZde0rBhwzR//nxJ0pAhQ9StWzd16NBBhw8f1uHDh9W0aVPH9oYPH64XXnhBmzdvlpeXl3r27Klnn31WEydO1Lfffqvdu3frpZdecrSfPXu2XnrpJb3yyiv67bff9Oqrr+rFF1/Uhx9+6PQ8nn/+eQ0ZMkRbtmxRlSpV1KNHD2VkZKhp06aaMGGCQkJCHPUMGTLksl6rrl276siRI1qxYoU2bdqk+vXrq23btjpx4oSqVKmihg0bavbs2U7LzJ49Wz179pR0Loi1adNG9erV008//aSVK1fqn3/+Ubdu3S6rHgCABQYAgMswY8YMExoa6nj8zTffGEnms88+c0w7c+aMCQgIMN9//73Tsv379zc9evTIdd3R0dHmnnvucTzu06ePufPOO53a7N2710gy7733nmPaJ598YiSZVatWOaaNHj3aVK1a1fG4UqVKZs6cOU7rGjVqlGnSpEmu692xY4eRZH777bccn3tuLtbu22+/NSEhIebMmTNO0ytVqmSmTZtmjDFm/PjxplKlSo55O3fudKpj1KhRJioqymn5v/76y0gyO3fuNMYY06pVK/Pkk09eslYAgDWcuA4AuKoaNmzo+H337t1KTU1Vu3btnNqcPXtW9erVczyePHmyPvjgAx04cECnT5/W2bNnVbdu3Txtr3bt2o7fixUrJkmqVauW07QjR45IklJSUrRnzx71799fAwYMcLTJyMhQaGhorustUaKEJOnIkSOqVq1anuq6lK1btyo5OVlFihRxmn769Gnt2bNHknTfffdpyJAh+uGHH/Sf//xHs2fPVv369R01bN26Vd98843TtWlZ9uzZoypVqlyVWgEA2RGkAABXVWBgoOP35ORkSdIXX3yhUqVKObXz9fWVJM2dO1dDhgzRG2+8oSZNmig4OFivv/66NmzYkKfteXt7O3632Ww5TrPb7U71vPvuu2rcuLHTejw9PS+53qz1XA3JyckqUaKE4uPjs83LGumvePHiatOmjebMmaP//Oc/mjNnjh599FGndXTu3FljxozJto6s8AcAyB8EKQBAvqlRo4Z8fX114MABtWrVKsc23333nZo2barHHnvMMS3riEwWHx8fZWZmXnE9xYoVU8mSJfXnn3+qV69el72eq1FP/fr1lZCQIC8vL5UvXz7Xdr169dKzzz6rHj166M8//9R9993ntI5PP/1U5cuXZ3REALjG2OsCAPJNcHCwhgwZoqeeekp2u13NmzdXYmKivvvuO4WEhKhPnz666aab9NFHH+nLL79UhQoV9PHHH2vjxo2qUKGCYz3ly5fXl19+qZ07d6pIkSLZTsOzYsSIEXriiScUGhqqDh06KC0tTT/99JP+/fdfDR48OE/rKF++vJKTk7Vq1SrVqVNHAQEBCggIyLFtZmamtmzZ4jTN19dXt956q5o0aaIuXbpo7NixqlKlig4dOqQvvvhCd911l+MUybvvvluPPvqoHn30UbVu3VolS5Z0rCc6OlrvvvuuevTooWeffVaFCxfW7t27NXfuXL333nvZjrIBAK4eRu0DAOSrUaNG6cUXX9To0aNVvXp1dejQQV988YUjKD388MO6++671b17dzVu3FjHjx93OjolSQMGDFDVqlXVsGFDhYeH67vvvrvseh566CG99957mjFjhmrVqqVWrVpp5syZTsHtUpo2bapHHnlE3bt3V3h4uMaOHZtr2+TkZNWrV8/pp3PnzrLZbFq+fLlatmypfv36qUqVKrrvvvu0f/9+x7Ve0rkw2rlzZ23dujXbUbSSJUvqu+++U2ZmpqKiolSrVi3FxMQoLCxMHh78iQeA/GQzxhhXFwEAAAAABQn/rgIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACz6P2eFjsgjWED8AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "# Plotting\n", "plt.figure(figsize=(10, 6))\n", @@ -278,7 +434,7 @@ "source": [ "## Multiple Average Potential Outcome Models (APOS)\n", "\n", - "Instead of looping over different treatment levels, one can directly use the [DoubleMLAPOS](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLAPOS.html#doubleml.DoubleMLAPOS) model which internally combines multiple [DoubleMLAPO](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLAPO.html#doubleml.DoubleMLAPO) models. An advantage of this approach is that the model can be parallelized, create joint confidence intervals and allow for a comparison between the average potential outcome levels.\n", + "Instead of looping over different treatment levels, one can directly use the [DoubleMLAPOS](https://docs.doubleml.org/dev/api/generated/doubleml.irm.DoubleMLAPOS.html#doubleml.irm.DoubleMLAPOS) model which internally combines multiple [DoubleMLAPO](https://docs.doubleml.org/dev/api/generated/doubleml.irm.DoubleMLAPO.html) models. An advantage of this approach is that the model can be parallelized, create joint confidence intervals and allow for a comparison between the average potential outcome levels.\n", "\n", "### Average Potential Outcome (APOs)\n", "\n", @@ -287,9 +443,105 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
treatment_levelapothetaci_lowerci_upper
00.0210.036240210.077702208.766940211.384677
11.0211.785815211.881937210.553004213.225427
22.0217.063017217.069443215.756200218.393654
33.0219.468907219.404300218.108259220.723846
44.0220.439699220.503700219.192952221.828157
55.0220.525064220.417834219.095785221.741523
\n", + "
" + ], + "text/plain": [ + " treatment_level apo theta ci_lower ci_upper\n", + "0 0.0 210.036240 210.077702 208.766940 211.384677\n", + "1 1.0 211.785815 211.881937 210.553004 213.225427\n", + "2 2.0 217.063017 217.069443 215.756200 218.393654\n", + "3 3.0 219.468907 219.404300 218.108259 220.723846\n", + "4 4.0 220.439699 220.503700 219.192952 221.828157\n", + "5 5.0 220.525064 220.417834 219.095785 221.741523" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "dml_obj = dml.DoubleMLAPOS(\n", " dml_data,\n", @@ -323,9 +575,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1IAAAIjCAYAAAAJLyrXAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAeG9JREFUeJzt3XmcjeX/x/H3mX2fsczYd9myL/naBxkipQWhLImWUU1S32gxqEQKlSwtVMhWIqEmDClJRKiILIXJlhkzw5iZc/3+8JvzdZoZ5sbMOcPr+XjM4zHnvq/7vj/nnOvcc95z3/d124wxRgAAAACAPPNwdQEAAAAAUNgQpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaRwzYmMjFRkZKSry7iq9u/fL5vNplmzZrm6FJey2WwaMmSIq8twe7NmzZLNZtP+/ftdXco1Y+XKlapfv778/Pxks9l06tQp9e/fXxUrVrzksnx+C5+8vmdZn7Uff/yxYArDdYv9unsiSKHAZO0Ecvv5/vvv87yuX375RbGxsW63Q3n77bfd5svS8uXLZbPZVLp0adnt9hzbVKxY0ek9iIiIUKtWrbR48eJsbY0x+uijj9S6dWuFhYUpICBAderU0ejRo5WSknJZNUZGRl60T2T9xMbGXtb6L+Xw4cOKjY3V1q1b82X9hUFcXJxatmypgIAAFSlSRHfffXeOn6t/95Wsn4ceesip3S+//KJWrVopODhYjRs31oYNG7Kt6/XXX9eNN96ojIwMS7XGx8frzjvvVMmSJeXj46OIiAh17dpVn376qaX1WHXixAn16NFD/v7+mjJlij766CMFBgbm6zbdUVa4mDBhwmUt7077x8IiPj4+T/tIm81WIPWkpqYqNjZW8fHxBbK9K7V8+XJLfz8iIyNVu3bt/CsI1xwvVxeA68/o0aNVqVKlbNOrVq2a53X88ssvGjVqlCIjI7P9R/irr7660hIv29tvv63ixYurf//+Lqshy5w5c1SxYkXt379fq1ev1s0335xju/r16+vJJ5+UdD5YTJ8+XXfeeaemTp3q+JKcmZmp3r17a8GCBWrVqpViY2MVEBCgb775RqNGjdLChQv19ddfq0SJEpZqfPbZZ/XAAw84Hm/atElvvPGGRowYoZo1azqm161b1+rTz5PDhw9r1KhRqlixourXr58v23Bny5Yt0+23366GDRvqlVdeUVJSkiZPnqyWLVvqp59+Unh4uFP7C/tKlmrVqjl+z8zM1J133qmiRYvq1Vdf1dKlS3X77bdrz549CgkJkSQdPXpUo0eP1oIFC+Tllfc/QSNHjtTo0aN1ww036MEHH1SFChV04sQJLV++XHfddZfmzJmj3r17X8GrkbtNmzbp9OnTGjNmjNPn6J133sn1nxTIzp32j4VFzZo19dFHHzlNGz58uIKCgvTss88WeD2pqakaNWqUJBWKMz+WL1+uKVOm5Ns/4wCCFArcLbfcosaNG+fb+n18fPJt3YVFSkqKlixZorFjx2rmzJmaM2dOrkGqTJkyuvfeex2P+/btq6pVq2rixImOIDV+/HgtWLBAw4YN06uvvupoO3jwYPXo0UPdunVT//79tWLFCkt1dujQwemxn5+f3njjDXXo0KFQ/JEu7P773/+qcuXK+vbbbx2fm65duzqC1WuvvebU/t995d9+//137dq1SwcOHFD58uXVt29fFS9eXBs2bFDHjh0lSSNGjFDr1q0VFRWV5zoXLVqk0aNH6+6779bcuXPl7e3tmPfUU0/pyy+/VHp6upWnbsnRo0clSWFhYU7TL6wDrpGRkSG73X7N7vdLlCiR7TP3yiuvqHjx4hf9LNrtdp07d05+fn75XSJwXePUPrilefPmqVGjRgoODlZISIjq1KmjyZMnSzp/imD37t0lSW3btnWc1pB1qsG/r5HKOjViwYIFGjVqlMqUKaPg4GDdfffdSkxMVFpammJiYhQREaGgoCANGDBAaWlpTvXMnDlT7dq1U0REhHx9fVWrVi1NnTrVqU3FihW1c+dOrV271lHThXWcOnVKMTExKleunHx9fVW1alWNGzcu23+0s669CA0NVVhYmPr166dTp05Zev0WL16sM2fOqHv37rrnnnv06aef6uzZs3latmTJkqpZs6b27dsnSTpz5oxeffVVVatWTWPHjs3WvmvXrurXr59Wrlxp6fTMK/HZZ5+pdu3a8vX11Y033qiVK1dma3Po0CHdf//9KlGihKPd+++/75gfHx+vJk2aSJIGDBjgeM+yTj365ptv1L17d5UvX16+vr4qV66cnnjiCZ05c+ay685LP5LO96Vbb71V69ev10033SQ/Pz9VrlxZH374Yba2O3fuVLt27eTv76+yZcvqxRdfzNNRkpMnT+qXX37RHXfc4fQltF69eqpZs6bmzZuX43Lnzp3L9VTOrNemSJEikqSAgAD5+/srNTVVkrRlyxbNmTNHr7/++iXru9Dzzz+vokWL6v33388xvHTs2FG33nqr4/HRo0c1cOBAlShRQn5+fqpXr54++OADp2UuPE1txowZqlKlinx9fdWkSRNt2rTJ0S4yMlL9+vWTJDVp0kQ2m81xRCWna6SsfH5/++033X333SpatKj8/PzUuHFjLV261KlN1inR3377rYYOHarw8HAFBgbqjjvu0LFjx7Ktc8WKFWrTpo1j39mkSRPNnTvXqc3GjRvVqVMnhYaGKiAgQG3atNG3336bY42Xktf6rsb+8cL3bNKkSY737KeffpKXl5fjSMmFdu3aJZvNprfeekvS+X4/bNgw1alTR0FBQQoJCdEtt9yibdu2Xdbzz5KamqoHH3xQxYoVU0hIiPr27at//vnHMb9fv34qXrx4joE/KipK1atXv6LtZ10/OmfOHN14443y9fV17BcvtS+Uzn+uX3jhBTVq1EihoaEKDAxUq1attGbNGkeb/fv3O45Sjxo1Ktup1/3791dQUJAOHjyoW2+9VUFBQSpTpoymTJkiSdq+fbvatWunwMBAVahQIVu/lKz3g4t9dvv37+/Y9tU+BXLFihVq1aqVAgMDFRwcrC5dumjnzp2O+RMmTJDNZtOBAweyLTt8+HD5+Pg49Y+r+ZlEATNAAZk5c6aRZL7++mtz7Ngxp5/jx4872n311VdGkmnfvr2ZMmWKmTJlihkyZIjp3r27McaYvXv3mscee8xIMiNGjDAfffSR+eijj0xCQoIxxpg2bdqYNm3aONa3Zs0aI8nUr1/fNGvWzLzxxhvmscceMzabzdxzzz2md+/e5pZbbjFTpkwx9913n5FkRo0a5VR7kyZNTP/+/c3EiRPNm2++aaKioowk89ZbbznaLF682JQtW9bUqFHDUdNXX31ljDEmJSXF1K1b1xQrVsyMGDHCTJs2zfTt29fYbDbz+OOPO9Zht9tN69atjYeHh3nkkUfMm2++adq1a2fq1q1rJJmZM2fm6bXu1KmTad++vTHGmAMHDhibzWYWLFiQrV2FChVMly5dnKadO3fOlChRwpQsWdLp/YiNjc11e1mv8bPPPpun+nKzcOFCI8msWbMmx/mSTL169UypUqXMmDFjzKRJk0zlypVNQECAUx9KSEgwZcuWNeXKlTOjR482U6dONbfddpuRZCZOnOhoM3r0aCPJDB482PGe7d271xhjzKOPPmo6d+5sXn75ZTN9+nQzcOBA4+npae6+++7Lfn556UfGnH9fqlevbkqUKGFGjBhh3nrrLdOwYUNjs9nMjh07HO2OHDliwsPDTZEiRUxsbKx59dVXzQ033ODoL/v27cu1lsOHDxtJ5oUXXsixTknmyJEjTjX5+/sbT09PI8lUqFDBTJo0yWm55ORkExoaaoYOHWr2799vxo8fb7y8vMz+/fuNMca0aNHCDBs2zNJrtnv3biPJ3H///Xlqn5qaamrWrGm8vb3NE088Yd544w3TqlUrI8mp3n379hlJpkGDBqZq1apm3LhxZvz48aZ48eKmbNmy5ty5c8aY8/1/8ODBRpIZPXq0+eijj8x3331njDGmX79+pkKFCo51Wvn87tixw4SGhppatWqZcePGmbfeesu0bt3a2Gw28+mnnzraZe03GzRoYNq1a2fefPNN8+STTxpPT0/To0cPp+c+c+ZMY7PZTO3atc1LL71kpkyZYh544AFz3333OdqsWrXK+Pj4mGbNmpnXXnvNTJw40dStW9f4+PiYjRs3XvS1zXrNXn31Vcv1XY39Y9b2a9WqZSpXrmxeeeUVM3HiRHPgwAHTrl07U6tWrWw1jxo1ynh6ejr+PmzatMlUqVLFPPPMM2b69Olm9OjRpkyZMiY0NNQcOnQo27Yutc/Nev516tQxrVq1Mm+88YaJjo42Hh4epnXr1sZutxtjjImLizOSzOeff+60/JEjR4ynp6cZPXr0RbdzoRtvvNHpb5wx5/eNNWvWNOHh4WbUqFFmypQp5qeffsrTvtAYY44dO2ZKlSplhg4daqZOnWrGjx9vqlevbry9vc1PP/1kjDn/+Z46daqRZO644w7H+7ht2zZjzPnPg5+fn6lVq5Z56KGHzJQpU0zz5s0dr2Pp0qXNU089Zd58801z4403Gk9PT/PHH384arDaDy712f3uu+9Mhw4djCRHrR999NFFX9s2bdqYG2+88aJtPvzwQ2Oz2UynTp3Mm2++acaNG2cqVqxowsLCHPvcrL+748ePz7Z85cqVnf7u5vUzmdXXLrZfR8EjSKHAZO0Ecvrx9fV1tHv88cdNSEiIycjIyHVdF/vCnVuQql27tmMHa4wxvXr1Mjabzdxyyy1Oyzdr1szpy5Ex57+c/VvHjh1N5cqVnabl9AfOGGPGjBljAgMDze7du52mP/PMM8bT09McPHjQGGPMZ599ZiQ57XwzMjIcXwTzEqT+/vtv4+XlZd555x3HtObNm5vbb789W9sKFSqYqKgoR6Ddtm2bueeee4wk8+ijjxpjjJk0aZKRZBYvXpzrNk+ePGkkmTvvvPOS9V1MXoKUj4+P2bNnj2Patm3bjCTz5ptvOqYNHDjQlCpVyilcGWPMPffcY0JDQx3v56ZNm3J9XXN6z8eOHWtsNps5cODAZTy7vPejChUqGElm3bp1jmlHjx41vr6+5sknn3RMi4mJMZKc/tgePXrUhIaGXvIPbmZmpgkLC3ME7izHjx83gYGBRpL58ccfHdO7du1qxo0bZz777DPz3nvvOfrk008/7bT83Llzjb+/v5FkPD09zYQJE4wxxsyZM8eUKFHCJCYmXuQVym7JkiXZvvRdTFZ/nT17tmPauXPnTLNmzUxQUJBJSkoyxvzvy1ixYsXMyZMns23vwi+8WfuuTZs2OW3r30HKyue3ffv2pk6dOubs2bOOaXa73TRv3tzccMMN2bZ98803O76UG2PME088YTw9Pc2pU6eMMcacOnXKBAcHm6ZNm5ozZ8441Zm1nN1uNzfccIPp2LGj07pSU1NNpUqVTIcOHS7yyl48SF2qPmOufP+Ytf2QkBBz9OhRp7bTp083ksz27dudpteqVcu0a9fO8fjs2bMmMzMz2/Py9fV1CjNWg1SjRo2c/r6MHz/eSDJLliwxxpz/vJUtW9b07NnTafnXX3/d2Gw2p0BxKbkFKQ8PD7Nz506n6XndF2ZkZJi0tDSnNv/8848pUaKE0z8xjh07ZiSZkSNHZqurX79+RpJ5+eWXndbh7+9vbDabmTdvnmP6b7/9lm09VvtBXj670dHRxsoxg0sFqdOnT5uwsDAzaNAgp+kJCQkmNDTUaXqzZs1Mo0aNnNr98MMPRpL58MMPjTHWPpMEKffEqX0ocFOmTFFcXJzTz4XX1oSFhSklJUVxcXFXdbt9+/Z1Oi2oadOmMsbo/vvvd2rXtGlT/fnnn04jivn7+zt+T0xM1PHjx9WmTRv98ccfSkxMvOS2Fy5cqFatWqlIkSI6fvy44+fmm29WZmam1q1bJ+n8hbFeXl56+OGHHct6enrq0UcfzfPznDdvnjw8PHTXXXc5pvXq1UsrVqxwOpUgy1dffaXw8HCFh4erXr16Wrhwoe677z6NGzdOknT69GlJUnBwcK7bzJqXlJSU5zov180336wqVao4HtetW1chISH6448/JJ0fXfCTTz5R165dZYxxer07duyoxMREbdmy5ZLbufA9T0lJ0fHjx9W8eXMZY/TTTz9dVu1W+lGtWrXUqlUrx+Pw8HBVr17d8Tyl8/3lP//5j2666Sandn369LlkLR4eHnrwwQe1atUqDR8+XL///rs2b96sHj166Ny5c5LkdBrj0qVL9fTTT+v222/X/fffr7Vr16pjx456/fXX9ddffzna9erVS4cOHdKGDRt06NAhPfnkk0pNTdV///tfvfTSSwoKCtKoUaNUuXJl1a1bN8cRIi+U1acu1v8utHz5cpUsWVK9evVyTPP29tZjjz2m5ORkrV271ql9z549HaciSnK85he+znmV18/vyZMntXr1avXo0UOnT5929M8TJ06oY8eO+v3333Xo0CGnZQYPHux0WlKrVq2UmZnpOHUoLi5Op0+f1jPPPJPtupis5bZu3arff/9dvXv31okTJxzbTUlJUfv27bVu3brLHjzjUvVdTF73j1nuuuuubAOh3HnnnfLy8tL8+fMd03bs2KFffvlFPXv2dEzz9fWVh8f5rz6ZmZk6ceKEgoKCVL169TztF3IzePBgp78vDz/8sLy8vLR8+XJJ5z9vffr00dKlSx37VOn8oEDNmzfPcQAmq9q0aaNatWo5HlvZF3p6ejpO8bXb7Tp58qQyMjLUuHFjy6/LhQMIhYWFqXr16goMDFSPHj0c06tXr66wsDCnz5nVfnA1P7t5FRcXp1OnTqlXr15ONXp6eqpp06ZOp0L27NlTmzdv1t69ex3T5s+fL19fX91+++2S8vcziYLBYBMocDfddNNFB5t45JFHtGDBAt1yyy0qU6aMoqKi1KNHD3Xq1OmKtlu+fHmnx6GhoZKkcuXKZZtut9uVmJioYsWKSZK+/fZbjRw5Uhs2bHBc75ElMTHRsa7c/P777/r555+z/fHPknUx+4EDB1SqVCkFBQU5zbdy/vzs2bN100036cSJEzpx4oQkqUGDBjp37pwWLlyowYMHO7Vv2rSpXnzxRdlsNgUEBKhmzZpOF9VnfYG98I//v+UlbF0t/34fpfPX5GSFxGPHjunUqVOaMWOGZsyYkeM6sl7vizl48KBeeOEFLV26NFsAzUt4zomVfnSp5ymd7y9NmzbN1i6v/WX06NE6fvy4xo8fr1deeUXS+es1Bg4cqGnTpmXrhxey2Wx64okn9OWXXyo+Pt7pwvciRYroP//5j+Px2LFjFRERoQEDBuj999/XtGnTNGfOHO3fv189e/bUL7/8kuuonVmj/V2s/13owIEDuuGGGxxflrNkjQL57y/2/36ds76Y5fRPh7xsOy+f3z179sgYo+eff17PP/98jus6evSoypQpk+c6s76sXWzo5t9//12SHNd85SQxMdHpy2leXcnrmNf9Y5acQkfx4sXVvn17LViwQGPGjJF0/kurl5eX7rzzTkc7u92uyZMn6+2339a+ffuUmZnpmJe1v78cN9xwg9PjoKAglSpVyulWAn379tW4ceO0ePFi9e3bV7t27dLmzZs1bdq0y97uhf79uljdF37wwQd67bXX9Ntvvzldy2Ul5Pn5+WV7H0NDQ1W2bNls1yeFhoY69Q+r/eBqfnbzKusz1K5duxznZ+2vJKl79+4aOnSo5s+frxEjRsgYo4ULF+qWW25xtMvPzyQKBkEKbiciIkJbt27Vl19+qRUrVmjFihWaOXOm+vbtm+2CcSs8PT0tTTfGSDr/BaV9+/aqUaOGXn/9dZUrV04+Pj5avny5Jk6cmKf/FtntdnXo0EFPP/10jvMvHEL6Svz++++Oi23//YddOv/fz38HqeLFi+c6op/0vy+gP//8s7p165Zjm59//lmSnP4bml8u9X5lvR/33ntvrn+cLjWcemZmpjp06KCTJ0/qv//9r2rUqKHAwEAdOnRI/fv3v6z/EFrtR5d6nleDj4+P3n33Xb300kvavXu3SpQooWrVqql3797y8PC45C0Jsv4JcfLkyVzb7N+/X6+99pq++uoreXh46OOPP9aDDz7o+CLywQcfaN68eXruuedyXL5GjRqSzl+onh8K4nX+t6z3etiwYY7RDP/t36/91agza7uvvvpqrsP9Xyw8X8yV1Gd1/3jhkd0L3XPPPRowYIC2bt2q+vXra8GCBWrfvr2KFy/uaPPyyy/r+eef1/33368xY8aoaNGi8vDwUExMTL7/579WrVpq1KiRZs+erb59+2r27Nny8fFxOlJzJf79uljZF86ePVv9+/dXt27d9NRTTykiIkKenp4aO3as0xGVS7ncv7NZ9VrpB6787H700UcqWbJktvkX3tKhdOnSatWqlRYsWKARI0bo+++/18GDBx1ne1y4vvz4TKJgEKTglnx8fNS1a1d17dpVdrtdjzzyiKZPn67nn39eVatWLbCbD0rS559/rrS0NC1dutTpP2AXHsLPkltdVapUUXJy8kUDiyRVqFBBq1atUnJystPOc9euXXmqdc6cOfL29tZHH32U7Y/M+vXr9cYbb+jgwYM5Hu3ITcuWLRUWFqa5c+fq2WefzfGPV9ZocheOnOYq4eHhCg4OVmZm5iVf79zer+3bt2v37t364IMP1LdvX8f0Kznd1Eo/yqsKFSo4/qN5obz2lywlSpRw3AMsMzNT8fHxatq06SX/gGedQpPbf5Cl82HhtttuU8uWLSWdv3dX6dKlHfNLly6d7TS2C1WrVk3Vq1fXkiVLNHny5EvWVKFCBf3888+y2+1OR6V+++03x/z8ktfPb+XKlSWdP+XwUn00r7JOd92xY0euATirTUhIyFXbrhVXun+8lG7duunBBx90nN63e/duDR8+3KnNokWL1LZtW7333ntO00+dOuUUuKz6/fff1bZtW8fj5ORkHTlyRJ07d3Zq17dvXw0dOlRHjhzR3Llz1aVLl3w72mBlX7ho0SJVrlxZn376qdP7NHLkSKd2+fm392r1gwtd7XqzPkMRERF5qrNnz5565JFHtGvXLs2fP18BAQHq2rVrtvW56jOJK8c1UnA7WaejZfHw8HD81yxrWPLAwEBJsjws+OXICg4X/pcrMTFRM2fOzNY2MDAwx5p69OihDRs26Msvv8w279SpU47rsTp37qyMjAynIbEzMzP15ptv5qnWOXPmqFWrVurZs6fuvvtup5+nnnpKkvTxxx/naV1ZAgICNGzYMO3atSvHG0B+8cUXmjVrljp27Oh0OpereHp66q677tInn3yiHTt2ZJt/4ZDMufWjnN5zY4xjCP7Lrevf68ytH+VV586d9f333+uHH35wTDt27JjmzJlz2eucMGGCjhw54nTj3ZMnTzqdAiVJ6enpeuWVV+Tj4+P0BfJCa9as0fLlyzV+/HjHtBIlSjhCjST9+uuvOf5n90KjRo3SiRMn9MADDzhdu5jlq6++0rJlyySdf00SEhKcrpXJyMjQm2++qaCgILVp0+ai27oSef38RkREKDIyUtOnT9eRI0eyrSenYc0vJSoqSsHBwRo7dmy2Wx1k9blGjRqpSpUqmjBhgpKTk6/Kdq240v3jpYSFhaljx45asGCB5s2bJx8fn2xH0T09PbMdsVi4cOFFw3xezJgxw+l0uKlTpyojI0O33HKLU7tevXrJZrPp8ccf1x9//HHRe0FdKSv7wpz2Txs3btSGDRuclgkICJCUP397r1Y/uNDV/q7QsWNHhYSE6OWXX85xKPt/f4buuusueXp66uOPP9bChQt16623OmqSXP+ZxJXjiBQK3IoVK5y+SGVp3ry5KleurAceeEAnT55Uu3btVLZsWR04cEBvvvmm6tev7zjNrH79+vL09NS4ceOUmJgoX19fx/15rraoqCjHEbIHH3xQycnJeueddxQREZHtS1CjRo00depUvfjii6pataoiIiLUrl07PfXUU1q6dKluvfVW9e/fX40aNVJKSoq2b9+uRYsWaf/+/SpevLi6du2qFi1a6JlnntH+/ftVq1Ytffrpp3m6Jmfjxo3as2ePhgwZkuP8MmXKqGHDhpozZ47++9//WnoNnnnmGf30008aN26cNmzYoLvuukv+/v5av369Zs+erZo1a2Y77XLWrFkaMGCAZs6c6bjvTkF55ZVXtGbNGjVt2lSDBg1SrVq1dPLkSW3ZskVff/2141S0KlWqKCwsTNOmTVNwcLACAwPVtGlT1ahRQ1WqVNGwYcN06NAhhYSE6JNPPsnx3Pv9+/erUqVK6tevn+MeVDmx0o/y6umnn9ZHH32kTp066fHHH1dgYKBmzJjhOCpzKbNnz9Ynn3yi1q1bKygoSF9//bUWLFigBx54wGmwkqVLl+rFF1/U3XffrUqVKunkyZOaO3euduzYoZdffjnHIJSZmamYmBg99dRTTkfg7r77bj399NMKDw/XgQMHtH379ksGv549e2r79u166aWX9NNPP6lXr16qUKGCTpw4oZUrV2rVqlWOe9IMHjxY06dPV//+/bV582ZVrFhRixYt0rfffqtJkybl63V8Vj6/U6ZMUcuWLVWnTh0NGjRIlStX1t9//60NGzbor7/+snxfo5CQEE2cOFEPPPCAmjRpot69e6tIkSLatm2bUlNT9cEHH8jDw0PvvvuubrnlFt14440aMGCAypQpo0OHDmnNmjUKCQnR559/frVejmyudP+YFz179tS9996rt99+Wx07dsx2E+Vbb71Vo0eP1oABA9S8eXNH/8s6Sni5zp07p/bt26tHjx7atWuX3n77bbVs2VK33XabU7vw8HB16tRJCxcuVFhYmLp06XJF272UvO4Lb731Vn366ae644471KVLF+3bt0/Tpk1TrVq1nL7g+/v7q1atWpo/f76qVaumokWLqnbt2he9Ni+vrmY/yNKoUSNJ0mOPPaaOHTvK09NT99xzz0WXOXbsmF588cVs0ytVqqQ+ffpo6tSpuu+++9SwYUPdc889Cg8P18GDB/XFF1+oRYsWjnuWSef/adK2bVu9/vrrOn36tNPAJ5Jc/pnEVVCAIwTiOnex4c91wTCzixYtMlFRUSYiIsL4+PiY8uXLmwcffNDpnjbGGPPOO++YypUrO+5rkzVkdm7Dny9cuDDHev49pPHIkSONJHPs2DHHtKVLl5q6desaPz8/U7FiRTNu3Djz/vvvZxuKNCEhwXTp0sUEBwcbSU51nD592gwfPtxUrVrV+Pj4mOLFi5vmzZubCRMmOA2be+LECXPfffeZkJAQExoaau677z7z008/XXIo3kcffdRIctwHKSexsbFGkuO+HzndRyo3mZmZZubMmaZFixYmJCTE+Pn5mRtvvNGMGjXKJCcnZ2v/5ptvGklm5cqVeVq/MXkb/jw6Ojrb9AoVKph+/fo5Tfv7779NdHS0KVeunPH29jYlS5Y07du3NzNmzHBqt2TJElOrVi3j5eXl9Br/8ssv5uabbzZBQUGmePHiZtCgQY6h1i98H7Zv324kmWeeeeaSzy+v/Si39+XffdsYY37++WfTpk0b4+fnZ8qUKWPGjBlj3nvvvTwNk7tx40bTunVrU6RIEePn52fq1atnpk2b5jQMrzHG/Pjjj6Zr166mTJkyxsfHxwQFBZmWLVvmeG+yLFOmTDFly5Y1KSkpTtPT09PN0KFDTfHixU2FChXMBx98cNEaL7Rq1Spz++23m4iICOPl5WXCw8NN165dHUNMZ/n777/NgAEDTPHixY2Pj4+pU6dOts9OTkN5Z9G/hmXO6/Dnxlj7/O7du9f07dvXlCxZ0nh7e5syZcqYW2+91SxatOiS287ar/37s7J06VLTvHlz4+/vb0JCQsxNN91kPv74Y6c2P/30k7nzzjtNsWLFjK+vr6lQoYLp0aOHWbVqVbbX4lKvmZX6rnT/eLH3LEtSUpJj+P0Lh8DPcvbsWfPkk0+aUqVKGX9/f9OiRQuzYcOGbJ8tq8Ofr1271gwePNgUKVLEBAUFmT59+pgTJ07kuMyCBQuM/v/+dZcjt+HPc9o3GpO3faHdbjcvv/yyqVChgvH19TUNGjQwy5Yty7GPf/fdd6ZRo0bGx8fH6bPSr18/ExgYmG37uQ0pntN+7kr7wb8/uxkZGebRRx814eHhxmazXXIo9DZt2uT6HeXCW0WsWbPGdOzY0YSGhho/Pz9TpUoV079/f6dbRmR55513jCQTHByc7dYEWfLymWT4c/dkMyYfr8oDcN3q0aOH9u/f73Ta2bXo7bff1tNPP629e/c6rjMCgNwsWbJE3bp107p165xucQCg8OHUPgBXnTFG8fHxmj17tqtLyXdr1qzRY489RogCkCfvvPOOKleu7BiABUDhRZACcNXZbLY83avpWrBw4UJXlwCgEJg3b55+/vlnffHFF5o8eXKBjj4LIH9wah8AAEA+s9lsCgoKUs+ePTVt2jSnew4BKJz4FAMAAOQz/m8NXHu4jxQAAAAAWESQAgAAAACLOLVPkt1u1+HDhxUcHMzFnwAAAMB1zBij06dPq3Tp0vLwyP24E0FK0uHDh1WuXDlXlwEAAADATfz5558qW7ZsrvMJUpKCg4MlnX+xQkJCXFpLenq6vvrqK0VFRcnb29ultaBwoM/AKvoMrKLPwCr6DKxypz6TlJSkcuXKOTJCbghSkuN0vpCQELcIUgEBAQoJCXF5J0LhQJ+BVfQZWEWfgVX0GVjljn3mUpf8MNgEAAAAAFhEkAIAAAAAiwhSAAAAAGAR10jlUWZmptLT0/N9O+np6fLy8tLZs2eVmZmZ79tD4VfQfcbT01NeXl7cKgAAAFzXCFJ5kJycrL/++kvGmHzfljFGJUuW1J9//skXVeSJK/pMQECASpUqJR8fnwLZHgAAgLshSF1CZmam/vrrLwUEBCg8PDzfv6ja7XYlJycrKCjoojcAA7IUZJ8xxujcuXM6duyY9u3bpxtuuIF+CgAArksEqUtIT0+XMUbh4eHy9/fP9+3Z7XadO3dOfn5+fEFFnhR0n/H395e3t7cOHDjg2C4AAMD1hm/qecRpdsD/EPIBAMD1zqXfhsaOHasmTZooODhYERER6tatm3bt2uWYf/LkST366KOqXr26/P39Vb58eT322GNKTEx0tNm2bZt69eqlcuXKyd/fXzVr1tTkyZNd8XQAAAAAXCdcemrf2rVrFR0drSZNmigjI0MjRoxQVFSUfvnlFwUGBurw4cM6fPiwJkyYoFq1aunAgQN66KGHdPjwYS1atEiStHnzZkVERGj27NkqV66cvvvuOw0ePFienp4aMmSIK58eAAAAgGuUS4PUypUrnR7PmjVLERER2rx5s1q3bq3atWvrk08+ccyvUqWKXnrpJd17773KyMiQl5eX7r//fqd1VK5cWRs2bNCnn37qlkEqw54pLw9PV5cBAAAA4Aq41WATWafsFS1a9KJtQkJC5OWVe+mJiYkXXUdaWprS0tIcj5OSkiSdH1ji3/eKyhpswm63y2635+l55GZdwl7dufoDLW7XX61KVs6xTdYQ61nbvFIbNmxQ69at1bFjRy1btswxff/+/apSpYrjcdGiRdWwYUO98soratCggWP6zp07NXr0aMXHxyspKUkVKlRQz5499d///lcBAQFXXB+u3NXuM3lht9tljFF6ero8PfnHQGGTtZ8riHvj4dpAn4FV9BlY5U59Jq812ExB3BwpD+x2u2677TadOnVK69evz7HN8ePH1ahRI91777166aWXcmzz3XffqU2bNvriiy8UFRWVY5vY2FiNGjUq2/S5c+dmCwdeXl4qWbKkypUrd0X3zEm3Z6rFqunam3xSVYOKaX37wfIugCNTjz32mAIDAzV79mz98MMPKlWqlCTp4MGDqlevnj777DPVqFFDhw8f1n//+18dOHBAP/zwg0JDQ7Vp0ybdcccdatOmjYYOHarw8HBt2bJFzz33nMqUKaPPP/+c+whdp86dO6c///xTCQkJysjIcHU5AAAAV01qaqp69+7tOICTG7cJUg8//LBWrFih9evXq2zZstnmJyUlqUOHDipatKiWLl0qb2/vbG127Nihtm3b6vHHH9dzzz2X67ZyOiJVrlw5HT9+PNuLdfbsWf3555+qWLHiFQ3zPHHnOv33x2UykmySxje+VTE3ts7Wzhij06dPKzg4+IpHCkxOTlaZMmX0ww8/KDY2VnXr1tXw4cMl/e+I1ObNm1W/fn1J50Noq1attHz5ckVFRalu3boKCAjQhg0bnEZp27Ztmxo1aqSXX35ZTz/99BXViCt3NftMXp09e1b79+9XuXLlGP68EEpPT1dcXJw6dOiQ474U+Df6DKyiz8Aqd+ozSUlJKl68+CWDlFuc2jdkyBAtW7ZM69atyzFEnT59Wp06dVJwcLAWL16c44v7yy+/qH379ho8ePBFQ5Qk+fr6ytfXN9t0b2/vbOvOzMyUzWaTh4fHZQ/5fDg1USN/+lJZidVIGrn1S91TpYFKB4Q6tc06NStrm1di0aJFqlGjhmrWrKn77rtPMTExGjFihNO6L3xegYGBkqSMjAz9/PPP+uWXXzR37txsp1E2aNBAN998s+bNm6dnnnnmimrElbuafSavPDw8ZLPZcvzMoPDg/YNV9BlYRZ+BVe7QZ/K6fZcOf26M0ZAhQ7R48WKtXr1alSpVytYmKSlJUVFR8vHx0dKlS3P87/fOnTvVtm1b9evXL9dT/lxp2A+f65w902laWmamnvrh83zd7nvvvad7771XktSpUyclJiZq7dq1ObY9deqUxowZo6CgIN10003avXu3JKlmzZo5tq9Zs6ajDQAAAHC9cWmQio6O1uzZszV37lwFBwcrISFBCQkJOnPmjKT/haiUlBS99957SkpKcrTJzDwfTLJO54uKitLQoUMd848dO+bKp+aw5sgezd+3VZnGeRCATGPXvH1bFX9kT75sd9euXfrhhx/Uq1cvSeev9erZs6fee+89p3bNmzdXUFCQihQpom3btmn+/PkqUaKEY76bnPkJAAAAuBWXnto3depUSVJkZKTT9JkzZ6p///7asmWLNm7cKEmqWrWqU5t9+/apYsWKWrRokY4dO6bZs2dr9uzZjvkVKlTQ/v3787X+vPhoz4/ykE12ZQ8kHrLpwz0/KrJU1RyWvDLvvfeeMjIyVLp0acc0Y4x8fX311ltvOabNnz9ftWrVUrFixRQWFuaYXq1aNUnSr7/+6jSKX5Zff/3V0QYAAFyfErdMUuKWydlnGKMaZ8/qyCw/KYfrd0MbPq7QhjH5XyCQj1wapC51tCMyMvKSbWJjYxUbG3sVq7q6+lZtrA/2/JjjPLuM+t3Q5KpvMyMjQx9++KFee+21bCMXduvWTR9//LE6deokSSpXrpzTMOhZ6tevrxo1amjixIm65557sg028fXXX2vs2LFXvXYAAFB42NOSlJl8KMd5PpIyU3JfDijs3GKwiWtZZKmq6lmpvhbt/9np9D5Pm4e6V6yrNiWzh5grtWzZMv3zzz8aOHCgQkOdB7O466679N577zmCVG5sNpvee+89dejQQXfddZeGDx+ukiVLauPGjXryySfVrFkzxcTEXPXaAQBA4eHhGyLPoDLOE41RZsrh8/MDS8lmy34liYdv7iOhAYUFQaoATLipq5Ye3Kkzmf8LUr6ennr1pq75sr333ntPN998c7YQJZ0PUuPHj3fchPhimjdvru+//16jRo3SLbfcotOnT6t8+fLq16+fhg8fnuPIhwAA4PoR2jAm2yl69vQUHZhSRJJUss82+QaEFXxhcFvX0umgBKkCUDogVGMadtJTmz533EdqTMNbsg19frV8/nnuowHedNNNjtMl8zKQRJ06dbRo0aKrVhsAAACuX9fS6aAEqQIypFZLvbPre+1KOqZqoeGKrtnC1SUBAAAABepaOh2UIFVAvD08NbXF3brj65ma1vxueXt4urokAAAAoEBdS6eDEqQKUJuSVXS09yh5EaIAAACAQs2lN+S9HhGiAAAAgMKPIAUAAAAAFhGkAAAAAMAirpHKB7mOj38JoQ0fV3D9x/KhIgAAAABXE0EqH1xsfPxLLQcAAADA/RGk8sGlxsf3DCyd4x2b3XF8fAAAAADZEaTyQU7j46enJWvRxFY6ai+iOrd8qDY3lJGnR/YwZbfbC6hK62bNmqWYmBidOnXK1aXki/79++vUqVP67LPPcm0THx+vtm3b6p9//lFYWFiB1QYAAAD3wmATBeDT7UdUZfx36nN8tJ44+YRufucnVXrpa326/Ui+bbN///6y2WzZfjp16pSn5StWrKhJkyY5TevZs6d2796dD9U6mzVrlktCyuTJkzVr1izH48jISMXExFyVdW/ZskUdOnRQWFiYihUrpsGDBys5OdmpTU7v17x58xzzf/rpJzVo0EBBQUHq2rWrTp486ZiXkZGhJk2a6IcffshTPT/99JO6d++uEiVKyM/PTzfccIMGDRrkeH/3798vm82mrVu3XvmTBwAAuAYRpPLZp9uPqPsHP+qvxDSn6YcSz6r7Bz/ma5jq1KmTjhw54vTz8ccfX/b6/P39FRERcRUrdC+hoaH5EuAOHz6sm2++WVWrVtXGjRu1cuVK7dy5U/3798/WdubMmU7vV7du3RzzHnjgAbVr105btmxRYmKiXn75Zce8t956S82bN9dNN910yXqWLVum//znP0pLS9OcOXP066+/avbs2QoNDdXzzz9/NZ4yAADANY8glY8y7UYxn+2QyWFe1rQnluxQpj2nFlfO19dXJUuWdPopUqTI+e0bo9jYWJUvX16+vr4qXbq0Hnvs/IiBkZGROnDggJ544gnHkREp+5Gi2NhY1a9fX++//77Kly+voKAgPfLII8rMzNT48eNVsmRJRURE6KWXXnKq6/XXX1edOnUUGBiocuXK6ZFHHnEcnYmPj9eAAQOUmJjo2HZsbKwkKS0tTcOGDVOZMmUUGBiopk2bKj4+PtfnP2zYMN16662Ox5MmTZLNZtPKlSsd06pWrap3331X0vmjeFnBpX///lq7dq0mT57sqGP//v2O5TZv3qzGjRsrICBAzZs3165du3KtY9myZfL29taUKVNUvXp1NWnSRNOmTdMnn3yiPXv2OLUNCwtzer/8/Pwc83799VcNGjRI1apVU69evfTrr79Kkv744w/Nnj1bL774Yq41ZElNTdWAAQPUuXNnLV26VDfffLMqVaqkpk2basKECZo+ffol1wEAAACCVL765o8T+ivxbK7zjaQ/T53VN3+cKLii/t8nn3yiiRMnavr06fr999/12WefqU6dOpKkTz/9VGXLltXo0aMdR0Zys3fvXq1YsUIrV67Uxx9/rPfee09dunTRX3/9pbVr12rcuHF67rnntHHjRscyHh4eeuONN7Rz50598MEHWr16tZ5++mlJUvPmzTVp0iSFhIQ4tj1s2DBJ0pAhQ7RhwwbNmzdPP//8s7p3765OnTrp999/z7G2Nm3aaP369crMzJQkrV27VsWLF3eEr0OHDmnv3r2KjIzMtuzkyZPVrFkzDRo0yFFHuXLlHPOfffZZvfbaa/rxxx/l5eWl+++/P9fXKC0tTT4+PvLw+N/Hzd/fX5K0fv16p7bR0dEqXry4brrpJr3//vsy5n8hu169eoqLi1NGRoZWrVqlunXrSpIefvhhjRo1SsHBwbnWkOXLL7/U8ePHHa/3v3HdFwAAQN4QpPLRkdNpl25koZ1Vy5YtU1BQkNNP1ulgBw8eVMmSJXXzzTerfPnyuummmzRo0CBJUtGiReXp6ang4GDHkZHc2O12vf/++6pVq5a6du2qtm3bateuXZo0aZKqV6+uAQMGqHr16lqzZo1jmZiYGLVt21YVK1ZUu3bt9OKLL2rBggWSJB8fH4WGhspmszm2HRQUpIMHD2rmzJlauHChWrVqpSpVqmjYsGFq2bKlZs6cmWNtrVq10unTp/XTTz/JGKN169bpySefdASp+Ph4lSlTRlWrVs22bGhoqHx8fBQQEOCow9PT0zH/pZdeUps2bVSrVi0988wz+u6773T2bM6huV27dkpISNCrr76qc+fO6Z9//tEzzzwjSU4hdfTo0VqwYIHi4uJ011136ZFHHtGbb77pmP/uu+9q0aJFqlKlinx8fDR8+HB99NFHCggIUIMGDdSpUydVrVpVzz33XK7vV1borFGjRq5tAAAAcGmM2pePSgX7XtV2VrVt21ZTp051mla0aFFJUvfu3TVp0iRVrlxZnTp1UufOndW1a1d5eVnrEhUrVnQ6ElKiRAl5eno6HX0pUaKEjh496nj89ddfa+zYsfrtt9+UlJSkjIwMnT17VqmpqQoICMhxO9u3b1dmZqaqVavmND0tLU3FihXLcZmwsDDVq1dP8fHx8vHxkY+PjwYPHqyRI0cqOTlZa9euVZs2bSw93yxZR4MkqVSpUpKko0ePqnz58tna3njjjfrggw80dOhQDR8+XJ6ennrsscdUokQJp9fpwuuTGjRooJSUFL366quOUy5vvPFGrV271tHmxIkTGjlypOLj4xUdHa3mzZtr8eLFatKkiZo2baquXbtmq+XCI1wAAAC4fByRyketKhdT2VA/ZR/k/DybpHJhfmpVOecgcKUCAwNVtWpVp5+sIFWuXDnt2rVLb7/9tvz9/fXII4+odevWSk9Pt7QNb29vp8c2my3HaVnDuu/fv1+33nqr6tatq08++USbN2/WlClTJEnnzp3LdTvJycny9PTU5s2btXXrVsfPr7/+qsmTJ+e6XGRkpOLj4x2hqWjRoqpZs6bWr19/RUHqwueYdQ3ZxYau7927txISEnTo0CGdOHFCsbGxOnbsmCpXrpzrMk2bNtVff/2ltLScj1gOHTpUMTExKlu2rNavX6+7775bgYGB6tKlS67XjmUF0d9+++1STxEAAAAXQZDKR54eNk3qVluSsoWprMcTb6+d4/2kCoK/v7+6du2qN954Q/Hx8dqwYYO2b98u6fwpdlnXFl1Nmzdvlt1u12uvvab//Oc/qlatmg4fPuzUJqdtN2jQQJmZmTp69Gi2cHixUw+zrpNatWqV41qoyMhIffzxx9q9e3eO10ddrI4rVaJECQUFBWn+/Pny8/NThw4dcm27detWFSlSRL6+2Y9Yrlq1Sr/++quGDBkiScrMzHSE4PT09FzrjoqKUvHixTV+/Pgc51+r9wgDAAC42ghS+ezOOqW0sF9jlQl1/jJcNsxPC/s11p11SuXbttPS0pSQkOD0c/z4cUnnR+B77733tGPHDseob/7+/qpQoYKk86fsrVu3TocOHXIsczVUrVpV6enpevPNN/XHH3/oo48+0rRp05zaVKxYUcnJyVq1apWOHz+u1NRUVatWTX369FHfvn316aefat++ffrhhx80duxYffHFF7lur3Xr1jp9+rSWLVvmFKTmzJmjUqVKZTtV8N91bNy4Ufv379fx48ev6GbJb731lrZs2aLdu3drypQpGjJkiMaOHesY3OHzzz/Xu+++qx07dmjPnj2aOnWqXn75ZT366KPZ1nX27FkNGTJEM2bMcJwa2LRpU7399tvatm2bPvnkE7Vo0SLHOgIDA/Xuu+/qiy++0G233aavv/5a+/fv148//qinn35aDz300GU/RwAAgOsJQaoA3FmnlPY+3Vxzir+giUUn6utBDfTHiJvzNURJ0sqVK1WqVCmnn5YtW0o6f/3QO++8oxYtWqhu3br6+uuv9fnnnzuuNxo9erT279+vKlWqKDw8/KrVVK9ePb3++usaN26cateurTlz5mjs2LFObZo3b66HHnpIPXv2VHh4uOPoycyZM9W3b189+eSTql69urp166ZNmzbleF1SliJFiqhOnToKDw93DLDQunVr2e32S57WN2zYMHl6eqpWrVoKDw/XwYMHL/t5//DDD+rQoYPq1KmjGTNmaPr06Y5rnyQ5hkdv1qyZ6tevr+nTp+v111/XyJEjs61r1KhR6tKli+rXr++YNm7cOG3btk2tW7dW165dddddd+Vay+23367vvvtO3t7e6t27t2rUqKFevXopMTExT0OoAwAAQLIZrj5XUlKSQkNDlZiYqJCQEKd5Z8+e1b59+1SpUiWne/pcTOKWSUrc8q/rdoxRZsr5U9g8A0tLtuyn84U2fFzB9R9TUlKSQkJCnAYiAHJjt9sLvM9czucC7iM9PV3Lly9X586ds13TCOSEPgMr7OkpOjDl/H0rSw8+Kt+AMNcWBLfnbn3mYtngQozalw/saUnKTD6U6/ysQJXTcgAAAADcH0EqH3j4hsgzqMxlLQcAAADA/RGk8kFowxiFNoy5rGWvZEADAAAAAAWDi3AAAAAAwCKCVB4xJgfwP3weAADA9Y4gdQmenp6SpHPnzrm4EsB9pKamShKjdwEAgOsW10hdgpeXlwICAnTs2DF5e3vn+/DSdrtd586d09mzZxn+HHlSkH3GGKPU1FQdPXpUYWFhjn80AAAAXG8IUpdgs9lUqlQp7du3TwcOHMj37RljdObMGfn7+8uWw72mgH9zRZ8JCwtTyZIlC2RbAAAA7ogglQc+Pj664YYbCuT0vvT0dK1bt06tW7fmtCnkSUH3GW9vb45EAQCA6x5BKo88PDzk5+eX79vx9PRURkaG/Pz8CFLIE/oMgKslccskJW6ZnH2GMapx9qyOzPKTcjjyHdrw8cu+7QcAFFYEKQAAIEmypyUpM/lQjvN8JGWm5L4cAFypDBWuy1oIUgAAQJLk4Rsiz6AyzhONUWbK4fPzA0vJZss+qI2Hb0hBlAfgGrbRs4weCrxVnxw9oPYVw1xdTp4QpAAAgCQptGFMtlP07OkpOjCliCSpZJ9t8g0IK/jCUKhk2o2+P3ujjtiLqM6+U2pXM1SeHoXrSAMKVro9U8/5t9Npm6+G/LhcP5evLW8P978em/G1AQAAcFV8uv2Iqoz/Tn1Sh2pYRBN1nP+NKr30tT7dfsTVpcGNvfXbRu3zCJMk/X76hKb8+q1rC8ojghQAAACu2Kfbj6j7Bz/qr8QzUundsnlmSqV361Biqrp/8CNhCjk6nJqokdtWOQayMZKe37JCh1MTXVtYHhCkAAAAcEUy7UYxn+2QkaRihySfM+dn+JyRKXb+GrsnluxQpt24rEa4p2E/fK5z9kynaWmZmXrqh89dVFHeEaQAAABwRb7544T+SjwreaVJEfsdo+TbbJIi9sl4penPU2f1zR8nXFon3MuaI3s0f99WZRrngJ1p7Jq3b6vij+xxUWV5Q5ACAADAFTlyOu38LyX2Sja780yb/fz0C9sBkl7c9M35c/lyYqQxm74p0HqsIkgBAADgipQK9pUC/pEt7Fi2ezbbbJIt7JgUcOp8O0DnTwfd8VuAcr11lE3a+VuAW58OSpACAADAFWlVuZgCI45f9OhCYMQxtapcrEDrgvv65o8TOnY0QOZUuP51Zp+MkcypcB09GuDWp4MSpAAAAHBFPD1serpBy4seXXi6YUvuJwUHx2mef1eRzL8iifE4P13ufTooQQoAAABX7PkWN6l5keqS+VdYMja1KFpDzze/yTWFwS05TvPM8JWOVnQclTJG0tFK56df2M4NEaQAAABwVcyP6iF/L0+naQFeXprXobuLKoK7alW5mMqG+p0/iHmijHTO//yMc/7SidKySSoX5ufWp4MSpAAAAHBVlA4I1ah67ZV1eMEmaUyjW1Q6INS1hcHteHrYNKlbbUmSTR7S4WoymZ7S4WrnH0uaeHtttz4dlCAFAACAq2ZIjaaqbP9HknRDcDFF12zh4orgru6sU0oL+zVWmVBfKTVM+q2FlBqmsmF+Wtivse6sU8rVJV6Ul6sLAAAAwLXD28NTY86s0UOBt+qtxp3l7eF56YVw3bqzTil1rRasRRNb6ai9iGrd/r7a1azg1keishCkAAAAcFU1zTykTUkzVD5ijKtLQSHg6WHTf/x2SpJKVworFCFK4tQ+AAAA5AOvXG8qBVwbCFIAAAAAYBFBCgAAAAAscmmQGjt2rJo0aaLg4GBFRESoW7du2rVrl2P+yZMn9eijj6p69ery9/dX+fLl9dhjjykxMdFpPQcPHlSXLl0UEBCgiIgIPfXUU8rIyCjopwMAAADgOuHSILV27VpFR0fr+++/V1xcnNLT0xUVFaWUlBRJ0uHDh3X48GFNmDBBO3bs0KxZs7Ry5UoNHDjQsY7MzEx16dJF586d03fffacPPvhAs2bN0gsvvOCqpwUAAADgGufSUftWrlzp9HjWrFmKiIjQ5s2b1bp1a9WuXVuffPKJY36VKlX00ksv6d5771VGRoa8vLz01Vdf6ZdfftHXX3+tEiVKqH79+hozZoz++9//KjY2Vj4+PgX9tAAAAABc49xq+POsU/aKFi160TYhISHy8jpf+oYNG1SnTh2VKFHC0aZjx456+OGHtXPnTjVo0CDbOtLS0pSWluZ4nJSUJElKT09Xenr6VXkulytr+66uA4UHfQZW0Wdghf2CfpKRni4P+g0ugT4Dq9ytz+T176PbBCm73a6YmBi1aNFCtWvXzrHN8ePHNWbMGA0ePNgxLSEhwSlESXI8TkhIyHE9Y8eO1ahRo7JN/+qrrxQQEHC5T+GqiouLc3UJKGToM7CKPoO8sNnPqs7//75q9WoZDz+X1gP3R5+BVe7WZ1JTU/PUzm2CVHR0tHbs2KH169fnOD8pKUldunRRrVq1FBsbe0XbGj58uIYOHeq07nLlyikqKkohISFXtO4rlZ6erri4OHXo0EHe3t4urQWFA30GVtFnYIU9PUWHZ5z/vX27dvIJCHNpPXB/9BlY5W59JutstUtxiyA1ZMgQLVu2TOvWrVPZsmWzzT99+rQ6deqk4OBgLV682OkPf8mSJfXDDz84tf/7778d83Li6+srX1/fbNO9vb3d5kuFO9WCwoE+A6voM8gLu/7XR7zoM8gD+gyscrc+k9ftu3TUPmOMhgwZosWLF2v16tWqVKlStjZJSUmKioqSj4+Pli5dKj8/50N9zZo10/bt23X06FHHtLi4OIWEhKhWrVr5/hwAAAAAXH9cekQqOjpac+fO1ZIlSxQcHOy4pik0NFT+/v6OEJWamqrZs2crKSnJcagtPDxcnp6eioqKUq1atXTfffdp/PjxSkhI0HPPPafo6OgcjzoBAAAAwJVyaZCaOnWqJCkyMtJp+syZM9W/f39t2bJFGzdulCRVrVrVqc2+fftUsWJFeXp6atmyZXr44YfVrFkzBQYGql+/fho9enSBPAcAAAAA1x+XBiljzEXnR0ZGXrKNJFWoUEHLly+/WmUBAAAAwEW59BopAAAAACiMCFIAAAAAYBFBCgAAAAAsIkgBAIBLypDN1SUAgFshSAEAgIva6FlGTUIGa/3RA64uBQDcBkEKAADkKt2eqef82+m0zVdDflyudHumq0sCALdAkAIAALl667eN2ucRJkn6/fQJTfn1W9cWBABugiAFAABydDg1USO3rZJs56+PMpKe37xCh1MTXVsYALgBghQAAMhRz68W6EyG86l8qRkZuiduoYsqAgD3QZACAADZjP52o777Z5dkM84zbEbfnvxNY779wTWFAYCbIEgBAAAnmXajV3/6Vsbk0sBI439ar0x7bg0A4NpHkAIAAE6++eOEUo6GZ10alZ1NSjkarm/+OFGgdQGAOyFIAQAAJ0dOp0mpYTKnwrMdlTJGMqfCpdSw8+0A4DpFkAIAAE5KBfue/+XvKpL511cF43F++oXtAOA6RJACAABOWlUuprKhfrJl+EpHKzqOShkj6Wgl2TJ8VS7MT60qF3NpnQDgSgQpAADgxNPDpkndakuSbCfKSOf8z8845y/bidKSpIm315anR24XUQHAtY8gBQAAsrmzTikt7NdYZUL9pcPVZDI9pcPVVDYsQAv7NdaddUq5ukQAcCkvVxcAAADc0511SqlrtWAtmthKR44WUZ2eD6tdzQociYJD4pZJStwy2XniBSOUJMypJ5st+//tQxs+rtCGMflcHZC/CFIAACBXnh42/cdvpySpdKUwQhSc2NOSlJl8KPf5KUdyXQ4o7AhSAAAAuCweviHyDCqTfYYxOnP2rPz9/JTTDck8fEMKoDogfxGkAAAAcFlCG8bkeIpeenq6li9frs6dO8vb27vgCwMKAEEKAAAAQIG4lq6rI0gBAAAAKBDX0nV1BCkAAAAABeJauq6OIAUAAACgQFxL19VxQ14AAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABZ5uboAAED+SNwySYlbJmefYYxqnD2rI7P8JJst2+zQho8rtGFM/hcIAEAhRpACgGuUPS1JmcmHcpznIykzJfflAADAxRGkAOAa5eEbIs+gMs4TjVFmyuHz8wNLyWbLfoa3h29IQZQHAEChRpACgGtUaMOYbKfo2dNTdGBKEUlSyT7b5BsQVvCFAQBwDWCwCQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAixi1DwAASMrlJs7GOH5NmFMvxyHzuYkzgOuRS49IjR07Vk2aNFFwcLAiIiLUrVs37dq1y6nNjBkzFBkZqZCQENlsNp06dSrbenbv3q3bb79dxYsXV0hIiFq2bKk1a9YU0LMAAODakHUTZ6ef/7/vmCTZU45kn598iJs4A7guuTRIrV27VtHR0fr+++8VFxen9PR0RUVFKSUlxdEmNTVVnTp10ogRI3Jdz6233qqMjAytXr1amzdvVr169XTrrbcqISGhIJ4GAADXhKybOGf7CSytc55F5RlYOsf53MQZwPXIpaf2rVy50unxrFmzFBERoc2bN6t169aSpJiYGElSfHx8jus4fvy4fv/9d7333nuqW7euJOmVV17R22+/rR07dqhkyZL5Vj8AANeSnG7iLEnp6elavny5OnfuLG9v74IvDADckFtdI5WYmChJKlq0aJ6XKVasmKpXr64PP/xQDRs2lK+vr6ZPn66IiAg1atQox2XS0tKUlpbmeJyUdP6UhPT0dKWnp1/BM7hyWdt3dR0oPOgzsMJ+QT/JSE+XB/0GecB+BlbRZ2CVO/WZvNZgM+aCq0hdyG6367bbbtOpU6e0fv36bPPj4+PVtm1b/fPPPwoLC3Oa99dff6lbt27asmWLPDw8FBERoS+++EINGjTIcVuxsbEaNWpUtulz585VQEDAVXk+AOCObPazqnPgPknS9gofyXj4ubgiAADcS2pqqnr37q3ExESFhOR+6rLbHJGKjo7Wjh07cgxRF2OMUXR0tCIiIvTNN9/I399f7777rrp27apNmzapVKlS2ZYZPny4hg4d6niclJSkcuXKKSoq6qIvVkFIT09XXFycOnTowOkTyBP6DKywp6fo8Izzv7dv104+AWEurQeFA/sZWEWfgVXu1Geyzla7FLcIUkOGDNGyZcu0bt06lS1b1tKyq1ev1rJly/TPP/84QtDbb7+tuLg4ffDBB3rmmWeyLePr6ytfX99s0729vV3+xmVxp1pQONBnkBd2/a+PeNFnYBH7GVhFn4FV7tBn8rp9lwYpY4weffRRLV68WPHx8apUqZLldaSmpkqSPDycByD08PCQ3W6/KnUCAAAAwIVcGqSio6M1d+5cLVmyRMHBwY7hykNDQ+Xv7y9JSkhIUEJCgvbs2SNJ2r59u4KDg1W+fHkVLVpUzZo1U5EiRdSvXz+98MIL8vf31zvvvKN9+/apS5cuLntuAAAAAK5dLr2P1NSpU5WYmKjIyEiVKlXK8TN//nxHm2nTpqlBgwYaNGiQJKl169Zq0KCBli5dKkkqXry4Vq5cqeTkZLVr106NGzfW+vXrtWTJEtWrV88lzwsAAADAtc3lp/ZdSmxsrGJjYy/apnHjxvryyy+vUlUAAAAAcHEuPSIFAAAAAIURQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAcB3KkM3VJQAAUKgRpADgOrPRs4yahAzW+qMHXF0KAACFFkEKAK4jZzMy9KRfJ522+er+DZ/rbEaGq0sCAKBQIkgBwHXi0+1HVGbafB3xDJQk/XnmlEq/OVOfbj/i4soAACh8CFIAcB34dPsRdZ/zrRJD9sj2/5dH2WxSYsjv6j7nW8IUAAAWEaQA4BqXaTeK+WyHTIm9ks3uNM/Y7FKJvXpiyQ5l2o2LKgQAoPAhSAHANe6bP07or/QjsoUdcxyNymKzSQo7pj/PJeibP064pD4AAAojghQAXOOOnE6Twv6WyeWAkzGSwhLOtwMAAHlCkAKAa1ypYF/pVMlsR6Oy2GySTpU83w4AAOQJQQoArnGtKhdTWe+S0qnwbEeljJF0KlzlfEqqVeViLqkPAIDCiCAFANc4Tw+bJnWrLf1dRTbjvNu3GQ/p7yqaeHtteXrkcsgKAABkQ5ACgOvAnXVKaWGfFgpJquo4KmWMFJp0gxb2aaE765RybYEAABQyBCkAuE7cWaeUDj3UU6UzUyRJ5fzDdPjRAYQoAAAuA0EKAK4jfl5emnB2pYJNmt5v1lV+Xl6uLgkAgEKJIAUA15mmmYe0KWmGWkZUcHUpAAAUWgQpALgOeSmXm0oBAIA8IUgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWOTSIDV27Fg1adJEwcHBioiIULdu3bRr1y6nNjNmzFBkZKRCQkJks9l06tSpHNf1xRdfqGnTpvL391eRIkXUrVu3/H8CAAAAAK5LLg1Sa9euVXR0tL7//nvFxcUpPT1dUVFRSklJcbRJTU1Vp06dNGLEiFzX88knn+i+++7TgAEDtG3bNn377bfq3bt3QTwFAAAAANchL1dufOXKlU6PZ82apYiICG3evFmtW7eWJMXExEiS4uPjc1xHRkaGHn/8cb366qsaOHCgY3qtWrXypWYAAAAAcGmQ+rfExERJUtGiRfO8zJYtW3To0CF5eHioQYMGSkhIUP369fXqq6+qdu3aOS6TlpamtLQ0x+OkpCRJUnp6utLT06/gGVy5rO27ug4UHvQZWGG/oJ9kpKfLg36DPGA/A6voM7DKnfpMXmuwGWNMPteSJ3a7XbfddptOnTql9evXZ5sfHx+vtm3b6p9//lFYWJhj+rx589SrVy+VL19er7/+uipWrKjXXntNX331lXbv3p1jKIuNjdWoUaOyTZ87d64CAgKu6vMCAHdis59VnQP3SZK2V/hIxsPPxRUBAOBeUlNT1bt3byUmJiokJCTXdm5zRCo6Olo7duzIMURdjN1ulyQ9++yzuuuuuyRJM2fOVNmyZbVw4UI9+OCD2ZYZPny4hg4d6niclJSkcuXKKSoq6qIvVkFIT09XXFycOnToIG9vb5fWgsKBPgMr7OkpOjzj/O/t27WTT0CYS+tB4cB+BlbRZ2CVO/WZrLPVLsUtgtSQIUO0bNkyrVu3TmXLlrW0bKlSpSQ5XxPl6+urypUr6+DBgzku4+vrK19f32zTvb29Xf7GZXGnWlA40GeQF3b9r4940WdgEfsZWEWfgVXu0Gfyun2XjtpnjNGQIUO0ePFirV69WpUqVbK8jkaNGsnX19dp2PT09HTt379fFSpUuJrlAgAAAIAkFx+Rio6O1ty5c7VkyRIFBwcrISFBkhQaGip/f39JUkJCghISErRnzx5J0vbt2xUcHKzy5curaNGiCgkJ0UMPPaSRI0eqXLlyqlChgl599VVJUvfu3V3zxAAAAABc01wapKZOnSpJioyMdJo+c+ZM9e/fX5I0bdo0p4EhsoZFv7DNq6++Ki8vL9133306c+aMmjZtqtWrV6tIkSL5/hwAAAAAXH8uK0hlZGQoPj5ee/fuVe/evRUcHKzDhw8rJCREQUFBeV5PXgYMjI2NVWxs7EXbeHt7a8KECZowYUKetw0AAAAAl8tykDpw4IA6deqkgwcPKi0tTR06dFBwcLDGjRuntLQ0TZs2LT/qBABYlLhlkhK3THaeeME/sBLm1JPNlv1S2dCGjyu0YUw+VwcAQOFmOUg9/vjjaty4sbZt26ZixYo5pt9xxx0aNGjQVS0OAHD57GlJykw+lPv8lCO5LgcAAC7OcpD65ptv9N1338nHx8dpesWKFXXoUO5/sAEABcvDN0SeQWWyzzBGZ86elb+fn2Sz5bgcAAC4OMtBym63KzMzM9v0v/76S8HBwVelKADAlQttGJPjKXrp6elavny5Onfu7PJ7dQAAUFhZvo9UVFSUJk2a5Hhss9mUnJyskSNHqnPnzlezNgAAAABwS5aPSL322mvq2LGjatWqpbNnz6p37976/fffVbx4cX388cf5USMAAAAAuBXLQaps2bLatm2b5s2bp59//lnJyckaOHCg+vTp47iJLgAAAABcyy7rPlJeXl669957r3YtAAAAAFAoWA5SH3744UXn9+3b97KLAQAAAIDC4LLuI3Wh9PR0paamysfHRwEBAQQpAAAAANc8y6P2/fPPP04/ycnJ2rVrl1q2bMlgEwAAAACuC5aDVE5uuOEGvfLKK9mOVgEAAADAteiqBCnp/AAUhw8fvlqrAwAAAAC3ZfkaqaVLlzo9NsboyJEjeuutt9SiRYurVhgAAAAAuCvLQapbt25Oj202m8LDw9WuXTu99tprV6suAAAAAHBbloOU3W7PjzoAAAAAoNC4atdIAQAAAMD1Ik9HpIYOHZrnFb7++uuXXQwAAAAAFAZ5ClI//fRTnlZms9muqBgAAAAAKAzyFKTWrFmT33UAAAAAQKHBNVIAAAAAYJHlUfsk6ccff9SCBQt08OBBnTt3zmnep59+elUKAwAAAAB3ZfmI1Lx589S8eXP9+uuvWrx4sdLT07Vz506tXr1aoaGh+VEjAAAAALgVy0Hq5Zdf1sSJE/X555/Lx8dHkydP1m+//aYePXqofPny+VEjAAAAALgVy0Fq79696tKliyTJx8dHKSkpstlseuKJJzRjxoyrXiAAAAAAuBvLQapIkSI6ffq0JKlMmTLasWOHJOnUqVNKTU29utUBAAAAgBvKc5DKCkytW7dWXFycJKl79+56/PHHNWjQIPXq1Uvt27fPnyoBAAAAwI3kedS+unXrqkmTJurWrZu6d+8uSXr22Wfl7e2t7777TnfddZeee+65fCsUAAAAANxFnoPU2rVrNXPmTI0dO1YvvfSS7rrrLj3wwAN65pln8rM+AAAAAHA7eT61r1WrVnr//fd15MgRvfnmm9q/f7/atGmjatWqady4cUpISMjPOgEAAADAbVgebCIwMFADBgzQ2rVrtXv3bnXv3l1TpkxR+fLlddttt+VHjQAAAADgViwHqQtVrVpVI0aM0HPPPafg4GB98cUXV6suAAAAAHBbeb5G6t/WrVun999/X5988ok8PDzUo0cPDRw48GrWBgAAAABuyVKQOnz4sGbNmqVZs2Zpz549at68ud544w316NFDgYGB+VUjAAAAALiVPAepW265RV9//bWKFy+uvn376v7771f16tXzszYAAAAAcEt5DlLe3t5atGiRbr31Vnl6euZnTQAAAADg1vIcpJYuXZqfdQAAAABAoXFFo/YBAAAAwPWIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsMilQWrs2LFq0qSJgoODFRERoW7dumnXrl1ObWbMmKHIyEiFhITIZrPp1KlTua4vLS1N9evXl81m09atW/O3eAAAAADXLZcGqbVr1yo6Olrff/+94uLilJ6erqioKKWkpDjapKamqlOnThoxYsQl1/f000+rdOnS+VkyAAAAAMjLlRtfuXKl0+NZs2YpIiJCmzdvVuvWrSVJMTExkqT4+PiLrmvFihX66quv9Mknn2jFihUXbZuWlqa0tDTH46SkJElSenq60tPTLT6Lqytr+66uA4UHfQZW0WdgFX0GVtFnYJU79Zm81uDSIPVviYmJkqSiRYtaWu7vv//WoEGD9NlnnykgIOCS7ceOHatRo0Zlm/7VV1/lafmCEBcX5+oSUMjQZ2AVfQZW0WdgFX0GVrlDn0lNTc1TO7cJUna7XTExMWrRooVq166d5+WMMerfv78eeughNW7cWPv377/kMsOHD9fQoUMdj5OSklSuXDlFRUUpJCTkcsq/atLT0xUXF6cOHTrI29vbpbWgcKDPwCr6DKyiz8Aq+gyscqc+k3W22qW4TZCKjo7Wjh07tH79ekvLvfnmmzp9+rSGDx+e52V8fX3l6+ubbbq3t7fL37gs7lQLCgf6DKyiz8Aq+gysos/AKnfoM3ndvlsMfz5kyBAtW7ZMa9asUdmyZS0tu3r1am3YsEG+vr7y8vJS1apVJUmNGzdWv3798qNcAAAAANc5lx6RMsbo0Ucf1eLFixUfH69KlSpZXscbb7yhF1980fH48OHD6tixo+bPn6+mTZtezXIBAAAAQJKLg1R0dLTmzp2rJUuWKDg4WAkJCZKk0NBQ+fv7S5ISEhKUkJCgPXv2SJK2b9+u4OBglS9fXkWLFlX58uWd1hkUFCRJqlKliuWjWwAAAACQFy49tW/q1KlKTExUZGSkSpUq5fiZP3++o820adPUoEEDDRo0SJLUunVrNWjQQEuXLnVV2QAAAACucy4/te9SYmNjFRsbm+d1VqxYMU/rBQAAAIDL5RaDTQAAAABAYUKQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCIvVxcAIG8St0xS4pbJ2WcYoxpnz+rILD/JZss2O7Th4wptGJP/BQIAAFxHCFJAIWFPS1Jm8qEc5/lIykzJfTkAAABcXQQpoJDw8A2RZ1AZ54nGKDPl8Pn5gaVks2U/W9fDN6QgygMAALiuEKSAQiK0YUy2U/Ts6Sk6MKWIJKlkn23yDQgr+MIAAACuQww2AQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFrk0SI0dO1ZNmjRRcHCwIiIi1K1bN+3atcupzYwZMxQZGamQkBDZbDadOnXKaf7+/fs1cOBAVapUSf7+/qpSpYpGjhypc+fOFeAzAQAAAHA9cWmQWrt2raKjo/X9998rLi5O6enpioqKUkpKiqNNamqqOnXqpBEjRuS4jt9++012u13Tp0/Xzp07NXHiRE2bNi3X9gAAAABwpbxcufGVK1c6PZ41a5YiIiK0efNmtW7dWpIUExMjSYqPj89xHZ06dVKnTp0cjytXrqxdu3Zp6tSpmjBhQr7UDQAAAOD65tIg9W+JiYmSpKJFi17xei62jrS0NKWlpTkeJyUlSZLS09OVnp5+Rdu+Ulnbd3UdKBzS087p+7M36qi9iKr/flyRNQLk6WFzdVlwc+xnYBV9BlbRZ2CVO/WZvNZgM8aYfK4lT+x2u2677TadOnVK69evzzY/Pj5ebdu21T///KOwsLBc17Nnzx41atRIEyZM0KBBg3JsExsbq1GjRmWbPnfuXAUEBFz2cwAK0obj0nt7pRPnPCQZSTYV8zEaWMWoWXFXVwcAAFA4paamqnfv3kpMTFRISEiu7dwmSD388MNasWKF1q9fr7Jly2abn5cgdejQIbVp00aRkZF69913c91WTkekypUrp+PHj1/0xSoI6enpiouLU4cOHeTt7e3SWuC+PtuRoHvmbJWRpIBTUvkd0sHasqWGSZLm9amvbrVLurBCuDP2M7CKPgOr6DOwyp36TFJSkooXL37JIOUWp/YNGTJEy5Yt07p163IMUXlx+PBhtW3bVs2bN9eMGTMu2tbX11e+vr7Zpnt7e7v8jcviTrXAvWTajZ5c9tv5ECW7VHq3bJ6ZMqV3y+xpLJs8NOyL33RnvbKc5oeLYj8Dq+gzsIo+A6vcoc/kdfsuHbXPGKMhQ4Zo8eLFWr16tSpVqnRZ6zl06JAiIyPVqFEjzZw5Ux4e3B4L165v/jihvxLPnn9Q7JDkc+b87z5npGKHZST9eeqsvvnjhMtqBAAAuNa59IhUdHS05s6dqyVLlig4OFgJCQmSpNDQUPn7+0uSEhISlJCQoD179kiStm/fruDgYJUvX15FixZ1hKgKFSpowoQJOnbsmGP9JUtyahOuPUdO//9pqV5pUsR+2f7/oJPNJpmIfVJiuJTh+792AAAAuOpcGqSmTp0qSYqMjHSaPnPmTPXv31+SNG3aNKeBIbKGRc9qExcXpz179mjPnj3ZTgt0k8u/gKuqVPD/n5ZaYq9kszvPtNnPTz9U63/tAAAAcNW5/NS+nH6yQpR0foS9i7Xp379/rusBrkWtKhdTeESKbGHHHEejsthski3smMIjUtWqcjHXFAgAAHAd4GIioJDx9LCpdo0zUm7/KzBS7RqpDDQBAACQjwhSQCH0fJNWUm45ySa9cFPrAq0HAADgekOQAgqhyFJV1bNSfXn+69w+T5uH7qlUX21KVnFRZQAAANcHghRQSE24qat8PDydpvl6eurVm7q6qCIAAIDrB0EKKKRKB4RqVL320v8PrGKTNKbhLSodEOrawgAAAK4DBCmgEBtSo6kq2/+RJN0QXEzRNVu4uCIAAIDrA0EKKMS8PTw15swaBZs0vdW4s7z/daofAAAA8gdBCijkmmYe0qakGWoZUcHVpQAAAFw3CFLANcAr15tKAQAAID8QpAAAAADAIoIUAAAAAFhEkAIAAAAAi7xcXcD1KnHLJCVumZx9hjGqcfasjszyk2y2bLNDGz6u0IYx+V8gAAAAgFwRpFzEnpakzORDOc7zkZSZkvtyAAAAAFyLIOUiHr4h8gwq4zzRGGWmHD4/P7CUbLbsZ156+IYURHkAAAAALoIg5SKhDWOynaJnT0/RgSlFJEkl+2yTb0BYwRcGAAAA4JIYbAIAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFjk5eoCAORN4pZJStwy2XmiMY5fE+bUk82W/X8joQ0fV2jDmHyuDgAA4PpCkAIKCXtakjKTD+U+P+VIrssBAADg6iJIAYWEh2+IPIPKZJ9hjM6cPSt/Pz/JZstxOQAAAFxdBCmgkAhtGJPjKXrp6elavny5OnfuLG9v74IvDAAA4DrEYBMAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkHJDGcp+U1UAAAAA7oMg5UYy7UYzM1uofvDDmrJtpzLtxtUlAQAAAMgBQcpNfLr9iCqPX68XA1vqrIeXhm1bqYovfaVPtx9xdWkAAAAA/oUg5QY+3X5E3T/4UYe8/pB8zpyf6HNGh73+UPcPfiRMAQAAAG7GpUFq7NixatKkiYKDgxUREaFu3bpp165dTm1mzJihyMhIhYSEyGaz6dSpU9nWc/LkSfXp00chISEKCwvTwIEDlZycXEDP4spk2o1iPtsh45UmReyX7f8vj7LZJBOxT/JK0xNLdnCaHwAAAOBGXBqk1q5dq+joaH3//feKi4tTenq6oqKilJKS4miTmpqqTp06acSIEbmup0+fPtq5c6fi4uK0bNkyrVu3ToMHDy6Ip3DFvvnjhP5KPCuV2CvZ7M4zbXaZEnv156mz+uaPE64pEAAAAEA2Xq7c+MqVK50ez5o1SxEREdq8ebNat24tSYqJiZEkxcfH57iOX3/9VStXrtSmTZvUuHFjSdKbb76pzp07a8KECSpdunS+1X81HDmdJgX8I1vYsWzzbDZJYcdk/jl1vh0AAAAAt+DSIPVviYmJkqSiRYvmeZkNGzYoLCzMEaIk6eabb5aHh4c2btyoO+64I9syaWlpSkv7XzBJSkqSJKWnpys9Pf1yy78s4QGeUtjfMkaO0/ouZIyksASFB3gWeG0oHLL6Bf0DeUWfgVX0GVhFn4FV7tRn8lqD2wQpu92umJgYtWjRQrVr187zcgkJCYqIiHCa5uXlpaJFiyohISHHZcaOHatRo0Zlm/7VV18pICDAWuFXKNNIISkldLrI3znOt9nOz0/6daOW/1agpaGQiYuLc3UJKGToM7CKPgOr6DOwyh36TGpqap7auU2Qio6O1o4dO7R+/fp839bw4cM1dOhQx+OkpCSVK1dOUVFRCgkJyfft/9s7FRJ0T/wRKfSYnO7FayQlhmvGrW3VtXbJAq8LhUN6erri4uLUoUMHeXt7u7ocFAL0GVhFn4FV9BlY5U59JutstUtxiyA1ZMgQxyARZcuWtbRsyZIldfToUadpGRkZOnnypEqWzDl8+Pr6ytfXN9t0b29vl7xx3RuUU6L9Tj209R2ZCwacsBkPTWt9p7o3KFfgNaHwcVX/ReFFn4FV9BlYRZ+BVe7QZ/K6fZeO2meM0ZAhQ7R48WKtXr1alSpVsryOZs2a6dSpU9q8ebNj2urVq2W329W0adOrWW6+eqDRDXql0c3/f1HUeeNv6qIHGt3gwqoAAAAA5MSlQSo6OlqzZ8/W3LlzFRwcrISEBCUkJOjMmTOONgkJCdq6dav27NkjSdq+fbu2bt2qkydPSpJq1qypTp06adCgQfrhhx/07bffasiQIbrnnnvcfsS+f3u05n9U2f6PJKlacDE9emNLF1cEAAAAICcuDVJTp05VYmKiIiMjVapUKcfP/PnzHW2mTZumBg0aaNCgQZKk1q1bq0GDBlq6dKmjzZw5c1SjRg21b99enTt3VsuWLTVjxowCfz5XytvDU2POrFGwSdNbjTvL28PT1SUBAAAAyIFLr5EyF5zGlpvY2FjFxsZetE3RokU1d+7cq1SVazXNPKRNSTNUPmKMq0sBAAAAkAuXHpFCzrx06YAJAAAAwHUIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWESQAgAAAACLCFIAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMAighQAAAAAWOTl6gKuV4lbJilxy2TnicY4fk2YU082W/acG9rwcYU2jMnn6gAAAABcDEHKRexpScpMPpT7/JQjuS4HAAAAwLUIUi7i4Rsiz6Ay2WcYozNnz8rfz0+y2XJcDgAAAIBrEaRcJLRhTI6n6KWnp2v58uXq3LmzvL29C74wAAAAAJfEYBMAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCAFAAAAABYRpAAAAADAIoIUAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgkZerC3AHxhhJUlJSkosrkdLT05WamqqkpCR5e3u7uhwUAvQZWEWfgVX0GVhFn4FV7tRnsjJBVkbIDUFK0unTpyVJ5cqVc3ElAAAAANzB6dOnFRoamut8m7lU1LoO2O12HT58WMHBwbLZbC6tJSkpSeXKldOff/6pkJAQl9aCwoE+A6voM7CKPgOr6DOwyp36jDFGp0+fVunSpeXhkfuVUByRkuTh4aGyZcu6ugwnISEhLu9EKFzoM7CKPgOr6DOwij4Dq9ylz1zsSFQWBpsAAAAAAIsIUgAAAABgEUHKzfj6+mrkyJHy9fV1dSkoJOgzsIo+A6voM7CKPgOrCmOfYbAJAAAAALCII1IAAAAAYBFBCgAAAAAsIkgBAAAAgEUEKQAAAACwiCDlZqZMmaKKFSvKz89PTZs21Q8//ODqkuCm1q1bp65du6p06dKy2Wz67LPPXF0S3NjYsWPVpEkTBQcHKyIiQt26ddOuXbtcXRbc2NSpU1W3bl3HzTGbNWumFStWuLosFCKvvPKKbDabYmJiXF0K3FRsbKxsNpvTT40aNVxdVp4RpNzI/PnzNXToUI0cOVJbtmxRvXr11LFjRx09etTVpcENpaSkqF69epoyZYqrS0EhsHbtWkVHR+v7779XXFyc0tPTFRUVpZSUFFeXBjdVtmxZvfLKK9q8ebN+/PFHtWvXTrfffrt27tzp6tJQCGzatEnTp09X3bp1XV0K3NyNN96oI0eOOH7Wr1/v6pLyjOHP3UjTpk3VpEkTvfXWW5Iku92ucuXK6dFHH9Uzzzzj4urgzmw2mxYvXqxu3bq5uhQUEseOHVNERITWrl2r1q1bu7ocFBJFixbVq6++qoEDB7q6FLix5ORkNWzYUG+//bZefPFF1a9fX5MmTXJ1WXBDsbGx+uyzz7R161ZXl3JZOCLlJs6dO6fNmzfr5ptvdkzz8PDQzTffrA0bNriwMgDXosTEREnnvxgDl5KZmal58+YpJSVFzZo1c3U5cHPR0dHq0qWL03caIDe///67SpcurcqVK6tPnz46ePCgq0vKMy9XF4Dzjh8/rszMTJUoUcJpeokSJfTbb7+5qCoA1yK73a6YmBi1aNFCtWvXdnU5cGPbt29Xs2bNdPbsWQUFBWnx4sWqVauWq8uCG5s3b562bNmiTZs2uboUFAJNmzbVrFmzVL16dR05ckSjRo1Sq1attGPHDgUHB7u6vEsiSAHAdSY6Olo7duwoVOehwzWqV6+urVu3KjExUYsWLVK/fv20du1awhRy9Oeff+rxxx9XXFyc/Pz8XF0OCoFbbrnF8XvdunXVtGlTVahQQQsWLCgUpxATpNxE8eLF5enpqb///ttp+t9//62SJUu6qCoA15ohQ4Zo2bJlWrduncqWLevqcuDmfHx8VLVqVUlSo0aNtGnTJk2ePFnTp093cWVwR5s3b9bRo0fVsGFDx7TMzEytW7dOb731ltLS0uTp6enCCuHuwsLCVK1aNe3Zs8fVpeQJ10i5CR8fHzVq1EirVq1yTLPb7Vq1ahXnowO4YsYYDRkyRIsXL9bq1atVqVIlV5eEQshutystLc3VZcBNtW/fXtu3b9fWrVsdP40bN1afPn20detWQhQuKTk5WXv37lWpUqVcXUqecETKjQwdOlT9+vVT48aNddNNN2nSpElKSUnRgAEDXF0a3FBycrLTf2z27dunrVu3qmjRoipfvrwLK4M7io6O1ty5c7VkyRIFBwcrISFBkhQaGip/f38XVwd3NHz4cN1yyy0qX768Tp8+rblz5yo+Pl5ffvmlq0uDmwoODs523WVgYKCKFSvG9ZjI0bBhw9S1a1dVqFBBhw8f1siRI+Xp6alevXq5urQ8IUi5kZ49e+rYsWN64YUXlJCQoPr162vlypXZBqAAJOnHH39U27ZtHY+HDh0qSerXr59mzZrloqrgrqZOnSpJioyMdJo+c+ZM9e/fv+ALgts7evSo+vbtqyNHjig0NFR169bVl19+qQ4dOri6NADXiL/++ku9evXSiRMnFB4erpYtW+r7779XeHi4q0vLE+4jBQAAAAAWcY0UAAAAAFhEkAIAAAAAiwhSAAAAAGARQQoAAAAALCJIAQAAAIBFBCkAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAA17DIyEjFxMS4ugwAuOYQpAAAeWaz2S76Exsbmy/b7d+/v7p165Yv674cs2bNUlhY2FVrBwAofLxcXQAAoPA4cuSI4/f58+frhRde0K5duxzTgoKCHL8bY5SZmSkvL/7UAACuPRyRAgDkWcmSJR0/oaGhstlsjse//fabgoODtWLFCjVq1Ei+vr5av3697Ha7xo4dq0qVKsnf31/16tXTokWLHOvMzMzUwIEDHfOrV6+uyZMnO+bHxsbqgw8+0JIlSxxHvuLj47V//37ZbDYtWLBArVq1kr+/v5o0aaLdu3dr06ZNaty4sYKCgnTLLbfo2LFjTs/j3XffVc2aNeXn56caNWro7bffdszLWu+nn36qtm3bKiAgQPXq1dOGDRskSfHx8RowYIASExOv+EjcqVOn9MADDyg8PFwhISFq166dtm3bJknavXu3bDabfvvtN6dlJk6cqCpVqjge79ixQ7fccouCgoJUokQJ3XfffTp+/Phl1QMAyDuCFADgqnrmmWf0yiuv6Ndff1XdunU1duxYffjhh5o2bZp27typJ554Qvfee6/Wrl0rSbLb7SpbtqwWLlyoX375RS+88IJGjBihBQsWSJKGDRumHj16qFOnTjpy5IiOHDmi5s2bO7Y3cuRIPffcc9qyZYu8vLzUu3dvPf3005o8ebK++eYb7dmzRy+88IKj/Zw5c/TCCy/opZde0q+//qqXX35Zzz//vD744AOn5/Hss89q2LBh2rp1q6pVq6ZevXopIyNDzZs316RJkxQSEuKoZ9iwYZf1WnXv3l1Hjx7VihUrtHnzZjVs2FDt27fXyZMnVa1aNTVu3Fhz5sxxWmbOnDnq3bu3pPNBrF27dmrQoIF+/PFHrVy5Un///bd69OhxWfUAACwwAABchpkzZ5rQ0FDH4zVr1hhJ5rPPPnNMO3v2rAkICDDfffed07IDBw40vXr1ynXd0dHR5q677nI87tevn7n99tud2uzbt89IMu+++65j2scff2wkmVWrVjmmjR071lSvXt3xuEqVKmbu3LlO6xozZoxp1qxZruvduXOnkWR+/fXXHJ97bi7W7ptvvjEhISHm7NmzTtOrVKlipk+fbowxZuLEiaZKlSqOebt27XKqY8yYMSYqKspp+T///NNIMrt27TLGGNOmTRvz+OOPX7JWAIA1nLgOALiqGjdu7Ph9z549Sk1NVYcOHZzanDt3Tg0aNHA8njJlit5//30dPHhQZ86c0blz51S/fv08ba9u3bqO30uUKCFJqlOnjtO0o0ePSpJSUlK0d+9eDRw4UIMGDXK0ycjIUGhoaK7rLVWqlCTp6NGjqlGjRp7qupRt27YpOTlZxYoVc5p+5swZ7d27V5J0zz33aNiwYfr+++/1n//8R3PmzFHDhg0dNWzbtk1r1qxxujYty969e1WtWrWrUisAIDuCFADgqgoMDHT8npycLEn64osvVKZMGad2vr6+kqR58+Zp2LBheu2119SsWTMFBwfr1Vdf1caNG/O0PW9vb8fvNpstx2l2u92pnnfeeUdNmzZ1Wo+np+cl15u1nqshOTlZpUqVUnx8fLZ5WSP9lSxZUu3atdPcuXP1n//8R3PnztXDDz/stI6uXbtq3Lhx2daRFf4AAPmDIAUAyDe1atWSr6+vDh48qDZt2uTY5ttvv1Xz5s31yCOPOKZlHZHJ4uPjo8zMzCuup0SJEipdurT++OMP9enT57LXczXqadiwoRISEuTl5aWKFSvm2q5Pnz56+umn1atXL/3xxx+65557nNbxySefqGLFioyOCAAFjL0uACDfBAcHa9iwYXriiSdkt9vVsmVLJSYm6ttvv1VISIj69eunG264QR9++KG+/PJLVapUSR999JE2bdqkSpUqOdZTsWJFffnll9q1a5eKFSuW7TQ8K0aNGqXHHntMoaGh6tSpk9LS0vTjjz/qn3/+0dChQ/O0jooVKyo5OVmrVq1SvXr1FBAQoICAgBzbZmZmauvWrU7TfH19dfPNN6tZs2bq1q2bxo8fr2rVqunw4cP64osvdMcddzhOkbzzzjv18MMP6+GHH1bbtm1VunRpx3qio6P1zjvvqFevXnr66adVtGhR7dmzR/PmzdO7776b7SgbAODqYdQ+AEC+GjNmjJ5//nmNHTtWNWvWVKdOnfTFF184gtKDDz6oO++8Uz179lTTpk114sQJp6NTkjRo0CBVr15djRs3Vnh4uL799tvLrueBBx7Qu+++q5kzZ6pOnTpq06aNZs2a5RTcLqV58+Z66KGH1LNnT4WHh2v8+PG5tk1OTlaDBg2cfrp27Sqbzably5erdevWGjBggKpVq6Z77rlHBw4ccFzrJZ0Po127dtW2bduyHUUrXbq0vv32W2VmZioqKkp16tRRTEyMwsLC5OHBn3gAyE82Y4xxdREAAAAAUJjw7yoAAAAAsIggBQAAAAAWEaQAAAAAwCKCFAAAAABYRJACAAAAAIsIUgAAAABgEUEKAAAAACwiSAEAAACARQQpAAAAALCIIAUAAAAAFhGkAAAAAMCi/wOQzXvd8v/DHQAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "# Plotting\n", "plt.figure(figsize=(10, 6))\n", @@ -354,9 +617,84 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
2.5 %97.5 %
0.0208.642329211.518478
1.0210.419871213.355065
2.0215.622272218.521233
3.0217.975289220.850038
4.0219.058375221.962364
5.0218.968127221.878746
\n", + "
" + ], + "text/plain": [ + " 2.5 % 97.5 %\n", + "0.0 208.642329 211.518478\n", + "1.0 210.419871 213.355065\n", + "2.0 215.622272 218.521233\n", + "3.0 217.975289 220.850038\n", + "4.0 219.058375 221.962364\n", + "5.0 218.968127 221.878746" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "dml_obj.bootstrap(n_rep_boot=2000)\n", "ci_joint = dml_obj.confint(level=0.95, joint=True)\n", @@ -370,14 +708,44 @@ "source": [ "## Sensitivity Analysis\n", "\n", - "For [DoubleMLAPO](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLAPO.html#doubleml.DoubleMLAPO) and [DoubleMLAPOS](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLAPOS.html#doubleml.DoubleMLAPOS) model all methods for sensitivity analysis are available." + "For [DoubleMLAPO](https://docs.doubleml.org/dev/api/generated/doubleml.irm.DoubleMLAPO.html) and [DoubleMLAPOS](https://docs.doubleml.org/dev/api/generated/doubleml.irm.DoubleMLAPOS.html#doubleml.irm.DoubleMLAPOS) model all methods for sensitivity analysis are available." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "================== Sensitivity Analysis ==================\n", + "\n", + "------------------ Scenario ------------------\n", + "Significance Level: level=0.95\n", + "Sensitivity parameters: cf_y=0.03; cf_d=0.03, rho=1.0\n", + "\n", + "------------------ Bounds with CI ------------------\n", + " CI lower theta lower theta theta upper CI upper\n", + "0 208.792396 209.890855 210.075809 210.260762 211.359229\n", + "1 210.543052 211.663177 211.889638 212.115901 213.238619\n", + "2 215.752696 216.858952 217.074927 217.290901 218.398166\n", + "3 218.114989 219.212811 219.416052 219.619294 220.716615\n", + "4 219.189739 220.296099 220.510555 220.725010 221.830273\n", + "5 219.072605 220.184224 220.418741 220.652324 221.761224\n", + "\n", + "------------------ Robustness Values ------------------\n", + " H_0 RV (%) RVa (%)\n", + "0 0.0 99.916359 99.909571\n", + "1 0.0 99.877903 99.861019\n", + "2 0.0 99.893461 99.882928\n", + "3 0.0 99.907879 99.899250\n", + "4 0.0 99.898183 99.888352\n", + "5 0.0 99.878895 99.864664\n" + ] + } + ], "source": [ "dml_obj.sensitivity_analysis()\n", "print(dml_obj.sensitivity_summary)" @@ -392,9 +760,98 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
cf_ycf_drhodelta_theta
0.00.00.0000000.00.000006
1.00.00.000000-1.0-0.004253
2.00.00.0000001.00.003220
3.00.00.000000-1.0-0.004526
4.00.00.0034151.00.003404
5.00.00.000000-1.0-0.006055
\n", + "
" + ], + "text/plain": [ + " cf_y cf_d rho delta_theta\n", + "0.0 0.0 0.000000 0.0 0.000006\n", + "1.0 0.0 0.000000 -1.0 -0.004253\n", + "2.0 0.0 0.000000 1.0 0.003220\n", + "3.0 0.0 0.000000 -1.0 -0.004526\n", + "4.0 0.0 0.003415 1.0 0.003404\n", + "5.0 0.0 0.000000 -1.0 -0.006055" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "dml_obj.sensitivity_benchmark(benchmarking_set=['x4'])" ] @@ -412,7 +869,7 @@ "source": [ "### Causal Contrasts\n", "\n", - "The [DoubleMLAPOS](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLAPOS.html#doubleml.DoubleMLAPOS) model also allows for the estimation of causal contrasts. \n", + "The [DoubleMLAPOS](https://docs.doubleml.org/dev/api/generated/doubleml.irm.DoubleMLAPOS.html#doubleml.irm.DoubleMLAPOS) model also allows for the estimation of causal contrasts. \n", "The contrast is defined as the difference in the average potential outcomes between the treatment levels $d_i$ and $d_j$ where\n", "\n", "$$ \\theta_{0,ij} = \\mathbb{E}[Y(d_i)] - \\mathbb{E}[Y(d_{j})]$$\n", @@ -426,9 +883,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " coef std err t P>|t| 2.5 % 97.5 %\n", + "1.0 vs 0.0 1.810306 0.180143 10.049264 0.0 1.454406 2.165707\n", + "2.0 vs 0.0 6.994208 0.145027 48.226969 0.0 6.710059 7.278035\n", + "3.0 vs 0.0 9.335446 0.135344 68.975592 0.0 9.068934 9.600776\n", + "4.0 vs 0.0 10.431998 0.141460 73.745022 0.0 10.155160 10.708837\n", + "5.0 vs 0.0 10.342362 0.155174 66.650234 0.0 10.039141 10.645583\n" + ] + } + ], "source": [ "causal_contrast_model = dml_obj.causal_contrast(reference_levels=0)\n", "print(causal_contrast_model.summary)" @@ -443,9 +913,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA0kAAAIjCAYAAADWYVDIAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAZQdJREFUeJzt3Xd4FOXexvF703tCJwklAaT3Kr1IR4Qj0kQpIqKiEhQ5WGiiIKgIIiqgFD30IooCHlCKFJUiikiXohCqkBACIezO+0fe7JllE0hgk03i93NduWCfab/dfbbcOzPPWAzDMAQAAAAAkCR5uLsAAAAAAMhJCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISciVmjVrpmbNmrm7DJc6duyYLBaL5syZ4+5SslWzZs1UuXJld5eR4/1T+0dWOnTokFq3bq3Q0FBZLBatWLFCc+bMkcVi0bFjx267fFRUlPr27ZvldcJ1MvKcpb7W3n777ewpCv9YvK/nbIQkuFTqF4z0/n744YcMr+v333/X6NGjM/RlJTt98MEHOeYNbdWqVbJYLIqIiJDNZrO3N2vW7JbPQ+rf6NGjJaV8cUhvnrZt22aqptGjR2do21kZcseNG6cVK1Zk2fpzusOHD+uhhx5Svnz5FBAQoEaNGmn9+vVO8/Xt2zfN56Z8+fIO8126dEm9evVSvnz5VKpUKX3yySdO69qxY4cCAgJ09OjRTNV65MgRDRw4UKVKlZKfn59CQkLUsGFDTZkyRVevXs3cHc+kPn36aM+ePXrjjTf02WefqXbt2lm6vZwqKipK999//x0tu2rVKvv7CDIuI++RFotFGzZsyJZ6ctLn2u1k9rtB6mfS+fPns7Yw5Dle7i4AedNrr72m6Ohop/YyZcpkeB2///67xowZo2bNmikqKsph2n//+9+7LfGOffDBBypYsGCO+AV53rx5ioqK0rFjx/Tdd9+pZcuWkqRXXnlFjz/+uH2+7du367333tPLL7+sChUq2NurVq1q/3/16tX1wgsvOG0jIiIiUzU9+OCDDs9zQkKCnnrqKf3rX//Sgw8+aG8vUqRIptabGePGjdNDDz2kzp07Z9k2cqo///xT9evXl6enp1588UUFBgZq9uzZat26tb799ls1adLEYX5fX199/PHHDm2hoaEOt4cOHaoNGzZozJgxOnz4sAYMGKAKFSqoQYMGkiTDMPTcc88pJiYmzdd9er7++mt17dpVvr6+6t27typXrqzr169r8+bNevHFF7V3717NmDHjDh+JW7t69aq2bdumV155Rc8884y9/dFHH1WPHj3k6+ubJdvNa1atWqVp06YRlDLps88+c7j96aefau3atU7t5vfrrJSTPtdu51bfDQBXIiQhS7Rr1y5Lf5X18fHJsnXnFleuXNEXX3yh8ePHa/bs2Zo3b549JLVq1cphXj8/P7333ntq1apVuntwIiMj9cgjj9x1XVWrVnUIX+fPn9dTTz2lqlWrumT9uLU333xTly5d0m+//aZy5cpJkgYMGKDy5ctryJAh2rlzp8P8Xl5et31evvrqK02cOFG9e/eWJP36669auXKlPSTNmzdPx48f18svv5zhOo8ePaoePXqoZMmS+u677xQeHm6fNmjQIB0+fFhff/11hteXWefOnZMkhYWFObR7enrK09Mzy7aL2zMMQ9euXZO/v7+7S8kyN7/mfvjhB61du/a2r8XExEQFBARkZWkA/h+H28FtFi5cqFq1aik4OFghISGqUqWKpkyZIinlsL2uXbtKkpo3b+506MHN5yRt2LBBFotFixcv1pgxYxQZGang4GA99NBDiouLU1JSkmJiYlS4cGEFBQWpX79+SkpKcqhn9uzZatGihQoXLixfX19VrFhRH374ocM8UVFR2rt3rzZu3JjmYWOXLl1STEyMihcvLl9fX5UpU0YTJkxwOBQudb6+ffsqNDRUYWFh6tOnjy5dupSpx+/zzz/X1atX1bVrV/Xo0UPLly/XtWvXMrWOnOT3339X8+bNFRAQoMjISE2cONFpnqSkJI0aNUplypSRr6+vihcvrmHDhjk8lxaLRVeuXNHcuXPtz1Hqr6PHjx/X008/rXLlysnf318FChRQ165d7+qQzu+//15du3ZViRIl7DUNGTLE6VCxvn37KigoSCdPnlTnzp0VFBSkQoUKaejQobJarQ7z3k3/+P7771WjRg17QJKkgIAAPfDAA9q1a5cOHTrktIzValV8fHy667x69ary5ctnv50/f34lJiZKSgnrw4cP1/jx4xUUFJShGiVp4sSJSkhI0CeffOIQkFKVKVNGgwcPtt++ceOGxo4dq9KlS8vX11dRUVF6+eWXnV7HqYeObd68WXXr1pWfn59KlSqlTz/91D7P6NGjVbJkSUnSiy++KIvFYv9FOq1zkgzD0Ouvv65ixYopICBAzZs31969e9O8Xxl5DzCf8zJjxgz7fapTp462b9/utM79+/erW7duKlSokPz9/VWuXDm98sorDvOcPHlSjz32mIoUKSJfX19VqlRJs2bNSufRv7WM1te3b19NmzZNkuPhY6lsNpsmT56sSpUqyc/PT0WKFNHAgQN18eJFh+2lPmfffPONateuLX9/f02fPl2VK1dW8+bNneqz2WyKjIzUQw89ZG97++231aBBAxUoUED+/v6qVauWli5dekf33+zdd99VyZIl5e/vr6ZNm+q3336zT5s9e7YsFot+/vlnp+XGjRsnT09PnTx58o63nXq+5s6dO9WkSRMFBATYf4jIyHthao1387mW+nrYvHmznnvuORUqVEhhYWEaOHCgrl+/rkuXLql3797Kly+f8uXLp2HDhskwDIf1Z7Yf3Oq1e7vvBndj//79euihh5Q/f375+fmpdu3a+vLLL+3Td+zYIYvForlz5zot+80338hiseirr76yt7nyNQn3YE8SskRcXJzT8b8Wi0UFChSQJK1du1Y9e/bUfffdpwkTJkiS9u3bpy1btmjw4MFq0qSJnnvuOadDxG536MH48ePl7++v4cOH6/Dhw5o6daq8vb3l4eGhixcvavTo0frhhx80Z84cRUdHa+TIkfZlP/zwQ1WqVEkPPPCAvLy8tHLlSj399NOy2WwaNGiQJGny5Ml69tlnFRQUZP+SknrYWGJiopo2baqTJ09q4MCBKlGihLZu3aqXXnpJsbGxmjx5sqSUL1ydOnXS5s2b9eSTT6pChQr6/PPP1adPn0w9xvPmzVPz5s1VtGhR9ejRQ8OHD9fKlSvtHyCZlZycnOYx24GBgVn+i+7FixfVtm1bPfjgg+rWrZuWLl2qf//736pSpYratWsnKeWD9oEHHtDmzZv1xBNPqEKFCtqzZ4/effddHTx40H4O0meffabHH39cdevW1RNPPCFJKl26tKSUww63bt2qHj16qFixYjp27Jg+/PBDNWvWTL///vsd/UK7ZMkSJSYm6qmnnlKBAgX0008/aerUqfrrr7+0ZMkSh3mtVqvatGmjevXq6e2339a6dev0zjvvqHTp0nrqqack3X3/SEpKcgg0qVLv286dO3XPPffY2xMTExUSEqLExETly5dPPXv21IQJExwCT506dTRp0iSVL19ef/zxh9asWaOZM2dKSvkyGBkZqUcffTRTj9vKlStVqlQp+96o23n88cc1d+5cPfTQQ3rhhRf0448/avz48dq3b58+//xzh3lTz8nq37+/+vTpo1mzZqlv376qVauWKlWqpAcffFBhYWEaMmSIevbsqfbt298y4I0cOVKvv/662rdvr/bt22vXrl1q3bq1rl+/7jBfRt8DUs2fP1+XL1/WwIEDZbFYNHHiRD344IP6448/5O3tLSllr13jxo3l7e2tJ554QlFRUTpy5IhWrlypN954Q5J05swZ3XvvvbJYLHrmmWdUqFAhrV69Wv3791d8fLxiYmIy9Bjf7Hb1DRw4UKdOnUrzMDFJGjhwoObMmaN+/frpueee09GjR/X+++/r559/1pYtW+z3UZIOHDignj17auDAgRowYIDKlSun7t27a/To0Tp9+rSKFi1qn3fz5s06deqUevToYW+bMmWKHnjgAfXq1UvXr1/XwoUL1bVrV3311Vfq0KHDHd3/Tz/9VJcvX9agQYN07do1TZkyRS1atNCePXtUpEgRPfTQQxo0aJDmzZunGjVqOCw7b948NWvWTJGRkXe07VQXLlxQu3bt1KNHDz3yyCMqUqRIht8Lpbv/XEv17LPPqmjRohozZox++OEHzZgxQ2FhYdq6datKlCihcePGadWqVXrrrbdUuXJl+15nKXP94Hav3Tv9bnA7e/fuVcOGDRUZGanhw4crMDBQixcvVufOnbVs2TL961//Uu3atVWqVCktXrzY6f140aJFypcvn9q0aSMp616TyGYG4EKzZ882JKX55+vra59v8ODBRkhIiHHjxo1017VkyRJDkrF+/XqnaU2bNjWaNm1qv71+/XpDklG5cmXj+vXr9vaePXsaFovFaNeuncPy9evXN0qWLOnQlpiY6LSdNm3aGKVKlXJoq1SpksO2U40dO9YIDAw0Dh486NA+fPhww9PT0zhx4oRhGIaxYsUKQ5IxceJE+zw3btwwGjdubEgyZs+e7bTum505c8bw8vIyZs6caW9r0KCB0alTpzTnv9VjaRiGUbJkyXSft/Hjx9+2nls5d+6cIckYNWpUmtObNm1qSDI+/fRTe1tSUpJRtGhRo0uXLva2zz77zPDw8DC+//57h+U/+ugjQ5KxZcsWe1tgYKDRp08fp22l9Rxv27bNafuZkdY6x48fb1gsFuP48eP2tj59+hiSjNdee81h3ho1ahi1atWy377b/tGxY0cjLCzMiI+Pd2ivX7++Icl4++237W3Dhw83/v3vfxuLFi0yFixYYK+xYcOGRnJysn2+X3/91ShWrJi9T3Tp0sWwWq3GH3/8Yfj7+xvbtm279YN0k7i4OENSuv31Zrt37zYkGY8//rhD+9ChQw1JxnfffWdvS+3LmzZtsredPXvW8PX1NV544QV729GjRw1JxltvveWwztT3sKNHj9qX9fHxMTp06GDYbDb7fC+//LIhyaGfZfQ9IHXbBQoUMP7++2/7fF988YUhyVi5cqW9rUmTJkZwcLBDXzIMw6GW/v37G+Hh4cb58+cd5unRo4cRGhqaZh81K1mypNGhQwenxyYj9Q0aNMhI66vE999/b0gy5s2b59C+Zs0ap/bU52zNmjUO8x44cMCQZEydOtWh/emnnzaCgoIc7tfN9/H69etG5cqVjRYtWjjd17TeG8xS77+/v7/x119/2dt//PFHQ5IxZMgQe1vPnj2NiIgIw2q12tt27dqV4ffyVGk9jqnvjR999JFDe2beC+/2cy319dCmTRuHPle/fn3DYrEYTz75pL3txo0bRrFixRzWcyf94Hav3dt9nt1s1KhRhiTj3Llz6c5z3333GVWqVDGuXbtmb7PZbEaDBg2Me+65x9720ksvGd7e3g6vi6SkJCMsLMx47LHH7G0ZfU2m9rXM9BVkHw63Q5aYNm2a1q5d6/C3evVq+/SwsDBduXJFa9eudel2e/fu7fCrVL169WQYhh577DGH+erVq6c///xTN27csLeZ95ak7glr2rSp/vjjD8XFxd1220uWLFHjxo2VL18+nT9/3v7XsmVLWa1Wbdq0SVLKic5eXl72PQdSynkQzz77bIbv58KFC+Xh4aEuXbrY23r27KnVq1c7HcKQUfXq1XN6zlL3+GW1oKAgh2PxfXx8VLduXf3xxx/2tiVLlqhChQoqX768w+PbokULSUpz9LabmZ/j5ORkXbhwQWXKlFFYWJh27dp1R7Wb13nlyhWdP39eDRo0kGEYaR6G8+STTzrcbty4scP9vNv+8dRTT+nSpUvq3r27fv75Zx08eFAxMTHasWOHJDkcBjh+/Hi9+eab6tatm3r06KE5c+bojTfe0JYtWxwOVapSpYoOHTqk7du369ChQ1q6dKk8PDz0wgsvqEuXLrr33nu1fPlyVatWTdHR0XrttdecDrkxSz20Lzg4OEP3adWqVZKk559/3qE9daCRm89dqlixoho3bmy/XahQIZUrV87hcc6odevW6fr163r22WcdDiVL65fgjL4HpOrevbvDXr/UmlPrPHfunDZt2qTHHntMJUqUcFg2tRbDMLRs2TJ17NhRhmE4bLdNmzaKi4u74759u/puZcmSJQoNDVWrVq0caqpVq5aCgoKcXq/R0dH2X+FTlS1bVtWrV9eiRYvsbVarVUuXLlXHjh0dXnvm/1+8eFFxcXFq3LjxHd93SercubPDnqC6deuqXr169v4opXzmnDp1yuH+zJs3T/7+/g7vz3fK19dX/fr1c2jLzHvh3X6uperfv79D/0/9bO3fv7+9zdPTU7Vr13Z6385MP3Dlazej/v77b3333Xfq1q2bLl++bK/xwoULatOmjQ4dOmQ/bLJ79+5KTk7W8uXL7cv/97//tb/nSln7mkT24nA7ZIm6devecuCGp59+WosXL1a7du0UGRmp1q1bq1u3bpkebvpmN3+RSB2lq3jx4k7tNptNcXFx9kMAt2zZolGjRmnbtm328y1SxcXFOY34dbNDhw7p119/VaFChdKcfvbsWUkp58WEh4c7Hd5jPofkdv7zn/+obt26unDhgi5cuCBJqlGjhq5fv64lS5bYDzPLjIIFC9oHfshuxYoVc/gAlqR8+fLp119/td8+dOiQ9u3bd9vH91auXr1qH+ji5MmTDl/kM/OFwezEiRMaOXKkvvzyS6eAevM6/fz8nOrPly+fw3J32z/atWunqVOnavjw4apZs6aklPN73njjDQ0bNuy25w0NGTJEI0aM0Lp16xwOZ0o9Rj/Vd999p//+9786cOCADhw4oB49emj69OmKiopSz549Vbx4cacvd6lCQkIkSZcvX87QfTp+/Lg8PDycRscsWrSowsLCdPz4cYf2m98HJOfHOaNS120+RFFK+fJ282GNGX0PSK/O1PWl1pn6xfBW1xE7d+6cLl26pBkzZqQ7EmBGXhtpuV19t3Lo0CHFxcWpcOHCGaopvVERu3fvrpdfflknT55UZGSkNmzYoLNnz9q/kKb66quv9Prrr2v37t1O5yjeqZufcykluC1evNh+u1WrVgoPD9e8efN03333yWazacGCBerUqVOGfwS4lcjISKeBijLzXni3n2upMvPZau4fme0HrnztZtThw4dlGIZGjBihESNGpDnP2bNnFRkZqWrVqql8+fJatGiRPSAuWrRIBQsWtIfUrHxNInsRkuAWhQsX1u7du/XNN99o9erVWr16tWbPnq3evXuneVJkRqU3KlV67alfko8cOaL77rtP5cuX16RJk1S8eHH5+Pho1apVevfdd50GXkiLzWZTq1atNGzYsDSnly1bNoP34tZSf9GX0v4Qnzdv3h2FJHe63fMjpTy+VapU0aRJk9Kc9+YP67Q8++yzmj17tmJiYlS/fn37RUR79OiRoef4ZlarVa1atdLff/+tf//73ypfvrwCAwN18uRJ9e3b12md2TVq2jPPPKN+/frp119/lY+Pj6pXr26/ttHt+mHqgBZ///13uvNYrVYNHjxYw4cPV2RkpMaOHasGDRrYQ9HAgQM1b968W4akiIgIh5PgMyKjX3gz0p+yQmbfA1xRZ2ofe+SRR9I9b8082mRm3E19NptNhQsX1rx589KcfvMX/PTOe+zevbteeuklLVmyRDExMVq8eLFCQ0MdflD7/vvv9cADD6hJkyb64IMPFB4eLm9vb82ePVvz58+/ba13w9PTUw8//LBmzpypDz74QFu2bNGpU6dcNpJnWo9LRt8LXfG5liozn603v29nph+447Wb+jgMHTrUaW9mKvMPNN27d9cbb7yh8+fPKzg4WF9++aV69uwpLy8vh/VlxWsS2YuQBLfx8fFRx44d1bFjR9lsNj399NOaPn26RowYoTJlytzVL4CZtXLlSiUlJenLL790+CUrrUO40qurdOnSSkhIuO3emJIlS+rbb79VQkKCw6/6Bw4cyFCt8+bNk7e3tz777DOnD5TNmzfrvffe04kTJ9L8RS43K126tH755Rfdd999t+0b6U1funSp+vTpo3feecfedu3atUyPLJhqz549OnjwoObOnetwovLdHEZ6t/0jVWBgoOrXr2+/vW7dOvn7+6thw4a3XC71cJP0fqWWUk4Gv3z5soYOHSpJOnXqlMP1tCIiIm47qtf999+vGTNmaNu2bQ51pqVkyZKy2Ww6dOiQwwnaZ86c0aVLl+wj1WWF1HUfOnRIpUqVsrefO3fO6dftjL4HZFTq9m4VJgsVKqTg4GBZrVa37Am+1fvhunXr1LBhw7sa+CU6Olp169bVokWL9Mwzz2j58uXq3Lmzw3Wsli1bJj8/P33zzTcO7bNnz77j7UpKcyTIgwcPOl2bp3fv3nrnnXe0cuVKrV69WoUKFUr3y7YrZPS90BWfa66o1RX9wMzVtaa+zry9vTP0GurevbvGjBmjZcuWqUiRIoqPj3fY6+7u1yRch3OS4Baph4il8vDwsP+yknqoRGBgoCTd8RfYzEgNGzcffpXWh2xgYGCaNXXr1k3btm3TN9984zTt0qVL9vOf2rdvrxs3bjgMw2q1WjV16tQM1Tpv3jw1btxY3bt310MPPeTw9+KLL0qSFixYkKF15SbdunXTyZMn7aOqmV29elVXrlyx307vOfL09HT6RXLq1KlOQ3BnVFr9xjAM+1D2d+Ju+0datm7dquXLl6t///72w2SuXbuW5uFuY8eOlWEY6R76+vfff2vUqFF666235OfnJyllJKz9+/fb59m3b5/DaGRpGTZsmAIDA/X444/rzJkzTtOPHDlifxzbt28vSU6jw6X+kn6no5dlRMuWLeXt7a2pU6c6PM831yJl/D0gowoVKqQmTZpo1qxZOnHihMO01Fo8PT3VpUsXLVu2LM0wlXo9qKyS3vt0t27dZLVaNXbsWKdlbty4kan39e7du+uHH37QrFmzdP78eadD7Tw9PWWxWBxex8eOHXMY5e1OrFixwiHs//TTT/rxxx/tI26mSr023Mcff6xly5apR48e9r0KWSGj74Wu+FxzRa2u6gepXP3doHDhwmrWrJmmT5+u2NhYp+k3v4YqVKigKlWqaNGiRVq0aJHCw8MdLtLt7tckXIc9ScgSq1evdvjSlKpBgwYqVaqUHn/8cf39999q0aKFihUrpuPHj2vq1KmqXr26/Zfi6tWry9PTUxMmTFBcXJx8fX3t13twtdatW9v3bA0cOFAJCQmaOXOmChcu7PSmWatWLX344Yd6/fXXVaZMGRUuXFgtWrTQiy++qC+//FL333+/fcjSK1euaM+ePVq6dKmOHTumggULqmPHjmrYsKGGDx+uY8eOqWLFilq+fHmGzon58ccfdfjwYT3zzDNpTo+MjFTNmjU1b948/fvf/87UY3Dy5En95z//cWoPCgpS586dJaV88YiOjlafPn00Z86cTK3/bj366KNavHixnnzySa1fv14NGzaU1WrV/v37tXjxYvs1VqSU52jdunWaNGmSIiIiFB0drXr16un+++/XZ599ptDQUFWsWFHbtm3TunXr7OelmaX+WnyrayiVL19epUuX1tChQ3Xy5EmFhIRo2bJld3X8/N30DynlHJpu3brpgQceUNGiRbV371599NFHqlq1qsaNG2ef7/Tp06pRo4Z69uyp8uXLS0q51seqVavUtm1bderUKc31jxgxQlWqVHEYar5Lly567bXX9NRTT6lkyZKaPn16uocCpSpdurTmz5+v7t27q0KFCurdu7cqV66s69eva+vWrVqyZIn9+lbVqlVTnz59NGPGDF26dElNmzbVTz/9pLlz56pz585pXkvHVVKvZTV+/Hjdf//9at++vX7++WetXr1aBQsWdJg3o+8BmfHee++pUaNGqlmzpp544glFR0fr2LFj+vrrr7V7925JKRcQXr9+verVq6cBAwaoYsWK+vvvv7Vr1y6tW7fulodO3q1atWpJkp577jm1adNGnp6e6tGjh5o2baqBAwdq/Pjx2r17t1q3bi1vb28dOnRIS5Ys0ZQpUxyuc3Qr3bp109ChQzV06FDlz5/f6df5Dh06aNKkSWrbtq0efvhhnT17VtOmTVOZMmUczmvMrDJlyqhRo0Z66qmnlJSUpMmTJ6tAgQJpHk7Zu3dv+57VrL5odkbfC13xuXa3XNkPUt3pd4NJkyY5XebBw8NDL7/8sqZNm6ZGjRqpSpUqGjBggEqVKqUzZ85o27Zt+uuvv/TLL784LNe9e3eNHDlSfn5+6t+/vzw8HPc5uPM1CRfKtnH08I9wqyHAZRrmcunSpUbr1q2NwoULGz4+PkaJEiWMgQMHGrGxsQ7rmzlzplGqVCnD09PTYcjP9IYAX7JkSZr1bN++3aE9rSFBv/zyS6Nq1aqGn5+fERUVZUyYMMGYNWuWw3DAhmEYp0+fNjp06GAEBwcbkhzquHz5svHSSy8ZZcqUMXx8fIyCBQsaDRo0MN5++22HockvXLhgPProo0ZISIgRGhpqPProo8bPP/9826FAn332WUOSceTIkXTnGT16tCHJ+OWXX+xtdzMEuHmo9D179hiSjOHDh6e7/ZtlZAjwSpUqObX36dPHaZj269evGxMmTDAqVapk+Pr6Gvny5TNq1apljBkzxoiLi7PPt3//fqNJkyaGv7+/wzDNFy9eNPr162cULFjQCAoKMtq0aWPs378/zWGBCxYsaNx77723vX+///670bJlSyMoKMgoWLCgMWDAAOOXX35xei779OljBAYGOi2f2hfN7rR/GIZh/P3330anTp2MokWLGj4+PkZ0dLTx73//22lI8IsXLxqPPPKIUaZMGSMgIMDw9fU1KlWqZIwbN86hr5r9+uuvho+Pj/Hzzz87TZszZ44RFRVlFChQwHj++edvOby/2cGDB40BAwYYUVFRho+PjxEcHGw0bNjQmDp1qsNwvMnJycaYMWOM6Ohow9vb2yhevLjx0ksvOcxjGM7DWae6+T0jo0OAG4ZhWK1WY8yYMUZ4eLjh7+9vNGvWzPjtt9/S7DcZeQ9Ib9uGYaT5Wvntt9+Mf/3rX0ZYWJjh5+dnlCtXzhgxYoTDPGfOnDEGDRpkFC9e3PD29jaKFi1q3HfffcaMGTOctnGz9IYAz0h9N27cMJ599lmjUKFChsVicerLM2bMMGrVqmX4+/sbwcHBRpUqVYxhw4YZp06dSnf7aWnYsGGaw8Cn+uSTT4x77rnH8PX1NcqXL2/Mnj07zddWZoYAf+utt4x33nnHKF68uOHr62s0btzY4X3VLDY21vD09DTKli17y3WnJ70hwNN6bzSMjL8X3u3nWmY+Qw0j/fe5u+kHN792DSP97wZpSa01rT9PT0/7fEeOHDF69+5tFC1a1PD29jYiIyON+++/31i6dKnTOg8dOmRfx+bNm9PcbkZekwwBnrNZDCOLz2QFkGd88MEHGjZsmI4cOeJ0scG85Pfff1elSpXu6kKUAP45zp8/r/DwcI0cOTLdEdIA5C6ckwQgw9avX6/nnnsuTwckKeV+1q9fn4AEIEPmzJkjq9WqRx991N2lAHAR9iQBAADcge+++06///67RowYoebNmztcZBRA7kZIAgAAuAPNmjXT1q1b1bBhQ/3nP/9RZGSku0sC4CKEJAAAAAAw4ZwkAAAAADAhJAEAAACASZ6/mKzNZtOpU6cUHBwsi8Xi7nIAAAAAuIlhGLp8+bIiIiKcLgRsludD0qlTp1S8eHF3lwEAAAAgh/jzzz9VrFixdKfn+ZAUHBwsSTp69Kjy58/v5mqQFyQnJ+u///2vWrduLW9vb3eXgzyAPgVXo0/B1ehTcDV39an4+HgVL17cnhHSk+dDUuohdsHBwQoJCXFzNcgLkpOTFRAQoJCQED4o4BL0KbgafQquRp+Cq7m7T93uNBwGbgAAAAAAE0ISAAAAAJgQkgAAAADAJM+fk5QRhmHoxo0bslqt7i4FuUBycrK8vLx07dq1PNlnvL295enp6e4yAAAA3OYfH5KuX7+u2NhYJSYmursU5BKGYaho0aL6888/8+S1tywWi4oVK6agoCB3lwIAAOAW/+iQZLPZdPToUXl6eioiIkI+Pj558ksvXMtmsykhIUFBQUG3vAhZbmQYhs6dO6e//vpL99xzD3uUAADAP9I/OiRdv35dNptNxYsXV0BAgLvLQS5hs9l0/fp1+fn55bmQJEmFChXSsWPHlJycTEgCAAD/SHnvG94dyItfdIE7xd5UAADwT0c6AAAAAAATQhIAAAAAmBCSXOiGLe8NBw0AAAD80xCSXGTj6SMqPH+UNp0+km3b3LZtmzw9PdWhQwdJUt++fWWxWNL9i4qKkiQ1a9YszelPPvlkttUOAAAA5FSEJBdItln15Jaliku+pie3LlVyNu1R+uSTT/Tss89q06ZNOnXqlKZMmaLY2Fj7nyTNnj3bfnv79u32ZQcMGOAwb2xsrCZOnJgtdQMAAAA52T96CHBXef/3zToUf06SdDDunKbt26KYSk2ydJsJCQlatGiRduzYodOnT2vOnDl6+eWXFRoa6jBfWFiYihYt6rR8QEBAmu0AAADAPx17ku7SqcQ4jdi1Rsb/3zYkjdi1WqcS47J0u4sXL1b58uVVrlw5PfLII5o1a5YMw7j9ggAAAABuiZB0l4b+tFLXbzq8Lslq1Ys/rczS7X7yySd65JFHJElt27ZVXFycNm7cmOHlP/jgAwUFBTn8zZs3L6vKBQAAAHINDre7C+tjD2vR0d1O7VbDpoVHd2tAuXvVLLyMy7d74MAB/fTTT/r8888lSV5eXurevbs++eQTNWvWLEPr6NWrl1555RWHtiJFiri6VAAAACDXISTdhc8O75CHLLLJ+TA3D1n06eEdWRKSPvnkE924cUMRERH2NsMw5Ovrq/fff9/pvKS0hIaGqkwZ19cGAAByr7hdkxW3a4rzBMNQ+WvXFDvHT7JYnCaH1hys0JoxWV8gkE0ISXehd5namnt4R5rTbDLU5546Lt/mjRs39Omnn+qdd95R69atHaZ17txZCxYsYChvAABwR2xJ8bImnExzmo8k65X0lwPyEkLSXWgWXkbdo6tr6bFfZTVs9nZPi4e6RlVV06KlXb7Nr776ShcvXlT//v2d9hh16dJFn3zySYZCUmJiok6fPu3Q5uvrq3z58rm0XgAAkHt4+IbIMyjSsdEwZL1yKmV6YLgsFudT2j18Q7KjPCDbEJLu0tt1O+rLE3t11fq/kOTr6am36nbMku198sknatmyZZqH1HXp0kUTJ07Ur7/+qqpVq95yPTNnztTMmTMd2tq0aaM1a9a4tF4AAJB7hNaMcTpszpZ8RcenpfyIWrTXL/INCMv+wpBr5dZDOAlJdykiIFRja7bVi9tXypBkkTS2ZjtFBNz+vKA7sXJl+qPm1a1b12EY8PSGBN+wYYOrywIAAACc3OoQTg9ZZL2S9vdVdx/CSUhygWcqNtLMAz/oQPw5lQ0tpEEVGrq7JAAAAMDt0juEc+s1i54MvF8zrVtVz/J3msu5EyHJBbw9PPVhw4f0r3Wz9VGDh+Tt4enukgAAAAC3S+sQzqSkeL362RBdtvhqZL6H9OuDw3Lc92cuJusiTYuW1tmHx6hJFgzWAAAAAOQV7+//UUc9wiRJhy5f0LR9W9xbUBoISS7klcMSMAAAwN2y2gz9cK2SPk9spE1HL8lqS/scEiAjTiXGadQv39oHazAkjdi1WqcS49xb2E0ISQAAAEjT8j2xKj1xq3olPq+hheuozaLvFf3GOi3fE+vu0pBLDf1ppa7brA5tSVarXvwp/cHJ3IGQBAAAACfL98Sq69wd+ivuqhRxUBZPqxRxUCfjEtV17g6CEjJtfexhLTq6W9abRmC2GjYtPLpbG2IPu6kyZwzckEnpjvV+G+4e6x0AACCjrDZDMSt+kyFJBU5KPldTJvhclVHglCwXimnIF7+pU6Wi8vRwvsYNkJbPDu+QhyyyyfmQTQ9Z9OnhHWoWXsYNlTkjJGXSrcZ6v91yAAAAucH3f1zQX3HXJK8kqfAx+7U+LRbJKHxURlwh/XkpZb5mZQq6tVbkHr3L1NbcwzvSnGaToT731MnmitJHSMqk9MZ6t145JUnyDIxI86rB7h7rHQAAIKNiLyel/KfIEclic5xosaW0n6z4v/mADGgWXkYN8pXT1r8PShbT3iTDooYFyqlpDholmpCUSWmN9Z6clKCl7zbWWVs+VWn3qZreE5nrdj3PmTNHMTExunTpkrtLyRJ9+/bVpUuXtGLFinTn2bBhg5o3b66LFy8qLCws22oDACCnCQ/2lQIuyhJ2zmmaxSIp7JyMi5dS5gMyaPmeWG39IZ+Meyyy3BSStmwL0/JSsXqwSrj7CjRh4Ia7ZB/15fxrGvL3ELWc+XOWj/rSt29fWSwWp7+2bdtmaPmoqChNnjzZoa179+46ePBgFlTraM6cOW4JIFOmTNGcOXPst5s1a6aYmBiXrHvXrl1q1aqVwsLCVKBAAT3xxBNKSEhwmCet52vhwoX26T///LNq1KihoKAgdezYUX///b8rT9+4cUO1atXSTz/9lKF6fv75Z3Xt2lVFihSRn5+f7rnnHg0YMMD+/B47dkwWi0W7d++++zsPAMiTGpcqoMDC55XGqSMpDCmw8Dk1LlUgW+tC7mU/z+2Gr3Q2SqljNxiGZJyNlm74asgXv+WYIeYJSXfhf6O+OO5qPhl3LctHfWnbtq1iY2Md/hYsWHDH6/P391fhwoVdWGHOEhoamiXh7NSpU2rZsqXKlCmjH3/8UWvWrNHevXvVt29fp3lnz57t8Hx17tzZPu3xxx9XixYttGvXLsXFxWncuHH2ae+8844aNmyounXr3raer776Svfee6+SkpI0b9487du3T//5z38UGhqqESNGuOIuAwD+ATw9LBpWo5GU3oExFmlYzUa57sgZuI/9PDdJuhApXfdP+f91f+lChAxJf166pu//uOC2Gs0ISXfIYdSXm6S2ZWUa9vX1VdGiRR3+8uXLl7J9w9Do0aNVokQJ+fr6KiIiQs8995yklD0ox48f15AhQ+x7NCTnPTyjR49W9erVNWvWLJUoUUJBQUF6+umnZbVaNXHiRBUtWlSFCxfWG2+84VDXpEmTVKVKFQUGBqp48eJ6+umn7XtVNmzYoH79+ikuLs6+7dGjR0uSkpKSNHToUEVGRiowMFD16tXThg0b0r3/Q4cO1f3332+/PXnyZFksFq1Zs8beVqZMGX388ceSUva+pYaSvn37auPGjZoyZYq9jmPHjtmX27lzp2rXrq2AgAA1aNBABw4cSLeOr776St7e3po2bZrKlSunOnXq6KOPPtKyZct0+LDjMJZhYWEOz5efn5992r59+zRgwACVLVtWPXv21L59+yRJf/zxhz755BOnxzktiYmJ6tevn9q3b68vv/xSLVu2VHR0tOrVq6e3335b06dPv+06AABINaJhXTXIV04ybgpChkUN85fXiAa3//EOSOV4/pqHdKqsDKundKqszJEkp5znRki6Qw5pOA3uTMPLli3Tu+++q+nTp+vQoUNasWKFqlSpIklavny5ihUrptdee82+RyM9R44c0erVq7VmzRotWLBAn3zyiTp06KC//vpLGzdu1IQJE/Tqq6/qxx9/tC/j4eGh9957T3v37tXcuXP13XffadiwYZKkBg0aaPLkyQoJCbFve+jQoZKkZ555Rtu2bdPChQv166+/qmvXrmrbtq0OHTqUZm1NmzbV5s2bZbWmXIxs48aNKliwoD1YnTx5UkeOHFGzZs2clp0yZYrq16+vAQMG2OsoXry4fforr7yid955Rzt27JCXl5cee+yxdB+jpKQk+fj4yMPjfy8lf/+UX0Y2b97sMO+gQYNUsGBB1a1bV7NmzZJhukZAtWrVtHbtWt24cUPffvutqlatKkl68sknNXHiRAUHB6dbQ6pvvvlG58+ftz/eN+M8KwBAZi1q3U3+Xp4ObQFeXlrYqqubKkJu5XT+WmKYtL9hyr+3ms9NCEl3KKMpN6vS8FdffaWgoCCHv9RDtE6cOKGiRYuqZcuWKlGihOrWrasBAwZIkvLnzy9PT08FBwfb92ikx2azadasWapYsaI6duyo5s2b68CBA5o8ebLKlSunfv36qVy5clq/fr19mZiYGDVv3lxRUVFq0aKFXn/9dS1evFiS5OPjo9DQUFksFvu2g4KCdOLECc2ePVtLlixR48aNVbp0aQ0dOlSNGjXS7Nmz06ytcePGunz5sn7++WcZhqFNmzbphRdesIekDRs2KDIyUmXKOI+1HxoaKh8fHwUEBNjr8PT83wfAG2+8oaZNm6pixYoaPny4tm7dqmvX0g7ELVq00OnTp/XWW2/p+vXrunjxooYPHy5JDgH0tdde0+LFi7V27Vp16dJFTz/9tKZOnWqf/vHHH2vp0qUqXbq0fHx89NJLL+mzzz5TQECA6tSpozZt2qhMmTJ69dVX032+UgNl+fLl050HAIDMiAgI1Zhq9yn1BBKLpLG12ikiINS9hSHXaVyqgIqF+t10BKfF4X/Fw/xyzHlujG53hzKacrMqDTdv3lwffvihQ1v+/PklSV27dtXkyZNVqlQptW3bVu3bt1fHjh3l5ZW5pzsqKsphD0aRIkXk6enpsNekSJEiOnv2rP32unXrNH78eO3fv1/x8fG6ceOGrl27psTERAUEBKS5nT179shqtaps2bIO7UlJSSpQIO0XSlhYmKpVq6YNGzbIx8dHPj4+euKJJzRq1CglJCRo48aNatq0aabub6rUvTiSFB6eMsLK2bNnVaJECad5K1WqpLlz5+r555/XSy+9JE9PTz333HMqUqSIw+NkPh+oRo0aunLlit566y37YZCVKlXSxo0b7fNcuHBBo0aN0qZNm/Tss8+qQYMGWr58uerUqaN69eqpY8eOTrUYRs440REAkLc8U76ePtq+QH945tc9wQU0qEJDd5eEXMjTw6LJnSur69wdsshxTJDUqPRup8o55jw39iTdobTT8P9kdRoODAxUmTJlHP5SQ1Lx4sV14MABffDBB/L399fTTz+tJk2aKDk5OVPb8Pb2drhtsVjSbLPZUq6fcOzYMd1///2qWrWqli1bpp07d2ratGmSpOvXr6e7nYSEBHl6emrnzp3avXu3/W/fvn2aMmVKuss1a9ZMGzZssAei/Pnzq0KFCtq8efNdhSTzfUw9Zyv1Pqbl4Ycf1unTp3Xy5ElduHBBo0eP1rlz51SqVKl0l6lXr57++usvJSWlvafx+eefV0xMjIoVK6YNGzaoa9euCgwMVIcOHdI9Vys1ZO7fv/92dxEAgAzz9vDU2KvrFWwk6f3a7eXt4Xn7hYA0PFglXEv61FZkqONOhGJhflrSp3aOGf5bYk/SHcvpadjf318dO3ZUx44dNWjQIJUvX1579uxRzZo15ePjYz+Xx5V27twpm82md955x74XJfVQu1RpbbtGjRqyWq06e/asGjdunOHtNW3aVLNmzZKXl5d9+PNmzZppwYIFOnjwYJrnI92qjrtVpEgRSdKsWbPk5+enVq1apTvv7t27lS9fPvn6Ou9p/Pbbb7Vv3z77oYZWq9UecG8VdFu3bq2CBQtq4sSJ+vzzz52mX7p0ifOSAAB3pJ71pLbHz1CJwmPdXQpyuQerhKtj2WD7NUYrdpqlFhVK5pg9SKkISXchNQ3HrNjjMAx4sTA/vdupcpam4aSkJJ0+fdqhzcvLSwULFtScOXNktVpVr149BQQE6D//+Y/8/f1VsmRJSSmH0W3atEk9evSQr6+vChYs6JKaypQpo+TkZE2dOlUdO3bUli1b9NFHHznMExUVpYSEBH377beqVq2aAgICVLZsWfXq1Uu9e/fWO++8oxo1aujcuXP2AQw6dOiQ5vaaNGmiy5cv66uvvtKbb74pKSUkPfTQQwoPD3c6fO/mOn788UcdO3ZMQUFB9r1wd+L9999XgwYNFBQUpLVr1+rFF1/Um2++aQ8kK1eu1JkzZ3TvvffKz89Pa9eu1bhx4+yDVphdu3ZNzzzzjBYsWGAPmg0bNtS0adM0aNAgLVu2TJMmTUqzjsDAQH388cfq2rWrHnjgAT333HMqU6aMzp8/r8WLF+vEiRMO12YCACAzvNK9aBKQOZ4eFt3rt1eSFBEdluMCksThdnftwSrhOjKsgeYVHKl387+rdQNq6I+XW2b57sI1a9YoPDzc4a9Ro0aSUs7XmTlzpho2bKiqVatq3bp1Wrlypf38ntdee03Hjh1T6dKlVahQIZfVVK1aNU2aNEkTJkxQ5cqVNW/ePI0fP95hngYNGujJJ59U9+7dVahQIU2cOFFSyjWEevfurRdeeEHlypVT586dtX379jTPA0qVL18+ValSRYUKFbIPVtCkSRPZbLbbHmo3dOhQeXp6qmLFiipUqJBOnDhxx/f7p59+UqtWrVSlShXNmDFD06dPt59rJMk+RHj9+vVVvXp1TZ8+XZMmTdKoUaOc1jVmzBh16NBB1atXt7e999572r17t5o0aaKOHTuqS5cu6dbSqVMnbd26Vd7e3nr44YdVvnx59ezZU3FxcXr99dfv+D4CAAD8k1iMPH62d3x8vEJDQ3X+/HmnQQCuXbumo0ePKjo62uGaNbcSt2uy4nbddJ6MYch65ZQkyTMwQrI4p+HQmoMVWjPmju4Dchabzab4+HiFhIQ4DM6QV9zJ6wJ3Jzk5WatWrVL79u2dzvsD7gR9Cnfqdt9zPALDZbE4f/bxPQeZYUu+ouPTUq7vGfHEWfkGhGXbtlOzQVxcnEJCQtKdj8PtMsmWFC9rwsl0p6e+iaS1HAAAQE52u+85titpX1+R7znIawhJmeThGyLPoMg7Wg4AACAnS/d7jmHo6rVr8vfzS/OIGb7nIK8hJGVSaM0YdicDAIA8Kb3vORzCiX8at55QsWnTJnXs2FERERGyWCxasWKFw3TDMDRy5EiFh4fL399fLVu21KFDh9xTLAAAAIB/BLfuSbpy5YqqVaumxx57TA8++KDT9IkTJ+q9997T3LlzFR0drREjRqhNmzb6/fffXXpCeR4fuwLIFF4PAADAVdIbDCTV6XnVcuRgIG4NSe3atVO7du3SnGYYhiZPnqxXX31VnTp1kiR9+umnKlKkiFasWKEePXrc9fZTdxcnJibK39//rtcH5AXXr1+XJHl6ckV1AABwd3LrYCA59pyko0eP6vTp02rZsqW9LTQ0VPXq1dO2bdvSDUlJSUlKSvrfhV3j41Me4OTkZCUnJzvNHxwcrDNnzshmsykgIECWNE5GBMwMw9D169d19erVPNdfbDabzp49Kz8/PxmGkeZrBq6X+jjzeMNV6FNwNfoU7pThFZhyiZyb25Vy2RE/Pz+l9W3K8ArMkv6W0XXm2JB0+vRpSVKRIkUc2osUKWKflpbx48drzJgxTu3r169XQEBAmssEBwfrypUrefKaN0BmJScn69y5c/r111/dXco/ztq1a91dAvIY+hRcjT6FzCsjFZ5y+9ludkrSqVUuryYxMTFD8+XYkHSnXnrpJT3//PP22/Hx8SpevLiaN2/udDFZM6vVqhs3bnA+Bm7rxo0b2rp1qxo0aCAvr7z1ErJYLPL29uYHg2yWnJystWvXqlWrVowaBZegT8HV6FNwNXf1qdSjzG4nx37DK1q0qCTpzJkzCg8Pt7efOXNG1atXT3c5X19f+fr6OrV7e3vf8gngBY+MSk5O1o0bNxQUFES/gUvd7n0KyCz6FFyNPgVXy+4+ldFt5difi6Ojo1W0aFF9++239rb4+Hj9+OOPql+/vhsrAwAAAJCXuXVPUkJCgg4fPmy/ffToUe3evVv58+dXiRIlFBMTo9dff1333HOPfQjwiIgIde7c2X1FAwAAAMjT3BqSduzYoebNm9tvp55L1KdPH82ZM0fDhg3TlStX9MQTT+jSpUtq1KiR1qxZ49JrJAEAAACAmVtDUrNmzW45UILFYtFrr72m1157LRurAgAAAPBPlmPPSQIAAAAAdyAkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgImXuwsAAAB3Jm7XZMXtmuI8wTBU/to1xc7xkywWp8mhNQcrtGZM1hcIALkUIQkAgFzKlhQva8LJNKf5SLJeSX85AED6CEkAAORSHr4h8gyKdGw0DFmvnNINWeQTWFQWi/OR9R6+IdlUIQDkToQkAAByqdCaMU6HzdmSr2jxR5X1ZOD9Wtbicd0XVc09xQFALsbADQAA5CHJNqte9W+hyxZfPbNjlZJtVneXBAC5DiEJAIA85P39P+qoR5gk6dDlC5q2b4t7CwKAXIiQBABAHnEqMU6jfvnWPqKdIWnErtU6lRjn3sIAIJchJAEAkEcM/Wmlrt90eF2S1aoXf1rppooAIHciJAEAkAesjz2sRUd3y2oYDu1Ww6aFR3drQ+xhN1UGALkPIQkAgDzgs8M7ZJHzhWMlySKLPj28I5srAoDci5AEAEAeEO1RSoaMNKcZMhTtWSqbKwKA3IuQBABALme1GZr5XZyMS4V009F2KbcvFdLH38XJaks7RAEAHBGSAADI5b7/44L+irsmnSktGTd9tBseMs6U1p+Xrun7Py64p0AAyGUISQAA5HKxl5NS/nPDVzobZd+bZBiSzkantJvnAwDcEiEJAIBcLjzY9383LkRK1/1T/n/dX7oQkfZ8AIB0EZIAAMjlGpcqoGKhfv8/tp2HdKqsDKundKqsJA9ZJBUP81PjUgXcWicA5BaEJAAAcjlPD4smd64sSSlBKTFM2t9QSgyzDwr+bqfK8vRIe4hwAIAjQhIAAHnAg1XCtaRPbUWGph5SlxKIioX5aUmf2nqwSrj7igOAXMbL3QUAAADXeLBKuDqWDdbSdxvrrC2fKnaapRYVSrIHCQAyiZAEAEAe4ulh0b1+eyVJEdFhBCQAuAMcbgcAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwYuAEAgFwqbtdkxe2a4thoGPb/np5XTRaL8++hoTUHK7RmTBZXBwC5FyEJAIBcypYUL2vCyfSnX4lNdzkAQPoISQAA5FIeviHyDIp0nmAYunrtmvz9/CSL8xDgHr4h2VAdAORehCQAAHKp0JoxaR42l5ycrFWrVql9+/by9vbO/sIAIJdj4AYAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACASY4OSVarVSNGjFB0dLT8/f1VunRpjR07VoZhuLs0AAAAAHmUl7sLuJUJEyboww8/1Ny5c1WpUiXt2LFD/fr1U2hoqJ577jl3lwcAAAAgD8rRIWnr1q3q1KmTOnToIEmKiorSggUL9NNPP7m5MgAAAAB5VY4OSQ0aNNCMGTN08OBBlS1bVr/88os2b96sSZMmpbtMUlKSkpKS7Lfj4+MlScnJyUpOTs7ympH3pfYj+hNchT4FV6NPwdXoU3A1d/WpjG7PYuTgE3xsNptefvllTZw4UZ6enrJarXrjjTf00ksvpbvM6NGjNWbMGKf2+fPnKyAgICvLBQAAAJCDJSYm6uGHH1ZcXJxCQkLSnS9Hh6SFCxfqxRdf1FtvvaVKlSpp9+7diomJ0aRJk9SnT580l0lrT1Lx4sUVGxurAgUKZFfpyMOSk5O1du1atWrVSt7e3u4uB3kAfQquRp+Cq9Gn4Gru6lPx8fEqWLDgbUNSjj7c7sUXX9Tw4cPVo0cPSVKVKlV0/PhxjR8/Pt2Q5OvrK19fX6d2b29vXtRwKfoUXI0+BVejT8HV6FNwtezuUxndVo4eAjwxMVEeHo4lenp6ymazuakiAAAAAHldjt6T1LFjR73xxhsqUaKEKlWqpJ9//lmTJk3SY4895u7SAAAAAORROTokTZ06VSNGjNDTTz+ts2fPKiIiQgMHDtTIkSPdXRoAAACAPCpHh6Tg4GBNnjxZkydPdncpAAAAAP4hcvQ5SQAAAACQ3QhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAICJl7sLAIB/irhdkxW3a4rzBMNQ+WvXFDvHT7JYnCaH1hys0JoxWV8gAACQREgCgGxjS4qXNeFkmtN8JFmvpL8cAADIPoQkAMgmHr4h8gyKdGw0DFmvnEqZHhgui8X5KGgP35DsKA8AAPw/QhIAZJPQmjFOh83Zkq/o+LR8kqSivX6Rb0BY9hcGAAAcMHADAAAAAJgQkgAAAADAhJAEADnADTmPagcAANyDkAQAbvajZ6TqhDyhzWePu7sUAAAgQhIAuFWyzapX/VvossVXz+xYpWSb1d0lAQDwj0dIAgA3mrrvBx31CJMkHbx8QVP3bnZvQQAAgJAEAO7y8c5DGr5znWT53/lIw376Wh/vPOTGqgAAACEJANxg+Z5YDdy0XDaLzaHdsNg0cNNyLd8T66bKAAAAIQkAspnVZuip1RulsHPmnUgpLJLCzump1ZtktRnuKA8AgH88QhIAZLPv/7igc15/ykgnAxmGdM7rhL7/40L2FgYAACQRkgAg28VeTpIuFXXei/T/LBZJl4qmzAcAALIdIQkAsll4sK+UGCbjUiGnvUmGIRmXCkmJYSnzAQCAbEdIAoBs1rhUARUL9ZPlTGnJuOlt2PCQ5UxpFQ/zU+NSBdxTIAAA/3CEJADIZp4eFk3uXFm64SvL2Sj73iTDkCxno6Ubvnq3U2V5eqRzPB4AAMhShCQAcIMHq4RrSZ/airhRSrrun9J43V+R1lJa0qe2HqwS7t4CAQD4ByMkAYCbPFglXH8Ma6RXr2yWn+2G3q7WVkdfbk1AAgDAzQhJAOBGnh4W9fPcot2XP9SgapU4xA4AgByAkAQAOYCXuHAsAAA5BSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYeLm7AAD4p4jbNVlxu6Y4Nhr/G7Dh9Lxqslicf7sKrTlYoTVjsrg6AACQipAEANnElhQva8LJ9KdfiU13OQAAkH0ISQCQTTx8Q+QZFOk8wTB09do1+fv5SRbn6yR5+IZkQ3UAACAVIQkAsklozZg0D5tLTk7WqlWr1L59e3l7e2d/YQAAwAEDNwAAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIDJHYWkGzduaN26dZo+fbouX74sSTp16pQSEhJcWhwAAAAAZDevzC5w/PhxtW3bVidOnFBSUpJatWql4OBgTZgwQUlJSfroo4+yok4AAAAAyBaZ3pM0ePBg1a5dWxcvXpS/v7+9/V//+pe+/fZblxYHAAAAANkt03uSvv/+e23dulU+Pj4O7VFRUTp58qTLCgMAAAAAd8j0niSbzSar1erU/tdffyk4ONglRQEAAACAu2Q6JLVu3VqTJ0+237ZYLEpISNCoUaPUvn17V9YGAAAAANku04fbvfPOO2rTpo0qVqyoa9eu6eGHH9ahQ4dUsGBBLViwICtqBAAAAIBsk+mQVKxYMf3yyy9auHChfv31VyUkJKh///7q1auXw0AOAAAAAJAbZTokSZKXl5ceeeQRV9cCAAAAAG6X6ZD06aef3nJ6796977gYAAAAAHC3TIekwYMHO9xOTk5WYmKifHx8FBAQQEgCAAAAkKtlenS7ixcvOvwlJCTowIEDatSoEQM3AAAAAMj1Mh2S0nLPPffozTffdNrLBAAAAAC5jUtCkpQymMOpU6dctToAAAAAcItMn5P05ZdfOtw2DEOxsbF6//331bBhQ5cVBgAAAADukOmQ1LlzZ4fbFotFhQoVUosWLfTOO++4qi4AAAAAcItMhySbzZYVdQAAAABAjuCyc5KyysmTJ/XII4+oQIEC8vf3V5UqVbRjxw53lwUAAAAgj8rQnqTnn38+wyucNGnSHRdzs4sXL6phw4Zq3ry5Vq9erUKFCunQoUPKly+fy7YBAAAAAGYZCkk///xzhlZmsVjuqpibTZgwQcWLF9fs2bPtbdHR0bdcJikpSUlJSfbb8fHxklIuepucnOzS+vDPlNqP6E9wFfoUXI0+BVejT8HV3NWnMro9i2EYRhbXcscqVqyoNm3a6K+//tLGjRsVGRmpp59+WgMGDEh3mdGjR2vMmDFO7fPnz1dAQEBWlgsAAAAgB0tMTNTDDz+suLg4hYSEpDtfjg5Jfn5+klIO9+vatau2b9+uwYMH66OPPlKfPn3SXCatPUnFixdXbGysChQokC11I29LTk7W2rVr1apVK3l7e7u7HOQB9Cm4Gn0Krkafgqu5q0/Fx8erYMGCtw1JmR7dTpJ27NihxYsX68SJE7p+/brDtOXLl9/JKtNks9lUu3ZtjRs3TpJUo0YN/fbbb7cMSb6+vvL19XVq9/b25kUNl6JPwdXoU3A1+hRcjT4FV8vuPpXRbWV6dLuFCxeqQYMG2rdvnz7//HMlJydr7969+u677xQaGprpQm8lPDxcFStWdGirUKGCTpw44dLtAAAAAECqTIekcePG6d1339XKlSvl4+OjKVOmaP/+/erWrZtKlCjh0uIaNmyoAwcOOLQdPHhQJUuWdOl2AAAAACBVpkPSkSNH1KFDB0mSj4+Prly5IovFoiFDhmjGjBkuLW7IkCH64YcfNG7cOB0+fFjz58/XjBkzNGjQIJduBwAAAABSZTok5cuXT5cvX5YkRUZG6rfffpMkXbp0SYmJiS4trk6dOvr888+1YMECVa5cWWPHjtXkyZPVq1cvl24HAAAAAFJleOCG3377TZUrV1aTJk20du1aValSRV27dtXgwYP13Xffae3atbrvvvtcXuD999+v+++/3+XrBQAAAIC0ZDgkVa1aVXXq1FHnzp3VtWtXSdIrr7wib29vbd26VV26dNGrr76aZYUCAAAAQHbIcEjauHGjZs+erfHjx+uNN95Qly5d9Pjjj2v48OFZWR8AAAAAZKsMn5PUuHFjzZo1S7GxsZo6daqOHTumpk2bqmzZspowYYJOnz6dlXUCAAAAQLbI9MANgYGB6tevnzZu3KiDBw+qa9eumjZtmkqUKKEHHnggK2oEAAAAgGyT6ZBkVqZMGb388st69dVXFRwcrK+//tpVdQEAAACAW2T4nKSbbdq0SbNmzdKyZcvk4eGhbt26qX///q6sDQAAAACyXaZC0qlTpzRnzhzNmTNHhw8fVoMGDfTee++pW7duCgwMzKoaAQAAACDbZDgktWvXTuvWrVPBggXVu3dvPfbYYypXrlxW1gYAAAAA2S7DIcnb21tLly7V/fffL09Pz6ysCQAAAADcJsMh6csvv8zKOgAAAAAgR7ir0e0AAAAAIK8hJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAACTXBWS3nzzTVksFsXExLi7FAAAAAB5VK4JSdu3b9f06dNVtWpVd5cCAAAAIA/LFSEpISFBvXr10syZM5UvXz53lwMAAAAgD/NydwEZMWjQIHXo0EEtW7bU66+/fst5k5KSlJSUZL8dHx8vSUpOTlZycnKW1ol/htR+RH+Cq9Cn4Gr0KbgafQqu5q4+ldHt5fiQtHDhQu3atUvbt2/P0Pzjx4/XmDFjnNrXr1+vgIAAV5eHf7C1a9e6uwTkMfQpuBp9Cq5Gn4KrZXefSkxMzNB8FsMwjCyu5Y79+eefql27ttauXWs/F6lZs2aqXr26Jk+enOYyae1JKl68uGJjY1WgQIHsKBt5XHJystauXatWrVrJ29vb3eUgD6BPwdXoU3A1+hRczV19Kj4+XgULFlRcXJxCQkLSnS9H70nauXOnzp49q5o1a9rbrFarNm3apPfff19JSUny9PR0WMbX11e+vr5O6/L29uZFDZeiT8HV6FNwNfoUXI0+BVfL7j6V0W3l6JB03333ac+ePQ5t/fr1U/ny5fXvf//bKSABAAAAwN3K0SEpODhYlStXdmgLDAxUgQIFnNoBAAAAwBVyxRDgAAAAAJBdcvSepLRs2LDB3SUAAAAAyMPYkwQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAmhCQAAAAAMCEkAQAAAIAJIQkAAAAATAhJAAAAAGBCSAIAAAAAE0ISAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACAiZe7CwByqrhdkxW3a4rzBMNQ+WvXFDvHT7JYnCaH1hys0JoxWV8gAAAAsgQhCUiHLSle1oSTaU7zkWS9kv5yAAAAyL0ISUA6PHxD5BkU6dhoGLJeOZUyPTBcFovzEaseviHZUR4AAACyCCEJSEdozRinw+ZsyVd0fFo+SVLRXr/INyAs+wsDAABAlmLgBgAAAAAwydEhafz48apTp46Cg4NVuHBhde7cWQcOHHB3WQAAAADysBwdkjZu3KhBgwbphx9+0Nq1a5WcnKzWrVvrypV0zpgHAAAAgLuUo89JWrNmjcPtOXPmqHDhwtq5c6eaNGnipqoAAAAA5GU5OiTdLC4uTpKUP3/+dOdJSkpSUlKS/XZ8fMpwzMnJyUpOTs7aApHn2Ux96EZysjzoU3CB1Pcm3qPgKvQpuBp9Cq7mrj6V0e1ZDMMwsrgWl7DZbHrggQd06dIlbd68Od35Ro8erTFjxji1z58/XwEBAVlZIv4BLLZrqnL8UUnSnpKfyfDwc3NFAAAAyKjExEQ9/PDDiouLU0hI+pdtyTUh6amnntLq1au1efNmFStWLN350tqTVLx4ccXGxqpAgQLZUSryMFvyFZ2aUViSVLjfSfkwBDhcIDk5WWvXrlWrVq3k7e3t7nKQB9Cn4Gr0Kbiau/pUfHy8ChYseNuQlCsOt3vmmWf01VdfadOmTbcMSJLk6+srX19fp3Zvb29e1LhrNv2vD3nRp+BivE/B1ehTcDX6FFwtu/tURreVo0OSYRh69tln9fnnn2vDhg2Kjo52d0kAAAAA8rgcHZIGDRqk+fPn64svvlBwcLBOnz4tSQoNDZW/v7+bq8M/kdVm6IdrlRRry6cqRy+pRYVQeXpY3F0WAAAAXChHh6QPP/xQktSsWTOH9tmzZ6tv377ZXxD+0ZbviVXMij36K/l5qcRv0qLvVcz7kCZ3rqwHq4S7uzwAAAC4SI4OSblkTAn8AyzfE6uuc3fIkE0qc1AWT6uMiIM6eThEXefu0JI+tQlKAAAAeYSHuwsAcjqrzVDMit9kSFKBk5LP1ZQJPldlFDglSRryxW+y2gj1AAAAeQEhCbiN7/+4oL/irkleSVLhY7L8/ylIFoukwkdleCXpz0vX9P0fF9xaJwAAAFyDkATcRuzl/7/uVpEjksXmONFiS2k3zwcAAIBcjZAE3EZ4sK8UcFGWsHP2vUipLBbJEnZOCriUMh8AAAByPUIScBuNSxVQYOHzUnqnHBlSYOFzalyqQLbWBQAAgKxBSAJuw9PDomE1GknpXQ7JIg2r2YjrJQEAAOQRhCQgA0Y0rKsG+cpJxk1ByLCoYf7yGtGgrnsKAwAAgMsRkoAMWtS6m/y9PB3aAry8tLBVVzdVBAAAgKxASAIyKCIgVGOq3Sf9/0WOLZLG1mqniIBQ9xYGAAAAlyIkAZnwTPl6KmW7KEm6J7iABlVo6OaKAAAA4GqEJCATvD08NfbqegUbSXq/dnt5e3jefiEAAADkKoQkIJPqWU9qe/wMNSpc0t2lAAAAIAsQkoA74JXuRZMAAACQ2xGSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACAiZe7CwByqrhdkxW3a4pjo/G/ARtOz6smi8X5d4bQmoMVWjMmi6sDAABAViEkAemwJcXLmnAy/elXYtNdDgAAALkXIQlIh4dviDyDIp0nGIauXrsmfz8/yWJJczkAAADkXoQkIB2hNWPSPGwuOTlZq1atUvv27eXt7Z39hQEAACBLMXADAAAAAJgQkgAAAADAhJAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYeLm7gKxmGIYk6fLly/L29nZzNcgLkpOTlZiYqPj4ePoUXII+BVejT8HV6FNwNXf1qfj4eEn/ywjpyfMh6cKFC5Kk6OhoN1cCAAAAICe4fPmyQkND052e50NS/vz5JUknTpy45QMBZFR8fLyKFy+uP//8UyEhIe4uB3kAfQquRp+Cq9Gn4Gru6lOGYejy5cuKiIi45Xx5PiR5eKScdhUaGsqLGi4VEhJCn4JL0afgavQpuBp9Cq7mjj6VkR0nDNwAAAAAACaEJAAAAAAwyfMhydfXV6NGjZKvr6+7S0EeQZ+Cq9Gn4Gr0KbgafQqultP7lMW43fh3AAAAAPAPkuf3JAEAAABAZhCSAAAAAMCEkAQAAAAAJoQkAAAAADDJ0yFp2rRpioqKkp+fn+rVq6effvrJ3SUhF9u0aZM6duyoiIgIWSwWrVixwt0lIZcbP3686tSpo+DgYBUuXFidO3fWgQMH3F0WcrEPP/xQVatWtV+csX79+lq9erW7y0Ie8eabb8pisSgmJsbdpSAXGz16tCwWi8Nf+fLl3V2WkzwbkhYtWqTnn39eo0aN0q5du1StWjW1adNGZ8+edXdpyKWuXLmiatWqadq0ae4uBXnExo0bNWjQIP3www9au3atkpOT1bp1a125csXdpSGXKlasmN58803t3LlTO3bsUIsWLdSpUyft3bvX3aUhl9u+fbumT5+uqlWrursU5AGVKlVSbGys/W/z5s3uLslJnh0CvF69eqpTp47ef/99SZLNZlPx4sX17LPPavjw4W6uDrmdxWLR559/rs6dO7u7FOQh586dU+HChbVx40Y1adLE3eUgj8ifP7/eeust9e/f392lIJdKSEhQzZo19cEHH+j1119X9erVNXnyZHeXhVxq9OjRWrFihXbv3u3uUm4pT+5Jun79unbu3KmWLVva2zw8PNSyZUtt27bNjZUBQPri4uIkpXypBe6W1WrVwoULdeXKFdWvX9/d5SAXGzRokDp06ODwvQq4G4cOHVJERIRKlSqlXr166cSJE+4uyYmXuwvICufPn5fValWRIkUc2osUKaL9+/e7qSoASJ/NZlNMTIwaNmyoypUru7sc5GJ79uxR/fr1de3aNQUFBenzzz9XxYoV3V0WcqmFCxdq165d2r59u7tLQR5Rr149zZkzR+XKlVNsbKzGjBmjxo0b67ffflNwcLC7y7PLkyEJAHKbQYMG6bfffsuRx2UjdylXrpx2796tuLg4LV26VH369NHGjRsJSsi0P//8U4MHD9batWvl5+fn7nKQR7Rr187+/6pVq6pevXoqWbKkFi9enKMOC86TIalgwYLy9PTUmTNnHNrPnDmjokWLuqkqAEjbM888o6+++kqbNm1SsWLF3F0OcjkfHx+VKVNGklSrVi1t375dU6ZM0fTp091cGXKbnTt36uzZs6pZs6a9zWq1atOmTXr//feVlJQkT09PN1aIvCAsLExly5bV4cOH3V2Kgzx5TpKPj49q1aqlb7/91t5ms9n07bffclw2gBzDMAw988wz+vzzz/Xdd98pOjra3SUhD7LZbEpKSnJ3GciF7rvvPu3Zs0e7d++2/9WuXVu9evXS7t27CUhwiYSEBB05ckTh4eHuLsVBntyTJEnPP/+8+vTpo9q1a6tu3bqaPHmyrly5on79+rm7NORSCQkJDr9yHD16VLt371b+/PlVokQJN1aG3GrQoEGaP3++vvjiCwUHB+v06dOSpNDQUPn7+7u5OuRGL730ktq1a6cSJUro8uXLmj9/vjZs2KBvvvnG3aUhFwoODnY6RzIwMFAFChTg3EncsaFDh6pjx44qWbKkTp06pVGjRsnT01M9e/Z0d2kO8mxI6t69u86dO6eRI0fq9OnTql69utasWeM0mAOQUTt27FDz5s3tt59//nlJUp8+fTRnzhw3VYXc7MMPP5QkNWvWzKF99uzZ6tu3b/YXhFzv7Nmz6t27t2JjYxUaGqqqVavqm2++UatWrdxdGgBIkv766y/17NlTFy5cUKFChdSoUSP98MMPKlSokLtLc5Bnr5MEAAAAAHciT56TBAAAAAB3ipAEAAAAACaEJAAAAAAwISQBAAAAgAkhCQAAAABMCEkAAAAAYEJIAgAAAAATQhIAAAAAmBCSAADIpZo1a6aYmBh3lwEAeQ4hCQAgSbJYLLf8Gz16dJZst2/fvurcuXOWrPtOzJkzR2FhYS6bDwCQ+3i5uwAAQM4QGxtr//+iRYs0cuRIHThwwN4WFBRk/79hGLJarfLy4mMEAJD3sCcJACBJKlq0qP0vNDRUFovFfnv//v0KDg7W6tWrVatWLfn6+mrz5s2y2WwaP368oqOj5e/vr2rVqmnp0qX2dVqtVvXv398+vVy5cpoyZYp9+ujRozV37lx98cUX9j1WGzZs0LFjx2SxWLR48WI1btxY/v7+qlOnjg4ePKjt27erdu3aCgoKUrt27XTu3DmH+/Hxxx+rQoUK8vPzU/ny5fXBBx/Yp6Wud/ny5WrevLkCAgJUrVo1bdu2TZK0YcMG9evXT3FxcXe9B+3SpUt6/PHHVahQIYWEhKhFixb65ZdfJEkHDx6UxWLR/v37HZZ59913Vbp0afvt3377Te3atVNQUJCKFCmiRx99VOfPn7+jegAAGUdIAgBk2PDhw/Xmm29q3759qlq1qsaPH69PP/1UH330kfbu3ashQ4bokUce0caNGyVJNptNxYoV05IlS/T7779r5MiRevnll7V48WJJ0tChQ9WtWze1bdtWsbGxio2NVYMGDezbGzVqlF599VXt2rVLXl5eevjhhzVs2DBNmTJF33//vQ4fPqyRI0fa5583b55GjhypN954Q/v27dO4ceM0YsQIzZ071+F+vPLKKxo6dKh2796tsmXLqmfPnrpx44YaNGigyZMnKyQkxF7P0KFD7+ix6tq1q86ePavVq1dr586dqlmzpu677z79/fffKlu2rGrXrq158+Y5LDNv3jw9/PDDklJCVosWLVSjRg3t2LFDa9as0ZkzZ9StW7c7qgcAkAkGAAA3mT17thEaGmq/vX79ekOSsWLFCnvbtWvXjICAAGPr1q0Oy/bv39/o2bNnuuseNGiQ0aVLF/vtPn36GJ06dXKY5+jRo4Yk4+OPP7a3LViwwJBkfPvtt/a28ePHG+XKlbPfLl26tDF//nyHdY0dO9aoX79+uuvdu3evIcnYt29fmvc9Pbea7/vvvzdCQkKMa9euObSXLl3amD59umEYhvHuu+8apUuXtk87cOCAQx1jx441Wrdu7bD8n3/+aUgyDhw4YBiGYTRt2tQYPHjwbWsFAGQOB5MDADKsdu3a9v8fPnxYiYmJatWqlcM8169fV40aNey3p02bplmzZunEiRO6evWqrl+/rurVq2doe1WrVrX/v0iRIpKkKlWqOLSdPXtWknTlyhUdOXJE/fv314ABA+zz3LhxQ6GhoemuNzw8XJJ09uxZlS9fPkN13c4vv/yihIQEFShQwKH96tWrOnLkiCSpR48eGjp0qH744Qfde++9mjdvnmrWrGmv4ZdfftH69esdzgVLdeTIEZUtW9YltQIAnBGSAAAZFhgYaP9/QkKCJOnrr79WZGSkw3y+vr6SpIULF2ro0KF65513VL9+fQUHB+utt97Sjz/+mKHteXt72/9vsVjSbLPZbA71zJw5U/Xq1XNYj6en523Xm7oeV0hISFB4eLg2bNjgNC11RLyiRYuqRYsWmj9/vu69917Nnz9fTz31lMM6OnbsqAkTJjitIzXYAQCyBiEJAHBHKlasKF9fX504cUJNmzZNc54tW7aoQYMGevrpp+1tqXtSUvn4+Mhqtd51PUWKFFFERIT++OMP9erV647X44p6atasqdOnT8vLy0tRUVHpzterVy8NGzZMPXv21B9//KEePXo4rGPZsmWKiopiFEEAyGa86wIA7khwcLCGDh2qIUOGyGazqVGjRoqLi9OWLVsUEhKiPn366J577tGnn36qb775RtHR0frss8+0fft2RUdH29cTFRWlb775RgcOHFCBAgWcDo3LjDFjxui5555TaGio2rZtq6SkJO3YsUMXL17U888/n6F1REVFKSEhQd9++62qVaumgIAABQQEpDmv1WrV7t27Hdp8fX3VsmVL1a9fX507d9bEiRNVtmxZnTp1Sl9//bX+9a9/2Q9bfPDBB/XUU0/pqaeeUvPmzRUREWFfz6BBgzRz5kz17NlTw4YNU/78+XX48GEtXLhQH3/8sdPeMQCA6zC6HQDgjo0dO1YjRozQ+PHjVaFCBbVt21Zff/21PQQNHDhQDz74oLp376569erpwoULDnuVJGnAgAEqV66cateurUKFCmnLli13XM/jjz+ujz/+WLNnz1aVKlXUtGlTzZkzxyGU3U6DBg305JNPqnv37ipUqJAmTpyY7rwJCQmqUaOGw1/Hjh1lsVi0atUqNWnSRP369VPZsmXVo0cPHT9+3H5ulZQSNDt27KhffvnFae9XRESEtmzZIqvVqtatW6tKlSqKiYlRWFiYPDz4+AaArGQxDMNwdxEAAAAAkFPwUxQAAAAAmBCSAAAAAMCEkAQAAAAAJoQkAAAAADAhJAEAAACACSEJAAAAAEwISQAAAABgQkgCAAAAABNCEgAAAACYEJIAAAAAwISQBAAAAAAm/wfsNc5JEJEsXQAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "ates = causal_contrast_model.thetas\n", "ci_ates = causal_contrast_model.confint(level=0.95)\n", @@ -484,9 +965,37 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "================== Sensitivity Analysis ==================\n", + "\n", + "------------------ Scenario ------------------\n", + "Significance Level: level=0.95\n", + "Sensitivity parameters: cf_y=0.03; cf_d=0.03, rho=1.0\n", + "\n", + "------------------ Bounds with CI ------------------\n", + " CI lower theta lower theta theta upper CI upper\n", + "1.0 vs 0.0 1.219585 1.518854 1.810306 2.101998 2.400905\n", + "2.0 vs 0.0 6.469676 6.708821 6.994208 7.279595 7.517798\n", + "3.0 vs 0.0 8.835344 9.060016 9.335446 9.610318 9.833065\n", + "4.0 vs 0.0 9.914598 10.148005 10.431998 10.716098 10.947855\n", + "5.0 vs 0.0 9.786986 10.040784 10.342362 10.643939 10.899654\n", + "\n", + "------------------ Robustness Values ------------------\n", + " H_0 RV (%) RVa (%)\n", + "1.0 vs 0.0 0.0 17.204893 14.534139\n", + "2.0 vs 0.0 0.0 51.844663 50.155423\n", + "3.0 vs 0.0 0.0 62.924443 61.490896\n", + "4.0 vs 0.0 0.0 65.700314 64.285001\n", + "5.0 vs 0.0 0.0 63.327958 61.784872\n" + ] + } + ], "source": [ "causal_contrast_model.sensitivity_analysis()\n", "print(causal_contrast_model.sensitivity_summary)" @@ -501,9 +1010,11393 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + " \n", + " " + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.plotly.v1+json": { + "config": { + "plotlyServerURL": "https://plot.ly" + }, + "data": [ + { + "contours": { + "coloring": "heatmap", + "labelfont": { + "color": "white", + "size": 12 + }, + "showlabels": true + }, + "hovertemplate": "cf_d: %{x:.3f}
cf_y : %{y:.3f}
Bound: %{z:.3f}", + "name": "Contour", + "type": "contour", + "x": [ + 0, + 0.0015151515151515152, + 0.0030303030303030303, + 0.004545454545454545, + 0.006060606060606061, + 0.007575757575757576, + 0.00909090909090909, + 0.010606060606060607, + 0.012121212121212121, + 0.013636363636363636, + 0.015151515151515152, + 0.016666666666666666, + 0.01818181818181818, + 0.019696969696969695, + 0.021212121212121213, + 0.022727272727272728, + 0.024242424242424242, + 0.025757575757575757, + 0.02727272727272727, + 0.02878787878787879, + 0.030303030303030304, + 0.031818181818181815, + 0.03333333333333333, + 0.03484848484848485, + 0.03636363636363636, + 0.03787878787878788, + 0.03939393939393939, + 0.04090909090909091, + 0.04242424242424243, + 0.04393939393939394, + 0.045454545454545456, + 0.04696969696969697, + 0.048484848484848485, + 0.05, + 0.051515151515151514, + 0.05303030303030303, + 0.05454545454545454, + 0.05606060606060606, + 0.05757575757575758, + 0.05909090909090909, + 0.06060606060606061, + 0.06212121212121212, + 0.06363636363636363, + 0.06515151515151515, + 0.06666666666666667, + 0.06818181818181818, + 0.0696969696969697, + 0.07121212121212121, + 0.07272727272727272, + 0.07424242424242425, + 0.07575757575757576, + 0.07727272727272727, + 0.07878787878787878, + 0.0803030303030303, + 0.08181818181818182, + 0.08333333333333333, + 0.08484848484848485, + 0.08636363636363636, + 0.08787878787878788, + 0.0893939393939394, + 0.09090909090909091, + 0.09242424242424242, + 0.09393939393939393, + 0.09545454545454546, + 0.09696969696969697, + 0.09848484848484848, + 0.1, + 0.10151515151515152, + 0.10303030303030303, + 0.10454545454545454, + 0.10606060606060606, + 0.10757575757575757, + 0.10909090909090909, + 0.11060606060606061, + 0.11212121212121212, + 0.11363636363636363, + 0.11515151515151516, + 0.11666666666666667, + 0.11818181818181818, + 0.11969696969696969, + 0.12121212121212122, + 0.12272727272727273, + 0.12424242424242424, + 0.12575757575757576, + 0.12727272727272726, + 0.12878787878787878, + 0.1303030303030303, + 0.1318181818181818, + 0.13333333333333333, + 0.13484848484848486, + 0.13636363636363635, + 0.13787878787878788, + 0.1393939393939394, + 0.1409090909090909, + 0.14242424242424243, + 0.14393939393939395, + 0.14545454545454545, + 0.14696969696969697, + 0.1484848484848485, + 0.15 + ], + "y": [ + 0, + 0.0015151515151515152, + 0.0030303030303030303, + 0.004545454545454545, + 0.006060606060606061, + 0.007575757575757576, + 0.00909090909090909, + 0.010606060606060607, + 0.012121212121212121, + 0.013636363636363636, + 0.015151515151515152, + 0.016666666666666666, + 0.01818181818181818, + 0.019696969696969695, + 0.021212121212121213, + 0.022727272727272728, + 0.024242424242424242, + 0.025757575757575757, + 0.02727272727272727, + 0.02878787878787879, + 0.030303030303030304, + 0.031818181818181815, + 0.03333333333333333, + 0.03484848484848485, + 0.03636363636363636, + 0.03787878787878788, + 0.03939393939393939, + 0.04090909090909091, + 0.04242424242424243, + 0.04393939393939394, + 0.045454545454545456, + 0.04696969696969697, + 0.048484848484848485, + 0.05, + 0.051515151515151514, + 0.05303030303030303, + 0.05454545454545454, + 0.05606060606060606, + 0.05757575757575758, + 0.05909090909090909, + 0.06060606060606061, + 0.06212121212121212, + 0.06363636363636363, + 0.06515151515151515, + 0.06666666666666667, + 0.06818181818181818, + 0.0696969696969697, + 0.07121212121212121, + 0.07272727272727272, + 0.07424242424242425, + 0.07575757575757576, + 0.07727272727272727, + 0.07878787878787878, + 0.0803030303030303, + 0.08181818181818182, + 0.08333333333333333, + 0.08484848484848485, + 0.08636363636363636, + 0.08787878787878788, + 0.0893939393939394, + 0.09090909090909091, + 0.09242424242424242, + 0.09393939393939393, + 0.09545454545454546, + 0.09696969696969697, + 0.09848484848484848, + 0.1, + 0.10151515151515152, + 0.10303030303030303, + 0.10454545454545454, + 0.10606060606060606, + 0.10757575757575757, + 0.10909090909090909, + 0.11060606060606061, + 0.11212121212121212, + 0.11363636363636363, + 0.11515151515151516, + 0.11666666666666667, + 0.11818181818181818, + 0.11969696969696969, + 0.12121212121212122, + 0.12272727272727273, + 0.12424242424242424, + 0.12575757575757576, + 0.12727272727272726, + 0.12878787878787878, + 0.1303030303030303, + 0.1318181818181818, + 0.13333333333333333, + 0.13484848484848486, + 0.13636363636363635, + 0.13787878787878788, + 0.1393939393939394, + 0.1409090909090909, + 0.14242424242424243, + 0.14393939393939395, + 0.14545454545454545, + 0.14696969696969697, + 0.1484848484848485, + 0.15 + ], + "z": [ + [ + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103, + 1.8103060423412103 + ], + [ + 1.8103060423412103, + 1.7957977316885265, + 1.7897881926490626, + 1.7851769111587692, + 1.7812894210358425, + 1.7778644734831248, + 1.7747680842123492, + 1.7719206604105397, + 1.7692703429569152, + 1.7667811103831588, + 1.7644267356774452, + 1.7621874195643417, + 1.7600477799763277, + 1.757995584362598, + 1.7560209146179848, + 1.754115596801762, + 1.7522727997304748, + 1.750486745070921, + 1.7487524932647678, + 1.7470657823646667, + 1.7454229046250394, + 1.7438206105693523, + 1.7422560334074477, + 1.7407266287630554, + 1.7392301260834881, + 1.737764489077791, + 1.7363278832139093, + 1.7349186487938866, + 1.7335352784798688, + 1.7321763984034027, + 1.7308407521835358, + 1.7295271873243299, + 1.7282346435726201, + 1.7269621429014328, + 1.725708780849939, + 1.7244737190019386, + 1.723256178425107, + 1.7220554339251384, + 1.7208708089943818, + 1.7197016713550575, + 1.7185474290136802, + 1.717407526756805, + 1.7162814430292177, + 1.71516868714476, + 1.714068796787473, + 1.7129813357669539, + 1.711905891997017, + 1.7108420756710907, + 1.7097895176114455, + 1.7087478677724233, + 1.7077167938804725, + 1.7066959801960027, + 1.705685126383986, + 1.7046839464818526, + 1.7036921679546273, + 1.7027095308284623, + 1.701735786894759, + 1.7007706989779723, + 1.6998140402609816, + 1.6988655936625907, + 1.697925151262314, + 1.6969925137681332, + 1.696067490023368, + 1.6951498965491982, + 1.6942395571197393, + 1.6933363023668875, + 1.6924399694124177, + 1.6915504015250777, + 1.6906674478006314, + 1.6897909628629981, + 1.6889208065848194, + 1.688056843825926, + 1.6871989441883248, + 1.686346981786449, + 1.6855008350315241, + 1.684660386429004, + 1.683825522388123, + 1.6829961330426877, + 1.6821721120823097, + 1.6813533565933452, + 1.6805397669088684, + 1.6797312464670555, + 1.678927701677416, + 1.6781290417943424, + 1.6773351787974944, + 1.6765460272785755, + 1.6757615043340839, + 1.6749815294636616, + 1.6742060244736852, + 1.6734349133857749, + 1.672668122349915, + 1.6719055795619082, + 1.6711472151849, + 1.6703929612747332, + 1.6696427517089016, + 1.6688965221188996, + 1.668154209825766, + 1.6674157537786432, + 1.6666810944961772, + 1.6659501740106044 + ], + [ + 1.8103060423412103, + 1.7897726074970304, + 1.7812673803024666, + 1.7747410899371856, + 1.7692391726528505, + 1.764391886218061, + 1.7600096043062847, + 1.755979680181562, + 1.7522287182637233, + 1.7487057378086706, + 1.7453736200469372, + 1.7422043433058558, + 1.7391761375331607, + 1.7362716901491204, + 1.733476964180643, + 1.7307803911492663, + 1.7281723029644906, + 1.7256445216219185, + 1.7231900562249796, + 1.720802874891653, + 1.7184777300949121, + 1.716210022890312, + 1.713995695944761, + 1.7118311482336044, + 1.7097131662713594, + 1.7076388681203105, + 1.7056056573896508, + 1.703611185129136, + 1.7016533180219136, + 1.699730111648743, + 1.697839787869011, + 1.695980715569293, + 1.6941513941862363, + 1.6923504395302118, + 1.6905765715288568, + 1.6888286035819642, + 1.6871054332761308, + 1.6854060342527126, + 1.6837294490586965, + 1.6820747828390625, + 1.6804411977526639, + 1.6788279080126935, + 1.6772341754684272, + 1.6756593056577398, + 1.6741026442705012, + 1.6725635739717628, + 1.6710415115409796, + 1.669535905289673, + 1.668046232725111, + 1.6665719984319507, + 1.6651127321474926, + 1.6636679870093514, + 1.662237337957031, + 1.6608203802711987, + 1.659416728236434, + 1.6580260139149257, + 1.6566478860200757, + 1.6552820088802325, + 1.6539280614838958, + 1.6525857365987004, + 1.6512547399573223, + 1.6499347895042036, + 1.6486256146976288, + 1.647326955862265, + 1.646038563587771, + 1.6447601981695372, + 1.6434916290880022, + 1.6422326345233396, + 1.6409830009026263, + 1.6397425224768716, + 1.6385110009255373, + 1.637288244986392, + 1.636074070108749, + 1.6348682981283034, + 1.6336707569619477, + 1.6324812803210864, + 1.6312997074420958, + 1.6301258828326948, + 1.6289596560330895, + 1.6278008813908569, + 1.6266494178486137, + 1.6255051287435909, + 1.6243678816183102, + 1.623237548041622, + 1.6221140034394135, + 1.6209971269343666, + 1.6198868011941672, + 1.6187829122876423, + 1.6176853495483121, + 1.6165940054449033, + 1.6155087754583908, + 1.6144295579651713, + 1.6133562541259985, + 1.6122887677803353, + 1.6112270053458069, + 1.610170875722454, + 1.6091202902015085, + 1.6080751623784368, + 1.6070354080700056, + 1.6060009452351467 + ], + [ + 1.8103060423412103, + 1.7851386919257601, + 1.7747140340546843, + 1.7667149127297608, + 1.7599713415103098, + 1.7540301359987058, + 1.748658875645535, + 1.7437194919835108, + 1.7391220257681583, + 1.7348039910948594, + 1.7307198923568026, + 1.7268353840457666, + 1.7231237831183113, + 1.7195638699507345, + 1.7161384397537125, + 1.7128333133140052, + 1.7096366406794092, + 1.7065383982613764, + 1.7035300174816326, + 1.7006041052035863, + 1.6977542296562014, + 1.6949747540209312, + 1.6922607053195837, + 1.6896076698605365, + 1.6870117089498595, + 1.684469290263959, + 1.681977231467402, + 1.6795326535068615, + 1.6771329416258114, + 1.6747757125951055, + 1.6724587869894192, + 1.670180165591211, + 1.6679380091951066, + 1.6657306212322813, + 1.6635564327480048, + 1.6614139893541813, + 1.6593019398485087, + 1.6572190262472242, + 1.6551640750225816, + 1.6531359893717323, + 1.6511337423723949, + 1.649156370904075, + 1.6472029702327098, + 1.6452726891723288, + 1.643364725750323, + 1.6414783233136974, + 1.6396127670226819, + 1.6377673806856237, + 1.6359415238954123, + 1.6341345894330583, + 1.6323460009085808, + 1.630575210613214, + 1.6288216975602587, + 1.6270849656947026, + 1.625364542254184, + 1.6236599762659418, + 1.621970837166215, + 1.6202967135301183, + 1.6186372119013712, + 1.6169919557124552, + 1.6153605842868, + 1.6137427519155108, + 1.6121381270019406, + 1.6105463912681117, + 1.6089672390176082, + 1.6074003764501017, + 1.6058455210231615, + 1.6043024008574194, + 1.6027707541815426, + 1.60125032881381, + 1.5997408816773804, + 1.5982421783466143, + 1.5967539926220549, + 1.5952761061318848, + 1.5938083079578669, + 1.5923503942839625, + 1.590902168065962, + 1.589463438720616, + 1.5880340218328768, + 1.5866137388799801, + 1.585202416971193, + 1.583799888602158, + 1.5824059914228457, + 1.581020568018201, + 1.5796434657006522, + 1.5782745363136979, + 1.5769136360458693, + 1.5755606252543934, + 1.5742153682979572, + 1.5728777333779957, + 1.5715475923879871, + 1.5702248207702598, + 1.5689092973798626, + 1.5676009043550772, + 1.5662995269941768, + 1.5650050536380742, + 1.5637173755585088, + 1.5624363868514666, + 1.5611619843355289, + 1.5598940674548796 + ], + [ + 1.8103060423412103, + 1.7812231477375466, + 1.769176618359642, + 1.7599329912564943, + 1.752140253133883, + 1.7452747130249566, + 1.7390677903190919, + 1.733359935814014, + 1.7280471943780737, + 1.7230573585302196, + 1.7183378544430132, + 1.7138489931234062, + 1.7095599401717783, + 1.7054461746087861, + 1.7014878149186443, + 1.6976684758817155, + 1.693974463926556, + 1.6903941959915993, + 1.686917770396505, + 1.6835366437781962, + 1.680243383708703, + 1.677031476391499, + 1.6738951751509028, + 1.670829379611779, + 1.667829538296974, + 1.6648915693228923, + 1.6620117952453672, + 1.659186889087062, + 1.6564138292868176, + 1.653689861831959, + 1.651012468221492, + 1.6483793381989906, + 1.6457883464149368, + 1.643237532347802, + 1.640725082944392, + 1.638249317542459, + 1.6358086747192286, + 1.633401700773433, + 1.6310270395995154, + 1.6286834237536993, + 1.6263696665448166, + 1.6240846550097867, + 1.6218273436557413, + 1.6195967488689318, + 1.6173919439056024, + 1.6152120543924493, + 1.6130562542747104, + 1.6109237621586252, + 1.6088138380023458, + 1.606725780115565, + 1.6046589224333685, + 1.6026126320342917, + 1.6005863068763622, + 1.598579373728176, + 1.5965912862748555, + 1.5946215233811576, + 1.5926695874960783, + 1.590735003185121, + 1.5888173157779544, + 1.5869160901205688, + 1.5850309094222208, + 1.5831613741885193, + 1.5813071012329076, + 1.5794677227596214, + 1.5776428855119016, + 1.5758322499798734, + 1.5740354896630673, + 1.5722522903830383, + 1.5704823496419886, + 1.568725376023687, + 1.5669810886333213, + 1.5652492165732383, + 1.5635294984518002, + 1.5618216819228334, + 1.5601255232533773, + 1.5584407869176298, + 1.5567672452151822, + 1.5551046779117836, + 1.5534528719010368, + 1.55181162088555, + 1.5501807250761959, + 1.5485599909082377, + 1.546949230773178, + 1.545348262765279, + 1.5437569104417879, + 1.5421750025959704, + 1.5406023730421232, + 1.5390388604118086, + 1.5374843079605953, + 1.535938563384657, + 1.5344014786466196, + 1.5328729098100897, + 1.5313527168823478, + 1.529840763664712, + 1.528336917610126, + 1.5268410496875466, + 1.5253530342527375, + 1.523872748925104, + 1.5224000744702317, + 1.5209348946877985 + ], + [ + 1.8103060423412103, + 1.777765566031508, + 1.764286859417949, + 1.7539442840703146, + 1.745225089721806, + 1.7375433252924946, + 1.7305984793953157, + 1.7242120344821494, + 1.7182676764946878, + 1.7126846134121037, + 1.7074040210558006, + 1.7023814919224793, + 1.697582525799419, + 1.6929796864785578, + 1.6885507287878705, + 1.6842773195160765, + 1.6801441371024013, + 1.6761382214083989, + 1.6722484935714266, + 1.668465394532539, + 1.664780608243779, + 1.6611868465020827, + 1.6576776794260222, + 1.6542474002715297, + 1.6508909164494212, + 1.6476036607926994, + 1.6443815186564348, + 1.6412207675285235, + 1.6381180266230886, + 1.6350702145108078, + 1.6320745132733467, + 1.6291283379945356, + 1.6262293106481653, + 1.6233752376319392, + 1.6205640903439837, + 1.617793988312949, + 1.615063184482997, + 1.6123700523264968, + 1.6097130745144017, + 1.6070908329201896, + 1.6045019997703909, + 1.6019453297849466, + 1.5994196531753464, + 1.5969238693888324, + 1.5944569415037484, + 1.5920178911950633, + 1.5896057942007396, + 1.5872197762293592, + 1.5848590092576278, + 1.5825227081732947, + 1.580210127724904, + 1.5779205597447779, + 1.5756533306159053, + 1.573407798957049, + 1.5711833535035264, + 1.5689794111638244, + 1.566795415234531, + 1.5646308337581085, + 1.5624851580097787, + 1.5603579011013218, + 1.558248596690943, + 1.5561567977895105, + 1.5540820756545202, + 1.5520240187640277, + 1.5499822318635925, + 1.5479563350799883, + 1.545945963096043, + 1.5439507643815396, + 1.5419704004755874, + 1.5400045453163176, + 1.5380528846141412, + 1.5361151152651624, + 1.5341909448016429, + 1.5322800908766983, + 1.5303822807806569, + 1.528497250986732, + 1.5266247467238676, + 1.5247645215747965, + 1.5229163370975138, + 1.521079962468525, + 1.5192551741463478, + 1.5174417555538904, + 1.5156394967784204, + 1.5138481942879496, + 1.5120676506629551, + 1.5102976743424246, + 1.508538079383316, + 1.5067886852325612, + 1.5050493165108338, + 1.503319802807345, + 1.5015999784849812, + 1.4998896824951666, + 1.4981887582018492, + 1.4964970532140773, + 1.494814419226657, + 1.4931407118684148, + 1.4914757905576321, + 1.4898195183642384, + 1.4881717618783814, + 1.4865323910850174 + ], + [ + 1.8103060423412103, + 1.774632494409933, + 1.7598560270388313, + 1.7485176448379944, + 1.7389589464786557, + 1.7305375641682774, + 1.7229240525948626, + 1.715922706131708, + 1.7094060117364522, + 1.7032853985473784, + 1.6974963786591863, + 1.6919902689124044, + 1.6867292473347786, + 1.6816832360972676, + 1.6768278482117123, + 1.6721429853032412, + 1.667611850616101, + 1.6632202361800197, + 1.6589559964340732, + 1.654808651951218, + 1.650769085995344, + 1.6468293086384969, + 1.6429822709155308, + 1.6392217166239833, + 1.6355420628485149, + 1.6319383026848238, + 1.6284059253205398, + 1.6249408498315625, + 1.6215393699222056, + 1.6181981074760698, + 1.614913973259187, + 1.611684133473736, + 1.6085059811316942, + 1.6053771114257107, + 1.6022953004354743, + 1.5992584866335455, + 1.5962647547535465, + 1.5933123216620393, + 1.5903995239380595, + 1.5875248069146144, + 1.5846867149771622, + 1.5818838829472261, + 1.579115028406378, + 1.5763789448381187, + 1.5736744954835986, + 1.5710006078224112, + 1.568356268602452, + 1.5657405193535219, + 1.5631524523283469, + 1.5605912068222692, + 1.5580559658293152, + 1.5555459529978006, + 1.553060429853325, + 1.5505986932609959, + 1.5481600731021667, + 1.545743930143937, + 1.5433496540822143, + 1.5409766617413714, + 1.5386243954154417, + 1.5362923213374926, + 1.5339799282652722, + 1.5316867261725113, + 1.5294122450363952, + 1.527156033712703, + 1.5249176588909918, + 1.5226967041229702, + 1.520492768917892, + 1.5183054678994052, + 1.516134430018829, + 1.5139792978203106, + 1.5118397267537396, + 1.5097153845316822, + 1.5076059505269361, + 1.5055111152076126, + 1.5034305796069276, + 1.5013640548251308, + 1.499311261561226, + 1.4972719296723263, + 1.4952457977586848, + 1.4932326127725912, + 1.4912321296494784, + 1.4892441109597148, + 1.487268326579689, + 1.485304553380888, + 1.4833525749357834, + 1.4814121812394307, + 1.4794831684457646, + 1.4775653386176546, + 1.4756584994898514, + 1.4737624642440235, + 1.471877051295138, + 1.4700020840884924, + 1.4681373909067565, + 1.4662828046864294, + 1.4644381628431526, + 1.4626033071053652, + 1.460778083355819, + 1.4589623414805026, + 1.457155935224557, + 1.4553587220547928 + ], + [ + 1.8103060423412103, + 1.7717447142695635, + 1.7557720891991688, + 1.7435158629137857, + 1.7331833861979162, + 1.7240802914703366, + 1.715850464761614, + 1.7082823580392583, + 1.7012381360571274, + 1.6946220581262694, + 1.6883644160338172, + 1.6824125857097583, + 1.676725683486361, + 1.671271196728898, + 1.666022764318119, + 1.660958660912081, + 1.6560607300546222, + 1.6513136136377145, + 1.6467041829150857, + 1.642221110148183, + 1.6378545405994633, + 1.6335958375548625, + 1.6294373814342356, + 1.625372409594402, + 1.6213948871820176, + 1.6174994019829754, + 1.613681078033829, + 1.6099355040589365, + 1.6062586737373066, + 1.6026469354934092, + 1.599096950019228, + 1.5956056541204973, + 1.5921702297730442, + 1.5887880774999288, + 1.5854567933540888, + 1.5821741489270678, + 1.5789380739113286, + 1.575746640828459, + 1.572598051603269, + 1.5694906257181909, + 1.5664227897264242, + 1.5633930679380479, + 1.5604000741226303, + 1.557442504095944, + 1.5545191290783062, + 1.5516287897285896, + 1.548770390771749, + 1.5459428961492496, + 1.5431453246315119, + 1.5403767458396813, + 1.5376362766310026, + 1.5349230778079832, + 1.532236351116586, + 1.5295753365030227, + 1.5269393096024215, + 1.5243275794358624, + 1.5217394862950278, + 1.5191743997961173, + 1.5166317170867663, + 1.5141108611915128, + 1.5116112794829517, + 1.5091324422670993, + 1.5066738414727094, + 1.5042349894353548, + 1.5018154177680343, + 1.499414676310892, + 1.4970323321533834, + 1.4946679687228719, + 1.4923211849342186, + 1.489991594395455, + 1.4876788246650767, + 1.485382516556923, + 1.483102323488961, + 1.4808379108726404, + 1.4785889555397613, + 1.4763551452040873, + 1.4741361779551556, + 1.4719317617819638, + 1.469741614124409, + 1.4675654614505227, + 1.4654030388577162, + 1.4632540896963875, + 1.461118365214377, + 1.458995624220878, + 1.4568856327685147, + 1.4547881638524047, + 1.4527029971251073, + 1.4506299186264446, + 1.448568720527261, + 1.446519200886248, + 1.444481163419031, + 1.4424544172787725, + 1.4404387768475937, + 1.4384340615381728, + 1.4364400956049184, + 1.4344567079641584, + 1.432483732022825, + 1.4305210055151507, + 1.4285683703469196, + 1.4266256724468542 + ], + [ + 1.8103060423412103, + 1.7690506473939762, + 1.7519621032857733, + 1.7388496022062805, + 1.7277952524467421, + 1.7180561748005934, + 1.7092513755834915, + 1.7011545270710782, + 1.6936181642303363, + 1.686539857499508, + 1.6798450285381483, + 1.6734773767233107, + 1.6673931620713507, + 1.6615576004694397, + 1.6559424890926153, + 1.650524584769363, + 1.6452844625522738, + 1.640205691347191, + 1.6352742251748995, + 1.6304779448903584, + 1.6258063072599767, + 1.6212500721702112, + 1.6168010877029635, + 1.6124521187465486, + 1.6081967088257723, + 1.6040290676050397, + 1.5999439784642862, + 1.5959367219364209, + 1.592003011800946, + 1.5881389413669305, + 1.5843409380273625, + 1.5806057245795224, + 1.5769302861194623, + 1.5733118415591556, + 1.5697478190010443, + 1.5662358343500884, + 1.5627736726578054, + 1.55935927178352, + 1.5559907080304647, + 1.5526661834725979, + 1.5493840147350864, + 1.5461426230297108, + 1.5429405252777801, + 1.5397763261789232, + 1.536648711105411, + 1.5335564397193597, + 1.5304983402249097, + 1.5274733041798427, + 1.524480281801491, + 1.5215182777105714, + 1.5185863470640255, + 1.5156835920342695, + 1.512809158597669, + 1.5099622335996776, + 1.5071420420680535, + 1.5043478447489984, + 1.5015789358440204, + 1.4988346409278872, + 1.496114315030271, + 1.4934173408656228, + 1.4907431271975153, + 1.488091107325174, + 1.4854607376812217, + 1.4828514965308137, + 1.4802628827633373, + 1.477694414768755, + 1.4751456293914558, + 1.4726160809551747, + 1.4701053403531712, + 1.4676129941984055, + 1.4651386440289436, + 1.4626819055642724, + 1.4602424080085885, + 1.457819793397488, + 1.4554137159847955, + 1.453023841666561, + 1.4506498474395062, + 1.448291420891434, + 1.4459482597213285, + 1.4436200712870542, + 1.4413065721787428, + 1.4390074878161032, + 1.436722552068037, + 1.4344515068930686, + 1.4321941019992122, + 1.429950094522009, + 1.4277192487195598, + 1.4255013356834727, + 1.4232961330647167, + 1.4211034248134602, + 1.4189230009320246, + 1.4167546572401581, + 1.414598195151887, + 1.4124534214632511, + 1.4103201481502858, + 1.4081981921766484, + 1.4060873753103345, + 1.4039875239489639, + 1.4018984689531515, + 1.3998200454875114 + ], + [ + 1.8103060423412103, + 1.7665144926863026, + 1.7483754389019048, + 1.7344568533967348, + 1.7227229430313948, + 1.712385160472779, + 1.7030390906409336, + 1.6944444924281878, + 1.6864448354625996, + 1.678931393376487, + 1.6718250031633413, + 1.665065903147664, + 1.6586076644522594, + 1.652413364628413, + 1.6464530670966468, + 1.6407020998231374, + 1.6351398437215794, + 1.629748857604545, + 1.6245142320232944, + 1.619423102814424, + 1.614464278604348, + 1.6096279512481781, + 1.6049054676927408, + 1.6002891480514894, + 1.595772138940657, + 1.5913482940666714, + 1.5870120761203683, + 1.5827584755077837, + 1.5785829425151658, + 1.574481330289718, + 1.570449846600205, + 1.5664850127795136, + 1.562583628583989, + 1.558742741959612, + 1.554959622902714, + 1.5512317407572076, + 1.547556744411764, + 1.5439324449566434, + 1.5403568004367858, + 1.5368279023995473, + 1.5333439639854722, + 1.5299033093511265, + 1.5265043642463012, + 1.5231456475952343, + 1.5198257639541175, + 1.516543396735917, + 1.5132973021092107, + 1.5100863034908514, + 1.5069092865633082, + 1.503765194756856, + 1.5006530251446835, + 1.4975718247057064, + 1.494520686915616, + 1.4914987486315976, + 1.48850518724038, + 1.4855392180429088, + 1.4826000918520834, + 1.479687092782719, + 1.47679953621526, + 1.4739367669168386, + 1.4710981573050645, + 1.468283105841517, + 1.4654910355432875, + 1.4627213926021436, + 1.4599736451019483, + 1.4572472818259297, + 1.4545418111462172, + 1.4518567599888212, + 1.44919167286788, + 1.4465461109835918, + 1.4439196513787764, + 1.4413118861494683, + 1.4387224217053782, + 1.43615087807642, + 1.4335968882618455, + 1.4310600976188326, + 1.4285401632876376, + 1.426036753650681, + 1.4235495478231435, + 1.4210782351728608, + 1.4186225148674856, + 1.4161820954470405, + 1.413756694420152, + 1.411346037882374, + 1.4089498601551464, + 1.4065679034440395, + 1.4041999175150393, + 1.401845659387725, + 1.3995048930442713, + 1.3971773891532908, + 1.3948629248076032, + 1.3925612832750782, + 1.3902722537617689, + 1.3879956311865986, + 1.3857312159669237, + 1.3834788138143346, + 1.3812382355401036, + 1.3790092968697307, + 1.376791818266073, + 1.374585624760571 + ], + [ + 1.8103060423412103, + 1.7641102016362358, + 1.7449752578910083, + 1.7302924991418358, + 1.7179143609312613, + 1.7070090022471354, + 1.6971498043751294, + 1.6880833362302927, + 1.679644473440806, + 1.6717185202262865, + 1.664221967287172, + 1.6570917718477824, + 1.6502789559424613, + 1.643744569966258, + 1.6374570337292098, + 1.631390320626793, + 1.6255226795213118, + 1.6198357116503923, + 1.6143136889906038, + 1.6089430410963232, + 1.6037119621530604, + 1.5986101055185422, + 1.5936283430603049, + 1.5887585732423601, + 1.5839935664090485, + 1.5793268388163373, + 1.574752549139721, + 1.570265412743087, + 1.5658606301193754, + 1.5615338267407894, + 1.557281002170911, + 1.5530984867530804, + 1.5489829045404018, + 1.5449311414019924, + 1.540940317448567, + 1.5370077630832197, + 1.5331309981113628, + 1.5293077134453656, + 1.5255357550205408, + 1.5218131096043128, + 1.518137892233134, + 1.5145083350546007, + 1.510922777387315, + 1.5073796568398916, + 1.5038775013543548, + 1.5004149220589853, + 1.4969906068321825, + 1.4936033144927576, + 1.4902518695437126, + 1.4869351574063883, + 1.4836521200901998, + 1.480401752250268, + 1.4771830975913056, + 1.4739952455812975, + 1.4708373284429679, + 1.4677085183948657, + 1.4646080251172093, + 1.461535093420511, + 1.4584890010974916, + 1.4554690569409792, + 1.4524745989123757, + 1.4495049924469472, + 1.4465596288836453, + 1.443637924008458, + 1.4407393167014138, + 1.4378632676783636, + 1.4350092583195515, + 1.4321767895777686, + 1.4293653809595743, + 1.4265745695737064, + 1.4238039092413306, + 1.4210529696632976, + 1.4183213356399975, + 1.4156086063398154, + 1.4129143946125313, + 1.410238326344338, + 1.4075800398514364, + 1.404939185309419, + 1.4023154242159008, + 1.399708428884055, + 1.3971178819649104, + 1.3945434759964392, + 1.39198491297762, + 1.3894419039658088, + 1.3869141686958741, + 1.3844014352196767, + 1.3819034395645848, + 1.379419925409803, + 1.3769506437793995, + 1.3744953527509876, + 1.3720538171790957, + 1.3696258084323314, + 1.3672111041435104, + 1.364809487971973, + 1.3624207493773732, + 1.3600446834042683, + 1.357681090476887, + 1.3553297762034915, + 1.3529905511897953, + 1.350663230860927 + ], + [ + 1.8103060423412103, + 1.761818123105178, + 1.7417337693463621, + 1.7263225026711058, + 1.7133302038691456, + 1.7018837588419222, + 1.6915353815236496, + 1.6820190664516836, + 1.6731614963515136, + 1.664842284633113, + 1.6569737785530563, + 1.6494898073702347, + 1.6423389630010015, + 1.6354803632951387, + 1.6288808611624113, + 1.6225131386478064, + 1.6163543653970807, + 1.6103852297646304, + 1.6045892233566654, + 1.5989521024087832, + 1.5934614753426342, + 1.5881064821511863, + 1.5828775417954781, + 1.577766150769205, + 1.5727647207060893, + 1.5678664461610483, + 1.56306519598317, + 1.5583554233308972, + 1.5537320905621568, + 1.5491906061001486, + 1.5447267710205743, + 1.5403367335910487, + 1.536016950361817, + 1.5317641526895458, + 1.5275753177947613, + 1.523447643624352, + 1.5193785269250157, + 1.5153655440401461, + 1.5114064340277835, + 1.5074990837656848, + 1.5036415147649025, + 1.4998318714582848, + 1.4960684107671418, + 1.4923494927796073, + 1.488673572399259, + 1.485039191843346, + 1.4814449738873112, + 1.4778896157668304, + 1.4743718836607926, + 1.4708906076889834, + 1.4674446773669687, + 1.4640330374681187, + 1.460654684249067, + 1.4573086620003357, + 1.4539940598885286, + 1.45071000906053, + 1.4474556799836122, + 1.444230279998386, + 1.4410330510641334, + 1.4378632676783636, + 1.4347202349544026, + 1.4316032868426003, + 1.4285117844822424, + 1.4254451146726301, + 1.422402688452951, + 1.4193839397816337, + 1.4163883243067963, + 1.413415318220225, + 1.4104644171880505, + 1.4075351353519392, + 1.4046270043952005, + 1.4017395726687223, + 1.3988724043721203, + 1.3960250787858954, + 1.3931971895507664, + 1.3903883439906881, + 1.387598162476356, + 1.3848262778262805, + 1.382072334742754, + 1.379335989280258, + 1.3766169083440578, + 1.3739147692169187, + 1.3712292591120352, + 1.368560074750424, + 1.3659069219611626, + 1.3632695153029826, + 1.3606475777058384, + 1.3580408401311794, + 1.3554490412497464, + 1.352871927135796, + 1.3503092509767485, + 1.3477607727973093, + 1.3452262591971995, + 1.3427054831016814, + 1.340198223524122, + 1.3377042653398958, + 1.335223399070968, + 1.3327554206805532, + 1.330300131377272, + 1.3278573374282838 + ], + [ + 1.8103060423412103, + 1.7596230003239686, + 1.738629396938102, + 1.7225204384851995, + 1.7089399583067268, + 1.6969753150841798, + 1.686158450786928, + 1.6762113174753512, + 1.6669527515349931, + 1.6582569162894853, + 1.6500321908207116, + 1.6422094087362022, + 1.6347348346291888, + 1.6275657355515496, + 1.620667463793224, + 1.6140114646733026, + 1.6075738742722439, + 1.601334506676505, + 1.5952761061318848, + 1.5893837840368281, + 1.5836445878271495, + 1.5780471658465731, + 1.572581503307747, + 1.5672387117375397, + 1.5620108592326458, + 1.556890832255002, + 1.5518722220870522, + 1.546949230773178, + 1.5421165926094926, + 1.5373695081514411, + 1.5327035883829563, + 1.5281148071977957, + 1.523599460728776, + 1.5191541323560465, + 1.5147756624542512, + 1.5104611221170186, + 1.5062077902377604, + 1.5020131334372024, + 1.497874788417061, + 1.4937905463907941, + 1.489758339300213, + 1.485776227573783, + 1.4818423892209567, + 1.4779551100885329, + 1.4741127751311942, + 1.4703138605701191, + 1.4665569268316743, + 1.4628406121733881, + 1.4591636269171673, + 1.4555247482205187, + 1.4519228153256674, + 1.448356725234249, + 1.444825428761889, + 1.441327926932669, + 1.4378632676783636, + 1.434430542811537, + 1.4310288852452377, + 1.427657466435165, + 1.424315494022931, + 1.4210022096614285, + 1.4177168870053949, + 1.4144588298520855, + 1.411227370418578, + 1.4080218677436338, + 1.4048417062032772, + 1.4016862941303627, + 1.398555062529361, + 1.3954474638784535, + 1.3923629710117997, + 1.3893010760755113, + 1.3862612895514796, + 1.3832431393437417, + 1.3802461699225588, + 1.3772699415218117, + 1.3743140293857095, + 1.3713780230611565, + 1.368461525732446, + 1.365564153595216, + 1.3626855352668854, + 1.3598253112309915, + 1.3569831333130886, + 1.3541586641860355, + 1.3513515769026858, + 1.3485615544541505, + 1.345788289351936, + 1.3430314832324073, + 1.3402908464821324, + 1.3375660978827701, + 1.3348569642742838, + 1.3321631802353204, + 1.3294844877797143, + 1.3268206360681203, + 1.324171381133869, + 1.3215364856221998, + 1.3189157185420748, + 1.3163088550298443, + 1.3137156761240811, + 1.31113596855094, + 1.3085695245194502, + 1.3060161415261864 + ], + [ + 1.8103060423412103, + 1.757512708357566, + 1.735644993418648, + 1.7188653055805854, + 1.7047193743739215, + 1.6922565587949316, + 1.6809893122609467, + 1.6706280097385122, + 1.6609839444960857, + 1.651926040390277, + 1.6433588616789234, + 1.635210362085543, + 1.6274245688199607, + 1.6199569696604847, + 1.6127714742688832, + 1.6058383390317892, + 1.5991327064066325, + 1.592633549998134, + 1.5863228955735234, + 1.5801852346139127, + 1.574207075248653, + 1.5683765931720752, + 1.5626833566107026, + 1.5571181070016715, + 1.5516725821806832, + 1.5463393724229881, + 1.5411118021709862, + 1.535983832059336, + 1.5309499771358142, + 1.5260052381211904, + 1.521145043253749, + 1.5163651987920916, + 1.5116618466509608, + 1.5070314279525598, + 1.502470651514078, + 1.4979764664781454, + 1.4935460384393435, + 1.4891767285359783, + 1.4848660750690146, + 1.4806117772845728, + 1.4764116810166366, + 1.4722637659356494, + 1.468166134188767, + 1.4641170002505182, + 1.4601146818298756, + 1.4561575917023741, + 1.4522442303547924, + 1.4483731793457295, + 1.444543095298711, + 1.4407527044556994, + 1.4370007977283983, + 1.4332862261928545, + 1.4296078969797592, + 1.4259647695187914, + 1.4223558521004196, + 1.4187801987229705, + 1.4152369061965564, + 1.411725111478742, + 1.40824398921968, + 1.4047927494969326, + 1.4013706357223683, + 1.3979769227054197, + 1.3946109148586587, + 1.391271944533116, + 1.3879593704720548, + 1.3846725763730596, + 1.3814109695493102, + 1.3781739796817982, + 1.3749610576550575, + 1.3717716744696657, + 1.3686053202254276, + 1.365461503169701, + 1.3623397488058362, + 1.359239599057153, + 1.3561606114822857, + 1.3531023585380864, + 1.350064426886615, + 1.347046416743032, + 1.3440479412614834, + 1.3410686259563045, + 1.3381081081560953, + 1.3351660364884106, + 1.3322420703929914, + 1.3293358796616364, + 1.3264471440029402, + 1.323575552630289, + 1.3207208038716034, + 1.3178826047994434, + 1.3150606708801948, + 1.312254725641146, + 1.3094645003543497, + 1.3066897337362517, + 1.3039301716621332, + 1.3011855668944825, + 1.298455678824478, + 1.2957402732258139, + 1.293039122020156, + 1.2903520030535665, + 1.2876786998832737, + 1.2850190015742085 + ], + [ + 1.8103060423412103, + 1.755477422699117, + 1.732766664837166, + 1.7153400874122358, + 1.7006488030570237, + 1.6877055217090096, + 1.6760039009169425, + 1.6652431500392804, + 1.6552272873331217, + 1.6458201834149304, + 1.6369227233091495, + 1.6284600832152767, + 1.6203741324832612, + 1.612618642858731, + 1.605156132650753, + 1.5979557115718421, + 1.5909915637728371, + 1.5842418522500443, + 1.5776879098290775, + 1.5713136301074964, + 1.5651050010768088, + 1.5590497425813752, + 1.5531370206825714, + 1.5473572198819363, + 1.5417017594926747, + 1.5361629441307438, + 1.53073384088282, + 1.525408177554287, + 1.52018025773735, + 1.515044889420846, + 1.509997324592757, + 1.505033207834801, + 1.5001485323250334, + 1.495339601984, + 1.490602998747394, + 1.4859355541413635, + 1.4813343244886505, + 1.4767965691943121, + 1.4723197316560306, + 1.4679014224213907, + 1.4635394042770886, + 1.459231579005936, + 1.4549759755891716, + 1.4507707396658462, + 1.446614124089343, + 1.4425044804446079, + 1.4384402514092698, + 1.4344199638582542, + 1.4304422226253122, + 1.4265057048465573, + 1.4226091548209894, + 1.4187513793314022, + 1.4149312433762513, + 1.4111476662692066, + 1.4073996180684067, + 1.4036861163019745, + 1.4000062229602959, + 1.3963590417289722, + 1.3927437154393143, + 1.38915942371584, + 1.385605380802474, + 1.3820808335511419, + 1.378585059558169, + 1.37511736543542, + 1.371677085204464, + 1.3682635788032296, + 1.3648762306956668, + 1.3615144485758615, + 1.3581776621588786, + 1.3548653220513396, + 1.3515768986954053, + 1.348311881380413, + 1.3450697773169449, + 1.3418501107685747, + 1.3386524222369616, + 1.3354762676963376, + 1.3323212178737824, + 1.3291868575719734, + 1.3260727850313963, + 1.3229786113292368, + 1.319903959812407, + 1.3168484655623707, + 1.3138117748896092, + 1.3107935448557528, + 1.30779344282154, + 1.3048111460189291, + 1.3018463411457948, + 1.2988987239817753, + 1.2959679990239326, + 1.2930538791409938, + 1.2901560852450278, + 1.2872743459794909, + 1.2844083974226623, + 1.281557982805544, + 1.2787228522433762, + 1.2759027624799737, + 1.273097476644139, + 1.2703067640174686, + 1.2675303998129008, + 1.2647681649634093 + ], + [ + 1.8103060423412103, + 1.7535090516744178, + 1.7299829678382541, + 1.7119307687893102, + 1.6967120610076252, + 1.6833041102928408, + 1.6711823962819499, + 1.6600353298200206, + 1.649659893335298, + 1.6399150703408325, + 1.6306981875908202, + 1.6219317350781417, + 1.61355549523741, + 1.60552158020004, + 1.597791162666275, + 1.5903322433740246, + 1.58311807967404, + 1.576126050604804, + 1.5693368188323418, + 1.5627336997274386, + 1.5563021782444717, + 1.550029533364933, + 1.5439045422071422, + 1.537917244072108, + 1.5320587502226897, + 1.5263210890072474, + 1.5206970786178875, + 1.5151802216855101, + 1.5097646172988308, + 1.504444887051252, + 1.4992161124750836, + 1.4940737817906533, + 1.4890137443293856, + 1.4840321713209845, + 1.4791255219911652, + 1.4742905141165012, + 1.4695240983404547, + 1.4648234356795466, + 1.4601858777483359, + 1.455608949312038, + 1.45109033284043, + 1.4466278547894236, + 1.4422194733798364, + 1.4378632676783636, + 1.4335574278150731, + 1.4293002461961024, + 1.425090109590537, + 1.4209254919874788, + 1.41680494813361, + 1.4127271076736623, + 1.4086906698264294, + 1.404694398537695, + 1.4007371180588697, + 1.3968177089065203, + 1.3929351041634295, + 1.3890882860865568, + 1.3852762829913403, + 1.3814981663853083, + 1.377753048327048, + 1.3740400789892435, + 1.3703584444068388, + 1.3667073643934202, + 1.36308609061071, + 1.3594939047776409, + 1.3559301170068698, + 1.3523940642578216, + 1.3488851088964373, + 1.345402637352766, + 1.3419460588683978, + 1.338514804326496, + 1.335108325157867, + 1.3317260923171164, + 1.3283675953234733, + 1.3250323413613696, + 1.3217198544362763, + 1.31842967458171, + 1.3151613571136669, + 1.3119144719290627, + 1.3086886028450437, + 1.305483346976299, + 1.3022983141477331, + 1.299133126340077, + 1.2959874171662074, + 1.2928608313761178, + 1.2897530243886557, + 1.286663661848267, + 1.2835924192051458, + 1.2805389813172883, + 1.2775030420730742, + 1.274484304033094, + 1.27148247809004, + 1.2684972831455519, + 1.265528445803006, + 1.2625757000752862, + 1.259638787106663, + 1.256717454907949, + 1.253811458104169, + 1.2509205576940297, + 1.248044520820517, + 1.2451831205520048 + ], + [ + 1.8103060423412103, + 1.7516008370531395, + 1.7272843448409239, + 1.7086256441135106, + 1.6928956317650687, + 1.6790372126840039, + 1.6665082441401, + 1.6549866684839811, + 1.6442626473406374, + 1.634190426476998, + 1.6246638831231452, + 1.6156029031598917, + 1.606945245885811, + 1.5986414145384313, + 1.5906512773330195, + 1.5829417599248243, + 1.5754852211889272, + 1.568258280164926, + 1.561240949840351, + 1.5544159850307002, + 1.5477683830267974, + 1.541284995420704, + 1.5349542222743733, + 1.5287657682381357, + 1.5227104459389897, + 1.5167800159008562, + 1.5109670550278669, + 1.505264847658111, + 1.4996672946267517, + 1.494168836828285, + 1.4887643905487309, + 1.4834492924256693, + 1.4782192523400643, + 1.473070312886012, + 1.4679988143294516, + 1.463001364173742, + 1.4580748106127854, + 1.4532162192814684, + 1.4484228528162584, + 1.443692152821641, + 1.4390217239050804, + 1.434409319497693, + 1.4298528292224215, + 1.4253502676081573, + 1.4208997639785732, + 1.416499553369591, + 1.4121479683504035, + 1.40784343164056, + 1.403584449430411, + 1.3993696053247147, + 1.395197554839778, + 1.3910670203935376, + 1.3869767867356524, + 1.382925696771283, + 1.3789126477378797, + 1.3749365876991795, + 1.3709965123248287, + 1.3670914619276948, + 1.3632205187341009, + 1.3593828043649916, + 1.3555774775084386, + 1.3518037317660219, + 1.348060793657461, + 1.3443479207695224, + 1.3406644000366437, + 1.3370095461420046, + 1.333382700028885, + 1.329783227513159, + 1.3262105179886414, + 1.3226639832178169, + 1.3191430562011535, + 1.3156471901188609, + 1.3121758573394915, + 1.3087285484902988, + 1.3053047715847144, + 1.3019040512027114, + 1.29852592772019, + 1.2951699565838455, + 1.291835707628284, + 1.2885227644324138, + 1.2852307237123846, + 1.2819591947485731, + 1.2787077988443079, + 1.2754761688142104, + 1.2722639485001976, + 1.269070792313336, + 1.2658963647998847, + 1.26274034022998, + 1.2596024022075363, + 1.2564822433000407, + 1.2533795646870156, + 1.250294075826009, + 1.2472254941350611, + 1.2441735446906586, + 1.2411379599402674, + 1.2381184794285924, + 1.2351148495367694, + 1.232126823233752, + 1.229154159839205, + 1.2261966247972547 + ], + [ + 1.8103060423412103, + 1.749747065691674, + 1.7246627162400006, + 1.7054148179298365, + 1.6891880890421378, + 1.674892053905025, + 1.6619674502047255, + 1.6500820504739702, + 1.639019390138791, + 1.6286291123926018, + 1.6188017433597235, + 1.6094546391068048, + 1.6005235935184623, + 1.5919575468416811, + 1.5837151000250027, + 1.5757621343141928, + 1.5680701357430655, + 1.560614985035859, + 1.5533760640375813, + 1.5463355830016425, + 1.5394780654688396, + 1.5327899478356475, + 1.5262592638654466, + 1.5198753931058058, + 1.5136288580682407, + 1.5075111590935295, + 1.5015146386820155, + 1.4956323691070885, + 1.4898580586067298, + 1.4841859725320479, + 1.4786108666374143, + 1.4731279303024674, + 1.4677327379363718, + 1.4624212071677158, + 1.4571895626967049, + 1.4520343048996875, + 1.4469521824439933, + 1.4419401683042068, + 1.436995438677338, + 1.4321153543798015, + 1.4272974443782367, + 1.4225393911624353, + 1.4178390177146298, + 1.4131942758672364, + 1.4086032358723992, + 1.404064077032654, + 1.3995750792636779, + 1.3951346154782382, + 1.3907411446957145, + 1.386393205794457, + 1.382089411835162, + 1.3778284448927498, + 1.373609051342152, + 1.369430037550214, + 1.3652902659317556, + 1.3611886513328586, + 1.357124157708795, + 1.3530957950677847, + 1.3491026166550322, + 1.3451437163543511, + 1.3412182262871752, + 1.3373253145909316, + 1.333464183360665, + 1.3296340667394895, + 1.3258342291449208, + 1.3220639636194562, + 1.3183225902949258, + 1.3146094549611704, + 1.3109239277305078, + 1.3072654017902745, + 1.3036332922364362, + 1.3000270349819285, + 1.2964460857339524, + 1.292889919034976, + 1.2893580273626606, + 1.2858499202843405, + 1.282365123662075, + 1.2789031789046192, + 1.2754636422629746, + 1.2720460841664578, + 1.2686500885964689, + 1.2652752524953845, + 1.2619211852081893, + 1.2585875079546602, + 1.255273853330085, + 1.2519798648326497, + 1.248705196415779, + 1.245449512063832, + 1.242212485389683, + 1.2389937992528295, + 1.23579314539675, + 1.2326102241043502, + 1.2294447438704013, + 1.2262964210899563, + 1.2231649797618056, + 1.220050151206093, + 1.2169516737952708, + 1.2138692926976375, + 1.2108027596327426, + 1.2077518326379937 + ], + [ + 1.8103060423412103, + 1.7479428566699953, + 1.7221111793721866, + 1.7022898362368144, + 1.6855796709987803, + 1.6708577198869328, + 1.6575480587122864, + 1.6453085620894288, + 1.633916316403163, + 1.6232164853275655, + 1.6130963334761943, + 1.603470754738522, + 1.594273630132419, + 1.5854523787023636, + 1.576964368011754, + 1.5687744628201536, + 1.5608532996563504, + 1.5531760406687851, + 1.5457214534341395, + 1.5384712182031204, + 1.5314093974326553, + 1.5245220234242822, + 1.517796773436149, + 1.5112227106049934, + 1.5047900750833625, + 1.4984901139851354, + 1.4923149416738752, + 1.486257424028023, + 1.4803110818376473, + 1.4744700096037886, + 1.4687288068411362, + 1.4630825196084678, + 1.4575265904651156, + 1.4520568154152333, + 1.44666930668305, + 1.4413604603820478, + 1.4361269283139204, + 1.430965593270313, + 1.4258735473198185, + 1.420848072650725, + 1.4158866246111785, + 1.4109868166463286, + 1.4061464068794014, + 1.4013632861225866, + 1.3966354671358339, + 1.3919610749783777, + 1.387338338320121, + 1.382765581598681, + 1.3782412179236274, + 1.3737637426427054, + 1.3693317274960919, + 1.3649438152942994, + 1.3605987150635168, + 1.356295197609161, + 1.3520320914544386, + 1.3478082791158799, + 1.3436226936822977, + 1.3394743156674882, + 1.3353621701103737, + 1.331285323899217, + 1.327242883299097, + 1.3232339916640972, + 1.3192578273176032, + 1.3153136015858657, + 1.3114005569714906, + 1.3075179654548763, + 1.3036651269128168, + 1.2998413676445346, + 1.29604603899636, + 1.2922785160771055, + 1.2885381965569283, + 1.2848244995431473, + 1.2811368645270682, + 1.277474750396417, + 1.273837634508447, + 1.2702250118192318, + 1.2666363940650305, + 1.263071308991976, + 1.2595292996306364, + 1.256009923612302, + 1.2525127525241002, + 1.2490373713002756, + 1.2455833776471859, + 1.242150381499763, + 1.2387380045073544, + 1.2353458795470331, + 1.2319736502625986, + 1.2286209706276314, + 1.2252875045310876, + 1.2219729253840255, + 1.2186769157461628, + 1.215399166971055, + 1.2121393788687764, + 1.208897259385053, + 1.2056725242958835, + 1.2024648969167409, + 1.1992741078255145, + 1.1960998945984058, + 1.1929420015580445, + 1.1898001795331457 + ], + [ + 1.8103060423412103, + 1.7461840010912604, + 1.719623781958484, + 1.6992434090112696, + 1.6820619598413105, + 1.6669247992502767, + 1.653239760013138, + 1.6406550676360174, + 1.628941521575758, + 1.6179399185913605, + 1.6075343437720981, + 1.5976372907234349, + 1.5881807756813289, + 1.5791107347270987, + 1.5703833330433117, + 1.5619624444553293, + 1.5538178773414106, + 1.545924093337454, + 1.5382592611930317, + 1.530804544479132, + 1.523543556159343, + 1.516461934598234, + 1.5095470095103969, + 1.5027875355755418, + 1.496173477685066, + 1.4896958360914605, + 1.483346502756314, + 1.4771181423513884, + 1.4710040929308246, + 1.464998282440353, + 1.4590951580824667, + 1.4532896261978223, + 1.447577000810305, + 1.441952959356977, + 1.436413504413466, + 1.4309549304513016, + 1.4255737948415104, + 1.4202668924597779, + 1.4150312333610646, + 1.4098640230820534, + 1.4047626452029862, + 1.3997246458599868, + 1.3947477199476719, + 1.3898296987919063, + 1.3849685391056592, + 1.3801623130684093, + 1.3754091993924795, + 1.3707074752588855, + 1.3660555090214475, + 1.3614517535915605, + 1.3568947404275788, + 1.3523830740626204, + 1.3479154271129872, + 1.3434905357165963, + 1.3391071953569935, + 1.3347642570338458, + 1.330460623745413, + 1.3261952472524867, + 1.3219671250967453, + 1.3177752978495016, + 1.3136188465694478, + 1.3094968904503155, + 1.3054085846413943, + 1.3013531182256317, + 1.2973297123416105, + 1.2933376184370822, + 1.2893761166429656, + 1.2854445142578128, + 1.2815421443336976, + 1.277668364355359, + 1.273822555005188, + 1.2700041190073394, + 1.2662124800448527, + 1.2624470817442326, + 1.258707386722416, + 1.254992875691507, + 1.2513030466170536, + 1.2476374139260107, + 1.2439955077608424, + 1.2403768732765252, + 1.2367810699774755, + 1.2332076710916606, + 1.229656262979379, + 1.2261264445743905, + 1.2226178268552574, + 1.2191300323449283, + 1.215662694636736, + 1.21221545794513, + 1.2087879766795835, + 1.2053799150402271, + 1.2019909466338743, + 1.1986207541091933, + 1.1952690288098733, + 1.1919354704447087, + 1.1886197867736077, + 1.1853216933085908, + 1.1820409130289213, + 1.178777176109549, + 1.1755302196621262, + 1.1722997874878835 + ], + [ + 1.8103060423412103, + 1.7444668394492022, + 1.7171953486754985, + 1.6962691978024167, + 1.6786276365571942, + 1.6630851090902796, + 1.649033590184216, + 1.6361118849702323, + 1.6240846550097867, + 1.612788433665186, + 1.6021042018725198, + 1.5919421098523374, + 1.5822323532636229, + 1.5729194203783983, + 1.5639583025010202, + 1.555311906012892, + 1.546949230773178, + 1.5388440545109892, + 1.5309739613440752, + 1.5233196104116762, + 1.5158641758393487, + 1.5085929113930279, + 1.5014928074823235, + 1.494552317641936, + 1.4877611380272215, + 1.48111002788117, + 1.4745906620354667, + 1.468195508724829, + 1.4619177275992543, + 1.4557510839973804, + 1.4496898764200883, + 1.4437288748019526, + 1.4378632676783636, + 1.432088616729942, + 1.4264008174829517, + 1.4207960651764076, + 1.415270824989162, + 1.4098218059650014, + 1.4044459380894003, + 1.3991403520644738, + 1.3939023614038293, + 1.3887294465301347, + 1.3836192406082413, + 1.378569516887821, + 1.3735781773634645, + 1.3686432425884179, + 1.3637628425016763, + 1.3589352081478774, + 1.3541586641860355, + 1.3494316220971538, + 1.3447525740126518, + 1.3401200870956234, + 1.3355327984155863, + 1.330989410264756, + 1.3264886858702274, + 1.3220294454619157, + 1.3176105626608299, + 1.3132309611563504, + 1.3088896116447382, + 1.304585529004204, + 1.3003177696845734, + 1.296085429291952, + 1.2918876403508777, + 1.2877235702282765, + 1.2835924192051458, + 1.279493418683318, + 1.2754258295159178, + 1.2713889404512395, + 1.2673820666807682, + 1.2634045484829515, + 1.2594557499551118, + 1.2555350578266042, + 1.25164188034694, + 1.2477756462431726, + 1.2439358037413433, + 1.2401218196472417, + 1.2363331784821423, + 1.232569381669555, + 1.2288299467693544, + 1.2251144067559558, + 1.2214223093374872, + 1.2177532163131377, + 1.2141067029661066, + 1.2104823574897656, + 1.2068797804448455, + 1.2032985842456156, + 1.19973839267319, + 1.1961988404142296, + 1.1926795726234365, + 1.1891802445083597, + 1.185700520935139, + 1.182240076053906, + 1.1787985929426619, + 1.175375763268527, + 1.1719712869653416, + 1.1685848719266563, + 1.165216233713233, + 1.1618650952742202, + 1.1585311866812285, + 1.1552142448745917 + ], + [ + 1.8103060423412103, + 1.7427881663291989, + 1.714821346382399, + 1.6933616506692708, + 1.6752702903171874, + 1.6593314818819507, + 1.644921697595282, + 1.6316705333621346, + 1.6193366504235873, + 1.6077524143051762, + 1.5967957713664083, + 1.5863745809676313, + 1.5764172589973313, + 1.5668668783694835, + 1.557677282821581, + 1.5488104329753931, + 1.540234538293165, + 1.5319227079263307, + 1.5238519544647757, + 1.516002443922344, + 1.5083569214226906, + 1.5009002647535252, + 1.493619132624668, + 1.4865016841756866, + 1.479537352849354, + 1.4727166622811536, + 1.4660310750396264, + 1.4594728673253914, + 1.4530350243830594, + 1.4467111525888559, + 1.4404954050750547, + 1.4343824184275575, + 1.4283672585059644, + 1.422445373829024, + 1.4166125552730429, + 1.4108649010687235, + 1.4051987862691422, + 1.3996108360100268, + 1.3940979020020445, + 1.3886570417900883, + 1.3832855003916065, + 1.3779806939887145, + 1.3727401954001128, + 1.3675617211010014, + 1.3624431195940523, + 1.357382360963431, + 1.3523775274680114, + 1.3474268050501574, + 1.342528475653452, + 1.3376809102571308, + 1.332882562547153, + 1.3281319631542012, + 1.3234277143977562, + 1.3187684854829507, + 1.3141530081034258, + 1.3095800724090192, + 1.3050485233019518, + 1.3005572570293862, + 1.2961052180438806, + 1.2916913961064296, + 1.287314823609576, + 1.2829745731004931, + 1.27866975498608, + 1.2743995154039838, + 1.2701630342451198, + 1.2659595233147112, + 1.2617882246201813, + 1.2576484087753528, + 1.253539373511451, + 1.2494604422862932, + 1.245410962983872, + 1.2413903066972516, + 1.2373978665883414, + 1.2334330568187006, + 1.2294953115460339, + 1.2255840839815124, + 1.2216988455034774, + 1.217839084823454, + 1.214004307200753, + 1.210194033702245, + 1.2064078005041714, + 1.2026451582331084, + 1.198905671343438, + 1.1951889175288808, + 1.1914944871658402, + 1.1878219827864829, + 1.1841710185796361, + 1.180541219917728, + 1.1769322229081256, + 1.1733436739673582, + 1.1697752294168045, + 1.166226555098546, + 1.162697326010163, + 1.1591872259573495, + 1.1556959472232893, + 1.1522231902538254, + 1.1487686633574978, + 1.1453320824196105, + 1.1419131706295298, + 1.1385116582204733 + ], + [ + 1.8103060423412103, + 1.7411451553485935, + 1.7124977779704986, + 1.6905158721734683, + 1.6719842683559767, + 1.655657597641538, + 1.6408971590510086, + 1.6273235349061044, + 1.614689513599787, + 1.6028233813633597, + 1.591600114447028, + 1.5809253300185289, + 1.5707257020057264, + 1.5609429180327599, + 1.5515296986494476, + 1.542447078809872, + 1.5336624943707429, + 1.5251484001092444, + 1.516881249229075, + 1.5088407250947566, + 1.5010091529418657, + 1.4933710425672566, + 1.485912728027673, + 1.4786220803217498, + 1.4714882757608072, + 1.4645016073781258, + 1.4576533299884715, + 1.4509355318379846, + 1.444341027470998, + 1.4378632676783636, + 1.4314962633119914, + 1.4252345204420038, + 1.4190729848583634, + 1.4130069943219856, + 1.4070322372824293, + 1.4011447170229525, + 1.395340720385509, + 1.389616790380331, + 1.3839697021061679, + 1.3783964415048602, + 1.3728941865528461, + 1.367460290556431, + 1.3620922672701714, + 1.3567877776009274, + 1.3515446176958474, + 1.3463607082421933, + 1.3412340848316466, + 1.3361628892624597, + 1.3311453616702429, + 1.3261798333928922, + 1.3212647204876515, + 1.3163985178288968, + 1.311579793724309, + 1.306807184994848, + 1.3020793924706058, + 1.297395176860371, + 1.292753354957685, + 1.2881527961504826, + 1.2835924192051458, + 1.2790711892990498, + 1.2745881152785337, + 1.2701422471217076, + 1.2657326735877041, + 1.261358520035892, + 1.2570189464002754, + 1.2527131453057847, + 1.2484403403145032, + 1.2441997842910366, + 1.2399907578772784, + 1.2358125680677587, + 1.23166454687758, + 1.2275460500956958, + 1.2234564561169399, + 1.2193951648468093, + 1.2153615966735425, + 1.2113551915025007, + 1.2073754078483032, + 1.2034217219805463, + 1.1994936271192942, + 1.1955906326768388, + 1.191712263542521, + 1.1878580594076587, + 1.184027574127866, + 1.1802203751202696, + 1.1764360427933032, + 1.1726741700069698, + 1.1689343615615901, + 1.165216233713233, + 1.1615194137141356, + 1.157843539376564, + 1.1541882586586638, + 1.1505532292709648, + 1.146938118302289, + 1.143342601863913, + 1.1397663647508987, + 1.1362091001196004, + 1.1326705091804041, + 1.129150300904837, + 1.1256481917462282, + 1.1221639053731662 + ], + [ + 1.8103060423412103, + 1.7395352993200586, + 1.7102210977414765, + 1.6877275197391748, + 1.6687645562989066, + 1.652057850127746, + 1.6369538332217548, + 1.6230642562079827, + 1.6101361531417424, + 1.597993813277755, + 1.5865093026919048, + 1.5755860416053866, + 1.5651489971371393, + 1.555138499575762, + 1.5455061689486373, + 1.5362121332215646, + 1.527223070256603, + 1.5185107936615574, + 1.5100512085420084, + 1.501823525352723, + 1.4938096579142819, + 1.4859937554585145, + 1.478361833940386, + 1.4709014820321067, + 1.4636016241022993, + 1.4564523272354513, + 1.4494446426848968, + 1.442570474535104, + 1.435822470074755, + 1.4291939276489438, + 1.422678718700349, + 1.4162712214170399, + 1.4099662639422745, + 1.4037590755141598, + 1.3976452442224154, + 1.391620680318836, + 1.3856815842142995, + 1.3798244184507837, + 1.374045883061103, + 1.36834289382895, + 1.3627125630425991, + 1.3571521824013415, + 1.3516592077874678, + 1.3462312456608325, + 1.3408660408695634, + 1.3355614657008177, + 1.330315510020799, + 1.3251262723744508, + 1.3199919519330683, + 1.3149108411931478, + 1.3098813193425405, + 1.3049018462208561, + 1.2999709568103142, + 1.2950872562011928, + 1.2902494149828436, + 1.2854561650171084, + 1.2807062955560642, + 1.2759986496704214, + 1.2713321209587165, + 1.266705650510788, + 1.2621182241019189, + 1.2575688695965863, + 1.2530566545429944, + 1.2485806839415274, + 1.244140098171996, + 1.2397340710660838, + 1.2353618081127506, + 1.2310225447855525, + 1.2267155449819047, + 1.222440099565268, + 1.218195525002079, + 1.2139811620860097, + 1.2097963747428064, + 1.2056405489095847, + 1.2015130914829752, + 1.197413429331033, + 1.193341008364236, + 1.1892952926613236, + 1.1852757636460574, + 1.1812819193113326, + 1.1773132734873533, + 1.173369355150844, + 1.16944970777253, + 1.165553888700317, + 1.1616814685758186, + 1.157832030782051, + 1.1540051709202834, + 1.1502004963141874, + 1.1464176255395615, + 1.1426561879780377, + 1.1389158233932937, + 1.135196181528396, + 1.1314969217230033, + 1.1278177125492428, + 1.1241582314651606, + 1.1205181644847193, + 1.1168972058633884, + 1.1132950577984322, + 1.1097114301430726, + 1.10614604013374 + ], + [ + 1.8103060423412103, + 1.7379563620277834, + 1.707988143208604, + 1.684992720126989, + 1.665606681714356, + 1.6485272390100092, + 1.6330862425198287, + 1.6188867807968568, + 1.6056702440759976, + 1.593257001400929, + 1.581516264565736, + 1.57034929903941, + 1.5596793979127674, + 1.5494455602077215, + 1.5395983265657427, + 1.5300969353838767, + 1.520907321087502, + 1.51200066842928, + 1.5033523449433914, + 1.4949410972575077, + 1.4867484356788079, + 1.4787581557990743, + 1.4709559615809251, + 1.463329164792479, + 1.455866442698447, + 1.4485576407740748, + 1.4413936106208458, + 1.434366075698546, + 1.4274675192525028, + 1.4206910901099108, + 1.4140305229816958, + 1.4074800706299664, + 1.401034445810785, + 1.3946887713237293, + 1.388438536826181, + 1.3822795613252241, + 1.3762079604606479, + 1.3702201178516413, + 1.3643126599067914, + 1.3584824335990944, + 1.3527264867902615, + 1.3470420507557872, + 1.3414265246171855, + 1.3358774614330096, + 1.3303925557376097, + 1.3249696323476066, + 1.3196066362819259, + 1.314301623662919, + 1.3090527534843246, + 1.3038582801472207, + 1.298716546678179, + 1.2936259785549153, + 1.2885850780742327, + 1.2835924192051458, + 1.2786466428770653, + 1.2737464526589175, + 1.268890610790275, + 1.2640779345300683, + 1.2593072927923608, + 1.2545776030420768, + 1.2498878284265431, + 1.2452369751213164, + 1.2406240898710483, + 1.2360482577081493, + 1.2315085998337936, + 1.2270042716473597, + 1.2225344609117945, + 1.2180983860436143, + 1.2136952945173491, + 1.209324461375203, + 1.2049851878335724, + 1.2006767999788446, + 1.1963986475455726, + 1.1921501027707633, + 1.18793055931856, + 1.1837394312701028, + 1.1795761521738048, + 1.175440174151683, + 1.171330967057755, + 1.16724801768484, + 1.1631908290164055, + 1.1591589195203666, + 1.1551518224820096, + 1.1511690853734091, + 1.1472102692569384, + 1.1432749482206415, + 1.1393627088434115, + 1.1354731496880783, + 1.13160588082064, + 1.1277605233540122, + 1.123936709014787, + 1.1201340797315908, + 1.1163522872437477, + 1.1125909927290323, + 1.108849866449388, + 1.1051285874135628, + 1.1014268430556835, + 1.09774432892886, + 1.0940807484129662, + 1.0904358124358096 + ], + [ + 1.8103060423412103, + 1.7364063389762872, + 1.7057960795871874, + 1.6823080014488947, + 1.662506635611364, + 1.6450612821001722, + 1.6292894769541117, + 1.6147858052761808, + 1.6012861168331645, + 1.588606932246441, + 1.576614661297244, + 1.565208454161196, + 1.5543099605565793, + 1.5438568726174013, + 1.5337986713654428, + 1.524093721919195, + 1.514707228881518, + 1.5056097596658196, + 1.4967761540791418, + 1.488184703415878, + 1.4798165218591344, + 1.4716550578366676, + 1.4636857090320985, + 1.455895515380253, + 1.4482729115670132, + 1.4408075255165949, + 1.4334900128347492, + 1.4263119196642637, + 1.4192655682111512, + 1.4123439605227472, + 1.4055406970821223, + 1.3988499075222816, + 1.3922661913251186, + 1.3857845668008324, + 1.3794004269770064, + 1.37310950128693, + 1.366907822151672, + 1.3607916957129027, + 1.3547576761032247, + 1.3488025427450303, + 1.3429232802532778, + 1.3371170605861735, + 1.331381227143885, + 1.3257132805615734, + 1.3201108659811818, + 1.3145717616180963, + 1.309093868465229, + 1.3036752009992028, + 1.2983138787719484, + 1.2930081187867488, + 1.2877562285710957, + 1.2825565998700648, + 1.2774077028935924, + 1.2723080810593368, + 1.2672563461799145, + 1.2622511740494493, + 1.2572913003896753, + 1.2523755171204203, + 1.2475026689233086, + 1.2426716500709798, + 1.2378814014971797, + 1.2331309080857218, + 1.2284191961586688, + 1.2237453311461213, + 1.2191084154218257, + 1.2145075862903947, + 1.2099420141133699, + 1.2054109005625853, + 1.2009134769904288, + 1.196449002907573, + 1.1920167645596416, + 1.1876160735950634, + 1.183246265817073, + 1.1789067000134528, + 1.1745967568581772, + 1.1703158378796328, + 1.1660633644905456, + 1.1618387770751686, + 1.1576415341296462, + 1.1534711114518228, + 1.1493270013770585, + 1.1452087120569026, + 1.1411157667777219, + 1.137047703316612, + 1.1330040733321247, + 1.1289844417875452, + 1.1249883864046133, + 1.1210154971457498, + 1.1170653757229867, + 1.1131376351319457, + 1.1092318992093113, + 1.1053478022123713, + 1.1014849884192957, + 1.0976431117489163, + 1.0938218353988516, + 1.0900208315009179, + 1.0862397807928157, + 1.0824783723051696, + 1.07873630306305, + 1.0750132778011674 + ], + [ + 1.8103060423412103, + 1.7348834251528817, + 1.7036423542038017, + 1.6796702373312073, + 1.659460807964553, + 1.6416559432671634, + 1.625559115164537, + 1.610756554031267, + 1.5969786660663936, + 1.5840381907762244, + 1.5717987849351271, + 1.5601575204209142, + 1.5490344323212044, + 1.5383659287389997, + 1.5281004496086772, + 1.5181955020434363, + 1.5086155735878959, + 1.4993306251132053, + 1.490314977928985, + 1.481546475959932, + 1.4730058441931164, + 1.464676189964017, + 1.4565426100339443, + 1.448591877258676, + 1.440812187987864, + 1.4331929563995671, + 1.425724645531684, + 1.4183986273112017, + 1.411207065721324, + 1.4041428185966451, + 1.397199354539966, + 1.3903706822086632, + 1.3836512897915765, + 1.3770360929370007, + 1.3705203897327336, + 1.3640998216048779, + 1.3577703392112386, + 1.3515281725710184, + 1.3453698048049088, + 1.3392919489661295, + 1.333291527529044, + 1.327365654172004, + 1.3215116175483728, + 1.315726866786782, + 1.3100089985006182, + 1.3043557451190697, + 1.2987649643790298, + 1.2932346298397583, + 1.2877628223011988, + 1.2823477220229098, + 1.276987601654168, + 1.2716808197973757, + 1.2664258151367893, + 1.2612211010730403, + 1.2560652608111906, + 1.250956942856332, + 1.2458948568761443, + 1.2408777698945233, + 1.2359045027844682, + 1.230973927031958, + 1.2260849617456624, + 1.221236570890035, + 1.2164277607217322, + 1.2116575774113807, + 1.2069251048345815, + 1.2022294625176526, + 1.1975698037250724, + 1.19294531367685, + 1.1883552078852002, + 1.183798730600905, + 1.179275153360646, + 1.1747837736274078, + 1.1703239135167596, + 1.1658949186024803, + 1.1614961567955675, + 1.1571270172911958, + 1.1527869095786536, + 1.1484752625097177, + 1.1441915234213036, + 1.139935157308577, + 1.135705646045023, + 1.1315024876462525, + 1.1273251955745915, + 1.1231732980817153, + 1.1190463375868238, + 1.114943870088032, + 1.1108654646048337, + 1.1068107026496574, + 1.1027791777266782, + 1.0987704948561898, + 1.0947842701229606, + 1.090820130247108, + 1.086877712176142, + 1.0829566626969047, + 1.0790566380662439, + 1.0751773036593173, + 1.0713183336345176, + 1.0674794106140588, + 1.0636602253793512, + 1.0598604765803223 + ], + [ + 1.8103060423412103, + 1.7333859883309373, + 1.7015246587414112, + 1.6770766006744753, + 1.6564659343206642, + 1.6383075727412844, + 1.6218911590287184, + 1.6067947085963712, + 1.5927432751416122, + 1.5795458803103912, + 1.5670634739255787, + 1.5551910843352625, + 1.5438471590077403, + 1.5329668434957116, + 1.5224975540626215, + 1.5123959541700478, + 1.5026258263001182, + 1.4931565349286393, + 1.4839618915418131, + 1.4750193001787397, + 1.4663091031413586, + 1.4578140723790427, + 1.4495190087649563, + 1.441410422543826, + 1.4334762757162265, + 1.4257057722898452, + 1.4180891859562172, + 1.410617717341005, + 1.4032833748515323, + 1.3960788745222028, + 1.3889975552817877, + 1.3820333068358517, + 1.3751805079420139, + 1.368433973304108, + 1.3617889076584058, + 1.3552408658961035, + 1.3487857182795722, + 1.3424196199790062, + 1.336138984291153, + 1.3299404590103585, + 1.323820905509947, + 1.3177773801633874, + 1.3118071177931032, + 1.3059075168828553, + 1.3000761263293148, + 1.294310633541433, + 1.2886088537237206, + 1.282968720202585, + 1.27738827567427, + 1.2718656642692991, + 1.2663991243422148, + 1.260986981907195, + 1.2556276446502124, + 1.2503195964570302, + 1.2450613924037346, + 1.2398516541629068, + 1.2346890657840328, + 1.229572369811565, + 1.2245003637081737, + 1.2194718965543778, + 1.2144858659988853, + 1.209541215436753, + 1.2046369313949068, + 1.1997720411066932, + 1.1949456102590261, + 1.1901567408973435, + 1.1854045694750797, + 1.1806882650356434, + 1.1760070275160683, + 1.171360086162529, + 1.1667466980488346, + 1.162166146689835, + 1.1576177407424157, + 1.1531008127874107, + 1.148614718186355, + 1.144158834007535, + 1.139732558016269, + 1.135335307724786, + 1.1309665194974565, + 1.1266256477074879, + 1.122312163941507, + 1.118025556248753, + 1.1137653284318565, + 1.1095309993764257, + 1.105322102416875, + 1.1011381847361281, + 1.0969788067970137, + 1.0928435418033293, + 1.0887319751887021, + 1.0846437041315196, + 1.0805783370943156, + 1.0765354933861284, + 1.0725148027464415, + 1.0685159049494215, + 1.064538449427257, + 1.060582094911481, + 1.0566465090912427, + 1.052731368287555, + 1.0488363571426165, + 1.0449611683233668 + ], + [ + 1.8103060423412103, + 1.7319125467931737, + 1.6994408977353423, + 1.6745245250690868, + 1.6535190512451374, + 1.6350128573019738, + 1.6182819790953764, + 1.6028963487160568, + 1.5885757531294744, + 1.575125555697101, + 1.5624040426671455, + 1.550304231603977, + 1.538743007796963, + 1.527654274479907, + 1.5169844406488697, + 1.50668933963268, + 1.4967320601490646, + 1.4870813798352684, + 1.4777106085236063, + 1.468596717416414, + 1.4597196722627372, + 1.4510619150001498, + 1.4426079553550526, + 1.434344045169262, + 1.4262579158495425, + 1.418338564601028, + 1.4105760788030235, + 1.4029614905248393, + 1.3954866550909033, + 1.3881441490076691, + 1.3809271836078068, + 1.3738295315511793, + 1.3668454639177385, + 1.359969696084415, + 1.3531973409318407, + 1.3465238682029623, + 1.339945069052992, + 1.333457025002514, + 1.3270560806432083, + 1.320738819556269, + 1.3145020429930807, + 1.3083427509404941, + 1.3022581252525953, + 1.296245514579819, + 1.2903024208667437, + 1.2844264872235005, + 1.2786154870037656, + 1.2728673139457958, + 1.2671799732527158, + 1.2615515735049554, + 1.2559803193118706, + 1.2504645046216156, + 1.245002506618604, + 1.2395927801466822, + 1.2342338526037084, + 1.228924319259722, + 1.2236628389565292, + 1.2184481301514007, + 1.2132789672718127, + 1.2081541773518538, + 1.2030726369241498, + 1.1980332691439692, + 1.1930350411246617, + 1.1880769614657498, + 1.1831580779569189, + 1.1782774754428458, + 1.1734342738353043, + 1.16862762626032, + 1.1638567173293264, + 1.159120761524329, + 1.1544190016880194, + 1.1497507076106215, + 1.1451151747060024, + 1.140511722770254, + 1.135939694816543, + 1.1313984559805919, + 1.1268873924916174, + 1.1224059107040079, + 1.117953436185416, + 1.1135294128572988, + 1.1091333021842642, + 1.1047645824088825, + 1.1004227478288797, + 1.0961073081138804, + 1.0918177876590893, + 1.0875537249734966, + 1.0833146721003797, + 1.0791001940680442, + 1.0749098683688947, + 1.0707432844650713, + 1.0666000433190157, + 1.0624797569474427, + 1.0583820479973136, + 1.054306549342491, + 1.0502529036998647, + 1.0462207632638023, + 1.0422097893578748, + 1.0382196521028635, + 1.0342500301001345, + 1.0303006101295105 + ], + [ + 1.8103060423412103, + 1.7304617506125877, + 1.6973891621005182, + 1.6720116723728844, + 1.650617458883965, + 1.631768778420686, + 1.6147282687321614, + 1.5990579028191834, + 1.5844722818598262, + 1.5707731671553424, + 1.5578162223158203, + 1.545492485025692, + 1.5337173024045587, + 1.5224233544605563, + 1.511556058403076, + 1.5010704301865283, + 1.4909288754267198, + 1.4810995939414688, + 1.4715554016191343, + 1.4622728434775638, + 1.4532315145001617, + 1.4444135316846607, + 1.4358031180853392, + 1.427386271112505, + 1.4191504951231129, + 1.4110845836980972, + 1.4031784407679688, + 1.3954229324362328, + 1.3878097632971564, + 1.3803313724736512, + 1.3729808456633128, + 1.3657518402791813, + 1.3586385213784422, + 1.3516355065376797, + 1.3447378181936174, + 1.3379408422495982, + 1.3312402919694748, + 1.3246321763561446, + 1.318112772352149, + 1.3116786003124243, + 1.3053264022904298, + 1.2990531227530078, + 1.2928558913999757, + 1.286732007814333, + 1.2806789277101736, + 1.2746942505796373, + 1.2687757085687723, + 1.2629211564361156, + 1.2571285624679072, + 1.251396000240852, + 1.24572164113775, + 1.240103747533556, + 1.2345406665799021, + 1.2290308245250663, + 1.223572721514064, + 1.2181649268201746, + 1.2128060744649418, + 1.2074948591886518, + 1.202230032737614, + 1.19701040043832, + 1.1918348180318463, + 1.1867021887447393, + 1.1816114605751398, + 1.1765616237751297, + 1.1715517085122293, + 1.166580782694713, + 1.1616479499469228, + 1.1567523477221369, + 1.1518931455417274, + 1.1470695433504414, + 1.1422807699785718, + 1.1375260817026498, + 1.1328047608970584, + 1.128116114769633, + 1.1234594741749575, + 1.1188341924995813, + 1.1142396446139173, + 1.1096752258859996, + 1.105140351252697, + 1.1006344543443496, + 1.0961569866591132, + 1.0917074167836067, + 1.087285229656733, + 1.0828899258737734, + 1.078521021028111, + 1.0741780450881115, + 1.0698605418069012, + 1.0655680681629431, + 1.0613001938294684, + 1.0570565006709614, + 1.0528365822650398, + 1.0486400434481715, + 1.0444664998837996, + 1.0403155776515354, + 1.0361869128561763, + 1.0320801512553945, + 1.027994947905015, + 1.0239309668208814, + 1.019887880656366, + 1.0158653703946552 + ], + [ + 1.8103060423412103, + 1.7290323658206448, + 1.6953677067419026, + 1.669535905289673, + 1.6477586893000793, + 1.628572576859897, + 1.6112270053458069, + 1.5952761061318848, + 1.5804293711425952, + 1.5664850127795136, + 1.5532961107204746, + 1.5407517519897769, + 1.5287657682381357, + 1.5172696343008374, + 1.5062077902377604, + 1.495534446692011, + 1.4852113362589479, + 1.4752060894646366, + 1.4654910355432875, + 1.4560422996180509, + 1.446839111378584, + 1.4378632676783636, + 1.4290987091303582, + 1.420531182468361, + 1.4121479683504037, + 1.4039376597383826, + 1.3958899798214188, + 1.3879956311865986, + 1.3802461699225588, + 1.3726338997962269, + 1.365151782724293, + 1.3577933625738303, + 1.35055269994398, + 1.3434243160543544, + 1.3364031442325746, + 1.3294844877797143, + 1.3226639832178169, + 1.315937568102336, + 1.309301452725065, + 1.3027520951477995, + 1.2962861790997389, + 1.2899005943471, + 1.2835924192051458, + 1.2773589049135947, + 1.2711974616383437, + 1.265105645897271, + 1.2590811492369545, + 1.2531217880114887, + 1.2472254941350611, + 1.2413903066972516, + 1.2356143643446724, + 1.229895898345048, + 1.2242332262604647, + 1.2186247461656552, + 1.2130689313550005, + 1.2075643254886896, + 1.2021095381343105, + 1.1967032406651887, + 1.1913441624812042, + 1.1860310875216193, + 1.1807628510428119, + 1.1755383366367211, + 1.1703564734683884, + 1.165216233713233, + 1.1601166301766859, + 1.1550567140805708, + 1.1500355730021758, + 1.145052328953334, + 1.140106136588063, + 1.135196181528396, + 1.130321678799028, + 1.125481871362238, + 1.1206760287453648, + 1.1159034457537726, + 1.1111634412629001, + 1.106455357083524, + 1.1017785568948912, + 1.0971324252408203, + 1.0925163665842912, + 1.0879298044164065, + 1.0833721804159577, + 1.0788429536561204, + 1.0743415998550931, + 1.0698676106677385, + 1.0654204930155167, + 1.0609997684522152, + 1.0566049725631588, + 1.052235654395771, + 1.047891375919506, + 1.043571711513323, + 1.039276247479003, + 1.0350045815787356, + 1.0307563225955119, + 1.0265310899149631, + 1.0223285131273783, + 1.018148231648729, + 1.013989894359597, + 1.0098531592609854, + 1.0057376931460573, + 1.0016431712869105 + ], + [ + 1.8103060423412103, + 1.7276232609366908, + 1.693374931504209, + 1.6670952640374714, + 1.6449404795321714, + 1.6254217225519492, + 1.6077754173860563, + 1.5915479650377362, + 1.5764438206672078, + 1.562257698127652, + 1.548840129825113, + 1.5360782797994448, + 1.5238844857337326, + 1.5121890343892352, + 1.5009354025399753, + 1.4900770069433982, + 1.4795749167231327, + 1.469396201190521, + 1.4595127098302063, + 1.4499001538280463, + 1.4405374027626885, + 1.431405937885513, + 1.422489421375417, + 1.4137733528462413, + 1.4052447924309024, + 1.3968921353186134, + 1.3887049265214393, + 1.380673707429994, + 1.372789887734262, + 1.365045637765597, + 1.3574337974159707, + 1.3499477986174624, + 1.3425815989932053, + 1.335329624772939, + 1.3281867214394483, + 1.3211481108634882, + 1.3142093539140938, + 1.3073663177129613, + 1.3006151468467682, + 1.2939522379679742, + 1.2873742173090157, + 1.28087792071157, + 1.2744603758353807, + 1.2681187862627736, + 1.2618505172576793, + 1.2556530829734276, + 1.2495241349331458, + 1.243461451631363, + 1.2374629291262553, + 1.2315265725095745, + 1.2256504881562038, + 1.2198328766679811, + 1.2140720264372602, + 1.2083663077649491, + 1.2027141674757482, + 1.197114123980152, + 1.1915647627387398, + 1.1860647320894055, + 1.180612739402647, + 1.1752075475339363, + 1.1698479715455863, + 1.1645328756735018, + 1.1592611705168272, + 1.1540318104307878, + 1.148843791105055, + 1.1436961473117484, + 1.1385879508087746, + 1.1335183083856035, + 1.1284863600398318, + 1.1234912772739905, + 1.1185322615030464, + 1.1136085425639288, + 1.1087193773192026, + 1.103864048347726, + 1.0990418627157508, + 1.0942521508225163, + 1.0894942653148822, + 1.0847675800660248, + 1.080071489213636, + 1.0754054062534357, + 1.0707687631841665, + 1.0661610097005356, + 1.0615816124308606, + 1.0570300542164288, + 1.0525058334298159, + 1.048008463329614, + 1.043537471449231, + 1.0390923990175747, + 1.0346728004096235, + 1.0302782426250137, + 1.0259083047929187, + 1.021562577701619, + 1.0172406633512723, + 1.0129421745285025, + 1.008666734401518, + 1.0044139761345647, + 1.0001835425205945, + 0.9959750856311074, + 0.9917882664822013, + 0.9876227547159138 + ], + [ + 1.8103060423412103, + 1.7262333954421472, + 1.691409364871951, + 1.6646879463852353, + 1.6421607485430845, + 1.6223138888265685, + 1.6043709561133235, + 1.5878707265833438, + 1.5725126874026918, + 1.5580881016440213, + 1.544444989221079, + 1.531468617444982, + 1.5190698504292601, + 1.5071778030826601, + 1.495735002045694, + 1.4846940810295628, + 1.4740154547449584, + 1.4636656389511165, + 1.4536160099334328, + 1.4438418705922165, + 1.4343217353119266, + 1.4250367740509597, + 1.415970374355775, + 1.4071077920943447, + 1.3984358698854367, + 1.3899428078458953, + 1.381617975243492, + 1.373451754473285, + 1.3654354108254776, + 1.3575609830177304, + 1.3498211905833748, + 1.3422093550478154, + 1.3347193324641733, + 1.3273454553692718, + 1.3200824826004591, + 1.3129255557099913, + 1.3058701609468324, + 1.298912095960597, + 1.29204744052996, + 1.2852725307365067, + 1.2785839361009477, + 1.2719784392766864, + 1.2654530179595793, + 1.2590048287252555, + 1.2526311925487537, + 1.246329581797285, + 1.240097608516992, + 1.2339330138597564, + 1.22783365851731, + 1.2217975140477693, + 1.215822654994914, + 1.209907251713397, + 1.20404956382411, + 1.1982479342333527, + 1.19250078365755, + 1.1868066056022515, + 1.1811639617501777, + 1.175571477718306, + 1.1700278391485337, + 1.1645317881004136, + 1.159082119717915, + 1.153677679145189, + 1.1483173586689737, + 1.1430000950676114, + 1.1377248671487066, + 1.1324906934592718, + 1.127296630153824, + 1.1221417690073112, + 1.1170252355610226, + 1.1119461873907683, + 1.1069038124876103, + 1.1018973277423345, + 1.096925977525655, + 1.0919890323568577, + 1.0870857876542477, + 1.0822155625613346, + 1.0773776988432229, + 1.0725715598481398, + 1.0677965295294665, + 1.0630520115240139, + 1.0583374282826434, + 1.0536522202496434, + 1.0489958450875605, + 1.044367776944445, + 1.0397675057607092, + 1.035194536613012, + 1.0306483890927802, + 1.0261285967171578, + 1.0216347063703395, + 1.0171662777733927, + 1.0127228829808166, + 1.0083041059022002, + 1.003909541847479, + 0.9995387970943694, + 0.9951914884766843, + 0.9908672429923022, + 0.9865656974296633, + 0.9822864980117247, + 0.9780293000563958, + 0.9737937676525252 + ], + [ + 1.8103060423412103, + 1.724861809867145, + 1.6894696499498276, + 1.6623122904824026, + 1.6394175773930797, + 1.6192469302439054, + 1.601011271316006, + 1.5842418522500443, + 1.568633257558445, + 1.5539733449190145, + 1.54010765479824, + 1.5269195827248354, + 1.514318538623595, + 1.5022324809633028, + 1.490602998747394, + 1.4793819529396806, + 1.468529112444949, + 1.4580104467508086, + 1.447796865167062, + 1.4378632676783636, + 1.4281878181466003, + 1.4187513793314022, + 1.4095370677588375, + 1.4005298987568278, + 1.3917165002908019, + 1.383084879970884, + 1.3746242336319932, + 1.3663247867647872, + 1.3581776621588786, + 1.350174768649255, + 1.3423087069936002, + 1.3345726897627461, + 1.3269604727756792, + 1.3194662961085832, + 1.312084833092957, + 1.3048111460189291, + 1.2976406474968187, + 1.2905690666178757, + 1.2835924192051458, + 1.2767069815659853, + 1.2699092672552699, + 1.263196006437676, + 1.256564127502313, + 1.2500107406363588, + 1.2435331231084608, + 1.2371287060492953, + 1.2307950625472317, + 1.2245298969026486, + 1.2183310349059797, + 1.2121964150227535, + 1.2061240803842965, + 1.2001121714958964, + 1.194158919585395, + 1.188262640524784, + 1.1824217292655974, + 1.1766346547359974, + 1.170899955153578, + 1.165216233713233, + 1.1595821546140406, + 1.153996439393147, + 1.1484578635381508, + 1.1429652533525494, + 1.1375174830515247, + 1.1321134720677128, + 1.1267521825486881, + 1.121432617029755, + 1.1161538162672608, + 1.110914857219106, + 1.105714851160407, + 1.1005529419234201, + 1.0954283042518587, + 1.0903401422606371, + 1.0852876879929139, + 1.0802702000670148, + 1.0752869624064945, + 1.070337283047172, + 1.0654204930155167, + 1.0605359452732341, + 1.0556830137233417, + 1.050861092273408, + 1.0460695939519904, + 1.0413079500746227, + 1.0365756094560012, + 1.0318720376652741, + 1.0271967163215943, + 1.0225491424272968, + 1.017928827736284, + 1.0133352981553627, + 1.008768093176465, + 1.0042267653378203, + 0.9997108797122998, + 0.9952200134212735, + 0.9907537551724451, + 0.9863117048202318, + 0.9818934729473615, + 0.9774986804664485, + 0.9731269582403934, + 0.9687779467205324, + 0.964451295601531, + 0.9601466634920859 + ], + [ + 1.8103060423412103, + 1.723507617222337, + 1.687554532345474, + 1.6599667600183587, + 1.6367091921034636, + 1.616218863435484, + 1.597694190322796, + 1.580658995284609, + 1.5648030223497376, + 1.54991076698459, + 1.5358253216499989, + 1.5224282338281476, + 1.5096274776955072, + 1.4973498699455905, + 1.4855360738348327, + 1.4741371873787819, + 1.4631123418657164, + 1.45242696743883, + 1.4420515123540012, + 1.4319604787895583, + 1.4221316845297578, + 1.4125456890310164, + 1.4031853412357924, + 1.3940354189822184, + 1.3850823383043818, + 1.376313916746843, + 1.3677191789109524, + 1.3592881953726559, + 1.3510119482280079, + 1.3428822180763536, + 1.3348914884059102, + 1.3270328642145763, + 1.3193000023582648, + 1.3116870516250017, + 1.3041886009247272, + 1.2967996342905572, + 1.2895154916279696, + 1.2823318343392311, + 1.2752446151027725, + 1.2682500512097123, + 1.2613446009587874, + 1.2545249426915515, + 1.24778795611562, + 1.241130705617967, + 1.2345504253150854, + 1.2280445056240312, + 1.2216104811694213, + 1.21524601986744, + 1.2089489130498041, + 1.202717066509096, + 1.1965484923625285, + 1.1904413016445397, + 1.1843936975499707, + 1.1784039692593251, + 1.1724704862859676, + 1.1665916932923348, + 1.160766105328455, + 1.1549923034514697, + 1.1492689306895516, + 1.1435946883176837, + 1.1379683324163534, + 1.132388670687321, + 1.1268545595033763, + 1.1213649011714069, + 1.1159186413902227, + 1.1105147668864612, + 1.1051523032135637, + 1.0998303127002762, + 1.0945478925364496, + 1.0893041729850685, + 1.084098315710484, + 1.0789295122137494, + 1.073796982366792, + 1.068699973037892, + 1.0636377568016178, + 1.058609630726953, + 1.0536149152379066, + 1.048652953041374, + 1.0437231081174558, + 1.0388247647678541, + 1.033957326718305, + 1.029120216271349, + 1.0243128735060303, + 1.0195347555213836, + 1.0147853357208225, + 1.0100641031347488, + 1.0053705617789266, + 1.000704230046332, + 0.9960646401303745, + 0.9914513374775302, + 0.9868638802675761, + 0.9823018389197433, + 0.9777647956232265, + 0.9732523438905963, + 0.9687640881327655, + 0.9642996432542502, + 0.9598586342675532, + 0.9554406959255786, + 0.9510454723710557, + 0.9466726168020227 + ], + [ + 1.8103060423412103, + 1.7221699955594345, + 1.6856628496484736, + 1.6576499313369073, + 1.6340339487776587, + 1.613227850469058, + 1.5944176997797923, + 1.577119981016277, + 1.5610196569557364, + 1.545897901995883, + 1.5315953905476456, + 1.5179918446608496, + 1.504993820332604, + 1.4925270064528249, + 1.4805311518591253, + 1.4689566009548352, + 1.4577618552141072, + 1.4469118120355695, + 1.4363764642629997, + 1.4261299211362766, + 1.416149658596906, + 1.4064159365095539, + 1.3969113395074344, + 1.3876204108450056, + 1.378529357218374, + 1.3696258084323314, + 1.360898619950009, + 1.3523377093283013, + 1.3439339196913438, + 1.3356789049720312, + 1.3275650328241184, + 1.3195853019889197, + 1.311733271570263, + 1.304003000185093, + 1.296388993354841, + 1.288886157813225, + 1.2814897616505556, + 1.2741953994084136, + 1.2669989613933028, + 1.259896606602273, + 1.252884738754081, + 1.2459599850013063, + 1.2391191769657772, + 1.2323593337947105, + 1.2256776469804889, + 1.2190714667247537, + 1.212538289659041, + 1.2060757477605677, + 1.199681598323998, + 1.1933537148687798, + 1.187090078877526, + 1.1808887722744545, + 1.1747479705644395, + 1.1686659365631142, + 1.1626410146569561, + 1.1566716255396123, + 1.1507562613770403, + 1.144893481359526, + 1.1390819076033991, + 1.1333202213694245, + 1.1276071595684596, + 1.1219415115281524, + 1.1163221159972347, + 1.1107478583664103, + 1.1052176680870043, + 1.0997305162704352, + 1.0942854134532722, + 1.0888814075141222, + 1.0835175817299285, + 1.078193052960448, + 1.0729069699507199, + 1.0676585117422897, + 1.062446886184789, + 1.057271328540237, + 1.0521311001730895, + 1.047025487319695, + 1.0419537999313429, + 1.036915370585602, + 1.031909553461083, + 1.0269357233711653, + 1.0219932748526017, + 1.0170816213052285, + 1.012200194179334, + 1.0073484422074876, + 1.0025258306778972, + 0.9977318407465829, + 0.9929659687858601, + 0.9882277257668193, + 0.9835166366736583, + 0.9788322399478797, + 0.9741740869605162, + 0.9695417415106731, + 0.9649347793488008, + 0.9603527877232279, + 0.9557953649485751, + 0.9512621199947799, + 0.9467526720955382, + 0.9422666503750516, + 0.9378036934920522, + 0.9333634493001282 + ], + [ + 1.8103060423412103, + 1.7208481814840018, + 1.6837935222560607, + 1.6553604822000978, + 1.631390320626793, + 1.610272184342774, + 1.591179929760153, + 1.573622789693217, + 1.557281002170911, + 1.5419324597695843, + 1.5274154474260082, + 1.5136078833300282, + 1.5004149220589853, + 1.4877611380272215, + 1.4755853764598403, + 1.463837237053901, + 1.4524745989123757, + 1.4414618329851314, + 1.4307684820857614, + 1.420368267159315, + 1.410238326344338, + 1.400358623454225, + 1.3907114819364677, + 1.3812812132340861, + 1.3720538171790957, + 1.3630167380551672, + 1.3541586641860355, + 1.345469361917873, + 1.3369395370452235, + 1.3285607183314352, + 1.3203251589646943, + 1.3122257526854373, + 1.3042559620006116, + 1.2964097564216932, + 1.2886815590670624, + 1.28106620028454, + 1.2735588771979585, + 1.2661551182783501, + 1.2588507521973833, + 1.25164188034694, + 1.2445248525108061, + 1.2374962452575224, + 1.230552842691391, + 1.2236916192545024, + 1.2169097243188463, + 1.2102044683459021, + 1.2035733104231159, + 1.1970138470134515, + 1.1905238017767605, + 1.1841010163407502, + 1.177743441915462, + 1.17144913165891, + 1.165216233713233, + 1.1590429848407653, + 1.1529277045980386, + 1.1468687899931655, + 1.1408647105784704, + 1.1349140039357974, + 1.1290152715167654, + 1.1231671748044407, + 1.1173684317665917, + 1.111617813573894, + 1.1059141415592968, + 1.10025628439723, + 1.0946431554835412, + 1.0890737104989667, + 1.0835469451406674, + 1.0780618930078778, + 1.072617623629053, + 1.0672132406191164, + 1.0618478799564723, + 1.0565207083703998, + 1.0512309218303124, + 1.0459777441291254, + 1.040760425553661, + 1.035578241635648, + 1.0304304919774196, + 1.025316499146925, + 1.0202356076371135, + 1.015187182885174, + 1.010170610347466, + 1.0051852946263329, + 1.0002306586452785, + 0.9953061428692787, + 0.9904112045672393, + 0.9855453171138544, + 0.9807079693283193, + 0.975898664847549, + 0.9711169215317248, + 0.9663622709001592, + 0.9616342575956041, + 0.956932438875272, + 0.9522563841269621, + 0.9476056744087885, + 0.9429799020111245, + 0.9383786700394623, + 0.9338015920169813, + 0.9292482915057008, + 0.9247184017451627, + 0.9202115653076642 + ], + [ + 1.8103060423412103, + 1.7195414644757285, + 1.6819455453407772, + 1.6530971819506548, + 1.6287768866102472, + 1.6073502762849206, + 1.587979139851668, + 1.5701655414553883, + 1.5535850483403446, + 1.5380123087447655, + 1.5232832454225844, + 1.509273993306411, + 1.4958883215600989, + 1.4830497028513725, + 1.4706960891134124, + 1.4587763438426538, + 1.4472477308792837, + 1.4360741007372309, + 1.4252245513399115, + 1.4146724197724474, + 1.404394510228631, + 1.3943704938519275, + 1.3845824358872345, + 1.3750144186137616, + 1.3656522373621258, + 1.3564831530138024, + 1.3474956886621077, + 1.3386794611695434, + 1.330025040569566, + 1.3215238318855993, + 1.3131679751476266, + 1.3049502602944711, + 1.2968640543394787, + 1.2889032387063724, + 1.2810621550516466, + 1.2733355582096637, + 1.2657185751483206, + 1.2582066690227236, + 1.2507956075736675, + 1.2434814352458021, + 1.2362604485039586, + 1.229129173910384, + 1.2220843485945745, + 1.2151229028040926, + 1.2082419442716117, + 1.2014387441723415, + 1.1947107244784463, + 1.1880554465442623, + 1.1814706007789875, + 1.174953997282839, + 1.1685035573390459, + 1.1621173056679688, + 1.1557933633615347, + 1.1495299414263458, + 1.1433253348725834, + 1.1371779172933492, + 1.1310861358856146, + 1.1250485068695841, + 1.1190636112681849, + 1.1131300910126771, + 1.1072466453440972, + 1.1014120274835282, + 1.0956250415470499, + 1.089884539683744, + 1.0841894194173576, + 1.078538621174184, + 1.0729311259814667, + 1.0673659533221578, + 1.0618421591332514, + 1.0563588339361152, + 1.0509151010883304, + 1.0455101151475346, + 1.0401430603386128, + 1.0348131491163708, + 1.02951962081652, + 1.024261740388432, + 1.0190387972036843, + 1.01385010393493, + 1.0086949955000832, + 1.0035728280672305, + 0.9984829781160518, + 0.9934248415518758, + 0.9883978328688119, + 0.9834013843586693, + 0.9784349453626446, + 0.9734979815629806, + 0.9685899743120201, + 0.9637104199962685, + 0.9588588294332587, + 0.9540347272991744, + 0.9492376515853327, + 0.9444671530817732, + 0.939722794886313, + 0.9350041519375543, + 0.930310810570428, + 0.9256423680929613, + 0.9209984323830411, + 0.9163786215040317, + 0.9117825633381798, + 0.9072098952368124 + ], + [ + 1.8103060423412103, + 1.7182491818967223, + 1.680117981791128, + 1.6508588828660793, + 1.6261923214522342, + 1.6044606445921232, + 1.5848137069296144, + 1.5667464831277158, + 1.5499299212410458, + 1.5341354610077462, + 1.5191966890923674, + 1.5049879768687338, + 1.4914117233909483, + 1.4783903117503758, + 1.4658608104558737, + 1.4537713549355675, + 1.442078600563258, + 1.4307458831658417, + 1.419741860690963, + 1.40903949060406, + 1.3986152468430366, + 1.3884485111143574, + 1.3785210933125174, + 1.3688168490843347, + 1.3593213715180181, + 1.3500217401187697, + 1.3409063145746778, + 1.3319645639158173, + 1.3231869239142213, + 1.3145646772202606, + 1.3060898519556985, + 1.2977551354043728, + 1.2895538001408808, + 1.28147964047424, + 1.273526917498913, + 1.2656903113699516, + 1.2579648796742817, + 1.2503460209725858, + 1.242829442747847, + 1.2354111331265414, + 1.2280873358435245, + 1.2208545280071355, + 1.2137094002909636, + 1.2066488392362311, + 1.1996699113962572, + 1.1927698490939498, + 1.1859460375961803, + 1.1791960035364786, + 1.1725174044406863, + 1.1659080192297937, + 1.1593657395907984, + 1.1528885621205411, + 1.1464745811595414, + 1.140121982243176, + 1.1338290361064223, + 1.1275940931860213, + 1.121415578570537, + 1.1152919873545004, + 1.1092218803578109, + 1.1032038801758968, + 1.0972366675299248, + 1.091318977889663, + 1.0854495983445047, + 1.079627364700727, + 1.0738511587853057, + 1.068119905938598, + 1.0624325726799762, + 1.0567881645320445, + 1.051185723990473, + 1.0456243286277065, + 1.0401030893199195, + 1.0346211485875594, + 1.029177679040716, + 1.023771881921331, + 1.0184029857349735, + 1.013070244965555, + 1.0077729388669097, + 1.002510370325701, + 0.9972818647905765, + 0.9920867692629112, + 0.9869244513448628, + 0.9817942983408177, + 0.9766957164086028, + 0.9716281297571454, + 0.9665909798875048, + 0.9615837248744452, + 0.9566058386859387, + 0.9516568105381719, + 0.9467361442838242, + 0.9418433578315417, + 0.9369779825946819, + 0.9321395629675485, + 0.9273276558274588, + 0.9225418300611006, + 0.9177816661137486, + 0.9130467555600057, + 0.9083367006948262, + 0.9036511141436585, + 0.8989896184906337, + 0.8943518459237806 + ], + [ + 1.8103060423412103, + 1.7169707145886768, + 1.6783099559850394, + 1.6486445125327287, + 1.6236353868361433, + 1.6016019047843226, + 1.5816821143720734, + 1.5633639765713014, + 1.5463138696288685, + 1.5303000590836096, + 1.5151538204848798, + 1.5007477805012113, + 1.4869829827242471, + 1.4737807323172136, + 1.4610772238089762, + 1.448819872342837, + 1.4369647313310763, + 1.4254746274158712, + 1.4143177832726979, + 1.4034667808056693, + 1.392897767227435, + 1.3825898379017127, + 1.372524550102442, + 1.3626855352668854, + 1.3530581864029363, + 1.3436294035785425, + 1.3343873848234638, + 1.3253214529157655, + 1.3164219108013926, + 1.3076799200659024, + 1.2990873981192046, + 1.2906369306875942, + 1.2823216969165268, + 1.2741354049316218, + 1.2660722361265677, + 1.2581267967754701, + 1.250294075826009, + 1.2425694079349958, + 1.2349484409717935, + 1.22742710734678, + 1.2200015986285493, + 1.212668343000224, + 1.2054239851761295, + 1.1982653684583933, + 1.1911895186612123, + 1.1841936296705473, + 1.1772750504403782, + 1.1704312732546145, + 1.1636599231072842, + 1.1569587480734755, + 1.150325610560356, + 1.1437584793419031, + 1.137255422293217, + 1.1308145997507526, + 1.1244342584337994, + 1.1181127258702932, + 1.1118484052767423, + 1.105639770847851, + 1.0994853634164723, + 1.0933837864489144, + 1.0873337023444636, + 1.081333829011344, + 1.0753829366942882, + 1.0694798450314837, + 1.063623420320942, + 1.0578125729783623, + 1.0520462551703433, + 1.0463234586083863, + 1.040643212490532, + 1.0350045815787356, + 1.029406664401197, + 1.0238485915698587, + 1.0183295242041852, + 1.0128486524531246, + 1.007405194107886, + 1.0019983932988026, + 0.9966275192701284, + 0.991291865227157, + 0.9859907472505013, + 0.9807235032728211, + 0.9754894921136597, + 0.9702880925684085, + 0.9651187025477334, + 0.9599807382640899, + 0.9548736334622154, + 0.9497968386907247, + 0.944749820612162, + 0.93973206134905, + 0.9347430578636735, + 0.9297823213694898, + 0.9248493767722188, + 0.9199437621388031, + 0.9150650281925605, + 0.9102127378329635, + 0.9053864656785942, + 0.9005857976319254, + 0.8958103304646623, + 0.8910596714224739, + 0.8863334378480143, + 0.8816312568212131 + ], + [ + 1.8103060423412103, + 1.7157054829762406, + 1.676520648279189, + 1.6464530670966468, + 1.621104923611271, + 1.5987727608916336, + 1.5785829425151658, + 1.5600164883738978, + 1.5427352542171675, + 1.5265043642463012, + 1.5111528068219338, + 1.496551481969864, + 1.4826000918520834, + 1.4692188748632367, + 1.4563431606003245, + 1.4439196513787764, + 1.431903804881331, + 1.4202579438369258, + 1.4089498601551464, + 1.3979517640668868, + 1.3872394794420568, + 1.376791818266073, + 1.3665900878076442, + 1.3566176976156719, + 1.3468598426891212, + 1.3373032455163616, + 1.3279359441424368, + 1.3187471166075202, + 1.3097269344065856, + 1.3008664393137366, + 1.292157439173205, + 1.2835924192051458, + 1.275164466093125, + 1.2668672026716017, + 1.2586947314586145, + 1.2506415856121977, + 1.2427026861513917, + 1.23487330449072, + 1.2271490295030927, + 1.2195257384596054, + 1.2119995713026575, + 1.2045669077966674, + 1.1972243471725021, + 1.1899686899408393, + 1.1827969215985177, + 1.17570619799248, + 1.1686938321397542, + 1.1617572823302433, + 1.1548941413629565, + 1.148102126786422, + 1.1413790720311034, + 1.1347229183361598, + 1.1281317073852632, + 1.1216035745768251, + 1.1151367428630767, + 1.1087295171003182, + 1.1023802788594386, + 1.0960874816516868, + 1.0898496465297915, + 1.083665358028981, + 1.0775332604163426, + 1.0714520542203672, + 1.0654204930155167, + 1.0594373804392732, + 1.0535015674214523, + 1.0476119496076062, + 1.0417674649601518, + 1.0359670915224708, + 1.0302098453326418, + 1.0244947784747547, + 1.0188209772568704, + 1.0131875605057137, + 1.0075936779690824, + 1.0020385088177752, + 0.996521260239565, + 0.9910411661183933, + 0.9855974857925633, + 0.9801895028862264, + 0.974816524208946, + 0.9694778787185543, + 0.9641729165429035, + 0.9589010080564825, + 0.9536615430081767, + 0.9484539296967562, + 0.9432775941909354, + 0.9381319795910966, + 0.9330165453299875, + 0.9279307665099066, + 0.9228741332740782, + 0.917846150210085, + 0.9128463357833811, + 0.9078742217990542, + 0.9029293528901338, + 0.8980112860308606, + 0.8931195900734472, + 0.8882538453069584, + 0.8834136430370321, + 0.8785985851852525, + 0.8738082839070609, + 0.8690423612271712 + ], + [ + 1.8103060423412103, + 1.7144529436072633, + 1.6747492901161753, + 1.6442836052710983, + 1.6185998448733163, + 1.595971997718106, + 1.5755148601784241, + 1.5567025806962667, + 1.53919253789114, + 1.5227467461393696, + 1.507191929556936, + 1.4923972788478181, + 1.478261168200986, + 1.4647027799438521, + 1.451656587418166, + 1.4390685872622586, + 1.4268936474054226, + 1.4150935917183887, + 1.403635785666105, + 1.392492071534709, + 1.3816379530950016, + 1.3710519617968226, + 1.3607151574115943, + 1.3506107298257413, + 1.340723678015638, + 1.3310405486714756, + 1.3215492214584792, + 1.3122387311308739, + 1.3030991190513228, + 1.2941213083843703, + 1.2852969985076843, + 1.276618575143513, + 1.2680790334410699, + 1.2596719117992572, + 1.2513912346517087, + 1.2432314627738554, + 1.2351874499375288, + 1.2272544049493823, + 1.2194278582777, + 1.2117036326074277, + 1.2040778167726618, + 1.1965467426048308, + 1.1891069643076135, + 1.1817552400295033, + 1.174488515354426, + 1.1673039084718972, + 1.1601986968224955, + 1.1531703050431314, + 1.1462162940607619, + 1.1393343512035816, + 1.1325222812160347, + 1.1257779980786768, + 1.1190995175464942, + 1.1124849503300211, + 1.1059324958528516, + 1.0994404365270818, + 1.0930071324951214, + 1.0866310167922506, + 1.0803105908894994, + 1.074044420580924, + 1.067831132183307, + 1.061669409019753, + 1.0555579881616808, + 1.049495657406379, + 1.0434812524696349, + 1.0375136543750207, + 1.0315917870232627, + 1.0257146149267342, + 1.019881141095567, + 1.0140904050631645, + 1.0083414810400362, + 1.0026334761859121, + 0.9969655289909997, + 0.9913368077580766, + 0.985746509177847, + 0.9801938569906496, + 0.9746781007282079, + 0.9691985145296451, + 0.9637543960264785, + 0.9583450652917432, + 0.9529698638487929, + 0.9476281537356879, + 0.942319316621409, + 0.9370427529704262, + 0.9317978812524351, + 0.926584137194302, + 0.9214009730715041, + 0.9162478570365397, + 0.9111242724819786, + 0.9060297174359961, + 0.9009637039883873, + 0.8959257577452004, + 0.8909154173102718, + 0.8859322337920501, + 0.880975770334224, + 0.876045601668761, + 0.8711413136900653, + 0.8662625030490447, + 0.8614087767659646, + 0.8565797518610339 + ], + [ + 1.8103060423412103, + 1.7132125860715544, + 1.6729951596669839, + 1.6421352429996994, + 1.6161191298018984, + 1.5931984739518565, + 1.5724766171173212, + 1.5534209031199757, + 1.5356842769927574, + 1.5190256735322427, + 1.5032695746311422, + 1.4882834782959817, + 1.4739644436581885, + 1.4602306072489455, + 1.4470155944824397, + 1.434264703183115, + 1.4219322172625868, + 1.40997946658513, + 1.3983733943185312, + 1.3870854783826947, + 1.3760909055625028, + 1.3653679295006276, + 1.3548973648782896, + 1.3446621840558097, + 1.3346471918934322, + 1.324838760992931, + 1.3152246141800674, + 1.3057936443166773, + 1.2965357638987411, + 1.2874417786348065, + 1.2785032804908911, + 1.2697125566590253, + 1.261062511644305, + 1.2525466002312715, + 1.244158769528581, + 1.2358934086330327, + 1.2277453047232751, + 1.2197096046069988, + 1.2117817809158877, + 1.203957602279627, + 1.1962331069210739, + 1.1886045792048505, + 1.1810685287453675, + 1.1736216717409371, + 1.1662609142507532, + 1.1589833371731493, + 1.1517861827182614, + 1.1446668421973065, + 1.1376228449751666, + 1.1306518484536192, + 1.1237516289700786, + 1.1169200735116078, + 1.110155172156681, + 1.103455011168063, + 1.0968177666695431, + 1.0902416988472994, + 1.083725146623669, + 1.0772665227571077, + 1.0708643093273915, + 1.0645170535696764, + 1.0582233640250198, + 1.0519819069784737, + 1.0457914031589186, + 1.0396506246775068, + 1.0335583921839633, + 1.027513572222091, + 1.0215150747676838, + 1.015561850933706, + 1.0096528908290496, + 1.0037872215584942, + 0.9979639053526564, + 0.9921820378177408, + 0.9864407462958522, + 0.9807391883274467, + 0.9750765502082512, + 0.9694520456336557, + 0.9638649144241792, + 0.9583144213261656, + 0.9527998548823495, + 0.9473205263673798, + 0.9418757687837955, + 0.9364649359143075, + 0.9310874014265733, + 0.9257425580269589, + 0.9204298166600449, + 0.9151486057508951, + 0.9098983704873262, + 0.9046785721396272, + 0.8994886874153687, + 0.8943282078471138, + 0.8891966392110057, + 0.8840935009743438, + 0.879018325770409, + 0.8739706588989073, + 0.8689500578505224, + 0.863956091854172, + 0.858988341445654, + 0.8540463980564612, + 0.8491298636216259, + 0.8442383502055245 + ], + [ + 1.8103060423412103, + 1.7119839302487732, + 1.6712575779389178, + 1.6400071486896266, + 1.6136618181563356, + 1.5904511160111663, + 1.5694670372820072, + 1.5501701853660046, + 1.5322091135366254, + 1.5153397060638985, + 1.4993842237707247, + 1.4842084879353303, + 1.469708255038043, + 1.4558006256800098, + 1.442418385347327, + 1.4295061396432618, + 1.417017593971461, + 1.4049135888502717, + 1.393160649134333, + 1.3817298918147978, + 1.3705961896811225, + 1.3597375211896834, + 1.3491344582437166, + 1.338769757729613, + 1.3286280322228041, + 1.3186954818790237, + 1.3089596741642153, + 1.2994093613864592, + 1.290034328390799, + 1.2808252645378833, + 1.2717736553954166, + 1.2628716905547592, + 1.2541121847320402, + 1.24548850988627, + 1.236994536530621, + 1.2286245827594877, + 1.2203733697865866, + 1.2122359830055514, + 1.2042078377571046, + 1.1962846491256367, + 1.188462405200239, + 1.1807373433265356, + 1.1731059289503323, + 1.1655648367155225, + 1.1581109335294504, + 1.1507412633510787, + 1.1434530334924693, + 1.1362436022535454, + 1.1291104677348756, + 1.1220512576941493, + 1.115063720329748, + 1.108145715889902, + 1.1012952090188093, + 1.0945102617621099, + 1.0877890271636013, + 1.0811297433932268, + 1.0745307283534433, + 1.0679903747171768, + 1.0615071453558897, + 1.0550795691209247, + 1.0487062369453133, + 1.0423857982367941, + 1.0361169575358873, + 1.0298984714155934, + 1.023729145601712, + 1.0176078322948792, + 1.0115334276773278, + 1.005504869589025, + 0.9995211353593335, + 0.993581239781665, + 0.987684233219761, + 0.9818291998352993, + 0.9760152559274553, + 0.970241548375896, + 0.9645072531794396, + 0.9588115740832919, + 0.953153741288385, + 0.9475330102368966, + 0.9419486604685227, + 0.9363999945425316, + 0.9308863370210348, + 0.9254070335092748, + 0.9199614497490725, + 0.9145489707618798, + 0.9091690000381567, + 0.9038209587700501, + 0.8985042851245819, + 0.8932184335547604, + 0.887962874146223, + 0.8827370919972024, + 0.8775405866297531, + 0.8723728714303398, + 0.8672334731180154, + 0.8621219312385427, + 0.8570377976829329, + 0.8519806362289695, + 0.8469500221043982, + 0.8419455415705382, + 0.8369667915251626, + 0.8320133791235704 + ], + [ + 1.8103060423412103, + 1.7107665238435086, + 1.669535905289673, + 1.637898538942249, + 1.6112270053458069, + 1.5877289125327516, + 1.5664850127795136, + 1.546949230773178, + 1.5287657682381357, + 1.5116874868481052, + 1.495534446692011, + 1.480170807671693, + 1.4654910355432875, + 1.4514112044627505, + 1.4378632676783636, + 1.4247911449101065, + 1.4121479683504037, + 1.3998940936520634, + 1.3879956311865986, + 1.3764233403210313, + 1.365151782724293, + 1.3541586641860355, + 1.3434243160543546, + 1.332931281714563, + 1.3226639832178169, + 1.312608449852702, + 1.3027520951477995, + 1.2930835321443264, + 1.2835924192051458, + 1.2742693304082735, + 1.265105645897271, + 1.2560934585564472, + 1.2472254941350611, + 1.2384950425249321, + 1.229895898345048, + 1.2214223093374872, + 1.2130689313550005, + 1.2048307889394643, + 1.1967032406651887, + 1.188681948561523, + 1.1807628510428119, + 1.1729421388661816, + 1.1652162337132332, + 1.1575817690539099, + 1.1500355730021758, + 1.1425746529158343, + 1.135196181528396, + 1.127897484430737, + 1.1206760287453648, + 1.1135294128572988, + 1.106455357083524, + 1.0994516951782567, + 1.0925163665842912, + 1.0856474093518713, + 1.0788429536561204, + 1.0721012158523244, + 1.0654204930155167, + 1.0587991579169926, + 1.052235654395771, + 1.0457284930877007, + 1.039276247479003, + 1.0328775502546315, + 1.0265310899149631, + 1.0202356076371135, + 1.013989894359597, + 1.007792788071208, + 1.0016431712869105, + 0.9955399686951989, + 0.9894821449629165, + 0.9834687026848266, + 0.9774986804664485, + 0.9715711511297174, + 0.9656852200319868, + 0.9598400234897436, + 0.9540347272991744, + 0.9482685253464035, + 0.942540638300852, + 0.9368503123857188, + 0.9311968182200919, + 0.9255794497276566, + 0.9199975231073756, + 0.9144503758618954, + 0.9089373658797681, + 0.903457870567894, + 0.8980112860308606, + 0.8925970262941221, + 0.8872145225681876, + 0.8818632225512012, + 0.876542589767499, + 0.8712521029398934, + 0.8659912553936129, + 0.8607595544899622, + 0.8555565210879159, + 0.8503816890319773, + 0.8452346046647534, + 0.8401148263628044, + 0.8350219240944238, + 0.8299554789980936, + 0.8249150829804495, + 0.8199003383326586 + ], + [ + 1.8103060423412103, + 1.7095599401717783, + 1.667829538296974, + 1.6358086747192289, + 1.6088138380023458, + 1.5850309094222208, + 1.5635294984518002, + 1.5437569104417879, + 1.5253530342527375, + 1.5080677358329138, + 1.4917188941017738, + 1.4761690223543937, + 1.461311307097247, + 1.4470608051661888, + 1.4333486449702724, + 1.4201180664436888, + 1.4073216336634817, + 1.3949192217273731, + 1.3828765302085009, + 1.3711639640290316, + 1.3597557765032315, + 1.3486294031780326, + 1.3377649369849243, + 1.3271447097061635, + 1.3167529545623902, + 1.3065755314940495, + 1.2965997014608632, + 1.2868139394752653, + 1.2772077785423654, + 1.2677716784824071, + 1.2584969149520289, + 1.2493754849882137, + 1.2404000261642643, + 1.2315637470343863, + 1.2228603669980815, + 1.2142840640705441, + 1.2058294293246172, + 1.1974914269913923, + 1.1892653593834195, + 1.181146835946666, + 1.1731317458623374, + 1.165216233713233, + 1.157396677805815, + 1.1496696708021104, + 1.1420320023675772, + 1.1344806435842423, + 1.1270127329144688, + 1.1196255635308692, + 1.112316571853284, + 1.1050833271551852, + 1.097923522120028, + 1.0908349642435449, + 1.0838155679911672, + 1.0768633476310616, + 1.0699764106729797, + 1.0631529518514826, + 1.056391247599334, + 1.049689650963126, + 1.0430465869186314, + 1.0364605480481401, + 1.029930090546167, + 1.023453830523544, + 1.017030440583103, + 1.0106586466429432, + 1.004337224985753, + 0.9980649995148285, + 0.9918408391993652, + 0.9856636556933036, + 0.9795324011135356, + 0.9734460659646227, + 0.9674036771983923, + 0.9614042963978471, + 0.9554470180757915, + 0.9495309680794393, + 0.943655302093046, + 0.9378192042313023, + 0.9320218857168531, + 0.9262625836358749, + 0.9205405597661523, + 0.9148550994725574, + 0.9092055106652529, + 0.9035911228163209, + 0.8980112860308606, + 0.8924653701689134, + 0.886952764014855, + 0.8814728744911591, + 0.8760251259136643, + 0.8706089592857031, + 0.8652238316286383, + 0.8598692153465404, + 0.854544597622901, + 0.8492494798474288, + 0.8439833770711167, + 0.8387458174878918, + 0.8335363419412827, + 0.8283545034546427, + 0.8231998667835696, + 0.8180720079892507, + 0.8129705140315551, + 0.8078949823807604 + ], + [ + 1.8103060423412103, + 1.7083637761683153, + 1.6661379069404538, + 1.6337368578910456, + 1.60642150999542, + 1.5823562053982392, + 1.560599506994631, + 1.5405921579613773, + 1.5219697715396974, + 1.5044792438225247, + 1.4879362913957253, + 1.4722017951671773, + 1.4571676734408812, + 1.4427479745178384, + 1.4288730090909216, + 1.4154853431789434, + 1.4025369776496295, + 1.3899873111955332, + 1.3778016361389411, + 1.3659500060180352, + 1.3544063684552683, + 1.343147891088582, + 1.3321544304917476, + 1.3214081086713034, + 1.3108929716480517, + 1.3005947114767342, + 1.2905004378657479, + 1.2805984889907167, + 1.2708782735815443, + 1.2613301381873967, + 1.251945254880305, + 1.2427155256776319, + 1.2336335007381847, + 1.2246923079809593, + 1.215885592235543, + 1.2072074623923852, + 1.198652445303839, + 1.1902154454110356, + 1.1818917092506245, + 1.1736767941392832, + 1.16556654045024, + 1.1575570469907142, + 1.1496446490666015, + 1.1418258988844185, + 1.1340975479931443, + 1.1264565315122972, + 1.1188999539290463, + 1.1114250762776936, + 1.104029304540552, + 1.0967101791309437, + 1.0894653653374284, + 1.0822926446240144, + 1.0751899066944666, + 1.0681551422402489, + 1.0611864363014722, + 1.0542819621786816, + 1.047439975840633, + 1.0406588107795496, + 1.033936873270855, + 1.0272726379991863, + 1.0206646440166767, + 1.0141114910031654, + 1.0076118358012254, + 1.0011643892017115, + 0.9947679129580487, + 0.9884212170096729, + 0.9821231568969881, + 0.9758726313519434, + 0.9696685800498559, + 0.963509981509493, + 0.9573958511296272, + 0.9513252393513854, + 0.945297229936672, + 0.9393109383538369, + 0.9333655102625289, + 0.9274601200903874, + 0.9215939696948603, + 0.9157662871040062, + 0.9099763253306574, + 0.9042233612547834, + 0.8985066945693263, + 0.8928256467851534, + 0.8871795602911263, + 0.8815677974655998, + 0.8759897398359534, + 0.8704447872830181, + 0.8649323572875032, + 0.8594518842157431, + 0.8540028186422851, + 0.8485846267070247, + 0.8431967895047547, + 0.8378388025051566, + 0.8325101750013968, + 0.8272104295856231, + 0.8219391016497741, + 0.8166957389102252, + 0.8114799009548929, + 0.8062911588115161, + 0.8011290945359155, + 0.7959933008191116 + ], + [ + 1.8103060423412103, + 1.7071776505883376, + 1.6644604720585723, + 1.6316824281223676, + 1.604049258835465, + 1.5797039479715584, + 1.5576941045528234, + 1.5374539646530647, + 1.518614901775934, + 1.5009208670825924, + 1.4841854329720081, + 1.4682678616641494, + 1.453058813903525, + 1.4384713379200904, + 1.4244349335529574, + 1.4108914985611802, + 1.3977924753297195, + 1.385096790144039, + 1.3727693314932958, + 1.360779804480565, + 1.3491018536019064, + 1.3377123808346116, + 1.326591008378292, + 1.31571965022424, + 1.3050821667644366, + 1.294664083576847, + 1.2844523603878715, + 1.2744351996846826, + 1.2646018869649192, + 1.2549426564572603, + 1.2454485775184294, + 1.2361114579435923, + 1.2269237612106578, + 1.2178785352801174, + 1.208969351037449, + 1.2001902488284633, + 1.1915356918239746, + 1.1830005251769207, + 1.1745799401161316, + 1.1662694422664748, + 1.158064823602806, + 1.1499621375409215, + 1.141957676747019, + 1.1340479533116146, + 1.1262296809870889, + 1.1184997592322545, + 1.1108552588442135, + 1.1032934089886677, + 1.0958115854658397, + 1.0884073000711016, + 1.0810781909280196, + 1.0738220136873415, + 1.0666366334989705, + 1.0595200176755266, + 1.0524702289760497, + 1.0454854194469474, + 1.0385638247647049, + 1.0317037590312803, + 1.0249036099786855, + 1.0181618345441046, + 1.01147695478115, + 1.0048475540765622, + 0.9982722736449164, + 0.9917498092767737, + 0.9852789083182291, + 0.9788583668620445, + 0.9724870271325304, + 0.9661637750480863, + 0.9598875379468679, + 0.9536572824624346, + 0.9474720125374606, + 0.9413307675646998, + 0.9352326206453814, + 0.9291766769560927, + 0.9231620722160034, + 0.9171879712469972, + 0.9112535666199197, + 0.9053580773807294, + 0.8995007478508622, + 0.8936808464965926, + 0.8878976648626026, + 0.8821505165653565, + 0.8764387363422339, + 0.8707616791526913, + 0.8651187193280132, + 0.8595092497664838, + 0.8539326811710439, + 0.8483884413267286, + 0.8428759744153734, + 0.837394740365268, + 0.831944214233604, + 0.8265238856197177, + 0.82113325810727, + 0.8157718487336404, + 0.8104391874849282, + 0.8051348168150678, + 0.7998582911876629, + 0.7946091766392454, + 0.7893870503627433, + 0.7841915003100279 + ], + [ + 1.8103060423412103, + 1.706001202381107, + 1.6627967230484773, + 1.629644760054971, + 1.6016963624210039, + 1.577073329808183, + 1.5548124067362965, + 1.5343413752663848, + 1.5152874037557438, + 1.4973915224609007, + 1.4804651770879378, + 1.4643660243754795, + 1.4489834777687316, + 1.4342295935859926, + 1.4200330674282162, + 1.4063351342469006, + 1.3930866825007973, + 1.3802461699225588, + 1.3677780844630107, + 1.355651785632937, + 1.3438406172751556, + 1.3323212178737824, + 1.3210729771664815, + 1.310077602826459, + 1.2993187711313827, + 1.288781842540694, + 1.2784536280224714, + 1.2683221954824924, + 1.2583767081915593, + 1.2486072889742656, + 1.2390049053100622, + 1.2295612715406588, + 1.2202687651702775, + 1.211120354853274, + 1.2021095381343105, + 1.1932302873738025, + 1.1844770025805906, + 1.1758444701031419, + 1.167327826313712, + 1.1589225255671, + 1.1506243118346653, + 1.1424291935111188, + 1.1343334209708507, + 1.1263334665156743, + 1.1184260064097484, + 1.1106079047421282, + 1.1028761988947196, + 1.0952280864246338, + 1.087660913196253, + 1.0801721626204874, + 1.0727594458775445, + 1.0654204930155167, + 1.058153144830775, + 1.0509553454478409, + 1.0438251355264685, + 1.0367606460323375, + 1.0297600925152222, + 1.0228217698450184, + 1.015944047361617, + 1.0091253643995475, + 1.0023642261525907, + 0.9956591998473184, + 0.9890089111978155, + 0.9824120411167339, + 0.9758673226603842, + 0.9693735381878277, + 0.962929516715924, + 0.9565341314540667, + 0.9501862975039073, + 0.9438849697107659, + 0.9376291406546853, + 0.9314178387701899, + 0.9252501265848112, + 0.9191250990673424, + 0.9130418820775766, + 0.9069996309100137, + 0.9009975289246641, + 0.8950347862586695, + 0.8891106386129815, + 0.8832243461088247, + 0.8773751922091009, + 0.871562482700281, + 0.8657855447306896, + 0.8600437259014134, + 0.8543363934063547, + 0.8486629332182187, + 0.8430227493174773, + 0.8374152629615621, + 0.8318399119917526, + 0.8262961501754125, + 0.8207834465813926, + 0.8153012849865782, + 0.8098491633117075, + 0.8044265930847121, + 0.7990330989299577, + 0.7936682180818718, + 0.7883314999215548, + 0.7830225055350534, + 0.7777408072920781, + 0.7724859884440174 + ], + [ + 1.8103060423412103, + 1.7048340892156508, + 1.6611461757810648, + 1.6276232607542183, + 1.5993621360900914, + 1.574463585432788, + 1.5519535750088442, + 1.5312534840787184, + 1.5119863092209194, + 1.493890182964532, + 1.4767744411979273, + 1.4604951479177772, + 1.4449404791672262, + 1.4300215072236715, + 1.4156661298314859, + 1.4018149243939493, + 1.3884182298389725, + 1.375434039064334, + 1.362826442660774, + 1.3505644572890378, + 1.3386211285243657, + 1.3269728334485, + 1.3155987311817283, + 1.3044803247162908, + 1.2936011076764782, + 1.2829462767134132, + 1.2725024952172395, + 1.2622576975802344, + 1.2522009258162266, + 1.2423221922297076, + 1.2326123632312462, + 1.223063060450642, + 1.2136665761006284, + 1.2044158001587117, + 1.1953041574106948, + 1.1863255527710577, + 1.1774743235878535, + 1.1687451978716872, + 1.1601332575735244, + 1.151633906184922, + 1.1432428400546442, + 1.1349560229135593, + 1.1267696631798314, + 1.118680193682292, + 1.1106842534943442, + 1.1027786716159436, + 1.0949604522789458, + 1.0872267616826854, + 1.0795749159932424, + 1.0720023704622945, + 1.0645067095404828, + 1.0570856378764013, + 1.0497369721061327, + 1.0424586333500938, + 1.0352486403441123, + 1.0281051031404118, + 1.0210262173217615, + 1.0140102586785995, + 1.007055578304636, + 1.0001605980714223, + 0.9933238064466883, + 0.9865437546250679, + 0.9798190529431507, + 0.9731483675537346, + 0.9665304173367348, + 0.9599639710264875, + 0.9534478445372017, + 0.9469808984701112, + 0.9405620357874578, + 0.9341901996398618, + 0.9278643713348971, + 0.9215835684358102, + 0.9153468429803374, + 0.9091532798104754, + 0.9030019950048749, + 0.8968921344062506, + 0.8908228722368654, + 0.8847934097957351, + 0.8788029742317296, + 0.8728508173872416, + 0.8669362147075211, + 0.8610584642111755, + 0.8552168855176931, + 0.8494108189281802, + 0.8436396245557898, + 0.8379026815026037, + 0.8321993870799664, + 0.826529156069502, + 0.8208914200222464, + 0.8152856265935242, + 0.8097112389113612, + 0.8041677349763935, + 0.7986546070913714, + 0.793171361318496, + 0.7877175169629429, + 0.7822926060810514, + 0.7768961730117464, + 0.7715277739298712, + 0.7661869764201918, + 0.7608733590709111 + ], + [ + 1.8103060423412103, + 1.7036759861416195, + 1.6595083707071439, + 1.6256173673895944, + 1.5970459299420288, + 1.5718739882343025, + 1.549116813407919, + 1.5281894313522522, + 1.5087106990730772, + 1.490415873742438, + 1.4731121977187458, + 1.4566541545526581, + 1.4409286924379785, + 1.4258459072079792, + 1.4113329049098913, + 1.3973296104750343, + 1.3837858175428475, + 1.3706590577647506, + 1.3579130274390105, + 1.3455164030231317, + 1.3334419341273946, + 1.3216657384491906, + 1.3101667462717919, + 1.2989262574866063, + 1.2879275844746276, + 1.2771557613432567, + 1.266597305044002, + 1.2562400174863626, + 1.246072820363294, + 1.2360856163123999, + 1.2262691714556162, + 1.2166150154265667, + 1.2071153558049441, + 1.1977630044987908, + 1.1885513140967459, + 1.1794741225880356, + 1.170525705143666, + 1.1617007318867465, + 1.1529942307670844, + 1.1444015548056574, + 1.135918353096281, + 1.127540545050784, + 1.1192642974550053, + 1.1110860039695267, + 1.1030022667641062, + 1.0950098800204873, + 1.0871058150763937, + 1.0792872070154642, + 1.0715513425347467, + 1.0638956489440752, + 1.0563176841708777, + 1.0488151276603312, + 1.0413857720747477, + 1.0340275157080323, + 1.0267383555413363, + 1.01951638087488, + 1.0123597674785718, + 1.005266772210685, + 0.9982357280596101, + 0.9912650395687332, + 0.9843531786088586, + 0.9774986804664485, + 0.970700140219311, + 0.9639562093743359, + 0.9572655927444844, + 0.9506270455445502, + 0.9440393706872445, + 0.9375014162629751, + 0.9310120731882907, + 0.9245702730093945, + 0.9181749858484148, + 0.9118252184812465, + 0.9055200125368111, + 0.8992584428084882, + 0.8930396156692957, + 0.8868626675831308, + 0.8807267637050531, + 0.8746310965641811, + 0.8685748848233259, + 0.8625573721099606, + 0.8565778259135792, + 0.8506355365448938, + 0.844729816152679, + 0.8388599977944152, + 0.8330254345571708, + 0.8272254987254446, + 0.8214595809929393, + 0.8157270897154647, + 0.8100274502023734, + 0.804360104044136, + 0.7987245084738166, + 0.7931201357603941, + 0.7875464726320021, + 0.7820030197273057, + 0.776489291073356, + 0.7710048135883774, + 0.765549126608045, + 0.7601217814339141, + 0.7547223409027446, + 0.749350378975554 + ], + [ + 1.8103060423412103, + 1.7025265843704516, + 1.6578828711337423, + 1.6236265451236218, + 1.5947471263996928, + 1.5693038477405121, + 1.5463013655591062, + 1.5251484001092444, + 1.5054596999262744, + 1.486967668428934, + 1.4694774701752231, + 1.452842020144324, + 1.4369470479060333, + 1.4217016801859237, + 1.4070322372824293, + 1.3928779965571996, + 1.3791882104581754, + 1.3659199528559527, + 1.3530365287188064, + 1.3405062768570837, + 1.3283016531398142, + 1.3163985178288968, + 1.3047755740899365, + 1.2934139202394872, + 1.282296688777002, + 1.2714087524874165, + 1.260736482983858, + 1.2502675506884446, + 1.2399907578772784, + 1.229895898345048, + 1.219973638678565, + 1.2102154172064838, + 1.2006133575113382, + 1.1911601940182743, + 1.1818492076612153, + 1.1726741700069698, + 1.163629294516658, + 1.1547091938608274, + 1.1459088423938588, + 1.1372235430453577, + 1.128648898009236, + 1.12018078271127, + 1.1118153226177747, + 1.1035488725153688, + 1.0953779979474376, + 1.0872994585391162, + 1.0793101929811435, + 1.071407305475247, + 1.0635880534708564, + 1.055849836545899, + 1.0481901863038705, + 1.0406067571758948, + 1.033097318030637, + 1.0256597445069957, + 1.0182920119948977, + 1.0109921891984788, + 1.0037584322236484, + 0.9965889791387594, + 0.9894821449629165, + 0.9824363170415336, + 0.9754499507731886, + 0.968521565655697, + 0.961649741622731, + 0.9548331156453127, + 0.9480703785751403, + 0.9413602722090428, + 0.934701586555919, + 0.92809315728935, + 0.9215338633706952, + 0.9150226248289302, + 0.9085584006847774, + 0.9021401870078307, + 0.8957670150964023, + 0.8894379497707543, + 0.8831520877711936, + 0.8769085562532677, + 0.8707065113729571, + 0.8645451369553763, + 0.8584236432410365, + 0.8523412657042124, + 0.8462972639384181, + 0.8402909206043816, + 0.8343215404362954, + 0.8283884493024383, + 0.8224909933165834, + 0.8166285379968699, + 0.8108004674690803, + 0.805006183711489, + 0.7992451058386629, + 0.7935166694217847, + 0.787820325843249, + 0.7821555416834419, + 0.776521798137764, + 0.7709185904620947, + 0.7653454274450163, + 0.7598018309052436, + 0.7542873352127937, + 0.7488014868325478, + 0.7433438438889344, + 0.7379139757505513 + ], + [ + 1.8103060423412103, + 1.701385590163906, + 1.6562692616522565, + 1.6216502851867431, + 1.592465137986602, + 1.566752507132743, + 1.5435065119535978, + 1.5221296131913595, + 1.5022324809633028, + 1.4835446858092975, + 1.4658693296854826, + 1.4490577704732432, + 1.432994528032276, + 1.4175877670692163, + 1.402763027881242, + 1.3884589449971294, + 1.3746242336319932, + 1.3612155132241477, + 1.3481957002743488, + 1.3355327984155863, + 1.3231989719242758, + 1.311169825509896, + 1.2994238368816882, + 1.287941904255819, + 1.2767069815659853, + 1.2657037814546888, + 1.2549185312597861, + 1.2443387708778086, + 1.2339531840415086, + 1.2237514564988146, + 1.213724156029488, + 1.203862630325084, + 1.194158919585395, + 1.1846056813194363, + 1.175196125330551, + 1.165923957248992, + 1.1567833292773846, + 1.1477687970539647, + 1.1388752817297232, + 1.1300980365092967, + 1.121432617029755, + 1.112874855052568, + 1.1044208350267675, + 1.0960668731493617, + 1.087809498605276, + 1.0796454367158084, + 1.0715715937635175, + 1.0635850432941123, + 1.0556830137233417, + 1.0478628771000804, + 1.0401221388964412, + 1.0324584287124676, + 1.0248694917972225, + 1.0173531813003116, + 1.009907451178373, + 1.0025303496901108, + 0.9952200134212735, + 0.9879746617877375, + 0.9807925919707604, + 0.9736721742435835, + 0.9666118476530486, + 0.9596101160238133, + 0.9526655442561895, + 0.9457767548916578, + 0.938942424922776, + 0.9321612828265573, + 0.9254321058024759, + 0.918753717198112, + 0.9121249841070851, + 0.9055448151253878, + 0.8990121582535412, + 0.8925259989331515, + 0.8860853582074874, + 0.8796892909966436, + 0.8733368844786777, + 0.8670272565688746, + 0.8607595544899622, + 0.8545329534267185, + 0.8483466552589607, + 0.842199887367405, + 0.8360919015073411, + 0.8300219727454718, + 0.823989398455642, + 0.8179934973695167, + 0.8120336086785815, + 0.8061090911841097, + 0.800219322492006, + 0.7943636982496604, + 0.788541631422166, + 0.7827525516054481, + 0.7769959043740275, + 0.7712711506613077, + 0.7655777661704275, + 0.7599152408138532, + 0.7542830781800173, + 0.7486807950254257, + 0.7431079207907603, + 0.7375639971396076, + 0.7320485775185336, + 0.7265612267373088 + ], + [ + 1.8103060423412103, + 1.7002527238186347, + 1.654667146702418, + 1.6196881031185486, + 1.5901994052960595, + 1.564219340975295, + 1.5407315674609119, + 1.5191323305730973, + 1.4990282510636255, + 1.480146086773484, + 1.462286891749875, + 1.4453004778683576, + 1.429070163895887, + 1.4135031593730938, + 1.3985242301522307, + 1.3840713725084155, + 1.3700927682509088, + 1.3565445856228873, + 1.3433893554248333, + 1.330594748500011, + 1.3181326396093795, + 1.3059783797304227, + 1.294110222722055, + 1.2825088681254675, + 1.2711570925806135, + 1.2600394497283331, + 1.249142023658956, + 1.2384522246732255, + 1.2279586188049842, + 1.2176507845250732, + 1.2075191915100536, + 1.1975550974600337, + 1.187750459786041, + 1.1780978596288718, + 1.1685904361679857, + 1.1592218295667793, + 1.1499861312057578, + 1.1408778400971133, + 1.1318918245674414, + 1.1230232884506255, + 1.11426774115854, + 1.1056209710993903, + 1.0970790219971123, + 1.0886381717339955, + 1.0802949133955049, + 1.0720459382434644, + 1.0638881203831114, + 1.0558185029225147, + 1.047834285450564, + 1.0399328126831822, + 1.0321115641472487, + 1.024368144788609, + 1.0167002764049773, + 1.00910578981687, + 1.0015826177003149, + 0.9941287880142294, + 0.9867424179632508, + 0.9794217084436501, + 0.9721649389259062, + 0.9649704627327006, + 0.9578367026756207, + 0.9507621470178138, + 0.9437453457333216, + 0.9367849070368715, + 0.929879494160607, + 0.9230278223566084, + 0.9162286561061734, + 0.9094808065186859, + 0.9027831289045642, + 0.8961345205082616, + 0.8895339183886, + 0.8829802974349059, + 0.8764726685084565, + 0.8700100766997028, + 0.8635915996925695, + 0.8572163462279021, + 0.8508834546588118, + 0.8445920915912892, + 0.8383414506040151, + 0.8321307510418012, + 0.8259592368775488, + 0.8198261756380315, + 0.8137308573891775, + 0.8076725937768761, + 0.8016507171196354, + 0.7956645795497115, + 0.789713552199575, + 0.783797024430829, + 0.7779144031028993, + 0.772065111879018, + 0.766248590567205, + 0.7604642944941068, + 0.7547116939097246, + 0.7489902734211759, + 0.7432995314537857, + 0.7376389797379126, + 0.7320081428200166, + 0.7264065575965916, + 0.7208337728696641, + 0.7152893489226521 + ], + [ + 1.8103060423412103, + 1.699127718736851, + 1.6530761492580206, + 1.6177395371581258, + 1.5879493951324917, + 1.5617037531373934, + 1.537975879052503, + 1.5161558469030132, + 1.4958462561748305, + 1.4767710715281321, + 1.458729313312174, + 1.441569258124837, + 1.425173031975041, + 1.4094468958655642, + 1.3943148465778155, + 1.3797142465622674, + 1.3655927479237728, + 1.3519060708413355, + 1.3386163630916408, + 1.3256909650375452, + 1.3131014639335765, + 1.300822958785938, + 1.288833481156574, + 1.2771135332903611, + 1.265645715763796, + 1.2544144243194135, + 1.2434056007940486, + 1.2326065267919566, + 1.222005651464816, + 1.211592446750803, + 1.2013572849040435, + 1.1912913342576745, + 1.181386470008451, + 1.1716351974588903, + 1.1620305856546471, + 1.152566209746553, + 1.143236100715054, + 1.134034701339243, + 1.1249568274878812, + 1.1159976339666966, + 1.1071525842831373, + 1.0984174237929842, + 1.0897881557776794, + 1.0812610200706696, + 1.0728324739084638, + 1.0644991747297594, + 1.05625796468576, + 1.048105856658108, + 1.0400400216088719, + 1.032057777110695, + 1.024156576925261, + 1.0163340015153017, + 1.008587749389918, + 1.000915629195484, + 0.9933155524750888, + 0.9857855270287283, + 0.9783236508144206, + 0.9709281063373432, + 0.9635971554800957, + 0.9563291347324252, + 0.9491224507833247, + 0.9419755764424174, + 0.9348870468610515, + 0.927855456026619, + 0.9208794535063354, + 0.9139577414191205, + 0.9070890716163501, + 0.9002722430541351, + 0.8935060993414605, + 0.886789526450011, + 0.8801214505728367, + 0.8735008361202098, + 0.8669266838420713, + 0.8603980290674393, + 0.8539139400519881, + 0.8474735164257872, + 0.84107588773388, + 0.834720212063, + 0.8284056747482957, + 0.8221314871544375, + 0.8158968855259424, + 0.809701129901976, + 0.8035435030912621, + 0.7974233097030835, + 0.7913398752306657, + 0.785292545183524, + 0.7792806842656167, + 0.773303675596379, + 0.7673609199719376, + 0.7614518351640006, + 0.755575855254101, + 0.749732430001037, + 0.7439210242395117, + 0.738141117308109, + 0.7323922025048758, + 0.7266737865688994, + 0.7209853891863817, + 0.7153265425198034, + 0.7096967907588816, + 0.7040956896920908 + ], + [ + 1.8103060423412103, + 1.6980103205743422, + 1.6514959096220299, + 1.6158041467683768, + 1.5857145988074743, + 1.5592051748880906, + 1.5352388237148336, + 1.5131994892495748, + 1.4926857769028499, + 1.4734188770406065, + 1.4551957900653596, + 1.4378632676783636, + 1.4213022511955429, + 1.4054180594955303, + 1.3901339254890974, + 1.3753865820877889, + 1.3611231552737384, + 1.3472989201914414, + 1.3338756441836694, + 1.320820339367472, + 1.3081043074349712, + 1.2957023971248376, + 1.2835924192051458, + 1.2717546799585104, + 1.2601716050884568, + 1.2488274335068703, + 1.2377079657589645, + 1.2268003556227094, + 1.2160929361579393, + 1.2055750734905049, + 1.1952370431108355, + 1.1850699245893672, + 1.1750655114644892, + 1.165216233713233, + 1.155515090721671, + 1.145955593067663, + 1.1365317117400022, + 1.1272378336649116, + 1.1180687226080213, + 1.1090194846784163, + 1.100085537789509, + 1.0912625845357624, + 1.0825465880295835, + 1.0739337503128517, + 1.0654204930155167, + 1.0570034399818515, + 1.0486794016250887, + 1.040445360804826, + 1.0322984600498757, + 1.0242359899731346, + 1.016255378745309, + 1.0083541825115612, + 1.0005300766498502, + 0.9927808477823412, + 0.9851043864620801, + 0.9774986804664485, + 0.9699618086369846, + 0.9624919352121273, + 0.9550873046055208, + 0.947746236587796, + 0.9404671218343674, + 0.9332484178058258, + 0.9260886449310524, + 0.9189863830663041, + 0.9119402682062664, + 0.9049489894255008, + 0.8980112860308604, + 0.8911259449073601, + 0.8842917980416725, + 0.8775077202089361, + 0.870772626809901, + 0.8640854718466402, + 0.8574452460261286, + 0.8508509749819549, + 0.8443017176052932, + 0.8377965644770421, + 0.8313346363937337, + 0.8249150829804495, + 0.8185370813845455, + 0.8121998350445099, + 0.805902572528732, + 0.7996445464393985, + 0.7934250323770996, + 0.7872433279620908, + 0.781098751908465, + 0.7749906431477784, + 0.768918359998944, + 0.7628812793814364, + 0.7568787960690814, + 0.7509103219818998, + 0.7449752855136585, + 0.7390731308929547, + 0.7332033175758107, + 0.7273653196679007, + 0.7215586253746616, + 0.7157827364776606, + 0.7100371678357033, + 0.7043214469092679, + 0.6986351133069484, + 0.69297771835267 + ], + [ + 1.8103060423412103, + 1.6969002864581153, + 1.6499260843201649, + 1.6138815112809364, + 1.58349453057502, + 1.556723063146863, + 1.5325198065329957, + 1.5102626150312406, + 1.4895461262991192, + 1.470088774691925, + 1.45168555397759, + 1.4341817010103441, + 1.4174569802206625, + 1.4014157745719587, + 1.3859805581385451, + 1.371087438441919, + 1.35668301880883, + 1.3427221322821976, + 1.3291661682780735, + 1.3159818128309553, + 1.3031400839525156, + 1.2906155817632432, + 1.2783858976924523, + 1.2664311433519557, + 1.254733570724781, + 1.2432772629257347, + 1.2320478801395685, + 1.2210324491603886, + 1.2102191877212707, + 1.1995973568330798, + 1.1891571358602562, + 1.178889516195512, + 1.1687862102570281, + 1.1588395731927716, + 1.1490425351883133, + 1.1393885426741008, + 1.1298715070426395, + 1.1204857597353723, + 1.1112260127581772, + 1.1020873238444238, + 1.093065065613969, + 1.084154898181771, + 1.075352744755929, + 1.066654769835814, + 1.0580573596794782, + 1.049557104758168, + 1.0411507839563094, + 1.0328353503093113, + 1.0246079181001146, + 1.0164657511595445, + 1.0084062522359827, + 1.000426953317284, + 0.9925255068027072, + 0.984699677435357, + 0.9769473349165665, + 0.9692664471330601, + 0.9616550739358798, + 0.9541113614171102, + 0.9466335366365667, + 0.9392199027559487, + 0.9318688345426278, + 0.9245787742093172, + 0.917348227559458, + 0.9101757604113005, + 0.9030599952764493, + 0.8959996082710726, + 0.8889933262401748, + 0.8820399240772272, + 0.8751382222231849, + 0.8682870843304306, + 0.8614854150785376, + 0.8547321581299747, + 0.848026294214937, + 0.8413668393354798, + 0.834752843079993, + 0.8281833870398408, + 0.8216575833207005, + 0.8151745731417658, + 0.8087335255165617, + 0.8023336360096297, + 0.7959741255638206, + 0.7896542393933541, + 0.7833732459381907, + 0.7771304358756193, + 0.7709251211852762, + 0.7647566342641103, + 0.7586243270880685, + 0.7525275704175252, + 0.7464657530436941, + 0.7404382810734736, + 0.7344445772503487, + 0.7284840803091617, + 0.7225562443627012, + 0.7166605383182187, + 0.7107964453221031, + 0.7049634622310728, + 0.6991610991083519, + 0.6933888787434042, + 0.6876463361938914, + 0.6819330183486121 + ], + [ + 1.8103060423412103, + 1.695797384266839, + 1.648366345083291, + 1.6119712286498673, + 1.581288726192468, + 1.5542568988746361, + 1.529818258928172, + 1.507344610112696, + 1.4864266478253723, + 1.466780068118097, + 1.448197871016767, + 1.430523788261421, + 1.4136364149585243, + 1.3974392041694916, + 1.3818538760076677, + 1.3668159166226115, + 1.3522714100437256, + 1.3381747500528416, + 1.324486950567453, + 1.3111743736345738, + 1.2982077554080622, + 1.2855614489875884, + 1.2732128278729475, + 1.2611418102559036, + 1.2493304755151338, + 1.2377627519693546, + 1.226424160344668, + 1.2153016012671813, + 1.2043831778841816, + 1.1936580467669087, + 1.1831162917714189, + 1.1727488166792384, + 1.162547253309534, + 1.1525038824619824, + 1.1426115655662632, + 1.1328636853175567, + 1.1232540938949833, + 1.113777067611695, + 1.1044272670463702, + 1.095199701867476, + 1.0860896996923235, + 1.0770928784292937, + 1.0682051216385635, + 1.0594225565182036, + 1.0507415341816315, + 1.0421586119414883, + 1.0336705373559636, + 1.0252742338278975, + 1.0169667875758384, + 1.0087454358206123, + 1.0006075560516148, + 0.9925506562546034, + 0.9845723659977729, + 0.9766704282857395, + 0.9688426921020954, + 0.9610871055707044, + 0.9534017096741255, + 0.945784632474679, + 0.9382340837898486, + 0.930748350279114, + 0.9233257909040125, + 0.9159648327273472, + 0.9086639670210858, + 0.9014217456556672, + 0.8942367777462411, + 0.8871077265338423, + 0.8800333064816914, + 0.8730122805687619, + 0.8660434577644727, + 0.8591256906699124, + 0.8522578733123629, + 0.8454389390811186, + 0.8386678587936957, + 0.8319436388824986, + 0.8252653196929027, + 0.8186319738844954, + 0.8120427049279372, + 0.8054966456905421, + 0.798992957104263, + 0.7925308269102851, + 0.7861094684749141, + 0.77972811967187, + 0.7733860418264908, + 0.7670825187177064, + 0.7608168556339663, + 0.7545883784795953, + 0.7483964329283286, + 0.7422403836210115, + 0.7361196134046847, + 0.7300335226104718, + 0.7239815283678801, + 0.7179630639532935, + 0.7119775781705968, + 0.706024534762019, + 0.7001034118474049, + 0.6942137013902663, + 0.6883549086890572, + 0.6825265518922374, + 0.6767281615357768, + 0.6709592801018422 + ], + [ + 1.8103060423412103, + 1.6947013919680478, + 1.646816377910084, + 1.6100729143036565, + 1.5790967415948853, + 1.5518061855917225, + 1.5271336370341133, + 1.504444887051252, + 1.4833267134789576, + 1.4634920912217229, + 1.4447320390545824, + 1.426888793033211, + 1.4098397862661027, + 1.3934875477386859, + 1.377753048327048, + 1.3625711567018208, + 1.3478874408485604, + 1.3336558580400613, + 1.3198370490478315, + 1.3063970539612435, + 1.2933063288422346, + 1.2805389813172883, + 1.2680721683220515, + 1.255885615840052, + 1.2439612317270163, + 1.232282790475398, + 1.2208356742263833, + 1.209606658228549, + 1.1985837317612937, + 1.187755947610567, + 1.177113294722419, + 1.166646589816092, + 1.1563473846167052, + 1.1462078860414524, + 1.1362208871948902, + 1.1263797074362443, + 1.1166781401022354, + 1.1071104067231112, + 1.0976711167725548, + 1.0883552321552639, + 1.0791580357679544, + 1.0700751035768632, + 1.0611022797426448, + 1.052235654395771, + 1.0434715437252118, + 1.0348064720927472, + 1.0262371559265966, + 1.0177604891826846, + 1.0093735301909952, + 1.001073489729073, + 0.9928577201855789, + 0.984723705694551, + 0.9766690531361613, + 0.9686914839127332, + 0.9607888264199193, + 0.9529590091425396, + 0.9452000543128858, + 0.9375100720764726, + 0.9298872551164794, + 0.9223298736935561, + 0.9148362710624316, + 0.907404859230916, + 0.9000341150305464, + 0.8927225764713356, + 0.8854688393559105, + 0.8782715541308362, + 0.8711294229551253, + 0.8640411969678994, + 0.8570056737389122, + 0.850021694887198, + 0.8430881438544838, + 0.836203943821255, + 0.8293680557544527, + 0.8225794765767864, + 0.8158372374485259, + 0.8091404021534414, + 0.8024880655812765, + 0.7958793522997921, + 0.789313415210001, + 0.7827894342787469, + 0.7763066153432593, + 0.7698641889827479, + 0.7634614094525026, + 0.7570975536763143, + 0.7507719202933664, + 0.7444838287560379, + 0.7382326184753364, + 0.7320176480109185, + 0.7258382943028926, + 0.7196939519427951, + 0.713584032481327, + 0.707507963770612, + 0.7014651893388939, + 0.6954551677957374, + 0.6894773722659365, + 0.6835312898504528, + 0.6776164211128223, + 0.6717322795895779, + 0.6658783913233264, + 0.6600542944172123 + ], + [ + 1.8103060423412103, + 1.6936120970067206, + 1.6452758822023505, + 1.608186200086209, + 1.5769181516722308, + 1.549370448010647, + 1.524465420199477, + 1.5015628834791848, + 1.4802457220634908, + 1.4602242063377413, + 1.4412873859330435, + 1.423276010360464, + 1.4060663578312078, + 1.3895600389015161, + 1.3736772797886225, + 1.3583523354574931, + 1.3435302610032518, + 1.3291645798570562, + 1.3152155619246306, + 1.3016489273051102, + 1.2884348536800836, + 1.2755472047028724, + 1.262962922068346, + 1.2506615407263335, + 1.2386247980577436, + 1.2268363156687618, + 1.2152813379625178, + 1.2039465155762066, + 1.1928197246171588, + 1.1818899147202389, + 1.171146980501463, + 1.160581652149804, + 1.1501854017857709, + 1.1399503628955503, + 1.1298692606760938, + 1.1199353515377042, + 1.1101423703342723, + 1.1004844841479078, + 1.090956251659586, + 1.0815525873021103, + 1.0722687295248767, + 1.0631002126082758, + 1.0540428415542011, + 1.045092669652036, + 1.0362459783797178, + 1.0274992593495202, + 1.01884919804992, + 1.0102926591698682, + 1.0018266733212053, + 0.9934484249997826, + 0.985155241646911, + 0.9769445836906605, + 0.9688140354618218, + 0.960761296892432, + 0.9527841759160103, + 0.9448805814983441, + 0.9370485172360348, + 0.9292860754672778, + 0.9215914318456508, + 0.9139628403331832, + 0.9063986285737761, + 0.8988971936122436, + 0.8914569979279349, + 0.8840765657551333, + 0.876754479665293, + 0.8694893773886937, + 0.8622799488553269, + 0.8551249334368132, + 0.848023117372902, + 0.8409733313676813, + 0.8339744483420094, + 0.827025381329946, + 0.8201250815080512, + 0.8132725363474513, + 0.8064667678794388, + 0.7997068310662041, + 0.79299181226901, + 0.7863208278067805, + 0.7796930225986635, + 0.7731075688846688, + 0.766563665018957, + 0.7600605343308033, + 0.7535974240486525, + 0.7471736042830465, + 0.7407883670645349, + 0.734441025432977, + 0.7281309125749225, + 0.7218573810060003, + 0.7156198017954821, + 0.7094175638303899, + 0.7032500731167102, + 0.6971167521154524, + 0.6910170391114567, + 0.6849503876129888, + 0.6789162657803147, + 0.6729141558815601, + 0.6669435537742769, + 0.6610039684112538, + 0.6550949213691912, + 0.6492159463989715 + ], + [ + 1.8103060423412103, + 1.6925292957404658, + 1.643744569966258, + 1.6063107332785556, + 1.574752549139721, + 1.546949230773178, + 1.5218131096043128, + 1.4986980606093683, + 1.4771830975913056, + 1.4569758025389765, + 1.4378632676783636, + 1.4196847648377746, + 1.4023154242159008, + 1.3856559434148965, + 1.3696258084323314, + 1.3541586641860355, + 1.3391990559382319, + 1.3247000758647345, + 1.3106216252163532, + 1.296929106009567, + 1.2835924192051458, + 1.2705851859376653, + 1.2578841339443472, + 1.2454686082801452, + 1.2333201768674154, + 1.2214223093374872, + 1.2097601131765408, + 1.1983201151532459, + 1.187090078877526, + 1.176058851448082, + 1.165216233713233, + 1.1545528698475225, + 1.144060152841401, + 1.133730143187798, + 1.1235554985808838, + 1.1135294128572988, + 1.1036455627367427, + 1.0938980611777727, + 1.0842814163714514, + 1.0747904955616847, + 1.0654204930155167, + 1.0561669015759956, + 1.0470254873196951, + 1.0379922669145327, + 1.029063487334339, + 1.0202356076371135, + 1.011505282556033, + 1.0028693476875474, + 0.9943248060905914, + 0.9858688161359979, + 0.9774986804664485, + 0.9692118359453652, + 0.9610058444885826, + 0.9528783846858437, + 0.9448272441305179, + 0.9368503123857188, + 0.9289455745234526, + 0.9211111051807513, + 0.913345063083111, + 0.9056456859911022, + 0.8980112860308604, + 0.8904402453734044, + 0.8829310122314533, + 0.8754820971456838, + 0.8680920695352534, + 0.860759554489962, + 0.8534832297836832, + 0.8462618230906862, + 0.8390941093882587, + 0.8319789085306097, + 0.8249150829804495, + 0.8179015356858965, + 0.810937208091496, + 0.8040210782731344, + 0.7971521591875481, + 0.7903294970279366, + 0.7835521696779236, + 0.7768192852567701, + 0.7701299807493428, + 0.7634834207148792, + 0.7568787960690813, + 0.7503153229345088, + 0.7437922415546511, + 0.7373088152674174, + 0.7308643295341202, + 0.7244580910203263, + 0.7180894267252342, + 0.7117576831564756, + 0.705462225547484, + 0.6992024371147738, + 0.69297771835267, + 0.6867874863632074, + 0.6806311742190798, + 0.674508230357665, + 0.6684181180042967, + 0.6623603146230742, + 0.6563343113936204, + 0.6503396127123038, + 0.6443757357165437, + 0.6384422098309034 + ], + [ + 1.8103060423412103, + 1.6914527929170369, + 1.642222165073432, + 1.6044461756938857, + 1.5725995434928637, + 1.544542097282021, + 1.5191762269802471, + 1.495849901852917, + 1.474138287805654, + 1.4537462940686905, + 1.4344590668487267, + 1.4161144088867035, + 1.3985863090465611, + 1.3817745572868427, + 1.3655979036911665, + 1.3499893866787507, + 1.334893044644517, + 1.3202615410174625, + 1.3060544105378757, + 1.2922367389898053, + 1.2787781522228316, + 1.2656520302634697, + 1.2528348881358429, + 1.2403058821046107, + 1.228046411619284, + 1.2160397952203439, + 1.2042710042734401, + 1.1927264423992365, + 1.1813937613646237, + 1.170261706328573, + 1.159319984917134, + 1.1485591557907502, + 1.1379705332700978, + 1.1275461052794369, + 1.1172784624028058, + 1.1071607362671625, + 1.0971865457961705, + 1.0873499501396604, + 1.077645407292472, + 1.0680677375841026, + 1.0586120913562431, + 1.0492739202556345, + 1.0400489516599498, + 1.0309331658286618, + 1.0219227754321967, + 1.0130142071636428, + 1.0042040851797736, + 0.995489216153764, + 0.9868665757519121, + 0.9783332963719972, + 0.9698866560023196, + 0.961524068078725, + 0.953243072232475, + 0.9450413258351622, + 0.9369165962583209, + 0.9288667537752529, + 0.920889765041123, + 0.9129836870947632, + 0.9051466618320563, + 0.8973769109063545, + 0.8896727310162914, + 0.8820324895456081, + 0.8744546205233801, + 0.8669376208763304, + 0.8594800469478239, + 0.852080511260709, + 0.8447376795034496, + 0.8374502677210057, + 0.8302170396937145, + 0.8230368044890185, + 0.815908414172311, + 0.8088307616644393, + 0.8018027787345415, + 0.794823434117915, + 0.7878917317495249, + 0.7810067091045875, + 0.7741674356384, + 0.7673730113182564, + 0.760622565240894, + 0.753915254329456, + 0.7472502621044533, + 0.7406267975236506, + 0.734044093886212, + 0.7275014077968092, + 0.7209980181857288, + 0.7145332253813244, + 0.7081063502314326, + 0.701716733270636, + 0.6953637339304756, + 0.6890467297899421, + 0.6827651158637599, + 0.6765183039261611, + 0.6703057218680111, + 0.664126813085296, + 0.6579810358971228, + 0.6518678629915088, + 0.6457867808973577, + 0.6397372894811227, + 0.6337189014667635, + 0.62773114197769 + ], + [ + 1.8103060423412103, + 1.6903824011903756, + 1.640708402576536, + 1.6025922028393071, + 1.5704587600395412, + 1.5421486286186532, + 1.5165543134250306, + 1.4930179115387503, + 1.4711107628118616, + 1.4505351188887068, + 1.431074191003877, + 1.4125643211506675, + 1.394878363337404, + 1.3779152050315333, + 1.361592864580363, + 1.3458437773474738, + 1.3306114777378721, + 1.315848202867651, + 1.3015131230471872, + 1.2875710096232869, + 1.2739912148960961, + 1.26074687915279, + 1.2478143059119269, + 1.2351724637195958, + 1.2228025845088508, + 1.210687836587038, + 1.1988130559720065, + 1.1871645238355009, + 1.1757297807362908, + 1.1644974704723092, + 1.1534572079765417, + 1.1425994668807782, + 1.131915483282513, + 1.1213971729492958, + 1.1110370597360042, + 1.1008282134130654, + 1.0907641954362033, + 1.0808390114519848, + 1.0710470695439795, + 1.061383143393601, + 1.051842339666544, + 1.0424200690471026, + 1.0331120204337374, + 1.0239141378841596, + 1.014822599960125, + 1.0058338011735393, + 0.9969443352783504, + 0.9881509801886479, + 0.9794506843335978, + 0.9708405542853689, + 0.9623178435178387, + 0.9538799421722698, + 0.9455243677218563, + 0.9372487564404921, + 0.9290508555926713, + 0.9209285162713885, + 0.9128796868195157, + 0.9049024067775877, + 0.8969948013074107, + 0.8891550760465571, + 0.8813815123537374, + 0.8736724629093593, + 0.8660263476393728, + 0.8584416499338308, + 0.8509169131345343, + 0.8434507372687202, + 0.8360417760080494, + 0.8286887338341902, + 0.8213903633940918, + 0.8141454630296645, + 0.8069528744680089, + 0.7998114806596233, + 0.7927202037531642, + 0.7856780031963647, + 0.7786838739536339, + 0.7717368448316948, + 0.7648359769053638, + 0.7579803620362453, + 0.7511691214777263, + 0.7444014045602053, + 0.7376763874509821, + 0.7309932719837, + 0.724351284552619, + 0.7177496750673967, + 0.71118771596437, + 0.7046647012706531, + 0.698179945717642, + 0.6917327839007769, + 0.6853225694826431, + 0.6789486744367108, + 0.6726104883292106, + 0.6663074176368161, + 0.6600388850979813, + 0.6538043290959192, + 0.6476032030713615, + 0.6414349749633572, + 0.6352991266764915, + 0.6291951535730177, + 0.62312256398849, + 0.6170808787695824 + ], + [ + 1.8103060423412103, + 1.6893179406717622, + 1.639203028074502, + 1.6007485031384174, + 1.5683298390023142, + 1.5397684225396686, + 1.513946928303089, + 1.4902016137260523, + 1.4681000138077938, + 1.447341737332866, + 1.4277080712857342, + 1.4090339050062795, + 1.3911909639356246, + 1.3740772380509652, + 1.3576100180179633, + 1.341721139486188, + 1.326353635663418, + 1.3114593197151079, + 1.2969969995410853, + 1.2829311337932616, + 1.2692308027381272, + 1.2558689082519519, + 1.2428215435197119, + 1.2300674904091071, + 1.2175878142649679, + 1.20536553399397, + 1.193385351016147, + 1.1816334247328317, + 1.1700971851108943, + 1.1587651751488874, + 1.1476269176003588, + 1.1366728015396022, + 1.1258939852743772, + 1.1152823128153537, + 1.1048302416580063, + 1.0945307800589879, + 1.084377432324522, + 1.0743641508943804, + 1.0644852942174308, + 1.0547355895854855, + 1.045110100230258, + 1.035604196100578, + 1.0262135278289106, + 1.0169340034718046, + 1.0077617676713486, + 0.9986931829365856, + 0.9897248127871046, + 0.9808534065372697, + 0.9720758855300389, + 0.963389330655074, + 0.9547909710076687, + 0.9462781735635816, + 0.93784843376072, + 0.9294993668921849, + 0.9212287002268466, + 0.9130342657836735, + 0.9049139936947167, + 0.896865906099173, + 0.8888881115174998, + 0.8809787996602347, + 0.8731362366311662, + 0.8653587604888424, + 0.857644777134235, + 0.849992756495736, + 0.8424012289856261, + 0.8348687822047702, + 0.8273940578746143, + 0.819975748977608, + 0.8126125970890051, + 0.8053033898846151, + 0.7980469588105321, + 0.7908421769021525, + 0.7836879567409605, + 0.7765832485385881, + 0.7695270383385953, + 0.7625183463272459, + 0.755556225245313, + 0.7486397588936264, + 0.7417680607256845, + 0.7349402725212115, + 0.7281555631350439, + 0.721413127316178, + 0.714712184592232, + 0.7080519782149466, + 0.7014317741626931, + 0.6948508601962624, + 0.6883085449645024, + 0.6818041571566209, + 0.6753370446982135, + 0.6689065739882916, + 0.6625121291747821, + 0.6561531114661555, + 0.649828938477004, + 0.6435390436055449, + 0.6372828754411661, + 0.6310598972002621, + 0.6248695861887253, + 0.6187114332895668, + 0.612584942474252, + 0.6064896303364178 + ], + [ + 1.8103060423412103, + 1.6882592385130166, + 1.6377057971230902, + 1.5989147772093872, + 1.566212434684823, + 1.5374010925447876, + 1.511353648224579, + 1.487400551101524, + 1.46510555190497, + 1.4441656308566293, + 1.4243601611003607, + 1.4055225871809807, + 1.387523512077564, + 1.3702600331321633, + 1.3536487172652993, + 1.3376208036567725, + 1.3221188270284356, + 1.30709417888853, + 1.29250530668685, + 1.2783163580719812, + 1.264496142748365, + 1.2510173254710866, + 1.237855790229372, + 1.2249901332223954, + 1.2124012541079479, + 1.2000720232002418, + 1.1879870080496202, + 1.1761322469457407, + 1.1644950598618378, + 1.153063889542373, + 1.141828167060115, + 1.130778197389287, + 1.11990506146873, + 1.1092005319404143, + 1.0986570002993916, + 1.0882674136213089, + 1.0780252193720483, + 1.0679243170724164, + 1.0579590158050853, + 1.0481239967232188, + 1.038414279859511, + 1.0288251946476912, + 1.0193523536612354, + 1.0099916291502842, + 1.0007391320207513, + 0.9915911929519423, + 0.9825443453926446, + 0.9735953102122106, + 0.9647409818139174, + 0.9559784155438545, + 0.9473048162506098, + 0.9387175278697555, + 0.9302140239231163, + 0.921791898836498, + 0.9134488599913169, + 0.905182720435699, + 0.8969913921893882, + 0.8888728800843797, + 0.8808252760898045, + 0.8728467540753262, + 0.8649355649723347, + 0.8570900322966175, + 0.8493085480000384, + 0.8415895686221513, + 0.8339316117156608, + 0.8263332525222827, + 0.8187931208778936, + 0.811309898327932, + 0.8038823154358495, + 0.7965091492690556, + 0.789189221048254, + 0.7819213939473766, + 0.7747045710324898, + 0.7675376933290894, + 0.760419738008145, + 0.7533497166820943, + 0.7463266738027517, + 0.7393496851537782, + 0.7324178564309777, + 0.7255303219042484, + 0.7186862431555198, + 0.7118848078874671, + 0.7051252287982125, + 0.6984067425175976, + 0.6917286086009629, + 0.685090108576675, + 0.6784905450439375, + 0.6719292408176768, + 0.665405538117534, + 0.6589187977982173, + 0.6524683986186617, + 0.6460537365476332, + 0.6396742241035808, + 0.6333292897266924, + 0.627018377181257, + 0.6207409449865631, + 0.6144964658746858, + 0.6082844262736261, + 0.6021043258143697, + 0.595955676860522 + ], + [ + 1.8103060423412103, + 1.6872061285189983, + 1.636216474686879, + 1.5970907371937892, + 1.5641062146967868, + 1.5350462670103786, + 1.5087740660962092, + 1.4846142839541552, + 1.4621269070325478, + 1.4410063008745748, + 1.421029934892577, + 1.402029816467849, + 1.3838754320463678, + 1.3664629910500272, + 1.349708340477096, + 1.3335421261882159, + 1.317906387052363, + 1.3027520951477995, + 1.2880373393782167, + 1.2737259580316231, + 1.259786491679547, + 1.246191369208384, + 1.232916266516602, + 1.2199395951155676, + 1.2072420898512082, + 1.1948064732301513, + 1.182617179640162, + 1.1706601268989467, + 1.1589225255671, + 1.1473927186645447, + 1.136060046067536, + 1.1249147290944526, + 1.1139477717238857, + 1.1031508756060817, + 1.0925163665842912, + 1.082037130876318, + 1.0717065594079394, + 1.0615184990605204, + 1.0514672098112934, + 1.041547326918482, + 1.031753827443944, + 1.0220820005203108, + 1.0125274208631019, + 1.0030859251051822, + 0.9937535905944881, + 0.9845267163487156, + 0.9754018059046865, + 0.9663755518369918, + 0.9574448217515256, + 0.9486066455857274, + 0.9398582040695544, + 0.9311968182200919, + 0.9226199397588444, + 0.9141251423545464, + 0.9057101136062073, + 0.8973726476913166, + 0.8891106386129815, + 0.8809220739874137, + 0.8728050293198448, + 0.8647576627227359, + 0.8567782100352211, + 0.8488649803071433, + 0.8410163516149392, + 0.8332307671800452, + 0.8255067317635159, + 0.8178428083131993, + 0.8102376148421843, + 0.802689821519309, + 0.7951981479543887, + 0.7877613606624672, + 0.7803782706928687, + 0.7730477314101509, + 0.7657686364152233, + 0.7585399175959672, + 0.7513605432976277, + 0.7442295166041044, + 0.7371458737220362, + 0.7301086824602623, + 0.7231170407978696, + 0.7161700755345971, + 0.7092669410178839, + 0.702406817941304, + 0.6955889122095567, + 0.6888124538655622, + 0.6820766960755577, + 0.6753809141684053, + 0.6687244047256194, + 0.6621064847188736, + 0.6555264906919935, + 0.6489837779846701, + 0.642477719995311, + 0.6360077074806547, + 0.629573147889925, + 0.6231734647314701, + 0.616808096969966, + 0.6104764984524053, + 0.6041781373612064, + 0.5979124956928913, + 0.5916790687608919, + 0.5854773647211271 + ], + [ + 1.8103060423412103, + 1.686158450786928, + 1.6347348346291888, + 1.5952761061318848, + 1.5620108592326458, + 1.5327035883829563, + 1.5062077902377604, + 1.4818423892209567, + 1.4591636269171673, + 1.4378632676783636, + 1.4177168870053949, + 1.398555062529361, + 1.3802461699225588, + 1.3626855352668854, + 1.345788289351936, + 1.3294844877797143, + 1.3137156761240811, + 1.2984324091968655, + 1.2835924192051458, + 1.2691592366721303, + 1.2551011344247025, + 1.2413903066972516, + 1.2280022223708826, + 1.2149151092218315, + 1.2021095381343105, + 1.189568084569799, + 1.1772750504403782, + 1.165216233713233, + 1.153378736100703, + 1.1417508014125806, + 1.1303216787990278, + 1.1190815063541, + 1.1080212114931243, + 1.0971324252408203, + 1.0864074081272879, + 1.0758389858264121, + 1.0654204930155167, + 1.0551457242080544, + 1.0450088905291195, + 1.0350045815787356, + 1.0251277316695795, + 1.0153735898410718, + 1.0057376931460573, + 0.9962158427838499, + 0.9868040827175117, + 0.9774986804664485, + 0.9682961098098122, + 0.9591930351733853, + 0.9501862975039073, + 0.9412729014612344, + 0.9324500037811028, + 0.9237149026803326, + 0.9150650281925607, + 0.9064979333365213, + 0.8980112860308604, + 0.8896028616797718, + 0.8812705363626614, + 0.8730122805687619, + 0.8648261534243301, + 0.8567102973659062, + 0.8486629332182187, + 0.8406823556397856, + 0.8327669289031895, + 0.8249150829804495, + 0.8171253099069522, + 0.8093961604000944, + 0.801726240711162, + 0.7941142096910772, + 0.7865587760525207, + 0.7790586958126028, + 0.7716127699017364, + 0.7642198419257036, + 0.7568787960690814, + 0.7495885551292714, + 0.74234807867132, + 0.7351563612945816, + 0.7280124310030501, + 0.7209153476718803, + 0.7138642016032466, + 0.7068581121652627, + 0.6998962265081945, + 0.6929777183526699, + 0.6861017868450083, + 0.679267655475186, + 0.6724745710532927, + 0.6657218027406658, + 0.659008641132172, + 0.652334397386372, + 0.645698402400555, + 0.6391000060278414, + 0.6325385763337641, + 0.6260134988899186, + 0.6195241761024526, + 0.6130700265733136, + 0.606650484492323, + 0.6002649990582787, + 0.5939130339274107, + 0.5875940666876212, + 0.5813075883570598, + 0.5750531029056627 + ], + [ + 1.8103060423412103, + 1.6851160513702959, + 1.6332606592367775, + 1.5934706173804971, + 1.559926060399381, + 1.5303727124276603, + 1.5036544435588364, + 1.4790844595977486, + 1.4562152761323448, + 1.4347360694284665, + 1.414420530617206, + 1.3950978147827062, + 1.376635192419784, + 1.3589271107207037, + 1.3418879878747245, + 1.325447292199001, + 1.3095460784575519, + 1.2941344862980082, + 1.279169893027912, + 1.2646155229562241, + 1.2504393825141102, + 1.236613432466156, + 1.2231129357190795, + 1.2099159372396633, + 1.1970028447764622, + 1.1843560874866372, + 1.1719598354740122, + 1.159799767459071, + 1.147862876854287, + 1.1361373087591544, + 1.12461222205483, + 1.11327767203034, + 1.1021245099234793, + 1.091144296489261, + 1.0803292272736895, + 1.0696720677117588, + 1.0591660965157226, + 1.0488050560949587, + 1.0385831089685447, + 1.0284947993083349, + 1.0185350188932012, + 1.0086989768713475, + 0.9989821728226914, + 0.9893803726915151, + 0.9798895872242022, + 0.9705060526005603, + 0.9612262129919944, + 0.9520467048172944, + 0.9429643424983578, + 0.9339761055448079, + 0.9250791268190465, + 0.916270681852498, + 0.9075481791001974, + 0.8989091510349161, + 0.8903512459940881, + 0.8818722207031936, + 0.873469933408239, + 0.8651423375577649, + 0.8568874759815732, + 0.8487034755192601, + 0.8405885420567918, + 0.8325409559338675, + 0.8245590676887611, + 0.8166412941108254, + 0.8087861145738935, + 0.8009920676265284, + 0.7932577478174707, + 0.7855818027367474, + 0.7779629302548059, + 0.7703998759437126, + 0.7628914306659493, + 0.7554364283176878, + 0.7480337437146137, + 0.7406822906094478, + 0.7333810198312731, + 0.7261289175376446, + 0.7189250035712379, + 0.7117683299134933, + 0.7046579792283502, + 0.697593063489738, + 0.6905727226870103, + 0.6835961236029786, + 0.6766624586596327, + 0.6697709448270182, + 0.6629208225911015, + 0.6561113549767679, + 0.6493418266223964, + 0.6426115429027235, + 0.6359198290969488, + 0.6292660295992651, + 0.6226495071691972, + 0.616069642219321, + 0.6095258321381161, + 0.6030174906458486, + 0.5965440471815405, + 0.5901049463192143, + 0.5836996472117142, + 0.577327623060534, + 0.5709883606101808, + 0.5646813596656982 + ], + [ + 1.8103060423412103, + 1.684078781965344, + 1.63179373877646, + 1.591674014069984, + 1.5578515215894775, + 1.5280533075272074, + 1.501113662790904, + 1.4763401027096703, + 1.4532814352117098, + 1.4316242612136112, + 1.4111403967503509, + 1.3916575813599659, + 1.3730419857987575, + 1.3551871826946802, + 1.3380068811436123, + 1.3214299650680923, + 1.3053970008377447, + 1.2898577149791703, + 1.2747691316469596, + 1.2600941704428128, + 1.2458005727132044, + 1.2318600669019026, + 1.218247710954911, + 1.2049413679292278, + 1.1919212832405979, + 1.1791697404618784, + 1.1666707785373103, + 1.1544099575275313, + 1.1423741630781303, + 1.130551442064091, + 1.1189308635418098, + 1.1075024004027951, + 1.0962568280822094, + 1.0851856374111786, + 1.074280959271421, + 1.063535499155496, + 1.052942480086012, + 1.042495592624693, + 1.0321889509238087, + 1.0220170539506115, + 1.0119747511594914, + 1.0020572120037534, + 0.9922598987748072, + 0.9825785423354048, + 0.9730091203787217, + 0.9635478378992017, + 0.9541911096062117, + 0.9449355440493814, + 0.935777929256305, + 0.9267152197101456, + 0.9177445245174592, + 0.9088630966359177, + 0.9000683230481501, + 0.8913577157820786, + 0.8827289036902917, + 0.8741796249114797, + 0.8657077199460141, + 0.8573111252856074, + 0.8489878675438063, + 0.8407360580400205, + 0.8325538877949745, + 0.824439622900015, + 0.8163916002266993, + 0.8084082234465906, + 0.8004879593342792, + 0.7926293343293832, + 0.7848309313356888, + 0.7770913867377469, + 0.7694093876171305, + 0.7617836691522656, + 0.75421301218725, + 0.7466962409564307, + 0.739232220952709, + 0.7318198569286368, + 0.7244580910203263, + 0.7171459009850788, + 0.7098822985444153, + 0.70266632782491, + 0.695497063889858, + 0.6883736113553937, + 0.6812951030851988, + 0.6742606989584129, + 0.6672695847057879, + 0.6603209708095266, + 0.6534140914625952, + 0.6465482035836252, + 0.6397225858838239, + 0.6329365379825665, + 0.626189379568612, + 0.6194804496040907, + 0.6128091055686319, + 0.6061747227411819, + 0.5995766935172453, + 0.5930144267594308, + 0.5864873471793428, + 0.5799948947489871, + 0.5735365241399855, + 0.5671117041890105, + 0.5607199173879587, + 0.5543606593974775 + ], + [ + 1.8103060423412103, + 1.6830464996182966, + 1.6303338710810675, + 1.5898860485971416, + 1.555786956895383, + 1.5257450540272366, + 1.4985850977701558, + 1.473608940336581, + 1.4503616998209248, + 1.4285274141724693, + 1.407876033345297, + 1.3882338881371026, + 1.369466054853073, + 1.351465235761645, + 1.3341444342745463, + 1.3174319527293945, + 1.3012678714495558, + 1.285601505826834, + 1.270389528560782, + 1.2555945560108328, + 1.2411840657132631, + 1.227129554907992, + 1.213405877565733, + 1.1999907157082992, + 1.186864153199101, + 1.174008328726642, + 1.161407150706179, + 1.1490460611090043, + 1.136911838331952, + 1.1249924314977457, + 1.113276820269888, + 1.101754895538527, + 1.090417357300639, + 1.07925562679965, + 1.0682617705638906, + 1.0574284344316789, + 1.0467487860037283, + 1.0362164642433855, + 1.0258255351686527, + 1.015570452759519, + 1.005446024349384, + 0.9954473798875008, + 0.98556994455605, + 0.9758094143049199, + 0.9661617339329952, + 0.9566230773992894, + 0.947189830092782, + 0.9378585728279341, + 0.9286260673649358, + 0.9194892432808147, + 0.9104451860404963, + 0.9014911261364313, + 0.8926244291820796, + 0.88384258685881, + 0.8751432086280464, + 0.8665240141310522, + 0.8579828262078825, + 0.8495175644749478, + 0.8411262394075055, + 0.8328069468793986, + 0.8245578631175787, + 0.816377240033545, + 0.8082634008978458, + 0.8002147363273231, + 0.7922297005579011, + 0.7843068079784694, + 0.776444629903849, + 0.7686417915669885, + 0.7608969693124576, + 0.7532088879750135, + 0.7455763184285378, + 0.7379980752920045, + 0.7304730147803535, + 0.7230000326892397, + 0.7155780625035996, + 0.7082060736208672, + 0.7008830696804552, + 0.6936080869918367, + 0.6863801930542047, + 0.6791984851612746, + 0.6720620890853158, + 0.6649701578349875, + 0.6579218704819744, + 0.6509164310518293, + 0.6439530674747735, + 0.6370310305925443, + 0.6301495932176726, + 0.6233080492418445, + 0.6165057127902558, + 0.6097419174190917, + 0.6030160153534706, + 0.596327376763391, + 0.5896753890753881, + 0.5830594563177685, + 0.5764789984974478, + 0.5699334510065407, + 0.5634222640569919, + 0.5569449021416375, + 0.5505008435202106, + 0.5440895797288877 + ], + [ + 1.8103060423412103, + 1.6820190664516836, + 1.6288808611624113, + 1.5881064821511863, + 1.5537320905621568, + 1.523447643624352, + 1.4960684107671418, + 1.4708906076889834, + 1.4474556799836122, + 1.4254451146726301, + 1.4046270043952005, + 1.3848262778262805, + 1.3659069219611626, + 1.3477607727973093, + 1.330300131377272, + 1.313452721185761, + 1.2971581387831033, + 1.2813652903576256, + 1.2660304988048134, + 1.2511160786663242, + 1.2365892449074936, + 1.2224212646504793, + 1.208586788848886, + 1.1950633193397593, + 1.1818307791930733, + 1.1688711628935766, + 1.1561682489407064, + 1.1437073617711389, + 1.1314751730367565, + 1.1194595345672183, + 1.1076493370530587, + 1.0960343897682694, + 1.0846053176260146, + 1.0733534726089105, + 1.062270857194201, + 1.0513500578461934, + 1.04058418700405, + 1.029966832275126, + 1.0194920117692683, + 1.0091541346905322, + 0.9989479664491905, + 0.9888685976760183, + 0.9789114166182866, + 0.9690720844770258, + 0.9593465133113507, + 0.9497308461906354, + 0.9402214393212047, + 0.9308148459126386, + 0.9215078015811149, + 0.9122972111145231, + 0.9031801364472155, + 0.894153785711953, + 0.885215503253408, + 0.8763627605019753, + 0.8675931476190044, + 0.8589043658352244, + 0.8502942204133339, + 0.841760614173713, + 0.8333015415291409, + 0.8249150829804495, + 0.8165994000303114, + 0.8083527304769895, + 0.8001733840539136, + 0.7920597383845297, + 0.7840102352249965, + 0.7760233769700843, + 0.7680977234000876, + 0.7602318886487386, + 0.7524245383740407, + 0.7446743871156719, + 0.736980195824133, + 0.7293407695481937, + 0.7217549552684166, + 0.714221639865634, + 0.7067397482142463, + 0.699308241391091, + 0.6919261149914384, + 0.6845923975443815, + 0.6773061490205441, + 0.6700664594256194, + 0.662872447473777, + 0.6557232593354698, + 0.648618067454597, + 0.6415560694303909, + 0.6345364869597483, + 0.627558564836058, + 0.6206215700008839, + 0.6137247906451291, + 0.6068675353565615, + 0.6000491323108147, + 0.5932689285031808, + 0.5865262890187094, + 0.5798205963383081, + 0.573151249678691, + 0.5665176643641843, + 0.5599192712285275, + 0.553355516044936, + 0.5468258589828163, + 0.5403297740896176, + 0.533866748796421 + ], + [ + 1.8103060423412103, + 1.68099634940826, + 1.6274345208491319, + 1.5863350842702104, + 1.5516866564753098, + 1.5211607787935095, + 1.4935632758595063, + 1.4681847527304976, + 1.4445629993570532, + 1.4223769635423598, + 1.401392889136109, + 1.3814343091265433, + 1.3623641261992105, + 1.344073314056955, + 1.3264734745971671, + 1.3094917551086995, + 1.2930672706094095, + 1.2771485199624713, + 1.2616914778649748, + 1.2466581584262049, + 1.2320155152458088, + 1.2177345863844697, + 1.2037898207105724, + 1.1901585407034798, + 1.1768205093778026, + 1.1637575776764595, + 1.1509533947794064, + 1.1383931681282107, + 1.1260634631197852, + 1.1139520347373177, + 1.1020476851067786, + 1.0903401422606371, + 1.0788199563728962, + 1.0674784104832822, + 1.0563074433119606, + 1.0452995822217646, + 1.0344478847435092, + 1.0237458873642924, + 1.0131875605057137, + 1.002767268801425, + 0.9924797359310076, + 0.982320013387238, + 0.972283452652017, + 0.9623656803370206, + 0.9525625759118763, + 0.9428702516981081, + 0.9332850348533407, + 0.9238034511089841, + 0.9144222100572108, + 0.905138191810559, + 0.8959484348808178, + 0.8868501251436933, + 0.8778405857726997, + 0.8689172680402125, + 0.8600777428960986, + 0.8513196932450606, + 0.842640906853124, + 0.8340392698217364, + 0.8255127605749282, + 0.8170594443110846, + 0.8086774678761885, + 0.8003650550200472, + 0.7921205020011112, + 0.7839421735090726, + 0.775828498877609, + 0.7677779685624265, + 0.7597891308622399, + 0.7518605888625112, + 0.7439909975837321, + 0.7361790613177576, + 0.7284235311372578, + 0.7207232025647277, + 0.7130769133887394, + 0.7054835416162241, + 0.6979420035505696, + 0.6904512519862108, + 0.6830102745111994, + 0.6756180919099618, + 0.6682737566591104, + 0.6609763515097695, + 0.6537249881504074, + 0.6465188059446588, + 0.6393569707390604, + 0.6322386737360227, + 0.6251631304277294, + 0.6181295795869843, + 0.6111372823113335, + 0.604185521117064, + 0.5972735990799347, + 0.5904008390197252, + 0.5835665827259066, + 0.5767701902219208, + 0.5700110390657495, + 0.5632885236846, + 0.556602054741704, + 0.5499510585333478, + 0.543334976414395, + 0.5367532642506652, + 0.5302053918966607, + 0.5236908426972094 + ], + [ + 1.8103060423412103, + 1.6799782200111602, + 1.6259946684475026, + 1.584571632425754, + 1.5496503976811105, + 1.5188841722517035, + 1.4910693783444844, + 1.4654910355432875, + 1.4416832945537947, + 1.4193225753510603, + 1.3981732812884995, + 1.3780575559283326, + 1.358837222510298, + 1.3404023963106546, + 1.3226639832178169, + 1.3055485569094476, + 1.2889947530210104, + 1.2729506649176825, + 1.2573719206600869, + 1.242220235272799, + 1.2274623021621967, + 1.2130689313550005, + 1.199014370540876, + 1.1852757636460574, + 1.1718327143477585, + 1.1586669306909605, + 1.145761933116232, + 1.1331028125948417, + 1.1206760287453648, + 1.1084692401389469, + 1.0964711607342714, + 1.0846714376867153, + 1.0730605467663792, + 1.0616297023793604, + 1.0503707797747437, + 1.039276247479003, + 1.0283391083609104, + 1.0175528480166187, + 1.0069113893933974, + 0.9964090527544261, + 0.9860405202357889, + 0.9758008043678253, + 0.9656852200319868, + 0.9556893594057541, + 0.9458090695154547, + 0.9360404320726899, + 0.9263797453166895, + 0.9168235076239533, + 0.9073684026793856, + 0.8980112860308604, + 0.8887491728726713, + 0.8795792269233098, + 0.8704987502800987, + 0.8615051741478192, + 0.8525960503510328, + 0.8437690435506214, + 0.8350219240944237, + 0.826352561439951, + 0.8177589180942061, + 0.8092390440217714, + 0.800791071477685, + 0.7924132102263175, + 0.7841037431115846, + 0.775861021947442, + 0.7676834637008105, + 0.7595695469418882, + 0.7515178085393122, + 0.7435268405798355, + 0.7355952874941544, + 0.7277218433722702, + 0.7199052494533313, + 0.7121442917762909, + 0.7044377989789636, + 0.6967846402341856, + 0.6891837233127793, + 0.6816339927639292, + 0.6741344282043875, + 0.666684042708654, + 0.6592818812929465, + 0.6519270194863622, + 0.6446185619831831, + 0.6373556413707606, + 0.6301374169278622, + 0.6229630734887709, + 0.6158318203687904, + 0.6087428903471458, + 0.601695538703581, + 0.5946890423052217, + 0.5877226987405418, + 0.5807958254974935, + 0.5739077591830781, + 0.567057854781836, + 0.5602454849509042, + 0.5534700393494694, + 0.5467309240005757, + 0.5400275606834117, + 0.533359386354307, + 0.526725852594801, + 0.5201264250852558, + 0.513560583102577 + ], + [ + 1.8103060423412103, + 1.6789645541390434, + 1.62456112842344, + 1.582815911633349, + 1.5476230659368766, + 1.5166175464551785, + 1.4885864141881247, + 1.4628091277331527, + 1.4388162145056698, + 1.4162815777347095, + 1.394967788346229, + 1.3746956065677325, + 1.3553257809254877, + 1.3367475720325488, + 1.3188711928196901, + 1.30162264586812, + 1.2849400895325427, + 1.2687712134578604, + 1.2530713005878997, + 1.2378017681737252, + 1.2229290505691468, + 1.208423730766631, + 1.1942598561591036, + 1.1804143929024526, + 1.166866786035039, + 1.1535986013303758, + 1.1405932310540448, + 1.1278356502176266, + 1.1153122131250952, + 1.1030104823582296, + 1.0909190840949556, + 1.079027584968126, + 1.0673263866701292, + 1.0558066352743272, + 1.0444601428369835, + 1.033279319306154, + 1.022257113128209, + 1.011386959231464, + 1.0006627332969882, + 0.9900787114120113, + 0.9796295343512481, + 0.969310175853415, + 0.9591159143599715, + 0.9490423077651644, + 0.9390851707942545, + 0.9292405546831151, + 0.9195047288793601, + 0.9098741645245055, + 0.9003455195097652, + 0.890915624926042, + 0.881581472752359, + 0.8723402046471304, + 0.8631891017238877, + 0.8541255752077946, + 0.8451471578819536, + 0.8362514962434073, + 0.8274363432981698, + 0.818699551932784, + 0.810039068807006, + 0.8014529287184011, + 0.7929392493950298, + 0.7844962266771387, + 0.7761221300529187, + 0.7678152985170379, + 0.7595741367238752, + 0.7513971114102278, + 0.7432827480647656, + 0.7352296278237509, + 0.7272363845745106, + 0.7193017022499182, + 0.7114243122987112, + 0.7036029913178764, + 0.6958365588345888, + 0.688123875226318, + 0.6804638397687292, + 0.6728553888019038, + 0.66529749400624, + 0.6577891607801133, + 0.6503294267120522, + 0.6429173601407894, + 0.6355520587970831, + 0.6282326485217082, + 0.6209582820544575, + 0.6137281378894043, + 0.6065414191920521, + 0.5993973527743236, + 0.5922951881236648, + 0.5852341964828073, + 0.5782136699769971, + 0.5712329207857327, + 0.5642912803562671, + 0.5573880986563277, + 0.5505227434636951, + 0.5436945996904365, + 0.5369030687397528, + 0.5301475678935368, + 0.5234275297288681, + 0.5167424015617877, + 0.5100916449168182, + 0.5034747350207764 + ], + [ + 1.8103060423412103, + 1.6779552318151045, + 1.6231337311041196, + 1.5810677140870735, + 1.5456044212889988, + 1.5143606331276431, + 1.4861140895084743, + 1.4601387118713047, + 1.4359614198670294, + 1.413253610762893, + 1.391776030909328, + 1.3713480631266974, + 1.3518293858329364, + 1.333108408640117, + 1.3150946544906885, + 1.2977135573165495, + 1.2809028002367873, + 1.2646096709059667, + 1.2487891086299387, + 1.2334022341622162, + 1.218415223914076, + 1.2037984348165727, + 1.1895257148241594, + 1.1755738530841235, + 1.1619221366757384, + 1.1485519897106817, + 1.1354466768287759, + 1.1225910575787996, + 1.1099713814013992, + 1.0975751153002986, + 1.0853907980488073, + 1.0734079161022902, + 1.061616797392848, + 1.050008519953911, + 1.0385748329197027, + 1.0273080879108683, + 1.0162011791845758, + 1.005247491218415, + 0.9944408526297902, + 0.9837754955192743, + 0.9732460194774459, + 0.9628473596176201, + 0.9525747580974093, + 0.9424237386747298, + 0.9323900839121849, + 0.9224698147005089, + 0.9126591718190719, + 0.9029545992911066, + 0.8933527293246626, + 0.88385036865847, + 0.8744444861557576, + 0.8651322015093905, + 0.8559107749390236, + 0.8467775977758158, + 0.8377301838430026, + 0.8287661615516201, + 0.8198832666401665, + 0.8110793354952228, + 0.8023522989972044, + 0.7937001768416493, + 0.7851210722918887, + 0.7766131673237099, + 0.7681747181268085, + 0.7598040509314938, + 0.7514995581323644, + 0.743259694683521, + 0.7350829747424299, + 0.7269679685417852, + 0.7189132994707229, + 0.7109176413485115, + 0.7029797158754298, + 0.6950982902469572, + 0.6872721749186672, + 0.6795002215103518, + 0.6717813208389196, + 0.6641144010705258, + 0.656498425983222, + 0.64893239333215, + 0.641415333309978, + 0.6339463070958841, + 0.6265244054869417, + 0.6191487476062586, + 0.6118184796826713, + 0.6045327738972119, + 0.597290827291935, + 0.5900918607370309, + 0.5829351179524687, + 0.575819864580685, + 0.5687453873071083, + 0.5617109930255303, + 0.5547160080455639, + 0.5477597773396223, + 0.5408416638270367, + 0.5339610476930964, + 0.5271173257409545, + 0.5203099107744802, + 0.5135382310102665, + 0.5068017295171356, + 0.5000998636815764, + 0.4934233058210885 + ], + [ + 1.8103060423412103, + 1.6769501370089195, + 1.6217123123977424, + 1.5793268388163375, + 1.5435942316766287, + 1.5121131728171817, + 1.4836521200901998, + 1.4574794809700968, + 1.433118582454274, + 1.4102383263443383, + 1.3885976420573782, + 1.3680145407758473, + 1.3483476352914645, + 1.3294844877797143, + 1.3113339340847183, + 1.2938208418708355, + 1.2768824210120473, + 1.2604655588563063, + 1.2445248525108061, + 1.2290211274733784, + 1.213920303293153, + 1.1991925117866231, + 1.1848114023051126, + 1.1707535877287036, + 1.1569981978391892, + 1.1435265156797567, + 1.130321678799028, + 1.1173684317665917, + 1.1046529195989832, + 1.0921625141221978, + 1.0798856670710195, + 1.067811785059148, + 1.055931122567338, + 1.0442346898740715, + 1.032714173455079, + 1.021361866847899, + 1.0101706103474661, + 0.9991337381919558, + 0.9882450321322322, + 0.9774986804664485, + 0.966889241773546, + 0.9564116127032192, + 0.9460609992812081, + 0.9358328912720766, + 0.9257230392104838, + 0.9157274337691241, + 0.9058422871792002, + 0.8960640164592454, + 0.8863892282417188, + 0.8768147050151753, + 0.8673373926238699, + 0.8579543888871204, + 0.8486629332182187, + 0.8394603971376442, + 0.8303442755881785, + 0.8213121789706035, + 0.8123618258282264, + 0.8034910361167757, + 0.7946977250034174, + 0.7859798971449169, + 0.7773356414004607, + 0.7687631259394483, + 0.7602605937087797, + 0.7518263582278696, + 0.7434587996828845, + 0.7351563612945816, + 0.726917545936683, + 0.7187409129839845, + 0.7106250753714023, + 0.7025686968469605, + 0.6945704894033102, + 0.6866292108738011, + 0.6787436626804019, + 0.6709126877219083, + 0.6631351683919013, + 0.655410024716846, + 0.6477362126055463, + 0.6401127222019241, + 0.6325385763337642, + 0.6250128290506779, + 0.6175345642450955, + 0.610102894350594, + 0.6027169591123246, + 0.5953759244247208, + 0.588078981232036, + 0.5808253444876147, + 0.573614252168105, + 0.566444964339107, + 0.559316762269015, + 0.552228947588053, + 0.545180841489714, + 0.5381717839720181, + 0.531201133116197, + 0.5242682644005606, + 0.5173725700474814, + 0.5105134584015597, + 0.5036903533371678, + 0.4969018610199999, + 0.49013360013976803, + 0.48339978400603933 + ], + [ + 1.8103060423412103, + 1.6759491574501828, + 1.6202967135301183, + 1.5775930913632674, + 1.541592272559155, + 1.509874914479758, + 1.481200230928338, + 1.4548311379902197, + 1.4302873847190263, + 1.4072353876681274, + 1.3854322667604992, + 1.3646946671567013, + 1.3448801403853243, + 1.3258754046549974, + 1.3075886115247608, + 1.2899440647099536, + 1.2728785027771, + 1.25633841440655, + 1.2402780559079343, + 1.2246579587322943, + 1.2094437866183063, + 1.194605447189608, + 1.1801163920075521, + 1.165953058406722, + 1.1520944195154659, + 1.1385216178860724, + 1.1252176644963212, + 1.1121671894073812, + 1.0993562336392293, + 1.086772074229827, + 1.074403076231802, + 1.0622385667440915, + 1.0502687270968423, + 1.0384845000910037, + 1.026877509800361, + 1.01543999191716, + 1.004164732995045, + 0.9930450172384768, + 0.9820745797236691, + 0.9712475651256991, + 0.9605584911797884, + 0.9500022162294964, + 0.9395739103166277, + 0.9292690293515766, + 0.9190832919721924, + 0.9090126587568541, + 0.8990533135054837, + 0.8892016463424836, + 0.8794542384294384, + 0.8698078481040173, + 0.8602593982857505, + 0.8508059650099666, + 0.8414447669687847, + 0.8321731559531163, + 0.8229886081025937, + 0.8138887158814855, + 0.8048711807083113, + 0.795933806175222, + 0.7870744918004696, + 0.7782912272636218, + 0.7695820870786969, + 0.7609452256652345, + 0.7523788727815608, + 0.7438813292882388, + 0.7354509632129898, + 0.7270862060912671, + 0.7187855495592492, + 0.7105475421782901, + 0.7023707864718899, + 0.6942539361580639, + 0.6861956935615816, + 0.6781948071919961, + 0.6702500694746586, + 0.6623603146230743, + 0.654524416641986, + 0.6467412874514954, + 0.6390098751233784, + 0.6313291622215007, + 0.6236981642389179, + 0.6161159281248659, + 0.608581530895402, + 0.6010940783219622, + 0.5936527036925596, + 0.5862565666407673, + 0.5789048520380056, + 0.5715967689449986, + 0.5643315496185897, + 0.5571084485703747, + 0.5499267416738939, + 0.542785725317355, + 0.5356847155990776, + 0.5286230475630569, + 0.5216000744722334, + 0.5146151671172094, + 0.5076677131583329, + 0.500757116499194, + 0.4938750323072404, + 0.487020727001803, + 0.48020166304110434, + 0.47341730237179314 + ], + [ + 1.8103060423412103, + 1.6749521844534763, + 1.6188867807968568, + 1.5758662834791979, + 1.5395983265657427, + 1.5076456150873914, + 1.4787581557990743, + 1.4521933953770882, + 1.4274675192525028, + 1.4042444686780087, + 1.3822795613252241, + 1.3613880818005106, + 1.3414265246171855, + 1.32228076739512, + 1.3038582801472207, + 1.2860828048970918, + 1.268890610790275, + 1.2522277894352416, + 1.2360482577081493, + 1.2203122541902127, + 1.2049851878335724, + 1.1900367429663745, + 1.175440174151683, + 1.1611717438812263, + 1.1472102692569384, + 1.133536752902541, + 1.1201340797315908, + 1.106986765755173, + 1.0940807484129662, + 1.081403210334301, + 1.0689424302366066, + 1.0566876560223268, + 1.0446289961637956, + 1.032757326254518, + 1.0210642082161074, + 1.0095418201270496, + 0.9981828950148074, + 0.9869806672503929, + 0.9759288254221912, + 0.9650214707568114, + 0.9542530803092378, + 0.9436184742702205, + 0.93311278684166, + 0.9227314402152864, + 0.9124701212598108, + 0.9023247605797536, + 0.8922915136575567, + 0.8823667438311389, + 0.8725470068931607, + 0.8628290371270737, + 0.8532097346194418, + 0.8436861537087991, + 0.8342554924490299, + 0.8249150829804495, + 0.8156623827148024, + 0.8064949662516374, + 0.7974105179532311, + 0.7884068251136541, + 0.7794817716648816, + 0.7706333323692318, + 0.7618595674529733, + 0.7531586176408219, + 0.7445286995553185, + 0.7359681014488442, + 0.7274751792393397, + 0.7190483528237277, + 0.7106861026456227, + 0.7023869664962188, + 0.694149536529273, + 0.6859724564729387, + 0.6778544190228015, + 0.669794163401937, + 0.6617904730750882, + 0.6538421736052357, + 0.6459481306418607, + 0.6381072480311483, + 0.6303184660392155, + 0.6225807596802119, + 0.6148931371418219, + 0.607254638301327, + 0.5996643333259345, + 0.592121321351606, + 0.5846247292350615, + 0.5771737103740748, + 0.5697674435915389, + 0.562405132079145, + 0.5550860023968248, + 0.5478093035244018, + 0.5405743059621553, + 0.5333803008772584, + 0.5262265992932518, + 0.519112531319935, + 0.5120374454212425, + 0.5050007077188339, + 0.4980017013292912, + 0.4910255357482146, + 0.48408431015977205, + 0.47717914361781333, + 0.4703094799231514, + 0.46347477702989903 + ], + [ + 1.8103060423412103, + 1.6739591127532858, + 1.617482365330038, + 1.5741462328389089, + 1.5376121831653613, + 1.5054250392592339, + 1.476325636855609, + 1.4495659746243277, + 1.4246586883188659, + 1.4012652535774368, + 1.3791391928727657, + 1.3580944355810576, + 1.3379864233366074, + 1.3187001964598704, + 1.300142546084606, + 1.2822366547406632, + 1.2649183239895123, + 1.2481332499215436, + 1.2318350113076935, + 1.2159835550053917, + 1.200544036177257, + 1.1854859167297322, + 1.1707822549984739, + 1.1564091393165423, + 1.1423452313700078, + 1.1285713944015876, + 1.1150703877539216, + 1.1018266138343058, + 1.088825906907445, + 1.076055355563473, + 1.063503152522462, + 1.05115846680027, + 1.0390113342965213, + 1.0270525636602685, + 1.0152736549041632, + 1.0036667287183834, + 0.9922244648136631, + 0.9809400479225734, + 0.9698071203275834, + 0.9588197399768543, + 0.9479723434043208, + 0.9372597127972209, + 0.9266769466577972, + 0.9162194335910617, + 0.9058828288209051, + 0.8956630330952804, + 0.8855561736899602, + 0.8755585872611984, + 0.8656668043320044, + 0.8558775352257388, + 0.8461876572853491, + 0.836594203237476, + 0.8270943505785304, + 0.8176854118751264, + 0.8083648258844065, + 0.7991301494111057, + 0.7899790498280018, + 0.7809092981948615, + 0.7719187629183749, + 0.7630054039019833, + 0.7541672671401164, + 0.7454024797162555, + 0.7367092451685587, + 0.7280858391905621, + 0.7195306056378142, + 0.7110419528142491, + 0.7026183500147164, + 0.6942583243023946, + 0.6859604575018771, + 0.677723383390546, + 0.6695457850724837, + 0.6614263925206273, + 0.6533639802741766, + 0.6453573652794375, + 0.6374054048633254, + 0.6295069948297028, + 0.6216610676695729, + 0.6138665908769115, + 0.6061225653626202, + 0.5984280239596979, + 0.5907820300133039, + 0.5831836760498899, + 0.5756320825200497, + 0.5681263966101595, + 0.5606657911182541, + 0.5532494633899574, + 0.545876634310583, + 0.5385465473498278, + 0.5312584676557371, + 0.5240116811948803, + 0.5168054939358763, + 0.5096392310736381, + 0.5025122362918741, + 0.4954196439781243, + 0.4883531038802902, + 0.48132405285226976, + 0.47433190049451224, + 0.46737607174325313, + 0.4604560063185521, + 0.45357115819760985 + ], + [ + 1.8103060423412103, + 1.6729698403485338, + 1.6160833228783564, + 1.5724327627713528, + 1.5356336383558573, + 1.5032129589139438, + 1.4739024242473509, + 1.4469486058624552, + 1.4218606034155024, + 1.398297436363181, + 1.3760108388473975, + 1.354813390199043, + 1.3345594832014949, + 1.3151333240791356, + 1.2964410276838343, + 1.2784052191921966, + 1.2609612343705046, + 1.2440543753042426, + 1.2276378839526485, + 1.2116714165654443, + 1.196119875486677, + 1.1809525010520254, + 1.1661421561204668, + 1.1516647555326895, + 1.137498806153491, + 1.123625032377828, + 1.1100260684578327, + 1.0966862036316372, + 1.0835911693837, + 1.0707279606247335, + 1.058084684406462, + 1.0456504311599608, + 1.0334151644897944, + 1.0213696263566794, + 1.0095052551011565, + 0.9978141142446519, + 0.9862888303851516, + 0.9749225388066903, + 0.9637088356629797, + 0.9526417357893078, + 0.9417156353535847, + 0.9309252786849302, + 0.9202657287225093, + 0.9097323406131125, + 0.8993207380568755, + 0.8890267920594105, + 0.8788466017977326, + 0.8687764773485109, + 0.8588129240617797, + 0.848952628392475, + 0.8391924450269406, + 0.8295293851626075, + 0.819960605817061, + 0.8104834000580995, + 0.8010951880596313, + 0.7917935088996588, + 0.7825760130264583, + 0.7734404553276023, + 0.7643846887438916, + 0.7554066583767391, + 0.7465043960431829, + 0.7376760152376581, + 0.7289197064639995, + 0.7202337329049447, + 0.7116164263997986, + 0.7030661837038598, + 0.6945814630058651, + 0.6861607806820215, + 0.677802708267275, + 0.6695058696263072, + 0.661268938308392, + 0.653090635071717, + 0.6449697255640868, + 0.6369050181480976, + 0.6288953618599397, + 0.6209396444919217, + 0.6130367907896782, + 0.6051857607557835, + 0.5973855480521939, + 0.5896351784945774, + 0.5819337086321437, + 0.5742802244071222, + 0.5666738398884896, + 0.5591136960749865, + 0.5515989597628406, + 0.5441288224739735, + 0.536702499440792, + 0.5293192286439458, + 0.5219782698997233, + 0.5146789039939862, + 0.5074204318597719, + 0.500202173795909, + 0.49301373188660025, + 0.485857548343983, + 0.47873973658834945, + 0.4716596859064709, + 0.464616801615121, + 0.45761050447813, + 0.45064023015041643, + 0.4437054286474835 + ], + [ + 1.8103060423412103, + 1.6719842683559767, + 1.614689513599787, + 1.5707257020057264, + 1.5336624943707429, + 1.5010091529418657, + 1.4714882757608072, + 1.444341027470998, + 1.4190729848583634, + 1.395340720385509, + 1.3728941865528461, + 1.3515446176958474, + 1.3311453616702429, + 1.311579793724309, + 1.292753354957685, + 1.2745881152785337, + 1.2570189464002754, + 1.2399907578772784, + 1.2234564561169399, + 1.2073754078483032, + 1.191712263542521, + 1.1764360427933032, + 1.1615194137141356, + 1.146938118302289, + 1.1326705091804041, + 1.1186971724150416, + 1.1050006176357328, + 1.091565021334759, + 1.0783760126007862, + 1.0654204930155167, + 1.0526864842827726, + 1.0401629985427971, + 1.0278399273755163, + 1.015707946302761, + 1.0037584322236484, + 0.9919833917046946, + 0.980375398429808, + 0.9689275384194516, + 0.9576333618711257, + 0.9464868406685101, + 0.9354823307644816, + 0.9246145387716518, + 0.9138784921991325, + 0.9032695128606444, + 0.8927831930504847, + 0.8824153741431765, + 0.8721621273220828, + 0.8620197361837092, + 0.8519846809992752, + 0.8420536244445741, + 0.8322233986340928, + 0.8224909933165834, + 0.8128535451074081, + 0.8033083276484855, + 0.7938527426000012, + 0.7844843113795318, + 0.7752006675741596, + 0.765999549959755, + 0.7568787960690814, + 0.7478363362568895, + 0.7388701882158573, + 0.7299784519022051, + 0.7211593048341979, + 0.7124109977305739, + 0.7037318504593405, + 0.6951202482703591, + 0.6865746382877963, + 0.6780935262408629, + 0.6696754734133468, + 0.6613190937943073, + 0.6530230514139498, + 0.6447860578501816, + 0.6366068698926693, + 0.6284842873524084, + 0.6204171510058747, + 0.6124043406637912, + 0.6044447733553961, + 0.5965374016198823, + 0.5886812118973779, + 0.5808752230124674, + 0.5731184847438318, + 0.5654100764741068, + 0.5577491059145221, + 0.5501347078993287, + 0.5425660432453961, + 0.5350422976727294, + 0.5275626807819702, + 0.5201264250852558, + 0.512732785087061, + 0.5053810364119175, + 0.49807047497611734, + 0.49078557668822986, + 0.48353875888921316, + 0.4763312201032529, + 0.4691623284734644, + 0.4620314689039313, + 0.454938042443775, + 0.4478814657000266, + 0.4408611702776697, + 0.43387660224532887 + ], + [ + 1.8103060423412103, + 1.6710023008718466, + 1.6133008018659207, + 1.5690248844318326, + 1.531698559402483, + 1.4988134068956565, + 1.4690829564806842, + 1.4417429857124384, + 1.4162955613906312, + 1.3923948179331191, + 1.36978893271477, + 1.348287799994659, + 1.3277437265224552, + 1.3080392596094401, + 1.2890791690671182, + 1.270784971565981, + 1.2530910764637555, + 1.2359420022192886, + 1.2192903209153416, + 1.2030951108191377, + 1.1873207714501028, + 1.171936102467292, + 1.156913577950938, + 1.1422287676870304, + 1.1278598706201581, + 1.1137873349943916, + 1.0999935462724404, + 1.0864625686130776, + 1.0731799290836666, + 1.0601324362782294, + 1.0473080268648267, + 1.0346956349792005, + 1.022285080440052, + 1.0100669725733127, + 0.998032627061384, + 0.9861739937241719, + 0.974483593525028, + 0.9629544634010143, + 0.9515801077614945, + 0.9403544556956258, + 0.9292718230883295, + 0.918326878973648, + 0.9075146155602181, + 0.8968303214505984, + 0.8862695576481078, + 0.8758281360045491, + 0.8655020998120138, + 0.8552877062836891, + 0.8451814107037001, + 0.8351798520556641, + 0.8252798399647627, + 0.8154783428095126, + 0.80577247687767, + 0.7961594964563188, + 0.7866367847596321, + 0.7772018456093497, + 0.7678522957930258, + 0.7585858580337541, + 0.7494003545126111, + 0.7402937008916193, + 0.7312639007907519, + 0.722309040677528, + 0.7134272851321348, + 0.7046168724548947, + 0.6958761105863005, + 0.6872033733128546, + 0.6785970967346202, + 0.6700557759727508, + 0.6615779620973667, + 0.6531622592580234, + 0.6448073220006717, + 0.6365118527565129, + 0.6282745994894731, + 0.6200943534902215, + 0.611969947305729, + 0.6039002527943227, + 0.5958841792970649, + 0.5879206719170644, + 0.5800087098990321, + 0.5721473051020375, + 0.5643355005589954, + 0.5565723691169367, + 0.5488570121525971, + 0.5411885583582828, + 0.533566162593374, + 0.5259890047971766, + 0.5184562889591671, + 0.5109672421429674, + 0.5035211135606658, + 0.4961145379803852, + 0.4887351783536138, + 0.48139670254945544, + 0.4740984384796384, + 0.46683973227061404, + 0.45961994757985025, + 0.452438464944769, + 0.44529468116244797, + 0.43818800869832264, + 0.43111787512225264, + 0.42408372257041915 + ], + [ + 1.8103060423412103, + 1.670023844841186, + 1.6119170560771747, + 1.567330148873756, + 1.5297416473411616, + 1.4966255126981047, + 1.4666862384698165, + 1.439154234386499, + 1.4135280698131392, + 1.389459449841137, + 1.3666947830675547, + 1.3450426284670969, + 1.3243542554063021, + 1.3045113862201059, + 1.2854181218323628, + 1.2669954276542361, + 1.2491772523411124, + 1.2319077246548518, + 1.2151390835491034, + 1.1988301198607896, + 1.1829449830549987, + 1.1674522536426033, + 1.1523242123644328, + 1.1375362574110124, + 1.1230664345984225, + 1.108895054841088, + 1.0950043798789086, + 1.0813783619388482, + 1.0680024264317873, + 1.0548632892965868, + 1.0419488024696284, + 1.0292478223610915, + 1.0167500972850676, + 1.0044461706082994, + 0.9923272970153801, + 0.9803853697825282, + 0.9686128573410637, + 0.957002747720167, + 0.9455484997048, + 0.9342439997426288, + 0.9230835237938988, + 0.9120617034484622, + 0.9011734957407127, + 0.8904141561808083, + 0.8797792145929837, + 0.8692644534118932, + 0.858865888138094, + 0.8485797496958, + 0.838402468471394, + 0.8283306598410392, + 0.818361111021032, + 0.8084907690960731, + 0.7987167300990016, + 0.7890362290312796, + 0.7794466307270288, + 0.769945421475074, + 0.7605302013235156, + 0.7511986770000735, + 0.7419486553890351, + 0.7327780375122334, + 0.7236848129672616, + 0.7146670547811712, + 0.7057229146423379, + 0.6968506184770759, + 0.6880484623410148, + 0.6793148085982909, + 0.6706480823642851, + 0.6620467681900268, + 0.6535094069684934, + 0.6450345930449211, + 0.6366209715149216, + 0.6282672356956983, + 0.6199721247569964, + 0.6117344214996278, + 0.6035529502704862, + 0.59542657500394, + 0.587354197380369, + 0.5793347550933876, + 0.5713672202180202, + 0.5634505976727304, + 0.5555839237687874, + 0.5477662648409902, + 0.5399967159542313, + 0.5322743996808352, + 0.5245984649439965, + 0.5169680859229938, + 0.5093824610162024, + 0.5018408118582144, + 0.49433567291340963, + 0.4868626145154007, + 0.47943142297906904, + 0.4720414024293732, + 0.4646918760573585, + 0.4573821853967317, + 0.45011168963535153, + 0.44287976495959536, + 0.43568580392970135, + 0.42852921488431905, + 0.42140942137261406, + 0.41432586161238616 + ], + [ + 1.8103060423412103, + 1.6690488099343517, + 1.610538148488143, + 1.5656413388759667, + 1.5277915775274935, + 1.4944452683659886, + 1.464297900466671, + 1.4365745345034089, + 1.4107702546350753, + 1.3865343451206351, + 1.3636114519637892, + 1.3418088035236295, + 1.320976635410723, + 1.300995847868149, + 1.281769875270845, + 1.263219133698097, + 1.2452771127137767, + 1.227887552745311, + 1.211002360782008, + 1.1945800412354757, + 1.1785844943907668, + 1.1629840823768123, + 1.1477508932710405, + 1.1328601542684877, + 1.1182897585921316, + 1.1040198803069181, + 1.090032657862528, + 1.076311931945479, + 1.0628430266656077, + 1.0496125656305768, + 1.0366083163413489, + 1.0238190577543058, + 1.0112344669289404, + 0.9988450215034304, + 0.9866419153778362, + 0.9746169854823914, + 0.9627626479000597, + 0.9510718419231445, + 0.939537980871729, + 0.9281549087010821, + 0.9169168615863679, + 0.9058184338041704, + 0.8948545473376253, + 0.8840204247201959, + 0.8733115647060488, + 0.8627237204155453, + 0.8522528796548802, + 0.841895247151212, + 0.8316472284802354, + 0.8215054154932013, + 0.811466573075873, + 0.8015276270935872, + 0.7916856533950873, + 0.7819378677636437, + 0.7722816167175925, + 0.7627143690741465, + 0.7532337082004799, + 0.7438373248848673, + 0.7345230107682903, + 0.7252886522835819, + 0.7161322250549836, + 0.7070517887160733, + 0.6980454821084908, + 0.6891115188278065, + 0.6802481830863429, + 0.6714538258658069, + 0.6627268613353051, + 0.6540657635127008, + 0.6454690631494118, + 0.6369353448206359, + 0.6284632442046878, + 0.62005144553664, + 0.6116986792228056, + 0.6034037196038229, + 0.5951653828551795, + 0.5869825250149917, + 0.578854040129741, + 0.5707788585094554, + 0.5627559450845404, + 0.5547842978571206, + 0.5468629464403235, + 0.5389909506794847, + 0.5311673993497216, + 0.5233914089247771, + 0.5156621224124146, + 0.5079787082520254, + 0.5003403592704329, + 0.4927359186373951, + 0.4851680400264119, + 0.4776430399499242, + 0.4701601977317783, + 0.46271881266023607, + 0.4553182032221651, + 0.447957706374586, + 0.44063667685137653, + 0.4333544865030823, + 0.426110523667916, + 0.4189041925721707, + 0.41173491275837537, + 0.4046021185396488 + ], + [ + 1.8103060423412103, + 1.6680771084302206, + 1.6091639550424222, + 1.5639583025010202, + 1.525848174519231, + 1.4922724777489123, + 1.4619177275992543, + 1.4340036539748904, + 1.4080218677436338, + 1.3836192406082413, + 1.3605386620049227, + 1.3385860342262021, + 1.3176105626608299, + 1.297492328270558, + 1.2781341011601728, + 1.2594557499551118, + 1.2413903066972516, + 1.2238811248072063, + 1.2068797804448455, + 1.190344492575681, + 1.1742389131566142, + 1.1585311866812285, + 1.1431932092222192, + 1.1282000375637244, + 1.1135294128572988, + 1.099161372786262, + 1.0850779329315752, + 1.0712628228206396, + 1.0577012656085703, + 1.0443797928874894, + 1.0312860880116048, + 1.0184088527482955, + 1.0057376931460573, + 0.9932630213392128, + 0.9809759706510979, + 0.9688683218585927, + 0.956932438875272, + 0.9451612124231803, + 0.9335480105129563, + 0.9220866347527417, + 0.9107712816686351, + 0.8995965083515173, + 0.8885572018530976, + 0.8776485518428832, + 0.8668660261111938, + 0.8562053485643163, + 0.8456624794087626, + 0.8352335972641975, + 0.8249150829804495, + 0.8147035049642823, + 0.8045956058472691, + 0.7945882903479262, + 0.7846786141999056, + 0.7748637740339889, + 0.7651410981153429, + 0.7555080378492994, + 0.7459621599791354, + 0.7365011394081726, + 0.7271227525861997, + 0.7178248714069265, + 0.7086054575690133, + 0.6994625573583539, + 0.6903942968137718, + 0.6813988772422502, + 0.6724745710532927, + 0.6636197178850912, + 0.6548327209979, + 0.6461120439124273, + 0.6374562072732022, + 0.6288637859187854, + 0.6203334061423911, + 0.6118637431280091, + 0.6034535185484808, + 0.5951014983131946, + 0.5868064904541676, + 0.5785673431402595, + 0.5703829428101517, + 0.5622522124155278, + 0.5541741097666019, + 0.546147625972804, + 0.5381717839720181, + 0.530245637142303, + 0.5223682679905052, + 0.5145387869126344, + 0.5067563310212467, + 0.49902006303546886, + 0.4913155443906242, + 0.4836516866772834, + 0.47603174900423284, + 0.4684549848249919, + 0.46092066850591207, + 0.4534280945150936, + 0.4459765766513094, + 0.4385654473105398, + 0.4311940567879091, + 0.42386177261295377, + 0.4165679789162957, + 0.40931207582592943, + 0.40209347889144664, + 0.39491161853463363 + ], + [ + 1.8103060423412103, + 1.6671086551056482, + 1.6077943552162866, + 1.562280892138102, + 1.5239112678700864, + 1.490106950282133, + 1.4595455111143503, + 1.4314413673216984, + 1.4052826680913628, + 1.3807138806345245, + 1.357476143691712, + 1.3353740379216235, + 1.3142557419349936, + 1.294000520150919, + 1.2745104806245413, + 1.255704946357464, + 1.2375164933989624, + 1.219888089456514, + 1.202770980966439, + 1.1861231024023344, + 1.1699078582230555, + 1.1540931760143442, + 1.1386507604859921, + 1.1235554985808838, + 1.1087849798874907, + 1.0943191061634003, + 1.0801397705315774, + 1.0662305917318853, + 1.0525766923021864, + 1.0391645121266504, + 1.0259816506940138, + 1.0130167328406776, + 1.0002592938415156, + 0.9876996805459592, + 0.9753289659031138, + 0.963138874724215, + 0.9511217189278383, + 0.9392703408281292, + 0.927578063277741, + 0.9160386456792454, + 0.9046462450422137, + 0.8933953813961262, + 0.8822809069780335, + 0.8712979787033444, + 0.8604420335020367, + 0.8497087661639784, + 0.8390941093882587, + 0.82859421577432, + 0.8182054415287768, + 0.8079243316922764, + 0.7977476067165918, + 0.7876721502441089, + 0.7776949979606277, + 0.7678133274084635, + 0.7580244486606306, + 0.7483257957687837, + 0.7387149189078722, + 0.7291894771493642, + 0.7197472318026376, + 0.7103860402708787, + 0.701103850373718, + 0.6918986950939826, + 0.6827686877104767, + 0.6737120172826742, + 0.6647269444567144, + 0.6558117975651928, + 0.6469649689959773, + 0.6381849118077103, + 0.6294701365718178, + 0.620819208422771, + 0.6122307443000543, + 0.6037034103668323, + 0.595235919591668, + 0.5868270294808837, + 0.5784755399502481, + 0.5701802913256685, + 0.5619401624634585, + 0.5537540689815548, + 0.5456209615937792, + 0.5375398245399097, + 0.5295096741049009, + 0.5215295572211522, + 0.5135985501481936, + 0.5057157572246173, + 0.4978803096874783, + 0.4900748975233479, + 0.48231386307158375, + 0.47459782126125605, + 0.4669259985478723, + 0.45929764330163025, + 0.4517120249479577, + 0.44416843315090637, + 0.43666617703682487, + 0.4292045844559005, + 0.4217830012793419, + 0.41440079073012004, + 0.4070573327453314, + 0.3997520233683719, + 0.3924842741692429, + 0.3852535116914063 + ], + [ + 1.8103060423412103, + 1.6661433671307866, + 1.6064292318706417, + 1.5606089643217063, + 1.5219806919203631, + 1.4879485007524793, + 1.457181048121095, + 1.4288874553966533, + 1.402552421400073, + 1.3778180167099396, + 1.354423635093178, + 1.3321725398943656, + 1.3109118863022025, + 1.2905201248619669, + 1.2708987037430364, + 1.251966402106531, + 1.233655341499516, + 1.215908105177021, + 1.1986756109295045, + 1.1819155096684977, + 1.1655909591637483, + 1.1496696708021104, + 1.134123158555933, + 1.118926140081968, + 1.1040560539009796, + 1.0894926662890922, + 1.0752177483115217, + 1.0612148082826984, + 1.0474688684520963, + 1.0339662772956755, + 1.0206945507108123, + 1.0076422368543747, + 0.9947988004589359, + 0.9821545233024171, + 0.9697004181570232, + 0.9574281540512671, + 0.9453299910786688, + 0.9333987233036938, + 0.9216276285686066, + 0.9100104242083566, + 0.8985412278451457, + 0.8872145225681876, + 0.8760251259136643, + 0.8649681621499417, + 0.8540390374475209, + 0.8432334175750175, + 0.8325472078140046, + 0.8219765348287483, + 0.8115177302631946, + 0.8011673158682451, + 0.7909219899883673, + 0.7807786152587016, + 0.7707342073827231, + 0.7607859248766702, + 0.7509310596808644, + 0.7411670285500034, + 0.7314913651448625, + 0.7219017127568015, + 0.7123958176042728, + 0.702971522647302, + 0.6936267618718516, + 0.6843595550011633, + 0.6751680025957277, + 0.6660502815075394, + 0.6570046406578216, + 0.6480293971105254, + 0.6391229324166685, + 0.6302836892070219, + 0.6215101680128321, + 0.6128009242961979, + 0.6041545656734473, + 0.5955697493164026, + 0.5870451795177986, + 0.5785796054083564, + 0.5701718188141238, + 0.5618206522436905, + 0.5535249769957852, + 0.5452837013785661, + 0.5370957690326597, + 0.5289601573506432, + 0.5208758759862865, + 0.5128419654473979, + 0.5048574957666093, + 0.4969207758879389, + 0.4890144036055648, + 0.4811549546571343, + 0.4733416033763176, + 0.46557354803057005, + 0.4578500098609657, + 0.4501702321709956, + 0.44253347946131205, + 0.43493903660762123, + 0.42738620807911887, + 0.41987431719505697, + 0.41240270541718727, + 0.4049707316759925, + 0.3975777717287484, + 0.3902232175476039, + 0.38290647673597666, + 0.3756269719716804 + ], + [ + 1.8103060423412103, + 1.6651811639698764, + 1.605068471110724, + 1.5589423795598063, + 1.5200562855985424, + 1.4857969490765188, + 1.4548241413479717, + 1.4263417051221643, + 1.3998308998802378, + 1.3749314072272085, + 1.3513808815328878, + 1.3289812730375319, + 1.3075787167784023, + 1.2870508520278912, + 1.267298469178439, + 1.248239805288657, + 1.2298065288558746, + 1.2119408399112848, + 1.1945933286497517, + 1.1777213633269341, + 1.161287855811827, + 1.145260301983313, + 1.1296100256858472, + 1.1143115758310427, + 1.0993422403547328, + 1.084681650484541, + 1.0703114556179991, + 1.0562150539969983, + 1.0423773679031183, + 1.0287846546962276, + 1.0154243469494784, + 1.0022849163852559, + 0.9893557574192653, + 0.9766270869658707, + 0.9640898578126869, + 0.951735683383771, + 0.9395567721132069, + 0.9275458699699739, + 0.9156962099297898, + 0.904001467394419, + 0.8924557207245654, + 0.8810534161872279, + 0.869789336728616, + 0.8586585740743831, + 0.8476565037338537, + 0.8367787625471355, + 0.8260212284659045, + 0.8153800023021329, + 0.8048513912155942, + 0.7944318937418728, + 0.784118186188779, + 0.7739071102513428, + 0.7637956617145717, + 0.75378098012943, + 0.7438603393614942, + 0.7340311389237816, + 0.7242908960156675, + 0.7146372381988342, + 0.7050678966490346, + 0.6955806999292898, + 0.6861735682361038, + 0.6768445080755069, + 0.6675916073303207, + 0.6584130306840724, + 0.649307015370539, + 0.6402718672210388, + 0.6313059569843709, + 0.6224077168967601, + 0.6135756374813591, + 0.6048082645588048, + 0.5961041964520631, + 0.5874620813703485, + 0.5788806149582928, + 0.5703585379977821, + 0.5618946342509966, + 0.5534877284341903, + 0.5451366843126579, + 0.5368404029081418, + 0.5285978208106749, + 0.5204079085875163, + 0.5122696692824438, + 0.5041821369992049, + 0.4961418022871802, + 0.488134566695023, + 0.4801754239130006, + 0.47226351765126007, + 0.46439801673262116, + 0.4565781140734346, + 0.4488030257170058, + 0.44107198991631025, + 0.4333842662629648, + 0.4257391348596301, + 0.4181358955332335, + 0.41057386708657073, + 0.4030523865860287, + 0.39557080868331895, + 0.388128504969255, + 0.3807248633577488, + 0.37335928749830993, + 0.36603119621545843 + ], + [ + 1.8103060423412103, + 1.664221967287172, + 1.6037119621530604, + 1.557281002170911, + 1.518137892233134, + 1.4836521200901998, + 1.452474598912376, + 1.4238039092413306, + 1.3971178819649106, + 1.3720538171790957, + 1.3483476352914645, + 1.3257999775408487, + 1.3042559620006116, + 1.2835924192051458, + 1.2637094838252296, + 1.2445248525108061, + 1.2259697421250575, + 1.2079859706727532, + 1.1905238017767605, + 1.1735403219200469, + 1.1569981978391892, + 1.1408647105784704, + 1.1251109944485211, + 1.1097114301430728, + 1.0946431554835416, + 1.0798856670710195, + 1.0654204930155167, + 1.0512309218303124, + 1.0373017761414511, + 1.0236192224774086, + 1.0101706103474661, + 0.9969443352783504, + 0.9839297215886107, + 0.9711169215317248, + 0.9584968280981433, + 0.9460609992812081, + 0.9338015920169813, + 0.921711304329234, + 0.9097833244673265, + 0.8980112860308606, + 0.8863892282417188, + 0.8749115606597391, + 0.8635730317492367, + 0.8523687007948355, + 0.8412939127404866, + 0.8303442755881785, + 0.8195156400450851, + 0.8088040811516619, + 0.7982058816600132, + 0.7877175169629431, + 0.7773356414004607, + 0.7670570757929147, + 0.7568787960690814, + 0.7467979228739096, + 0.7368117120547069, + 0.7269175459366832, + 0.7171129253092487, + 0.7073954620535562, + 0.6977628723496617, + 0.6882129704085664, + 0.6787436626804019, + 0.6693529424952852, + 0.6600388850979814, + 0.6507996430415717, + 0.6416334419089049, + 0.6325385763337642, + 0.6235134062964821, + 0.614556353671216, + 0.6056658990042957, + 0.5968405785050219, + 0.5880789812320356, + 0.5793797464599486, + 0.5707415612123108, + 0.5621631579482608, + 0.5536433123913064, + 0.545180841489714, + 0.5367746014988835, + 0.5284234861769075, + 0.5201264250852561, + 0.5118823819871967, + 0.5036903533371678, + 0.4955454278277004, + 0.4874359697660452, + 0.47937581069237456, + 0.4713640622962125, + 0.4633998626280943, + 0.45548237501721067, + 0.44761078704552704, + 0.43978430957482006, + 0.4320021758233312, + 0.4242636404889796, + 0.4165679789162957, + 0.40891448630444205, + 0.4013024769538712, + 0.3937312835493386, + 0.3862002564771535, + 0.37870876317468605, + 0.37125618751029077, + 0.36384192919192526, + 0.35646540320285414 + ], + [ + 1.8103060423412103, + 1.6632657008576792, + 1.6023595971992295, + 1.5556247001294568, + 1.5162253593741482, + 1.4815138433492527, + 1.450132234101965, + 1.4212738660817728, + 1.3944131520572487, + 1.369185017890617, + 1.3453236553243102, + 1.3226284005946076, + 1.3009433579177032, + 1.2801445515606042, + 1.2601314624755935, + 1.2408212485548429, + 1.222144676407086, + 1.2040431831777214, + 1.1864667069152677, + 1.169372053190787, + 1.1527216443572947, + 1.136482547280774, + 1.1206257073170374, + 1.105125337455825, + 1.08995842586272, + 1.075104334923555, + 1.0605444718313386, + 1.04626201570595, + 1.0322416898223352, + 1.0184695701550592, + 1.0049329234032842, + 0.991620069130849, + 0.9785202617732868, + 0.9656235891207215, + 0.9529208845491148, + 0.9404036507904283, + 0.9280639934400239, + 0.9158945627229325, + 0.9038885022987913, + 0.8920394040927417, + 0.8803412683074101, + 0.8687884679076099, + 0.8573757169811009, + 0.8460980424705821, + 0.8349507588480052, + 0.823929445365337, + 0.8130299255684792, + 0.8022482488051, + 0.7915806734941964, + 0.7810236519564928, + 0.770573816631306, + 0.760227967528073, + 0.7499830607799984, + 0.7398361981837712, + 0.7297846176234746, + 0.7198256842890175, + 0.7099568826099767, + 0.7001758088348773, + 0.6904801641938891, + 0.6808677485898375, + 0.6713364547684754, + 0.661884262924258, + 0.6525092357025012, + 0.6432095135628979, + 0.6339833104729616, + 0.6248289099031472, + 0.6157446610982177, + 0.6067289756019176, + 0.5977803240142328, + 0.5888972329624884, + 0.580078282269304, + 0.5713221023019807, + 0.5626273714893251, + 0.553992813993152, + 0.5454171975228547, + 0.5368993312824427, + 0.5284380640403634, + 0.5200322823132486, + 0.5116809086554774, + 0.5033829000471115, + 0.4951323613870765, + 0.48691927530056134, + 0.478756732722316, + 0.47064381184325565, + 0.46257961853802365, + 0.45456328521483325, + 0.44659396972602783, + 0.4386708543354838, + 0.430793144739279, + 0.4229600691363008, + 0.41517087734572067, + 0.40742483996847345, + 0.399721247590096, + 0.3920594100224505, + 0.3844386555820467, + 0.376858330402822, + 0.3693177977813922, + 0.3618164375529134, + 0.35435364549583015, + 0.3469288327638884 + ], + [ + 1.8103060423412103, + 1.6623122904824026, + 1.601011271316006, + 1.5539733449190145, + 1.514318538623595, + 1.4793819529396806, + 1.447796865167062, + 1.4187513793314022, + 1.3917165002908019, + 1.3663247867647874, + 1.3423087069936004, + 1.3194662961085832, + 1.2976406474968187, + 1.2767069815659853, + 1.256564127502313, + 1.2371287060492953, + 1.2183310349059797, + 1.2001121714958964, + 1.1824217292655979, + 1.165216233713233, + 1.1484578635381508, + 1.1321134720677128, + 1.116153816267261, + 1.1005529419234201, + 1.0852876879929139, + 1.070337283047172, + 1.0556830137233417, + 1.041307950074623, + 1.0271967163215943, + 1.0133352981553627, + 0.9997108797122998, + 0.9863117048202318, + 0.9731269582403935, + 0.9601466634920859, + 0.9473615945148307, + 0.9347631989442573, + 0.9223435311883645, + 0.9100951938162026, + 0.8980112860308606, + 0.8860853582074874, + 0.8743113716459905, + 0.8626836628254564, + 0.8511969115597617, + 0.8398461125462743, + 0.8286265498759564, + 0.8175337741366211, + 0.8065635817940183, + 0.7957119965797652, + 0.7849752526524272, + 0.7743497793295568, + 0.7638321872151892, + 0.7534192555700053, + 0.7431079207907603, + 0.7328952658821777, + 0.7227785108187657, + 0.7127550037063102, + 0.7028222126634157, + 0.69297771835267, + 0.6832192070990063, + 0.6735444645398081, + 0.6639513697573803, + 0.654437889849748, + 0.6450020749004088, + 0.6356420533117862, + 0.6263560274707493, + 0.6171422697177656, + 0.6079991185940925, + 0.5989249753439156, + 0.5899183006505826, + 0.5809776115880627, + 0.5721014787705364, + 0.5632885236846, + 0.5545374161899852, + 0.5458468721759646, + 0.5372156513617526, + 0.5286425552302313, + 0.5201264250852558, + 0.5116661402236267, + 0.503260616213559, + 0.49490339392988547, + 0.4865852260435113, + 0.4783188862620651, + 0.4701034177132718, + 0.461937892611031, + 0.4538214110324731, + 0.4457530997603413, + 0.437732111186488, + 0.42975762227258574, + 0.42182883356444, + 0.41394496825657634, + 0.4061052713039893, + 0.39830900857818663, + 0.39055546606485736, + 0.38284394910068054, + 0.375173781646966, + 0.3675443055979809, + 0.35995488012195176, + 0.3524048810328827, + 0.34489370019144316, + 0.3374207449332962 + ], + [ + 1.8103060423412103, + 1.6613616639078332, + 1.5996668823214977, + 1.552326811392835, + 1.5124172854744562, + 1.4772562872977255, + 1.445468315123437, + 1.4162362578254024, + 1.389027722301785, + 1.363472907041079, + 1.339302561813677, + 1.3163134244449957, + 1.2943475804444597, + 1.2732794487075565, + 1.2530072085575115, + 1.2334469451575258, + 1.214528528607702, + 1.1961926377184275, + 1.1783885622820724, + 1.1610725485416407, + 1.1442065322542407, + 1.1277571538321118, + 1.1116949824001934, + 1.0959938970302012, + 1.080630587905664, + 1.0655841501743248, + 1.050835750269472, + 1.0363683494960845, + 1.0221664733095945, + 1.0082160173812644, + 0.9945040835257444, + 0.9810188400559849, + 0.9677494022623601, + 0.9546857295810081, + 0.9418185366885548, + 0.9291392162851697, + 0.9166397717409478, + 0.9043127581081061, + 0.8921512302629901, + 0.880148697152077, + 0.8682990812861437, + 0.856596682765083, + 0.8450361472289616, + 0.8336124372239673, + 0.822320806548781, + 0.8111567772107562, + 0.8001161186745605, + 0.7891948291305547, + 0.7783891185477093, + 0.7676953933075708, + 0.7571102422426473, + 0.7466304239254468, + 0.7362528550739026, + 0.725974599955642, + 0.7157928606878909, + 0.7057049683421954, + 0.695708374773813, + 0.685800645104902, + 0.6759794507986809, + 0.6662425632687431, + 0.6565878479738412, + 0.6470132589538088, + 0.6375168337670042, + 0.6280966887937864, + 0.6187510148741937, + 0.6094780732512046, + 0.600276191793823, + 0.5911437614767481, + 0.5820792330956447, + 0.57308111419902, + 0.5641479662195038, + 0.5552784017889163, + 0.5464710822229347, + 0.5377247151624447, + 0.5290380523598119, + 0.5204098875993342, + 0.5118390547420711, + 0.5033244258860713, + 0.4948593995404117, + 0.48643464592559416, + 0.478063046922398, + 0.4697436089380075, + 0.4614753689537521, + 0.4532573932239681, + 0.4450887760451995, + 0.4369686385911451, + 0.42889612780911224, + 0.42087041537404757, + 0.41289069669652523, + 0.40495618998131977, + 0.3970661353334509, + 0.3892197939088087, + 0.38141644710666556, + 0.37365539580158347, + 0.3659359596123902, + 0.3582574762060595, + 0.35061930063448477, + 0.3430208047022594, + 0.33546137636371565, + 0.3279404191475822 + ], + [ + 1.8103060423412103, + 1.6604137507494143, + 1.5983263306769095, + 1.5506849776412903, + 1.5105214591576184, + 1.475136689038734, + 1.4431464115648405, + 1.4137283153437399, + 1.386346619012609, + 1.3606291675658222, + 1.336304997209029, + 1.3131695521646791, + 1.2910639129413704, + 1.2698616992101899, + 1.249460442286293, + 1.2297756932813182, + 1.210736875974026, + 1.1922842916423533, + 1.1743669073483085, + 1.1569406908768398, + 1.1399673357362579, + 1.1234132700314121, + 1.107248875583001, + 1.0914478652236927, + 1.0759867807884709, + 1.06084458438223, + 1.0460023225775026, + 1.0314428482414502, + 1.0171505883462695, + 1.0031113488003272, + 0.9893121493315238, + 0.9757410829534822, + 0.9623871956840078, + 0.9492403830589924, + 0.9362913006613228, + 0.923531286412513, + 0.910952292790434, + 0.8985468274660984, + 0.8863079011156336, + 0.8742289813750949, + 0.8623039520768478, + 0.850527077045429, + 0.8388929678446393, + 0.8273965549612579, + 0.8160330619881482, + 0.8047979824337819, + 0.7936870588388163, + 0.7826962639252589, + 0.7718217835415303, + 0.7610600011986378, + 0.7504074840197072, + 0.7398609699481234, + 0.7294173560791695, + 0.7190736879968568, + 0.7088271500121013, + 0.6986750562108305, + 0.6886148422313757, + 0.6786440576998242, + 0.6687603592600991, + 0.6589615041426045, + 0.6492453442214261, + 0.6396098205154794, + 0.6300529580937337, + 0.6205728613487993, + 0.6111677096068419, + 0.601835753045027, + 0.5925753088905672, + 0.5833847578779889, + 0.574262540943496, + 0.5652071561373229, + 0.5562171557367553, + 0.5472911435441132, + 0.5384277723554066, + 0.5296257415866772, + 0.520883795046182, + 0.5122007188416101, + 0.5035753394124695, + 0.49500133663388424, + 0.4864684411571564, + 0.477990070649295, + 0.46956519304011435, + 0.46119280841336663, + 0.4528719476215287, + 0.44460167097638437, + 0.4363810670103915, + 0.42820925130421517, + 0.42008536537615593, + 0.4120085756295281, + 0.40397807235432914, + 0.3959930687798261, + 0.3880528001749083, + 0.38015652299330693, + 0.3723035140609694, + 0.3644930698030776, + 0.35672450550837065, + 0.3489971546285974, + 0.34131036811106574, + 0.33366351376240255, + 0.3260559756417535, + 0.3184871534817796 + ], + [ + 1.8103060423412103, + 1.6594684824187502, + 1.596989519383603, + 1.5490477248657946, + 1.5086309224962902, + 1.473023004794392, + 1.4408309864847015, + 1.411227370418578, + 1.3836729964259957, + 1.3577933625738303, + 1.333315796284106, + 1.3100344517856615, + 1.2877894073903788, + 1.2664534857749088, + 1.2459235720543809, + 1.2261146847789595, + 1.2069558026513703, + 1.1883868504704747, + 1.1703564734683884, + 1.1528203617489465, + 1.1357399672475734, + 1.1190815063541, + 1.102815174107593, + 1.0869145175655055, + 1.0713559306281928, + 1.0561182427289104, + 1.0411823809138658, + 1.0265310899149631, + 1.0121486984959458, + 0.9980209230527368, + 0.984134701455525, + 0.9704780516286997, + 0.957039950510781, + 0.9438102299157052, + 0.9307794864975006, + 0.917939003551868, + 0.9052806828064504, + 0.8927969846832583, + 0.8804808757815259, + 0.8683257825421493, + 0.8563255502270014, + 0.8444744064864771, + 0.8327669289031895, + 0.8211980159939598, + 0.8097628612301124, + 0.798456929700755, + 0.7872759370976558, + 0.7762158307455295, + 0.7652727724395474, + 0.7544431228839904, + 0.7437234275531737, + 0.733110403818919, + 0.7226009292086074, + 0.7121920306747666, + 0.7018808747716841, + 0.6916647586470611, + 0.6815411017675514, + 0.6715074383064068, + 0.6615614101296038, + 0.6517007603239304, + 0.6419233272167085, + 0.6322270388422666, + 0.6226099078150321, + 0.6130700265733136, + 0.6036055629615303, + 0.5942147561219107, + 0.5848959126695673, + 0.5756474031274258, + 0.5664676585997388, + 0.5573551676649693, + 0.5483084734706045, + 0.5393261710140989, + 0.5304069045955664, + 0.5215493654291545, + 0.5127522894011767, + 0.5040144549641317, + 0.4953302493515378, + 0.48668760149787615, + 0.47810089487597607, + 0.46956905707468977, + 0.4610910495152688, + 0.4526658659756765, + 0.4442925311966385, + 0.43597009956396615, + 0.42769765386211456, + 0.4194743040943194, + 0.41129918636502394, + 0.4031714618206166, + 0.39509031564480745, + 0.3870549561052372, + 0.37906461364815847, + 0.3711185400382645, + 0.36321600754093686, + 0.3553563081443919, + 0.3475387528193623, + 0.33976267081413203, + 0.3320274089828774, + 0.3243323311454126, + 0.3166768174765655, + 0.3090602639235207 + ], + [ + 1.8103060423412103, + 1.6585257920543366, + 1.5956563538851307, + 1.5474149372588244, + 1.506745541767463, + 1.4709150850578288, + 1.4385218761064498, + 1.4087332461510025, + 1.3810066654290512, + 1.3549652914805892, + 1.3303347476042644, + 1.3069079015534177, + 1.2845238321764385, + 1.2630545673291294, + 1.2423963476889317, + 1.2224636606969548, + 1.2031850411937155, + 1.1845000385257447, + 1.1663569769729714, + 1.1487112697154176, + 1.1315241277744472, + 1.1147615564022688, + 1.0983935643657134, + 1.082393533399127, + 1.066737709871689, + 1.051404790906842, + 1.0363755843504445, + 1.0216327270940524, + 1.0071604499607947, + 0.9929443800782713, + 0.9789713736822034, + 0.9652293738125338, + 0.9517072885168917, + 0.9383948860610428, + 0.9252827043308702, + 0.9123619721452371, + 0.8996245406199681, + 0.8870628230569293, + 0.8746697420986701, + 0.8624386831032761, + 0.8503634528673183, + 0.8384382429657053, + 0.8266575970925366, + 0.8150163818818648, + 0.8035097607656251, + 0.7921331704910656, + 0.7808822999742892, + 0.7697530712119827, + 0.7587416220116666, + 0.7478442903330944, + 0.7370576000608121, + 0.7263782480511815, + 0.7158030923170483, + 0.7053291412302688, + 0.6949535436369283, + 0.6846735797926966, + 0.674486653036653, + 0.6643902821313582, + 0.6543820942051447, + 0.6444598182397572, + 0.6346212790526996, + 0.6248643917291241, + 0.6151871564628834, + 0.605587653770587, + 0.5960640400462207, + 0.5866145434271698, + 0.5772374599453902, + 0.5679311499400544, + 0.5586940347102793, + 0.5495245933885873, + 0.5404213600175665, + 0.5313829208138184, + 0.5224079116047325, + 0.5134950154249291, + 0.5046429602603827, + 0.4958472691457607, + 0.4870932017078018, + 0.4783965398482196, + 0.4697561688366366, + 0.4611710095596506, + 0.4526400169477016, + 0.44416217849016115, + 0.4357365128326718, + 0.42736206845123437, + 0.4190379223979763, + 0.4107631791139159, + 0.40253696930440286, + 0.3943584488732377, + 0.3862267979117687, + 0.37814121973954207, + 0.3701009399933257, + 0.36210520576156247, + 0.3541532847615071, + 0.34624446455651015, + 0.33837805181107417, + 0.3305533715814828, + 0.3227697666399424, + 0.31502659683032885, + 0.30732323845374476, + 0.2996590836822224 + ], + [ + 1.8103060423412103, + 1.6575856144556023, + 1.594326741973961, + 1.5457865018896784, + 1.5048651865699942, + 1.4688127840361362, + 1.4362189207219553, + 1.4062457700365176, + 1.3783474416067123, + 1.3521447586843862, + 1.3273616449871959, + 1.3037896852221185, + 1.2812669614381464, + 1.2596647087888502, + 1.238878525231748, + 1.2188223685145796, + 1.1994243307987782, + 1.1806235869793236, + 1.162368141239463, + 1.1446131305735538, + 1.1273195257310622, + 1.110453121389367, + 1.093983740539577, + 1.0778846000336044, + 1.0621317991027004, + 1.04670390291317, + 1.0315816004282552, + 1.0167474209866147, + 1.002185497731825, + 0.987881368761111, + 0.973821808893325, + 0.9599946864835696, + 0.946388840872214, + 0.9329939769462416, + 0.919800573980039, + 0.9067998064608417, + 0.8939834750275621, + 0.8813439459875223, + 0.8688740981437544, + 0.8565672758810413, + 0.8444172476331817, + 0.8324181689957579, + 0.8205645498646876, + 0.8088512250762385, + 0.7972733281030269, + 0.7858262674259883, + 0.7745057052569307, + 0.7633075383320257, + 0.7522278805350828, + 0.7412630471419541, + 0.7304095405049648, + 0.719664037019701, + 0.7090233752364898, + 0.6984845449960431, + 0.6880446774834456, + 0.6777010361073618, + 0.6674510081222856, + 0.6572920969211635, + 0.6472219149339683, + 0.6372381770749977, + 0.627338694687949, + 0.6175213699433147, + 0.6077841906474798, + 0.5981252254271322, + 0.588542619256346, + 0.5790345892969971, + 0.569599421026094, + 0.5602354646262021, + 0.550941131617437, + 0.5417148917115596, + 0.5325552698705284, + 0.5234608435534978, + 0.5144302401377158, + 0.5054621345000794, + 0.49655361656259633, + 0.4876864031862309, + 0.47887811012875015, + 0.4701275782389851, + 0.4614336858815452, + 0.45279534725863946, + 0.4442115108271977, + 0.43568115780476246, + 0.42720330075813884, + 0.4187769822692704, + 0.41040127367323687, + 0.40207527386366504, + 0.3937981081612031, + 0.38556892724103764, + 0.3773869061157281, + 0.36925124316991587, + 0.36116115924370507, + 0.35311589676175126, + 0.34511471890530643, + 0.33715690882464966, + 0.32924176888953205, + 0.3213686199754102, + 0.3135368007834016, + 0.30574566719204166, + 0.2979945916390333, + 0.2902829625313188 + ], + [ + 1.8103060423412103, + 1.6566478860200757, + 1.5930005937026168, + 1.5441623085956444, + 1.5029897296989412, + 1.4667159595098644, + 1.433921964537617, + 1.4037647737988006, + 1.375695145064023, + 1.3493315733778066, + 1.3243962873042259, + 1.3006795918462302, + 1.2780185748500785, + 1.2562836808320985, + 1.235369866704172, + 1.21519056190052, + 1.195673417056672, + 1.1767572335915024, + 1.1583896964254294, + 1.140525667086609, + 1.1231258766785186, + 1.106155909852254, + 1.0895854043071487, + 1.0733874124421994, + 1.0575378867340237, + 1.0420152607355375, + 1.0268001048370545, + 1.0118748411045124, + 0.9972235052563911, + 0.9828315465914641, + 0.968685658723806, + 0.9547736355182326, + 0.9410842477868357, + 0.9276071372029193, + 0.9143327245820552, + 0.9012521302213856, + 0.888357104414403, + 0.8756399665963065, + 0.8630935518448124, + 0.8507111636781367, + 0.8384865322672412, + 0.8264137773221081, + 0.8144873750285166, + 0.8027021285077834, + 0.79105314135125, + 0.7795357938471725, + 0.7681457215726298, + 0.7568787960690814, + 0.7457311073589467, + 0.7346989480932683, + 0.7237787991482422, + 0.7129673165119788, + 0.7022613193229867, + 0.6916577789391068, + 0.6811538089304305, + 0.6707466559025032, + 0.6604336910671339, + 0.6502124024876927, + 0.6400803879340814, + 0.6300353482897976, + 0.6200750814598299, + 0.610197476733655, + 0.6004005095624589, + 0.5906822367139815, + 0.5810407917721337, + 0.5714743809518732, + 0.5619812792027571, + 0.5525598265772014, + 0.5432084248417948, + 0.5339255343120779, + 0.5247096708930346, + 0.5155594033091849, + 0.5064733505096486, + 0.4974501792348498, + 0.48846845580593556, + 0.4795467962874135, + 0.47068441886818235, + 0.4618801572796949, + 0.45313288299953625, + 0.44444150356293877, + 0.43580496097018573, + 0.4272222301833284, + 0.41869231770617055, + 0.4102142602419524, + 0.40178712342359846, + 0.3934100006117949, + 0.38508201175651535, + 0.3768023023179533, + 0.3685700422431103, + 0.3603844249945778, + 0.3522446666282908, + 0.34415000491726844, + 0.33609969851857424, + 0.3280930261809134, + 0.32012928599047263, + 0.3122077946527724, + 0.30432788680844713, + 0.29648891438102065, + 0.2886902459548609, + 0.2809312661816289 + ], + [ + 1.8103060423412103, + 1.6557125446834926, + 1.5916778212989726, + 1.542542249878263, + 1.501119047025775, + 1.4646244726990991, + 1.4316308555276556, + 1.4012900932312429, + 1.373049600256735, + 1.3465255493680572, + 1.3214384782909168, + 1.2975774155818742, + 1.2747784574153158, + 1.2529112596829872, + 1.2318701398829908, + 1.2115680004809144, + 1.1919320517103396, + 1.1729007224647638, + 1.1544213792144973, + 1.136448608722727, + 1.1189429030569877, + 1.1018696373767407, + 1.0851982645612248, + 1.0689016729751568, + 1.0529556687141008, + 1.0373385540526219, + 1.0220307811099474, + 1.0070146649523686, + 0.9922741441212757, + 0.9777945793430387, + 0.9635625832336698, + 0.9495658753573234, + 0.9357931581722596, + 0.922234010299021, + 0.9088787942431787, + 0.8957185762497288, + 0.8827450563949042, + 0.8699505073610996, + 0.8573277206120236, + 0.8448699589033559, + 0.8325709142406236, + 0.8204246705395598, + 0.808425670361619, + 0.7965686851939012, + 0.7848487888225385, + 0.7732613334148765, + 0.7618019279810657, + 0.7504664189319901, + 0.7392508724894213, + 0.7281515587371863, + 0.7171649371300219, + 0.7062876433005126, + 0.6955164770247638, + 0.6848483912247951, + 0.674280481900546, + 0.6638099788972175, + 0.6534342374247714, + 0.6431507302560253, + 0.632957040538129, + 0.6228508551594963, + 0.6128299586206184, + 0.6028922273627446, + 0.5930356245133147, + 0.5832581950113085, + 0.5735580610794688, + 0.5639334180137023, + 0.5543825302629123, + 0.5449037277751538, + 0.5354954025883174, + 0.5261560056456431, + 0.5168840438181939, + 0.5076780771180923, + 0.49853671608778427, + 0.48944069995129147, + 0.4804038767848481, + 0.4714279097232813, + 0.4625115856204568, + 0.45365373110856744, + 0.44485321079570495, + 0.43610892556709746, + 0.42741981098281734, + 0.4187848357653493, + 0.41020300037092916, + 0.4016733356390586, + 0.39319490151502345, + 0.38476678584065327, + 0.37638810320891936, + 0.36805799387829696, + 0.3597756227431246, + 0.3515401783564742, + 0.3433508720022894, + 0.3352069368137943, + 0.32710762693538176, + 0.3190522167253864, + 0.311039999997336, + 0.30307028929742963, + 0.2951424152161557, + 0.28725572573209335, + 0.2794095855860854, + 0.2716033756840702 + ], + [ + 1.8103060423412103, + 1.654779529862679, + 1.5903583390854834, + 1.5409262208043994, + 1.4992530173841483, + 1.4625381881357455, + 1.4293454452942085, + 1.3988215680458334, + 1.3704106358297563, + 1.343726504905617, + 1.318488026366453, + 1.2944829554973958, + 1.2715463992675886, + 1.2495472269057766, + 1.2283791180867252, + 1.2079544496181396, + 1.1881999924270863, + 1.1690538038082838, + 1.1504629325740292, + 1.1323816914059759, + 1.1147703339302808, + 1.0975940263358477, + 1.080822037141533, + 1.064427091085782, + 1.0483848482472067, + 1.0326734799485549, + 1.0172733203321487, + 1.0021665777307778, + 0.9873370937504564, + 0.9727701407654595, + 0.9584522505952087, + 0.9443710686880065, + 0.9305152293183023, + 0.9168742482107102, + 0.9034384297058384, + 0.8901987861309826, + 0.877146967470024, + 0.864275199768843, + 0.8515762309856242, + 0.8390432832149004, + 0.8266700103916953, + 0.8144504607265257, + 0.8023790432401562, + 0.7904504978641549, + 0.7786598686535813, + 0.767002479724816, + 0.755473913587163, + 0.7440699915834386, + 0.7327867561939672, + 0.7216204549914929, + 0.7105675260625752, + 0.6996245847349043, + 0.6887884114703431, + 0.6780559408009464, + 0.667424251200205, + 0.6568905557946636, + 0.6464521938322397, + 0.6361066228332305, + 0.6258514113584059, + 0.6156842323359074, + 0.6056028568950687, + 0.5956051486608708, + 0.5856890584676606, + 0.5758526194550794, + 0.566093942512962, + 0.5564112120453223, + 0.54680268202653, + 0.5372666723254108, + 0.527801565275357, + 0.5184058024706184, + 0.5090778817708101, + 0.49981635449732575, + 0.4906045687700178, + 0.4814507200584197, + 0.4723593571469348, + 0.4633292176238708, + 0.45435908102141886, + 0.4454477668901702, + 0.43659413298581007, + 0.4277970735601029, + 0.4190555177489298, + 0.4103684280507278, + 0.40173479888920827, + 0.3931536552547191, + 0.38462405141905387, + 0.3761450697189139, + 0.3677158194035921, + 0.359335435542784, + 0.3510030779907337, + 0.34271793040320353, + 0.33447919930401726, + 0.32628611319814393, + 0.3181379217285297, + 0.3100338948740603, + 0.30197332218623074, + 0.29395551206226544, + 0.2859797910525793, + 0.2780455032006217, + 0.2701520094132681, + 0.26229868686005287 + ], + [ + 1.8103060423412103, + 1.6538487824010555, + 1.5890420634021105, + 1.5393141189118513, + 1.4973915224609007, + 1.4604569735416693, + 1.4270655889338393, + 1.3963590417289722, + 1.3677780844630107, + 1.3409342625207457, + 1.3155447444613015, + 1.2913960153926138, + 1.2683221954824924, + 1.246191369208384, + 1.2248965799717193, + 1.2043496801997455, + 1.1844770025805906, + 1.165216233713233, + 1.1465141055239108, + 1.1283246572788004, + 1.1106079047421282, + 1.0933288056400685, + 1.076456444579117, + 1.0599633830690833, + 1.0438251355264685, + 1.028019742640436, + 1.0125274208631019, + 0.9973302720531333, + 0.9824120411167339, + 0.9677579122907933, + 0.9533543367944882, + 0.9391888861403828, + 0.9252501265848112, + 0.9115275111093057, + 0.8980112860308606, + 0.8846924098900986, + 0.871562482700281, + 0.8586136839841074, + 0.8458387182999624, + 0.8332307671800452, + 0.8207834465813925, + 0.8084907690960731, + 0.7963471102856708, + 0.7843471786029065, + 0.7724859884440174, + 0.7607588359425873, + 0.749161277171474, + 0.7376891084663456, + 0.7263383486237742, + 0.715105222760126, + 0.7039861476457113, + 0.6929777183526699, + 0.6820766960755577, + 0.671279997001156, + 0.6605846821190978, + 0.6499879478779008, + 0.639487117602228, + 0.6290796335969222, + 0.6187630498718202, + 0.6085350254287162, + 0.5983933180582808, + 0.5883357786003724, + 0.578360345626118, + 0.5684650405044958, + 0.5586479628199713, + 0.5489072861111364, + 0.5392412539032806, + 0.5296481760104951, + 0.5201264250852558, + 0.5106744333955439, + 0.501290689811423, + 0.4919615906494659, + 0.4826887868203096, + 0.47348015695714674, + 0.4643343868399855, + 0.455250206502238, + 0.4462263881722136, + 0.4372617443361113, + 0.4283551259138625, + 0.4195054205398874, + 0.4107115509414875, + 0.40197247340817865, + 0.3932871763458067, + 0.38465467890978, + 0.37607402971218784, + 0.3675443055979809, + 0.3590646104857603, + 0.35063407426905113, + 0.3422518517742501, + 0.3339171217717134, + 0.3256290860367106, + 0.3173869684572037, + 0.30919001418563175, + 0.3010374888320734, + 0.29292867769634934, + 0.2848628850367908, + 0.27683943337355854, + 0.26885766282453316, + 0.26091693047194175, + 0.25301660975799356 + ] + ] + }, + { + "hovertemplate": "cf_d: %{x:.3f}
cf_y : %{y:.3f}
Bound: 1.519", + "marker": { + "color": "red", + "line": { + "color": "white", + "width": 2 + }, + "size": 10 + }, + "mode": "markers+text", + "name": "Scenario", + "showlegend": false, + "text": [ + "Scenario" + ], + "textfont": { + "color": "white", + "size": 14 + }, + "textposition": "top right", + "type": "scatter", + "x": [ + 0.03 + ], + "y": [ + 0.03 + ] + }, + { + "hovertemplate": "cf_d: %{x:.3f}
cf_y : %{y:.3f}
Bound: 1.81", + "marker": { + "color": "red", + "line": { + "color": "white", + "width": 2 + }, + "size": 10 + }, + "mode": "markers+text", + "name": "Unadjusted", + "showlegend": false, + "text": [ + "Unadjusted" + ], + "textfont": { + "color": "white", + "size": 14 + }, + "textposition": "top right", + "type": "scatter", + "x": [ + 0 + ], + "y": [ + 0 + ] + } + ], + "layout": { + "template": { + "data": { + "bar": [ + { + "error_x": { + "color": "#2a3f5f" + }, + "error_y": { + "color": "#2a3f5f" + }, + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "bar" + } + ], + "barpolar": [ + { + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "barpolar" + } + ], + "carpet": [ + { + "aaxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "baxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "type": "carpet" + } + ], + "choropleth": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "choropleth" + } + ], + "contour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "contour" + } + ], + "contourcarpet": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "contourcarpet" + } + ], + "heatmap": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmap" + } + ], + "heatmapgl": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmapgl" + } + ], + "histogram": [ + { + "marker": { + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "histogram" + } + ], + "histogram2d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2d" + } + ], + "histogram2dcontour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2dcontour" + } + ], + "mesh3d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "mesh3d" + } + ], + "parcoords": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "parcoords" + } + ], + "pie": [ + { + "automargin": true, + "type": "pie" + } + ], + "scatter": [ + { + "fillpattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + }, + "type": "scatter" + } + ], + "scatter3d": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatter3d" + } + ], + "scattercarpet": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattercarpet" + } + ], + "scattergeo": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergeo" + } + ], + "scattergl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergl" + } + ], + "scattermapbox": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattermapbox" + } + ], + "scatterpolar": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolar" + } + ], + "scatterpolargl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolargl" + } + ], + "scatterternary": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterternary" + } + ], + "surface": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "surface" + } + ], + "table": [ + { + "cells": { + "fill": { + "color": "#EBF0F8" + }, + "line": { + "color": "white" + } + }, + "header": { + "fill": { + "color": "#C8D4E3" + }, + "line": { + "color": "white" + } + }, + "type": "table" + } + ] + }, + "layout": { + "annotationdefaults": { + "arrowcolor": "#2a3f5f", + "arrowhead": 0, + "arrowwidth": 1 + }, + "autotypenumbers": "strict", + "coloraxis": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "colorscale": { + "diverging": [ + [ + 0, + "#8e0152" + ], + [ + 0.1, + "#c51b7d" + ], + [ + 0.2, + "#de77ae" + ], + [ + 0.3, + "#f1b6da" + ], + [ + 0.4, + "#fde0ef" + ], + [ + 0.5, + "#f7f7f7" + ], + [ + 0.6, + "#e6f5d0" + ], + [ + 0.7, + "#b8e186" + ], + [ + 0.8, + "#7fbc41" + ], + [ + 0.9, + "#4d9221" + ], + [ + 1, + "#276419" + ] + ], + "sequential": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "sequentialminus": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ] + }, + "colorway": [ + "#636efa", + "#EF553B", + "#00cc96", + "#ab63fa", + "#FFA15A", + "#19d3f3", + "#FF6692", + "#B6E880", + "#FF97FF", + "#FECB52" + ], + "font": { + "color": "#2a3f5f" + }, + "geo": { + "bgcolor": "white", + "lakecolor": "white", + "landcolor": "#E5ECF6", + "showlakes": true, + "showland": true, + "subunitcolor": "white" + }, + "hoverlabel": { + "align": "left" + }, + "hovermode": "closest", + "mapbox": { + "style": "light" + }, + "paper_bgcolor": "white", + "plot_bgcolor": "#E5ECF6", + "polar": { + "angularaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "radialaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "scene": { + "xaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "yaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "zaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + } + }, + "shapedefaults": { + "line": { + "color": "#2a3f5f" + } + }, + "ternary": { + "aaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "baxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "caxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "title": { + "x": 0.05 + }, + "xaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + }, + "yaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + } + } + }, + "title": {}, + "xaxis": { + "range": [ + 0, + 0.15 + ], + "title": { + "text": "cf_d" + } + }, + "yaxis": { + "range": [ + 0, + 0.15 + ], + "title": { + "text": "cf_y " + } + } + } + }, + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "causal_contrast_model.sensitivity_plot(idx_treatment=0)" ] @@ -518,7 +12411,7 @@ ], "metadata": { "kernelspec": { - "display_name": "dml_dev", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -532,7 +12425,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.4" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/doc/examples/py_double_ml_basic_iv.ipynb b/doc/examples/py_double_ml_basic_iv.ipynb index 98aa2548..003dd0b1 100644 --- a/doc/examples/py_double_ml_basic_iv.ipynb +++ b/doc/examples/py_double_ml_basic_iv.ipynb @@ -45,16 +45,11 @@ ] }, { - "attachments": { - "basic_iv_example_nb.png": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaMAAACbCAYAAADPy/SAAAAAAXNSR0IArs4c6QAAIABJREFUeF7tnQl0FUUWhi9CWAwEoyAgYAyi7OuAMg4CCsMSUTQuoCyOrIqAgiDg4OACAoqjw6LRIKNsIsIYEWWJIAKCOIgaQWBkF1FkH1EWWeb85XR4Sfq91+/1e73+dY5Hj+nuqvrqvv67qm7dW+jcuXNnROQCYSEBEiABEiABewjMLkQxsoc8ayUBEiABEsglQDGiMZAACZAACdhOgGJk+xCwASRAAiRAAhQj2gAJkMB5Alu2bJEff/xRzp07J0lJSdKwYUP1xx07dsiuXbt0UdWrV0+Sk5PV35YvX657TZkyZaR27drqb6jjhx9+0L2ucePGkpiYKIcOHZKcnBzdaxISEqRcuXJSoUIFdS2LJwhQjDwxjOwECZggMHjwYJk+fbr89NNPeZ5yzTXXyNq1a9X/GzFihIwePVq3luzsbGnVqpX8/PPPSsD0SocOHSQrK0v9qWfPnvLaa6/pXvf1118r0Vq4cKGkpaWF7dXVV18tbdq0ke7du0v9+vXDXs8LHEuAYuTYoWHDSCCOBBYvXqxe4igrV66UrVu3SpUqVdQ/lStXjmPNsXn0zp07Zc+ePbJs2TJ555135PHHH5f09PTYPJxPsYMAxcgO6qyTBOwisGLFCrn//vvlyJEjsm3bNilRooRdTYlLvZ988omMGjVKzfSwNMjiGgIUI9cMFRtKAiYJDBw4UF588UV54IEHZOzYsUGX1ExWY+vtu3fvVjOk77//Xt58801p0aKFre1h5YYJUIwMo+KFJOBiAr1791azhXfffVdat27t4p4YazoENyMjQ9566y256667jN3Eq+wkQDGykz7rJgErCMBBAQ4D2CeCU4JfymOPPSZjxoyRuXPnyu233+6Xbru1nxQjt44c200CRglgyWr//v2+9Dbr0qWLzJw5U3kKli1b1igyXmc9AYqR9cxZIwlYQwBOChdddJE1lTm4llWrVknTpk0d3EI2TUQoRjQDEvAiAZz5SU1NlVmzZvlij8jIGB49elRKly5t5FJeYz0BipH1zFkjCcSfAA6pYgN/+/btnvSai5QglipxmHbp0qW5USUifQavjysBilFc8fLhJGADAeyPpKSkqIgJgwYNsqEFzqyyZs2aat8Ms0UWxxGgGDluSNggEjBJYPz48TJs2DAVnsdrh1rNoMnMzJS+ffuqOHuVKlUy8yjeG3sCFKPYM+UTScBeAm3btpUzZ84IYsaxnCdw8uRJueyyy+SZZ56RPn36EI2zCFCMnDUebA0JmCNw+vRpKVmypDz77LMyYMAAcw/z4N2dOnWSiy++WF566SUP9s7VXaIYuXr42HgSyEfg+PHjKgwOZkeYBbDkJYCAqkhzgeCwLI4iQDFy1HCwMSRAAnElsG7dOiVGiErB4igCFCNHDQcbQwIkQAL+JEAx8ue4s9deJTBnzhzp2LGjytTKQgIuIkAxctFgsakkEJYAxSg0IkTxvvvuu+Xs2bNhWfICSwlQjCzFzcpIIM4EKEYUozibWLweTzGKF1k+lwTsIOAUMfrll19U7iTkUEL21YSEBGnZsqVK7Fe1alXZuHGj8vizunBmZDVxw/VRjAyj4oUk4AICThCj9evXqzNOl19+uQwfPlxq1KghRYoUkcOHD8s777wjTz31lApV1LlzZ8uJUowsR260QoqRUVK8jgTcQMBuMfr222+la9euUrduXXnhhRckMTGxALaPP/5YPv30Uxk6dKjlSClGliM3WiHFyCgpXkcCbiCAMzSYeSxbtszy5iIE0ZNPPilTpkyRrKysoFllEZYH0bPT0tIsb+Pbb7+t0pDT29By9OEqpBiFI8S/kwAJGCOwa9cuueeeeyQ5OVntFeHfTiucGTltRHLbQzFy7NCwYSTgMgJYemvdurUSJCzRMWK4ywbQ3uZSjOzlz9pJILYE4KU2d+5cGTlyZGwfbOBp8JpDem9ExKYYGQDGSwIJUIxoDyTgJQJ27ols3bpVEBUbSewmT54spUqVchza/fv3yzfffCPNmzd3XNt83iCKkc8NgN33GAE7velwtqhfv36ydu1agSjWqlXLcXS5Z+S4IdEaRDFy7NCwYSQQBQE7xQjNxVIdzg/ddNNNKqeSnms3RGvJkiXSrl07KV68eBS9jP4WilH07OJ8J8UozoD5eBKwlIDdYgSXaRxsHTRokNSpU0eGDBkiTZo0kaJFi8qpU6fU+SJkoMWh2LJly1rKBpVRjCxHbrRCipFRUryOBNxAwG4x0hjt3r1bpk2bJv/617/kiy++UNlVEQ4IQUpxvqhYsWK24KQY2YLdSKUUIyOUeA0JuIUAPOnuvPNOHuoMMmCzZs1Sy4g89Oo4i6YYOW5I2CASIIG4EZgwYYKKi7dv37641cEHR0WAYhQVNt5EAiTgWgKbN2+W6tWru7b9Hm04xcijA8tu+ZgAPNXgMNCiRQsfU2DXXUaAYuSyAWNzSSAsgf79+8vq1avl888/D3utny7Ys2ePbNmyRTlSsDiOAMXIcUPCBpGASQLwXmvYsKF8+eWXUq9ePZNP887tEOl58+bJzp071cyRxVEEKEaOGg42hgRiRKB+/fpSpUoV5VrNInLgwAGpWLGijB07VgYOHEgkziNAMXLemLBFJGCewOLFi1Va7w8//JDLUiLSpUsXef/992Xv3r2MJm7evOLxBIpRPKjymSTgBAJ33HGHOnAK77GEhAQnNMmWNmjBY19//XW59957bWkDKw1LgGIUFhEvIAGXEvjxxx8lPT1dhecpV66cS3thrtlwWqhdu7aaJc6ePdvcw3h3PAlQjOJJl88mARKwl8ChQ4dUWgs4LjgxpYW9dBxVO8XIUcPBxpBAnAgsXLhQkG8IHmUsJOBAAhQjBw4Km0QCMScAh4a77rpLOnToIFOmTPG0azNctxGQdeLEidKoUaOYs+QD40KAYhQXrHwoCTiQwLZt21SeIZRx48YpYfJaycrKkq5du0rdunUFQVFTUlK81kWv9odi5NWRZb9IQI/AiRMnVKDQUaNGyXXXXSdvvPGGVK1a1dWwdu3apYQH6SG++uorldQPeZRYXEWAYuSq4WJjSSBGBDBL6t27t2RkZMhVV10lBw8elPnz50tqamqBGpAcT8vIumLFCjl79myBa5CvCLMRlE2bNgWNiq0969ixY7Ju3Trd3lSuXFmuvPJK9becnByBE0JggaDin1tvvVX979tuu01Fm+jRo4f07NlTypcvHyNKfIyFBChGFsJmVSTgOALr169X4oGXP5LeHT58uEAbd+zYIVdccYX6/4UKFdLtQ5s2bWTRokXqbzhgOnPmTN3rtm/frgRPC1mkdxGyxD7//PPqT3DJxn5X/tK8eXNZvny5+t+DBw+Wp59+modZHWddETWIYhQRLl5MAh4igBTgeNkjjt2yZctc2TN4CKL9mJVBDEuWLOnKfrDRQjGiEZCAHwkgqjdmM/A2++CDD1w9q8AS3Y033qj2vhD+KCkpyY9D6vY+U4zcPoJsPwlESgDLW1iSa9q0qbz33ntSrFixSB/huOs3btyo8jdVqlRJzfKSk5Md10Y2KCQBihENhAT8RCA7O1tuvvlmadWqlQoT5KWYdchVhL2kMmXKqP0k/JvFNQQoRq4ZKjaUBEwSQNRqeJ61b99e5syZI0WKFDH5ROfdDgeJZs2aqb2jjz/+2Lcx+Zw3MmFbRDEKi4gXkIAHCGAWhAgMiOQ9Y8YMKVy4sAd6pd8FnDvCDAl9hCs68hixOJ4AxcjxQ8QGkoBJAohWDXfrzp07C9IoBHPPNlmNo27//vvv1QzpzJkzaobESAyOGh69xlCMHD9EbCAJmCAwbdo0+ctf/qIOg77yyiu+ECIN1759+9QMCQdsMUNC5lsWxxKgGDl2aNgwEjBJIDMzU/r06SMPPvigChrqx4J04/Cyw78xQ6pWrZofMbihzxQjN4wS20gCkRKYNGmSSheB6ATPPfdcpLd76npElcA5JCTag5ddrVq1PNU/j3SGYuSRgWQ3SCCXwPjx41WgUITIGTFiBMmIyH//+1/lzo6IDTiHVL9+fXJxFgGKkbPGg60hAXME/va3vykRwmwIsyKW8wSwd4SoExs2bFCRGho3bkw8ziFAMXLOWLAlJGCOAGZDCDA6YcIE6devn7mHefTu48ePq+gTiBiOAKxIo8HiCAIUI0cMAxtBAiYJYH9o8uTJynW7W7duJp/m7dtPnjypolCsWrVKxeWDgwOL7QQoRrYPARtAAiYInDt3TnnMTZ06VR1m7dSpk4mn+efW3377TUWjwHId4vP9+c9/9k/nndlTipEzx4WtIoHwBCBEOEOELKcI74OXK4txAqdPn1ZRKRYsWKDi9Gkp2Y0/gVfGkADFKIYw+SgSsIwAIgsgqsK8efP4IjVBXeM4d+5cCroJjjG4lWIUA4h8BAlYSkD7osd+B5eYzKPXZpjITsulTvM8o3wCxShKcLyNBGwhoO114KwMN99jNwTa3tuUKVPoBBI7rJE8iWIUCS1eSwJ2EtC8wNasWUO35DgNhOaViDh+vXr1ilMtfKwOAYoRzYIE3EBAOx+zfv16HtiM84DhvBaiWCCeH89rxRn2+cdTjCxDzYpIIEoCWuSATZs2MZRNlAwjvQ1RLBDNgpEsIiUX9fUUo6jR8UYSsIDAkSNHpGXLloKEcR999JHUqVPHglpZBQhoMf5GjhwpTzzxBKHElwDFKL58+XQSiJ4Aok1DiJAoDtGma9SoEf3DeGdUBLBUN2DAAHn00Udl3LhxUT2DNxkiQDEyhIkXkYDFBALz8KxevZqJ4SzmH1idlqDQz3mhLMBPMbIAMqsggYgIQIiaNm3KDKURUYvvxYEZc1999dX4VubPp1OM/Dnu7LVTCQSmyv7kk08kJSXFqU31Xbtmz54t99xzjy9TuFsw2BQjCyCzChIwRAB7Q82aNROEqEGKbAqRIWyWXgRBQhimzp07q8OxhQoVsrR+D1dGMfLw4LJrLiIAIfrTn/4khQsXlhUrVkjFihVd1Hp/NRVBVRFgFbMkClLMxp5iFDOUfBAJREkAbtvNmzdXQgRnhXLlykX5JN5mFYH3339fbr31VrnjjjtUPDuMHYspAhQjU/h4MwmYJLB9+3a1NFeyZEm1NEchMgnUwtshSEjb0b59e3n77bcpSObYU4zM8ePdJBCaALKJwjNOr0CIkPa6TJky6hwR/s3iLgLZ2dkqDxIECTmlihQpUqADv/76q1x44YXu6pj1raUYWc+cNfqJAL6cGzRooELLBJYtW7aopTkI0MqVKyU5OdlPWDzVVwgS0pi3atVKsrKy8ggSoqzfeOON0qNHD5UIkSUoAYoRjYME4kUArtnarOiZZ56R4cOHq6ogRPj/lSpVUrHmKETxGgHrnouZbVpamhIeODgkJCSoypHOHA4pV1xxhRp3FooRbYAELCeQnp4u2Fc4deqUqhtBN9u1ayctWrSQ1NRUFX07KSnJ8naxwvgQgPMJZkf40EDSw9tvv12WLFkimB2h/POf/+TsKDh6zoziY5Z8qt8JBM6KAllg7wDBTrG0U6pUKb9j8lz/Me5t27ZVe0SHDh0SZOXVSrVq1WTz5s2e63OMOkQxihFIPoYE8hDIPysK/OPf//53GThwIIl5lACW5rD8evbs2QI9xLmke++916M9N9UtipEpfLyZBHQIYLkGB1hDlczMTBVWhsVbBLp27SqzZs3SFSL0lLOjoONNMfLWT4G9cQIB7BUsWLAgd68oWJv4leyE0YpdG+AxN3Xq1LAP5LjrIqIYhbUcXkACERAwMivC44oWLarEaubMmSqsDIu7Cdx///3yyiuvGOoEZ0cUI0OGwotIwAwBhIeBJ5XmQZf/WcWKFZOTJ0/K3XffLciPE245z0xbeK+1BN544w3BfmBOTo5ccMEFQZfq0Cpc261bN2sb6OzaODNy9viwdW4isGbNGhVRQa9oM6G+ffsqEapZs6abusa2RkBg/vz58uKLL6o08dq457+9evXqsmnTpgie6vlLKUaeH2J20DICd955p+BFFDgrwhdyiRIl5KGHHpJ+/fpJhQoVLGsPK7KXAA67Tp48WYUJ0mbEgS3i7CjP+FCM7DVX1u4VAnqzIgjPoEGD1EwIgsTiTwJff/21TJo0SZAhNnCmxNkRxcifvwj2Oq4EAmdFNWrUkEceeUTFI2MhAY3Anj17lCj94x//UFEZkESRs6Nc++DMiD8VEjBLQJsVIQzMww8/rMLAsJBAMALHjh1Ty3dwdihbtqxs2LCBsERiK0Z79+6V/fv3y5EjR+TcuXOOBYww74iWDEO45JJLHNtOvzXMrfaDrJ9w7UWQTBYSiIQA3MEROgiHZZHt98CBA359f5oTo8OHD6ukUm+99ZbKx6IX/iKSgbHj2tKlS6uMjZ06dVIRdpmx0bpRgP1gcxf2g8RybrQfJMWDO3fHjh1pP9aZjidqQuw67f3pVvu/6KKLVIJB2D+CxJp4f0YvRk899ZSMHDlSihcvrl7k+DpMSUmRSy+91BVJwvAVsm/fPvniiy9UHnskQatSpYpkZGSolwpLfAk8+eST8sQTTxSwH2Q6dcNsNZj94EsXP0oWEghFQLN/zIpw5gxLu3h/utH+EY0cAWKvvPJK5aQR5QpB5GL06aefyn333SdYUnn66afVJm1iYqLrLQ9ZNx977DH1lY4T8dhkZObN2A8r9le6d+8uP/zwg+CDxqv2M2HCBFeIauxHmE8MRQD2jyR7+BCG/ffq1csTnpbbtm2Tv/71r7nvz4kTJ8rFF18ciTFEJkZIEIYKcXJ4/Pjxas/FawVi27t3bzl48KCaQgc7xOi1flvRn9GjR8uIESNoP1bAZh2OI4CPd2T8xQfY2LFjPfmxixkS9k+1LZw//vGPRsfBmBjBIQHr4kuXLvWNKyI2FGfMmCFjxoyRYcOGGQXK63QIwH7g+oxkctOnT5cuXbp4mtOJEyfUCwfRm/HSGTp0qKf7y86FJoAXM+wf70+8Uzp37uxpZMePH1dpMvAx/+yzz8qQIUOM9NeYGCEz5TfffCMffPCBNGrUyMiDPXENQOJF8vzzz6vDiyzREYD9IPQJsp76yX4gREg1jtAwiMDA4k8CiD+4detW9f78wx/+4BsI+JDH1ge2PAYMGBCu3+HFCCqOnO7Y4G/YsGG4B3ru71jXxWbju+++K+3bt/dc/+LdIc1+1q5dqzKc+q3ASQPLM7Qfv4387/31u/1jWRLL84sWLQrnGBZajHA6GM4KyM2SlpbmT2v6v0EhEjOcHOjUYNwM4KWI5Sqw87P9YIl7yZIltB/jpuOJK5HbCA4KtP/ft3i+/fbbUO/P4GKEdf6qVasqt0N4Rvi5IOT/VVddJThhj30AlvAENPvBlyGm6X4uv/76qyAOWbNmzdSeAYv3CcD+U1NTlefcCy+84P0Oh+gh7B9agiMP06ZNC3ZlcDGCRxmW5zAbKFWqlK9hovNYZsHhWOS2v+GGG3zPIxwAfBFmZWXRfv4PCr+l9PR02k84w/HI37EigAjufH/+PqDz5s1TTnDwtgvioawvRj/99JOUL19epdCFsrP8TgDKjigBECSW4AQ0+8EyHROIneekHYaFVyGLdwlo9o9ZgNc9RyMZRdg/UqpgyVqn6IsRvH+w6Yo4SYUKFYqkPk9fi2RZLVu2VAc2cVKaRZ8AliVGjRpF+8mHB+vmiO5B+/H2LwfetziHiXFmOU8gOztb2rVrp2Lw6bw/9cXommuukQYNGhjO6e4X4Aj5jlA1CIM0cOBAv3Q74n42btxYuXC//PLLEd/r5Rs0+4F3Jl29vTvScN9u0qSJiszNcp6AZv+Y6PTv3z8/moJidPr0aUlISFDx2nBwiSUvgbZt2wqCY86dO5dodAho9oMlChwcZslLoHXr1oLgkggQy+I9Apr9++FwazSjh6U6eCTPnj07vBhhww0B75Ay9/rrr4+mPk/fg9TRn332mfqHpSABxKiC5wzOpeGwH0teAn379pX169cLwk6xeI8A3JevvvpqWb16tUQQCsd7IIL0CKGCcnJyFJ98peDMCCLUvHlzta532WWX+QaS0Y5iP23cuHFcDw4CDKHwEXEB6+VwgmHJSwAJ1bCngN8Xi/cIYF8ZUasRCBUZDFjyEsBeGo56fPfdd+HFSHPBc3JyPDsHGPGWkC6DfPRHgfYT2joRFR4pV2g/dv6K41c3lu8Rh47jq8/4zTffVFkRdPgUnBlpMLHZBDc8lrwEND5w8aanYUHroFiH/sVgrwiJyPiy8uabBR8bCBTgxkSRVowI9opwEB76EnaZjmIUeki8LkbYz8CeT1JSUlS2STGiGEVlOA65CQGhK1SoIMnJyVG1iGIUGhvFKCqz0r/JD2IE11SclsZyA/4dyQyZYkQxiuHPzfJHQYxq1aqloq1gOR72D+9io4Vi5CExOnr0qPTs2TOk63SHDh3UGSg7Dp76RYyKFCkicFMtUaKEWlaCMBkJdmqXGG3ZskW186uvvirwa0DGSZx9gncfXi6IE2fXEiuX6Yy+1u25ThOjwoULq6WkokWL5tr/zTffHLZRdokRwuwgdmZgadOmjcycOTM34zD+OzAiRP6/h+1cDC5w1cwIGVYnTZqksq1iuqwVrLEDJjKFIpo4PP7sKH4Ro0C2+EGeOnVKpRHG5iNe6MH42yVGaC/W6TMzM1WmycBzHr/88os6qgAvHriUImMxroHgWl0oRlYTj6w+TYz07B/nw7AfhA+zYPEp7RIjtHfnzp0qfxCcBAYPHqyioBQrViwPgHXr1inbx8F9REOw+jfgKjHavXu3IOJt3bp180CEyzCCbyI/DAzCri9bP4pR4EDAuBHFvHLlymojEsIUmDDMTjFCO7UvRL1Dhz///LNKloi/ZWRk2GJHFKPIxMHqq/XESM/+K1asmGv/mHVrxU4xQhvgUt6nTx+1qjFlypQ8xyu0D3osu9v1DnWVGOkZH85k4DR/jRo1VBrbxMREq200tz5NjLx6gn7Hjh2G02RrwoRxgTDhixHLZHa6vocSIwyitpxXtmxZFc4+cPZthVFpYuRV+7GCYTzr2LNnj+Gszpr945Arlr/wYYYDnXZ70+HDHdFzbrrppjzvS2RbXr58ufqot3pGpI2Zq8UISyyPPvqoOiRo1z5RoPFrYhTPH4Tbnq0t4+HEOfI+4SVvl+tyODE6ceKEPPLII/LSSy8JfrTIMWRl0cTIyjpZV3wJaPaPmJ6wf+Q8s9O1G789LNUhfiaCFkMcEUkcy9RDhgyJ2lMwFhRdK0aAOmHCBAXUzn0iPTHy6jkjuHYHLrsFM0DtqxA/Pm1WVLNmTXHyMp3WFwRqRDpkO+KHcZkuFq+0+D0j3DKdVrNm/1WqVMmdFdWpU0fsXqbT2odlOkT6QLBiLNdt3rxZZRzAKoadxbVi5JR9IorR7wS0HyDCRMGRActy+BoMLBSj0D91ipGdr8LwdYcSI83+4cWr7Zfmjz/nFDFCT7X9I+Rfw4oOgvTaXVwpRgg4iH0ifKXbvU/kZzHCGYvffvtNSpcunetJhNhbwYrTxej48eNq+QJLKUjyhVD/VhaKkZW0I68rvxhp9o9s11juwr4QclIFK04SI7QRqwAI0YV2VatWLXIgMb7DdWLktH0iv4oR1sK1g38422WkOF2MNAcGLK+89tprlq+fU4yMWJF91wSKETb5Nfu/7bbbDDWKYhQak6vECAfNENkYiamC7RMdPnxYYDR2pCjwg2s3vqa06AsQpEiKk8Uo0LV7+vTpYlRgI+l/uGspRuEI2ft3vFeGDRuWa/849B1JoRh5SIzC7RNBiCBUOLBlZKM9EkMycq3XxejQoUPqcGu0xU4xCnboFUtz8LJD+Pp///vfMnHiRBU5O5IwR9HyyH8fxShWJOPznP379wvc/qMtThIj2P3jjz8ur776qlqmQ2JQu85najxdMzPS9onWrl0b0hauu+46teafkpISrc1EfZ/XxShqMP+/0S4xChcOCDaDcC6IOWZnnhmKkVkLc/b9ThEjI+GB7CAZkRgtWLBA/WihqsWLF7ejvY6uU4vvZNc5GkfDERHNfshHf6SwPIhwLNgXZfEegffee09uueUW287ZOZ0otl4efPBBOXbsWP6mFsxn9Pnnn0ujRo1k69atKv04S14CyPKKw7dIz85SkABiXyE8CvikpqYSUT4CY8aMUY4T+H2xeI/AZ599Jtdee62KE2fHyo3TiY4ePVr5AvznP/8JL0ZIF41zJAgbYVcwUicD7devnwp5s3LlSic307a27d27VxC3C4FJr7/+etva4dSK+/btKxs3blTRH1i8RwDhhBC3cdWqVbY4WDmdKFYFsKSO9Oz5SsGZES7Ay2TAgAGGY5Q5HUAs2weniRYtWqjTzSz6BGA/Dz/8sAo9wpKXQIMGDaRVq1by3HPPEY1HCcD+Bw0apMJOseQlUK9ePeVIgRUmQ2KEr384EcDziOU8ge+++04uv/xyfvWHMQqsCWO5Lpwjit9sS7MffjV7e+QfeOAB+fLLL2XNmjXe7miEvcPSNMKHIY1L/sgVIqI/M/rwww/VKeNdu3aply/L7wQQCQL/wP3TbhdJJ49Jdna2Cj1C+8k7SmPHjlXu5bQfJ1uv+bYtXrxYff1jy6N8+fLmH+iRJyCPGM6QBrF/fTHCeQ1Mp+AOi816FlHehTi1j68eBNlkCU5Asx9knkSgRpbz9oNZIxJEsniXAOwfAUmxHIszkSwiOHCOVBv9+/dXCQB1ir4Y4UJsMCHKK5ZaApNH+RUs0ljArRteYvmzJ/qVSah+037y0kHmTYT1p/3449eyaNEiSUtLU8t1+ROF+oNA3l5iDw1nsELYf3AxwqMAE6HH4e6dnJzsR4aqz/B8QnDQqVOnqqRVLMYIIEoGPGfwg0xKSjJ2kwevwrIllm1ef/11FfyXxR8E8M5AHjbsvdP+26o8Z4h2HqSEFiOseWKDBoFxAAADF0lEQVS5DtMrvJALFy7sDysK6CWiQuDcFZKwzZ8/n3tFEViAZj+1a9cWvJD9aj/woLvhhhsEByJZ/EMADivwvvWz/SO7LFbW8DGG6DUhSmgxwo3wisJ5EaTVzczM9I8licjBgwcVSHzVwDMm0qCJvoIVpLOa/XTr1s13+48HDhxQ9oP0G7Qff/4aMCtCzq8+ffpIRkaGryDAUQF9v+SSS9S5qzARfcKLEehhrQ+BJTt27CiILeSHghlRmzZt5OTJk0qQK1So4Idux6WPmv0gHwxiCvqhYHkSX4PIBQX7oVeVH0Zdv494Z8L2kZAS+85+KNjewTJ9BPZvTIwAD+6K6enpgtS6iD9WpkwZzzLFkiTiS8GtHf1GRAoWcwRoP+b48W53E8A7Ex/zeH8uXLjQ03vwge/PpUuXGg1MbFyMYAo5OTnKqQGzheHDh6uAd17yLNu9e7dy254xY4aaFSEC9YUXXujuX4GDWh9oP3DvRGgcr9kPQvbDfjArov04yPgc0BTYP87fwfUb9o9jIl6zfxxbwOwvCvuPTIwwnsh3gzAX8AzCjKFHjx7qHzcHBUSOeHh6IIAf0gvjYCtiKLHEngD24eDmDPtB2JTu3bu73n7w9QfbQURu2A8Otvbu3Tv28PhE1xPIb/+wk/vuu0/Fs3NrQZAE2D8+wmD/CJXWq1evSLsTuRhpNWBDCmdvtJAXCO/QsGFDNQ2F952TIxScOHFCzfLwDwKeYkaUmJgoSC2MF0m5cuUiBcnrIyQA7kOHDi1gPziTgZAhbrEfBISF15RmP/gh2pkvKcJh4OU2EYD946MMUb5R8P6E5x3enz61/+jFSBvDo0ePqgjfEKUNGzaodOA7duywaYiNVQuvjmrVqknNmjWV6zrctnViJRl7GK8yRcCt9lO9enV1yp72Y2r4fX8z7B8HxBGvDdHc4Qrthvcn7D/w/dmkSROzY2lejMy2gPeTAAmQAAn4ngDFyPcmQAAkQAIkYD8BipH9Y8AWkAAJkIDvCVCMfG8CBEACJEAC9hNQYjTH/nawBSRAAiRAAj4mcOZ/DjaH9m+1TRoAAAAASUVORK5CYII=" - } - }, "cell_type": "markdown", "id": "14d56698", "metadata": {}, "source": [ - "![basic_iv_example_nb.png](attachment:basic_iv_example_nb.png)" + "![basic_iv_example_nb.png](../_static/basic_iv_example_nb.png)" ] }, { diff --git a/doc/examples/py_double_ml_cate.ipynb b/doc/examples/py_double_ml_cate.ipynb index ffaa14bf..a6d4d2f6 100644 --- a/doc/examples/py_double_ml_cate.ipynb +++ b/doc/examples/py_double_ml_cate.ipynb @@ -33,7 +33,7 @@ "\n", "We define a data generating process to create synthetic data to compare the estimates to the true effect. The data generating process is based on the Monte Carlo simulation from [Oprescu et al. (2019)](http://proceedings.mlr.press/v97/oprescu19a.html).\n", "\n", - "The documentation of the data generating process can be found [here](https://docs.doubleml.org/dev/api/datasets.html#dataset-generators)." + "The documentation of the data generating process can be found [here](https://docs.doubleml.org/stable/api/datasets.html#dataset-generators)." ] }, { @@ -409,7 +409,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.2" + "version": "3.12.3" }, "vscode": { "interpreter": { diff --git a/doc/examples/py_double_ml_cate_plr.ipynb b/doc/examples/py_double_ml_cate_plr.ipynb index ef9dccd3..5293794b 100644 --- a/doc/examples/py_double_ml_cate_plr.ipynb +++ b/doc/examples/py_double_ml_cate_plr.ipynb @@ -30,7 +30,7 @@ "\n", "We define a data generating process to create synthetic data to compare the estimates to the true effect. The data generating process is based on the Monte Carlo simulation from [Oprescu et al. (2019)](http://proceedings.mlr.press/v97/oprescu19a.html).\n", "\n", - "The documentation of the data generating process can be found [here](https://docs.doubleml.org/dev/api/datasets.html#dataset-generators)." + "The documentation of the data generating process can be found [here](https://docs.doubleml.org/stable/api/datasets.html#dataset-generators)." ] }, { diff --git a/doc/examples/py_double_ml_gate.ipynb b/doc/examples/py_double_ml_gate.ipynb index adf83167..85853443 100644 --- a/doc/examples/py_double_ml_gate.ipynb +++ b/doc/examples/py_double_ml_gate.ipynb @@ -33,7 +33,7 @@ "\n", "We define a data generating process to create synthetic data to compare the estimates to the true effect. The data generating process is based on the Monte Carlo simulation from [Oprescu et al. (2019)](http://proceedings.mlr.press/v97/oprescu19a.html).\n", "\n", - "The documentation of the data generating process can be found [here](https://docs.doubleml.org/dev/api/datasets.html#dataset-generators). In this example the true effect depends only the first covariate $X_0$ and takes the following form\n", + "The documentation of the data generating process can be found [here](https://docs.doubleml.org/stable/api/datasets.html#dataset-generators). In this example the true effect depends only the first covariate $X_0$ and takes the following form\n", "\n", "$$\n", "\\theta_0(X) = \\exp(2X_0) + 3\\sin(4X_0).\n", diff --git a/doc/examples/py_double_ml_gate_plr.ipynb b/doc/examples/py_double_ml_gate_plr.ipynb index b8308fc8..de784fc6 100644 --- a/doc/examples/py_double_ml_gate_plr.ipynb +++ b/doc/examples/py_double_ml_gate_plr.ipynb @@ -30,7 +30,7 @@ "\n", "We define a data generating process to create synthetic data to compare the estimates to the true effect. The data generating process is based on the Monte Carlo simulation from [Oprescu et al. (2019)](http://proceedings.mlr.press/v97/oprescu19a.html).\n", "\n", - "The documentation of the data generating process can be found [here](https://docs.doubleml.org/dev/api/datasets.html#dataset-generators). In this example the true effect depends only the first covariate $X_0$ and takes the following form\n", + "The documentation of the data generating process can be found [here](https://docs.doubleml.org/stable/api/datasets.html#dataset-generators). In this example the true effect depends only the first covariate $X_0$ and takes the following form\n", "\n", "$$\n", "\\theta_0(X) = \\exp(2X_0) + 3\\sin(4X_0).\n", diff --git a/doc/examples/py_double_ml_irm_vs_apo.ipynb b/doc/examples/py_double_ml_irm_vs_apo.ipynb new file mode 100644 index 00000000..4d97251c --- /dev/null +++ b/doc/examples/py_double_ml_irm_vs_apo.ipynb @@ -0,0 +1,527 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Python: IRM and APO Model Comparison\n", + "\n", + "In this simple example, we illustrate how the (binary) [DoubleMLIRM](https://docs.doubleml.org/stable/guide/models.html#binary-interactive-regression-model-irm) class relates to the [DoubleMLAPOS](https://docs.doubleml.org/stable/guide/models.html#average-potential-outcomes-apos-for-multiple-treatment-levels) class.\n", + "\n", + "More specifically, we focus on the `causal_contrast()` method of [DoubleMLAPOS](https://docs.doubleml.org/stable/guide/models.html#average-potential-outcomes-apos-for-multiple-treatment-levels) in a binary setting to highlight, when both methods coincide." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "import doubleml as dml\n", + "\n", + "from sklearn.linear_model import LinearRegression, LogisticRegression\n", + "from sklearn.preprocessing import PolynomialFeatures\n", + "\n", + "from matplotlib import pyplot as plt\n", + "\n", + "from doubleml.datasets import make_irm_data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Data\n", + "\n", + "We rely on the [make_irm_data](https://docs.doubleml.org/stable/api/generated/doubleml.datasets.make_irm_data.html) go generate data with a binary treatment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n_obs = 2000\n", + "\n", + "np.random.seed(42)\n", + "df = make_irm_data(\n", + " n_obs=n_obs,\n", + " dim_x=10,\n", + " theta=5.0,\n", + " return_type='DataFrame'\n", + ")\n", + "\n", + "df.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, define the ``DoubleMLData`` object." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_data = dml.DoubleMLData(\n", + " df,\n", + " y_col='y',\n", + " d_cols='d'\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Learners and Hyperparameters\n", + "\n", + "To simplify the comparison and keep the variation in learners as small as possible, we will use linear models." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n_folds = 5\n", + "n_rep = 1\n", + "\n", + "dml_kwargs = {\n", + " \"obj_dml_data\": dml_data,\n", + " \"ml_g\": LinearRegression(),\n", + " \"ml_m\": LogisticRegression(random_state=42),\n", + " \"n_folds\": n_folds,\n", + " \"n_rep\": n_rep,\n", + " \"normalize_ipw\": False,\n", + " \"trimming_threshold\": 1e-2,\n", + " \"draw_sample_splitting\": False,\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Remark:**\n", + "All results rely on the exact same predictions for the machine learning algorithms. If the more than two treatment levels exists the `DoubleMLAPOS` model fit multiple binary models such that the combined model might differ." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Further, to remove all uncertainty from sample splitting, we will rely on externally provided sample splits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from doubleml.utils import DoubleMLResampling\n", + "\n", + "rskf = DoubleMLResampling(\n", + " n_folds=n_folds,\n", + " n_rep=n_rep,\n", + " n_obs=n_obs,\n", + " stratify=df['d'],\n", + ")\n", + "all_smpls = rskf.split_samples()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Average Treatment Effect\n", + "\n", + "Comparing the effect estimates for the `DoubleMLIRM` and `causal_contrasts` of the `DoubleMLAPOS` model, we can numerically equivalent results for the ATE." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_irm = dml.DoubleMLIRM(**dml_kwargs)\n", + "dml_irm.set_sample_splitting(all_smpls)\n", + "print(\"Training IRM Model\")\n", + "dml_irm.fit()\n", + "\n", + "print(dml_irm.summary)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_apos = dml.DoubleMLAPOS(treatment_levels=[0,1], **dml_kwargs)\n", + "dml_apos.set_sample_splitting(all_smpls)\n", + "print(\"Training APOS Model\")\n", + "dml_apos.fit()\n", + "print(dml_apos.summary)\n", + "\n", + "print(\"Evaluate Causal Contrast\")\n", + "causal_contrast = dml_apos.causal_contrast(reference_levels=[0])\n", + "print(causal_contrast.summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For a direct comparison, see" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(\"IRM Model\")\n", + "print(dml_irm.summary)\n", + "print(\"Causal Contrast\")\n", + "print(causal_contrast.summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Average Treatment Effect on the Treated\n", + "\n", + "For the average treatment effect on the treated we can adjust the score in `DoubleMLIRM` model to `score=\"ATTE\"`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_irm_atte = dml.DoubleMLIRM(score=\"ATTE\", **dml_kwargs)\n", + "dml_irm_atte.set_sample_splitting(all_smpls)\n", + "print(\"Training IRM Model\")\n", + "dml_irm_atte.fit()\n", + "\n", + "print(dml_irm_atte.summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In order to consider weighted effects in the `DoubleMLAPOS` model, we have to specify the correct weight, see [User Guide](https://docs.doubleml.org/stable/guide/heterogeneity.html#weighted-average-treatment-effects).\n", + "\n", + "As these weights include the propensity score, we will use the predicted propensity score from the previous `DoubleMLIRM` model.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "p_hat = df[\"d\"].mean()\n", + "m_hat = dml_irm_atte.predictions[\"ml_m\"][:, :, 0]\n", + "\n", + "weights_dict = {\n", + " \"weights\": df[\"d\"] / p_hat,\n", + " \"weights_bar\": m_hat / p_hat,\n", + "}\n", + "\n", + "dml_apos_atte = dml.DoubleMLAPOS(treatment_levels=[0,1], weights=weights_dict, **dml_kwargs)\n", + "dml_apos_atte.set_sample_splitting(all_smpls)\n", + "print(\"Training APOS Model\")\n", + "dml_apos_atte.fit()\n", + "print(dml_apos_atte.summary)\n", + "\n", + "print(\"Evaluate Causal Contrast\")\n", + "causal_contrast_atte = dml_apos_atte.causal_contrast(reference_levels=[0])\n", + "print(causal_contrast_atte.summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The same results can be achieved by specifying the weights for `DoubleMLIRM` class with `score='ATE'`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_irm_weighted_atte = dml.DoubleMLIRM(score=\"ATE\", weights=weights_dict, **dml_kwargs)\n", + "dml_irm_weighted_atte.set_sample_splitting(all_smpls)\n", + "print(\"Training IRM Model\")\n", + "dml_irm_weighted_atte.fit()\n", + "\n", + "print(dml_irm_weighted_atte.summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In summary, see" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(\"IRM Model ATTE Score\")\n", + "print(dml_irm_atte.summary.round(4))\n", + "print(\"IRM Model (Weighted)\")\n", + "print(dml_irm_weighted_atte.summary.round(4))\n", + "print(\"Causal Contrast (Weighted)\")\n", + "print(causal_contrast_atte.summary.round(4))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Sensitivity Analysis\n", + "\n", + "The sensitvity analysis gives identical results." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dml_irm.sensitivity_analysis()\n", + "print(dml_irm.sensitivity_summary)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "causal_contrast.sensitivity_analysis()\n", + "print(causal_contrast.sensitivity_summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Effect Heterogeneity\n", + "\n", + "For conditional treatment effects the exact same methods do not exist.\n", + "Nevertheless, we will compare the `capo()` variant of the `DoubleMLAPO` class to the corresponding `cate()` method of the `DoubleMLIRM` class.\n", + "\n", + "For a simple case we will just use a polynomial basis of the first feature `X1`. To plot the data we will evaluate the methods on the corresponding grid of basis values." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X = df[[\"X1\"]]\n", + "poly = PolynomialFeatures(degree=2, include_bias=True)\n", + "\n", + "basis_matrix = poly.fit_transform(X)\n", + "basis_df = pd.DataFrame(basis_matrix, columns=poly.get_feature_names_out([\"X1\"]))\n", + "\n", + "grid = pd.DataFrame({\"X1\": np.linspace(np.quantile(df[\"X1\"], 0.1), np.quantile(df[\"X1\"], 0.9), 100)})\n", + "grid_basis = pd.DataFrame( poly.transform(grid), columns=poly.get_feature_names_out([\"X1\"]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Apply the `cate()` method to the basis and evaluate on the transformed grid values." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cate = dml_irm.cate(basis_df)\n", + "print(cate)\n", + "np.random.seed(42)\n", + "df_cate = cate.confint(grid_basis, level=0.95, joint=True, n_rep_boot=2000)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The corresponding `apo()` method can be used for the treatment levels $0$ and $1$." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "capo0 = dml_apos.modellist[0].capo(basis_df)\n", + "print(capo0)\n", + "np.random.seed(42)\n", + "df_capo0 = capo0.confint(grid_basis, level=0.95, joint=True, n_rep_boot=2000)\n", + "\n", + "capo1 = dml_apos.modellist[1].capo(basis_df)\n", + "print(capo1)\n", + "np.random.seed(42)\n", + "df_capo1 = capo1.confint(grid_basis, level=0.95, joint=True, n_rep_boot=2000)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this example the average potential outcome of the control group is zero (as can be seen in the outcome definition, see [documentation](https://docs.doubleml.org/stable/api/generated/doubleml.datasets.make_irm_data.html#doubleml.datasets.make_irm_data)).\n", + "Let us visualize the effects" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [ + "nbsphinx-thumbnail" + ] + }, + "outputs": [], + "source": [ + "df_cate['x'] = grid_basis['X1']\n", + "\n", + "plt.rcParams['figure.figsize'] = 10., 7.5\n", + "fig, (ax1, ax2) = plt.subplots(1, 2)\n", + "\n", + "# Plot CATE\n", + "ax1.plot(df_cate['x'], df_cate['effect'], label='Estimated Effect')\n", + "ax1.fill_between(df_cate['x'], df_cate['2.5 %'], df_cate['97.5 %'], alpha=.3, label='Confidence Interval')\n", + "ax1.legend()\n", + "ax1.set_title('CATE')\n", + "ax1.set_xlabel('X1')\n", + "ax1.set_ylabel('Effect and 95%-CI')\n", + "\n", + "# Plot Average Potential Outcomes\n", + "ax2.plot(df_cate['x'], df_capo0['effect'], label='APO(0)')\n", + "ax2.fill_between(df_cate['x'], df_capo0['2.5 %'], df_capo0['97.5 %'], alpha=.3, label='Confidence Interval')\n", + "ax2.plot(df_cate['x'], df_capo1['effect'], label='APO(1)')\n", + "ax2.fill_between(df_cate['x'], df_capo1['2.5 %'], df_capo1['97.5 %'], alpha=.3, label='Confidence Interval')\n", + "ax2.legend()\n", + "ax2.set_title('Average Potential Outcomes')\n", + "ax2.set_xlabel('X1')\n", + "ax2.set_ylabel('Effect and 95%-CI')\n", + "\n", + "# Ensure the same scale on y-axis\n", + "ax1.set_ylim(min(ax1.get_ylim()[0], ax2.get_ylim()[0]), max(ax1.get_ylim()[1], ax2.get_ylim()[1]))\n", + "ax2.set_ylim(min(ax1.get_ylim()[0], ax2.get_ylim()[0]), max(ax1.get_ylim()[1], ax2.get_ylim()[1]))\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `causal_contrast()` method does not currently not have a `cate()` method implemented. But the cate can be manually constructed via the the correct score function." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "orth_signal = -1.0 * (causal_contrast.scaled_psi.reshape(-1) - causal_contrast.thetas)\n", + "\n", + "causal_contrast_cate = dml.utils.DoubleMLBLP(orth_signal, basis_df)\n", + "causal_contrast_cate.fit()\n", + "print(causal_contrast_cate.summary)\n", + "np.random.seed(42)\n", + "df_causal_contrast_cate = causal_contrast_cate.confint(grid_basis, level=0.95, joint=True, n_rep_boot=2000)\n", + "\n", + "print(\"CATE (IRM) as comparison:\")\n", + "print(cate.summary)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.rcParams['figure.figsize'] = 10., 7.5\n", + "fig, (ax1, ax2) = plt.subplots(1, 2)\n", + "\n", + "# Plot CATE\n", + "ax1.plot(df_cate['x'], df_cate['effect'], label='Estimated Effect')\n", + "ax1.fill_between(df_cate['x'], df_cate['2.5 %'], df_cate['97.5 %'], alpha=.3, label='Confidence Interval')\n", + "ax1.legend()\n", + "ax1.set_title('CATE (IRM)')\n", + "ax1.set_xlabel('X1')\n", + "ax1.set_ylabel('Effect and 95%-CI')\n", + "\n", + "# Plot Average Potential Outcomes\n", + "ax2.plot(df_cate['x'], df_causal_contrast_cate['effect'], label='Estimated Effect')\n", + "ax2.fill_between(df_cate['x'], df_causal_contrast_cate['2.5 %'], df_causal_contrast_cate['97.5 %'], alpha=.3, label='Confidence Interval')\n", + "ax2.legend()\n", + "ax2.set_title('CATE (Causal Contrast)')\n", + "ax2.set_xlabel('X1')\n", + "ax2.set_ylabel('Effect and 95%-CI')\n", + "\n", + "# Ensure the same scale on y-axis\n", + "ax1.set_ylim(min(ax1.get_ylim()[0], ax2.get_ylim()[0]), max(ax1.get_ylim()[1], ax2.get_ylim()[1]))\n", + "ax2.set_ylim(min(ax1.get_ylim()[0], ax2.get_ylim()[0]), max(ax1.get_ylim()[1], ax2.get_ylim()[1]))\n", + "\n", + "plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/doc/examples/py_double_ml_multiway_cluster.ipynb b/doc/examples/py_double_ml_multiway_cluster.ipynb index 0fdbc908..82db9b26 100644 --- a/doc/examples/py_double_ml_multiway_cluster.ipynb +++ b/doc/examples/py_double_ml_multiway_cluster.ipynb @@ -150,7 +150,7 @@ }, "source": [ "### Data-Backend for Cluster Data\n", - "The implementation of cluster robust double machine learning is based on a special data-backend called [DoubleMLClusterData](https://docs.doubleml.org/stable/api/generated/doubleml.DoubleMLClusterData.html#doubleml.DoubleMLClusterData). As compared to the standard data-backend [DoubleMLData](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLData.html), users can specify the clustering variables during instantiation of a [DoubleMLClusterData](https://docs.doubleml.org/stable/api/generated/doubleml.DoubleMLClusterData.html#doubleml.DoubleMLClusterData) object. The estimation framework will subsequently account for the provided clustering options." + "The implementation of cluster robust double machine learning is based on a special data-backend called [DoubleMLClusterData](https://docs.doubleml.org/stable/api/generated/doubleml.DoubleMLClusterData.html#doubleml.DoubleMLClusterData). As compared to the standard data-backend [DoubleMLData](https://docs.doubleml.org/stable/api/generated/doubleml.DoubleMLData.html), users can specify the clustering variables during instantiation of a [DoubleMLClusterData](https://docs.doubleml.org/stable/api/generated/doubleml.DoubleMLClusterData.html#doubleml.DoubleMLClusterData) object. The estimation framework will subsequently account for the provided clustering options." ] }, { diff --git a/doc/examples/py_double_ml_pension.ipynb b/doc/examples/py_double_ml_pension.ipynb index 251baf7b..8dce90f4 100644 --- a/doc/examples/py_double_ml_pension.ipynb +++ b/doc/examples/py_double_ml_pension.ipynb @@ -294,7 +294,7 @@ "id": "2e1fe478", "metadata": {}, "source": [ - "To start our analysis, we initialize the data backend, i.e., a new instance of a [DoubleMLData](https://docs.doubleml.org/dev/api/generated/doubleml.DoubleMLData.html#doubleml.DoubleMLData) object. We implement the regression model by using scikit-learn's `PolynomialFeatures` class.\n", + "To start our analysis, we initialize the data backend, i.e., a new instance of a [DoubleMLData](https://docs.doubleml.org/dev/api/generated/doubleml.data.DoubleMLData.html#doubleml.data.DoubleMLData) object. We implement the regression model by using scikit-learn's `PolynomialFeatures` class.\n", "\n", "To implement both models (basic and flexible), we generate two data backends: `data_dml_base` and `data_dml_flex`." ] diff --git a/doc/examples/py_double_ml_robust_iv.ipynb b/doc/examples/py_double_ml_robust_iv.ipynb new file mode 100644 index 00000000..40c47eb1 --- /dev/null +++ b/doc/examples/py_double_ml_robust_iv.ipynb @@ -0,0 +1,240 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "255d0213", + "metadata": {}, + "source": [ + "# Python: Confidence Intervals for Instrumental Variables Models That Are Robust to Weak Instruments" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this example we will show how to use the DoubleML package to obtain confidence sets for the treatment effects that are robust to weak instruments. Weak instruments are those that have a relatively weak correlation with the treatment. It is well known that in this case, standard methods to construct confidence intervals have poor properties and can have coverage much lower than the nominal value. We will assume that the reader of this notebook is already familiar with DoubleML and how it can be used to fit instrumental variable models.\n", + "\n", + "Throughout this example\n", + "\n", + "- $Z$ is the instrument,\n", + "- $X$ is a vector of covariates,\n", + "- $D$ is treatment variable,\n", + "- $Y$ is the outcome.\n", + "\n", + "![robust_iv_example_nb.png](../_static/robust_iv_example_nb.png)\n", + "\n", + "\n", + "Next, we will run a simulation, where we will generate two synthetic data sets, one where the instrument is weak and another where it is not. Then, we will compare the output of the standard way to compute confidence intervals using the ``DoubleMLIIVM`` class, with the confidence sets computed using the ``robust_confset()`` method from the same class. We will see that using the ``robust_confset()`` method is an easy way to ensure the results of an analysis are robust to weak instruments." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "import random\n", + "from sklearn.base import BaseEstimator, ClassifierMixin\n", + "from sklearn.linear_model import LinearRegression, LogisticRegression\n", + "import doubleml as dml\n", + "\n", + "np.random.seed(1234)\n", + "random.seed(1234)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Running a small simulation" + ] + }, + { + "cell_type": "markdown", + "id": "774e45c7", + "metadata": {}, + "source": [ + "The following function generates data from an instrumental variables model. The ``true_effect`` argument is the estimand of interest, the true effect of the treatment on the outcome. The ``instrument_strength`` argument is a measure of the strength of the instrument. The higher it is, the stronger the correlation is between the instrument and the treatment. Notice that the instrument is fully randomized." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "82111204", + "metadata": {}, + "outputs": [], + "source": [ + "def generate_weakiv_data(n_samples, true_effect, instrument_strength):\n", + " u = np.random.normal(0, 2, size=n_samples)\n", + " X = np.random.normal(0, 1, size=n_samples)\n", + " Z = np.random.binomial(1, 0.5, size=n_samples)\n", + " D = instrument_strength * Z + u \n", + " D = np.array(D > 0, dtype=int)\n", + " Y = true_effect * D + np.sign(u)\n", + " return pd.DataFrame({\"Y\": Y, \"Z\": Z, \"D\": D, \"X\": X})" + ] + }, + { + "cell_type": "markdown", + "id": "8c938fd8", + "metadata": {}, + "source": [ + "To fit the DML model, we need to decide on how we will estimate the nuisance functions. We will use a linear regression model for $g$, and a logistic regression for $r$. We will assume that we know the true $m$ function, as is the case in a controlled experiment, such as an AB test. The following class defines defines this \"fake\" estimator." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "9a347c25", + "metadata": {}, + "outputs": [], + "source": [ + "class TrueMFunction(BaseEstimator, ClassifierMixin):\n", + " def __init__(self, prob_dist=(0.5, 0.5)):\n", + " self.prob_dist = prob_dist \n", + "\n", + " def fit(self, X, y):\n", + " self.prob_dist_ = np.array(self.prob_dist)\n", + " self.classes_ = np.array(sorted(set(y)))\n", + " return self\n", + "\n", + " def predict_proba(self, X):\n", + " return np.tile(self.prob_dist_, (len(X), 1))\n", + "\n", + " def predict(self, X):\n", + " return np.full(len(X), self.classes_[np.argmax(self.prob_dist_)])" + ] + }, + { + "cell_type": "markdown", + "id": "becf84b0", + "metadata": {}, + "source": [ + "We will now run a loop, where for each of $100$ replications we will generate data using the previously defined function. We will take a sample size of $5000$, a true effect equal to $1$, and take two possible values for the instrument strength: $0.003$ and $1$. In the latter case the instrument is strong, in the former it is weak. We will then compute both the robust and the standard confidence intervals, check whether they contain the true effect, and compute their length." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "600b8196", + "metadata": {}, + "outputs": [], + "source": [ + "n_samples = 5000\n", + "true_effect = 1\n", + "output_list = []\n", + "for _ in range(100):\n", + " for instrument_strength in [0.003, 1]:\n", + " dataset = generate_weakiv_data(n_samples = n_samples, true_effect = true_effect, instrument_strength = instrument_strength)\n", + " dml_data = dml.DoubleMLData(\n", + " dataset, y_col='Y', d_cols='D', \n", + " z_cols='Z', x_cols='X'\n", + " )\n", + " ml_g = LinearRegression()\n", + " ml_m = TrueMFunction()\n", + " ml_r = LogisticRegression(penalty=None)\n", + " dml_iivm = dml.DoubleMLIIVM(dml_data, ml_g, ml_m, ml_r)\n", + " dml_iivm.fit()\n", + " dml_standard_ci = dml_iivm.confint(joint=False)\n", + " dml_robust_confset = dml_iivm.robust_confset()\n", + " dml_covers = dml_standard_ci[\"2.5 %\"].iloc[0] <= true_effect <= dml_standard_ci[\"97.5 %\"].iloc[0]\n", + " robust_covers = any(interval[0] <= true_effect <= interval[1] for interval in dml_robust_confset)\n", + " dml_length = dml_standard_ci[\"97.5 %\"].iloc[0] - dml_standard_ci[\"2.5 %\"].iloc[0]\n", + " dml_robust_length = max(interval[1] - interval[0] for interval in dml_robust_confset)\n", + " output_list.append({\n", + " \"instrument_strength\": instrument_strength,\n", + " \"dml_covers\": dml_covers,\n", + " \"robust_covers\": robust_covers,\n", + " \"dml_length\": dml_length,\n", + " \"robust_length\": dml_robust_length\n", + " })\n", + "results_df = pd.DataFrame(output_list)" + ] + }, + { + "cell_type": "markdown", + "id": "3f1366d2", + "metadata": {}, + "source": [ + "Having stored the results of the simulation in the ``results_df`` dataframe, we will compute some summary statistics. We see in the table below that, when the instrument is strong, the standard DML confidence interval and the robust confidence set behave similarly, with coverage close to the nominal level and similar median lengths. On the other hand, when the instrument is strong, the coverage of the standard DML confidence interval is very low, whereas the coverage of the robust confidence set is again close to the nominal value. Note that in this case the robust confidence set has an infinite median length. When the robust confidence set has infinite length, the analyst should interpret the results as indicating that the data contains little information about the estimand of interest, possibly because the instrument is weak." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "86c83edc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " DML coverage Robust coverage DML median length \\\n", + "instrument_strength \n", + "0.003 0.15 0.91 0.489567 \n", + "1.000 0.93 0.92 0.572717 \n", + "\n", + " Robust median length \n", + "instrument_strength \n", + "0.003 inf \n", + "1.000 0.582754 \n" + ] + } + ], + "source": [ + "results_df = pd.DataFrame(output_list)\n", + "summary_df = results_df.groupby(\"instrument_strength\").agg(\n", + " **{\"DML coverage\": (\"dml_covers\", \"mean\"),\n", + " \"Robust coverage\": (\"robust_covers\", \"mean\"),\n", + " \"DML median length\": (\"dml_length\", \"median\"),\n", + " \"Robust median length\": (\"robust_length\", \"median\")}\n", + ")\n", + "print(summary_df)" + ] + }, + { + "cell_type": "markdown", + "id": "f4fd3d05", + "metadata": {}, + "source": [ + "# References" + ] + }, + { + "cell_type": "markdown", + "id": "946cbbcf", + "metadata": {}, + "source": [ + "- Chernozhukov, V., Chetverikov, D., Demirer, M., Duflo, E., and Hansen, C. (2018). Double/debiased machine learning for\n", + "treatment and structural parameters. The Econometrics Journal, 21(1):C1–C68.\n", + "- Ma, Y. (2023). Identification-robust inference for the late with high-dimensional covariates. arXiv preprint arXiv:2302.09756.\n", + "- Stock, J. H. and Wright, J. H. (2000). GMM with weak identification. Econometrica, 68(5):1055–1096.\n", + "- Takatsu, K., Levis, A. W., Kennedy, E., Kelz, R., and Keele, L. (2023). Doubly robust machine learning for an instrumental\n", + "variable study of surgical care for cholecystitis. arXiv preprint arXiv:2307.06269." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/doc/guide/algorithms.rst b/doc/guide/algorithms.rst index 8e52a076..6ffa56f0 100644 --- a/doc/guide/algorithms.rst +++ b/doc/guide/algorithms.rst @@ -83,6 +83,7 @@ The default version of the :class:`DoubleML` class is based on the DML2 algorith :sync: py .. ipython:: python + :okwarning: import doubleml as dml from doubleml.datasets import make_plr_CCDDHNR2018 diff --git a/doc/guide/data/base_data.rst b/doc/guide/data/base_data.rst new file mode 100644 index 00000000..1c848720 --- /dev/null +++ b/doc/guide/data/base_data.rst @@ -0,0 +1,173 @@ +The usage of both interfaces is demonstrated in the following. +We download the Bonus data set from the Pennsylvania Reemployment Bonus experiment. + +.. note:: + - In Python we use :py:class:`pandas.DataFrame` and :py:class:`numpy.ndarray`. + The data can be fetched via :py:func:`doubleml.datasets.fetch_bonus`. + - In R we use `data.table::data.table() `_, `data.frame() `_, and `matrix() `_. + The data can be fetched via `DoubleML::fetch_bonus() `_ + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + from doubleml.datasets import fetch_bonus + + # Load data + df_bonus = fetch_bonus('DataFrame') + df_bonus.head(5) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + + # Load data as data.table + dt_bonus = fetch_bonus(return_type = "data.table") + head(dt_bonus) + + # Load data as data.frame + df_bonus = fetch_bonus(return_type = "data.frame") + head(df_bonus) + + +DoubleMLData from dataframes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``DoubleMLData`` class serves as data-backend and can be initialized from a dataframe by +specifying the column ``y_col='inuidur1'`` serving as outcome variable :math:`Y`, the column(s) ``d_cols = 'tg'`` +serving as treatment variable :math:`D` and the columns ``x_cols`` specifying the confounders. + +.. note:: + * In Python we use :py:class:`pandas.DataFrame` + and the API reference can be found here :py:class:`doubleml.DoubleMLData`. + * In R we use `data.table::data.table() `_ and the API reference can be found here `DoubleML::DoubleMLData `_. + * For initialization from the R base class `data.frame() `_ the API reference can be found here `DoubleML::double_ml_data_from_data_frame() `_. + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + from doubleml import DoubleMLData + + # Specify the data and the variables for the causal model + obj_dml_data_bonus = DoubleMLData(df_bonus, + y_col='inuidur1', + d_cols='tg', + x_cols=['female', 'black', 'othrace', 'dep1', 'dep2', + 'q2', 'q3', 'q4', 'q5', 'q6', 'agelt35', 'agegt54', + 'durable', 'lusd', 'husd'], + use_other_treat_as_covariate=True) + print(obj_dml_data_bonus) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + # Specify the data and the variables for the causal model + + # From data.table object + obj_dml_data_bonus = DoubleMLData$new(dt_bonus, + y_col = "inuidur1", + d_cols = "tg", + x_cols = c("female", "black", "othrace", "dep1", "dep2", + "q2", "q3", "q4", "q5", "q6", "agelt35", "agegt54", + "durable", "lusd", "husd"), + use_other_treat_as_covariate=TRUE) + obj_dml_data_bonus + + # From dat.frame object + obj_dml_data_bonus_df = double_ml_data_from_data_frame(df_bonus, + y_col = "inuidur1", + d_cols = "tg", + x_cols = c("female", "black", "othrace", "dep1", "dep2", + "q2", "q3", "q4", "q5", "q6", "agelt35", "agegt54", + "durable", "lusd", "husd"), + use_other_treat_as_covariate=TRUE) + obj_dml_data_bonus_df + +Comments on detailed specifications: + +* If ``x_cols`` is not specified, all variables (columns of the dataframe) which are neither specified as outcome + variable ``y_col``, nor treatment variables ``d_cols``, nor instrumental variables ``z_cols`` are used as covariates. +* In case of multiple treatment variables, the boolean ``use_other_treat_as_covariate`` indicates whether the other + treatment variables should be added as covariates in each treatment-variable-specific learning task. +* Instrumental variables for IV models have to be provided as ``z_cols``. + + +DoubleMLData from arrays and matrices +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To introduce the array interface we generate a data set consisting of confounding variables ``X``, an outcome +variable ``y`` and a treatment variable ``d`` + +.. note:: + * In python we use :py:class:`numpy.ndarray`. + and the API reference can be found here :py:func:`doubleml.DoubleMLData.from_arrays`. + * In R we use the R base class `matrix() `_ + and the API reference can be found here `DoubleML::double_ml_data_from_matrix() `_. + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + import numpy as np + + # Generate data + np.random.seed(3141) + n_obs = 500 + n_vars = 100 + theta = 3 + X = np.random.normal(size=(n_obs, n_vars)) + d = np.dot(X[:, :3], np.array([5, 5, 5])) + np.random.standard_normal(size=(n_obs,)) + y = theta * d + np.dot(X[:, :3], np.array([5, 5, 5])) + np.random.standard_normal(size=(n_obs,)) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + # Generate data + set.seed(3141) + n_obs = 500 + n_vars = 100 + theta = 3 + X = matrix(stats::rnorm(n_obs * n_vars), nrow = n_obs, ncol = n_vars) + d = X[, 1:3, drop = FALSE] %*% c(5, 5, 5) + stats::rnorm(n_obs) + y = theta * d + X[, 1:3, drop = FALSE] %*% c(5, 5, 5) + stats::rnorm(n_obs) + +To specify the data and the variables for the causal model from arrays we call + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + from doubleml import DoubleMLData + + obj_dml_data_sim = DoubleMLData.from_arrays(X, y, d) + print(obj_dml_data_sim) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + + obj_dml_data_sim = double_ml_data_from_matrix(X = X, y = y, d = d) + obj_dml_data_sim diff --git a/doc/guide/data/panel_data.rst b/doc/guide/data/panel_data.rst new file mode 100644 index 00000000..c1ed1a43 --- /dev/null +++ b/doc/guide/data/panel_data.rst @@ -0,0 +1,32 @@ +The ``DoubleMLPanelData`` class serves as data-backend for :ref:`DiD models ` and can be initialized from a dataframe. +The class is a subclass of :ref:`DoubleMLData ` and inherits all methods and attributes. +Furthermore, it provides additional methods and attributes to handle panel data () + +* ``id_col``: column to with unique identifiers for each unit +* ``t_col``: column to specify the time periods of the observation +* ``datetime_unit``: unit of the time periods (e.g. 'Y', 'M', 'D', 'h', 'm', 's') + +.. note:: + The ``t_col`` can contain ``float``, ``int`` or ``datetime`` values. + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + from doubleml.did.datasets import make_did_CS2021 + + np.random.seed(42) + df = make_did_CS2021(n_obs=500) + dml_data = dml.data.DoubleMLPanelData( + df, + y_col="y", + d_cols="d", + id_col="id", + t_col="t", + x_cols=["Z1", "Z2", "Z3", "Z4"], + datetime_unit="M" + ) + print(dml_data) diff --git a/doc/guide/data_backend.rst b/doc/guide/data_backend.rst index 7a5684ab..8849f264 100644 --- a/doc/guide/data_backend.rst +++ b/doc/guide/data_backend.rst @@ -1,175 +1,30 @@ .. _data_backend: -The data-backend DoubleMLData ------------------------------ +Data Backend +------------ -:ref:`DoubleML ` provides interfaces to dataframes as well as arrays. The usage of both interfaces is -demonstrated in the following. We download the Bonus data set from the Pennsylvania Reemployment Bonus experiment. +:ref:`DoubleML ` generally provides interfaces to dataframes as well as arrays. -.. note:: - - In Python we use :py:class:`pandas.DataFrame` and :py:class:`numpy.ndarray`. - The data can be fetched via :py:func:`doubleml.datasets.fetch_bonus`. - - In R we use `data.table::data.table() `_, `data.frame() `_, and `matrix() `_. - The data can be fetched via `DoubleML::fetch_bonus() `_ +.. _dml_data: -.. tab-set:: +DoubleMLData +~~~~~~~~~~~~ - .. tab-item:: Python - :sync: py +.. include:: data/base_data.rst - .. ipython:: python - from doubleml.datasets import fetch_bonus +.. _dml_data_types: - # Load data - df_bonus = fetch_bonus('DataFrame') - df_bonus.head(5) +Special Data Types +~~~~~~~~~~~~~~~~~~ - .. tab-item:: R - :sync: r +The :ref:`DoubleMLData ` class is extended by the following classes to support special data types or allow for additional parameters. - .. jupyter-execute:: - library(DoubleML) +.. _dml_panel_data: - # Load data as data.table - dt_bonus = fetch_bonus(return_type = "data.table") - head(dt_bonus) +DoubleMLPanelData +^^^^^^^^^^^^^^^^^ - # Load data as data.frame - df_bonus = fetch_bonus(return_type = "data.frame") - head(df_bonus) +.. include:: data/panel_data.rst - -DoubleMLData from dataframes -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The ``DoubleMLData`` class serves as data-backend and can be initialized from a dataframe by -specifying the column ``y_col='inuidur1'`` serving as outcome variable :math:`Y`, the column(s) ``d_cols = 'tg'`` -serving as treatment variable :math:`D` and the columns ``x_cols`` specifying the confounders. - -.. note:: - * In Python we use :py:class:`pandas.DataFrame` - and the API reference can be found here :py:class:`doubleml.DoubleMLData`. - * In R we use `data.table::data.table() `_ and the API reference can be found here `DoubleML::DoubleMLData `_. - * For initialization from the R base class `data.frame() `_ the API reference can be found here `DoubleML::double_ml_data_from_data_frame() `_. - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - - from doubleml import DoubleMLData - - # Specify the data and the variables for the causal model - obj_dml_data_bonus = DoubleMLData(df_bonus, - y_col='inuidur1', - d_cols='tg', - x_cols=['female', 'black', 'othrace', 'dep1', 'dep2', - 'q2', 'q3', 'q4', 'q5', 'q6', 'agelt35', 'agegt54', - 'durable', 'lusd', 'husd'], - use_other_treat_as_covariate=True) - print(obj_dml_data_bonus) - - .. tab-item:: R - :sync: r - - .. jupyter-execute:: - - # Specify the data and the variables for the causal model - - # From data.table object - obj_dml_data_bonus = DoubleMLData$new(dt_bonus, - y_col = "inuidur1", - d_cols = "tg", - x_cols = c("female", "black", "othrace", "dep1", "dep2", - "q2", "q3", "q4", "q5", "q6", "agelt35", "agegt54", - "durable", "lusd", "husd"), - use_other_treat_as_covariate=TRUE) - obj_dml_data_bonus - - # From dat.frame object - obj_dml_data_bonus_df = double_ml_data_from_data_frame(df_bonus, - y_col = "inuidur1", - d_cols = "tg", - x_cols = c("female", "black", "othrace", "dep1", "dep2", - "q2", "q3", "q4", "q5", "q6", "agelt35", "agegt54", - "durable", "lusd", "husd"), - use_other_treat_as_covariate=TRUE) - obj_dml_data_bonus_df - -Comments on detailed specifications: - -* If ``x_cols`` is not specified, all variables (columns of the dataframe) which are neither specified as outcome - variable ``y_col``, nor treatment variables ``d_cols``, nor instrumental variables ``z_cols`` are used as covariates. -* In case of multiple treatment variables, the boolean ``use_other_treat_as_covariate`` indicates whether the other - treatment variables should be added as covariates in each treatment-variable-specific learning task. -* Instrumental variables for IV models have to be provided as ``z_cols``. - -DoubleMLData from arrays and matrices -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To introduce the array interface we generate a data set consisting of confounding variables ``X``, an outcome -variable ``y`` and a treatment variable ``d`` - -.. note:: - * In python we use :py:class:`numpy.ndarray`. - and the API reference can be found here :py:func:`doubleml.DoubleMLData.from_arrays`. - * In R we use the R base class `matrix() `_ - and the API reference can be found here `DoubleML::double_ml_data_from_matrix() `_. - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - - import numpy as np - - # Generate data - np.random.seed(3141) - n_obs = 500 - n_vars = 100 - theta = 3 - X = np.random.normal(size=(n_obs, n_vars)) - d = np.dot(X[:, :3], np.array([5, 5, 5])) + np.random.standard_normal(size=(n_obs,)) - y = theta * d + np.dot(X[:, :3], np.array([5, 5, 5])) + np.random.standard_normal(size=(n_obs,)) - - .. tab-item:: R - :sync: r - - .. jupyter-execute:: - - # Generate data - set.seed(3141) - n_obs = 500 - n_vars = 100 - theta = 3 - X = matrix(stats::rnorm(n_obs * n_vars), nrow = n_obs, ncol = n_vars) - d = X[, 1:3, drop = FALSE] %*% c(5, 5, 5) + stats::rnorm(n_obs) - y = theta * d + X[, 1:3, drop = FALSE] %*% c(5, 5, 5) + stats::rnorm(n_obs) - -To specify the data and the variables for the causal model from arrays we call - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - - from doubleml import DoubleMLData - - obj_dml_data_sim = DoubleMLData.from_arrays(X, y, d) - print(obj_dml_data_sim) - - .. tab-item:: R - :sync: r - - .. jupyter-execute:: - - obj_dml_data_sim = double_ml_data_from_matrix(X = X, y = y, d = d) - obj_dml_data_sim \ No newline at end of file diff --git a/doc/guide/guide.rst b/doc/guide/guide.rst index 2c2566cb..dcaecb9c 100644 --- a/doc/guide/guide.rst +++ b/doc/guide/guide.rst @@ -10,7 +10,7 @@ User Guide :numbered: The basics of double/debiased machine learning - The data-backend DoubleMLData + Data Backend Models Heterogeneous Treatment Effects Score functions diff --git a/doc/guide/heterogeneity.rst b/doc/guide/heterogeneity.rst index a7cfcd85..42332cf5 100644 --- a/doc/guide/heterogeneity.rst +++ b/doc/guide/heterogeneity.rst @@ -30,17 +30,17 @@ GATEs for IRM models :sync: py .. ipython:: python - + import numpy as np import pandas as pd import doubleml as dml from doubleml.datasets import make_irm_data from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier - ml_g = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_g = RandomForestRegressor(n_estimators=100, max_features=5, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=5, max_depth=5, min_samples_leaf=2) np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=20, return_type='DataFrame') + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') obj_dml_data = dml.DoubleMLData(data, 'y', 'd') dml_irm_obj = dml.DoubleMLIRM(obj_dml_data, ml_g, ml_m) _ = dml_irm_obj.fit() @@ -124,10 +124,10 @@ CATEs for IRM models from doubleml.datasets import make_irm_data from sklearn.ensemble import RandomForestRegressor - ml_g = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=20, return_type='DataFrame') + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') obj_dml_data = dml.DoubleMLData(data, 'y', 'd') dml_irm_obj = dml.DoubleMLIRM(obj_dml_data, ml_g, ml_m) _ = dml_irm_obj.fit() @@ -258,10 +258,10 @@ In these cases the weights can be specified as an array via the ``weights`` argu from doubleml.datasets import make_irm_data from sklearn.ensemble import RandomForestRegressor - ml_g = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=20, return_type='DataFrame') + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') obj_dml_data = dml.DoubleMLData(data, 'y', 'd') weights = np.ones(500) dml_irm_obj = dml.DoubleMLIRM(obj_dml_data, ml_g, ml_m, weights=weights) @@ -470,10 +470,10 @@ The ``depth`` parameter, which defaults to ``2``, can be used to adjust the maxi from doubleml.datasets import make_irm_data from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier - ml_g = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=20, return_type='DataFrame') + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') obj_dml_data = dml.DoubleMLData(data, 'y', 'd') dml_irm_obj = dml.DoubleMLIRM(obj_dml_data, ml_g, ml_m) _ = dml_irm_obj.fit() diff --git a/doc/guide/learners.rst b/doc/guide/learners.rst index 67b406a9..700c48a1 100644 --- a/doc/guide/learners.rst +++ b/doc/guide/learners.rst @@ -275,12 +275,12 @@ To illustrate the use of external predictions, we work with the following exampl from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=20, return_type='DataFrame') + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') obj_dml_data = dml.DoubleMLData(data, 'y', 'd') # DoubleML with interal predictions - ml_g = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) dml_irm_obj = dml.DoubleMLIRM(obj_dml_data, ml_g, ml_m) dml_irm_obj.fit() print(dml_irm_obj.summary) diff --git a/doc/guide/models.rst b/doc/guide/models.rst index 3977c6d4..0496d95e 100644 --- a/doc/guide/models.rst +++ b/doc/guide/models.rst @@ -3,512 +3,43 @@ Models ---------- -The :ref:`DoubleML ` includes the following models. +The :ref:`DoubleML `-package includes the following models. + +.. _plm-models: Partially linear models (PLM) +++++++++++++++++++++++++++++ -The partially linear models (PLM) take the form - -.. math:: - - Y = D \theta_0 + g_0(X) + \zeta, - -where treatment effects are additive with some sort of linear form. - -.. _plr-model: - -Partially linear regression model (PLR) -*************************************** - -.. include:: ../shared/models/plr.rst - -.. include:: ../shared/causal_graphs/plr_irm_causal_graph.rst - -``DoubleMLPLR`` implements PLR models. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_plr_CCDDHNR2018 - from sklearn.ensemble import RandomForestRegressor - from sklearn.base import clone - - learner = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_l = clone(learner) - ml_m = clone(learner) - np.random.seed(1111) - data = make_plr_CCDDHNR2018(alpha=0.5, n_obs=500, dim_x=20, return_type='DataFrame') - obj_dml_data = dml.DoubleMLData(data, 'y', 'd') - dml_plr_obj = dml.DoubleMLPLR(obj_dml_data, ml_l, ml_m) - print(dml_plr_obj.fit()) - - .. tab-item:: R - :sync: r - - .. jupyter-execute:: - - library(DoubleML) - library(mlr3) - library(mlr3learners) - library(data.table) - lgr::get_logger("mlr3")$set_threshold("warn") +.. include:: models/plm/plm_models.inc - learner = lrn("regr.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) - ml_l = learner$clone() - ml_m = learner$clone() - set.seed(1111) - data = make_plr_CCDDHNR2018(alpha=0.5, n_obs=500, dim_x=20, return_type='data.table') - obj_dml_data = DoubleMLData$new(data, y_col="y", d_cols="d") - dml_plr_obj = DoubleMLPLR$new(obj_dml_data, ml_l, ml_m) - dml_plr_obj$fit() - print(dml_plr_obj) - - -.. _pliv-model: - -Partially linear IV regression model (PLIV) -******************************************* - -.. include:: ../shared/models/pliv.rst - -.. include:: ../shared/causal_graphs/pliv_iivm_causal_graph.rst - -``DoubleMLPLIV`` implements PLIV models. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - :okwarning: - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_pliv_CHS2015 - from sklearn.ensemble import RandomForestRegressor - from sklearn.base import clone - - learner = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_l = clone(learner) - ml_m = clone(learner) - ml_r = clone(learner) - np.random.seed(2222) - data = make_pliv_CHS2015(alpha=0.5, n_obs=500, dim_x=20, dim_z=1, return_type='DataFrame') - obj_dml_data = dml.DoubleMLData(data, 'y', 'd', z_cols='Z1') - dml_pliv_obj = dml.DoubleMLPLIV(obj_dml_data, ml_l, ml_m, ml_r) - print(dml_pliv_obj.fit()) - - .. tab-item:: R - :sync: r - - .. jupyter-execute:: - - library(DoubleML) - library(mlr3) - library(mlr3learners) - library(data.table) - - learner = lrn("regr.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) - ml_l = learner$clone() - ml_m = learner$clone() - ml_r = learner$clone() - set.seed(2222) - data = make_pliv_CHS2015(alpha=0.5, n_obs=500, dim_x=20, dim_z=1, return_type="data.table") - obj_dml_data = DoubleMLData$new(data, y_col="y", d_col = "d", z_cols= "Z1") - dml_pliv_obj = DoubleMLPLIV$new(obj_dml_data, ml_l, ml_m, ml_r) - dml_pliv_obj$fit() - print(dml_pliv_obj) +.. _irm-models: Interactive regression models (IRM) ++++++++++++++++++++++++++++++++++++ -The interactive regression model (IRM) take the form - -.. math:: - - Y = g_0(D, X) + U, - -where treatment effects are fully heterogeneous. - -.. _irm-model: - -Binary Interactive Regression Model (IRM) -***************************************** - -.. include:: ../shared/models/irm.rst - -.. include:: ../shared/causal_graphs/plr_irm_causal_graph.rst - -``DoubleMLIRM`` implements IRM models. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_irm_data - from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier - - ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) - np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') - obj_dml_data = dml.DoubleMLData(data, 'y', 'd') - dml_irm_obj = dml.DoubleMLIRM(obj_dml_data, ml_g, ml_m) - print(dml_irm_obj.fit()) - - .. tab-item:: R - :sync: r - - .. jupyter-execute:: - - library(DoubleML) - library(mlr3) - library(mlr3learners) - library(data.table) - - set.seed(3333) - ml_g = lrn("regr.ranger", num.trees = 100, mtry = 10, min.node.size = 2, max.depth = 5) - ml_m = lrn("classif.ranger", num.trees = 100, mtry = 10, min.node.size = 2, max.depth = 5) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type="data.table") - obj_dml_data = DoubleMLData$new(data, y_col="y", d_cols="d") - dml_irm_obj = DoubleMLIRM$new(obj_dml_data, ml_g, ml_m) - dml_irm_obj$fit() - print(dml_irm_obj) - -.. _irm-apo-model: - -Average Potential Outcomes (APOs) -********************************* - -.. include:: ../shared/models/apo.rst - -``DoubleMLAPO`` implements the estimation of average potential outcomes. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_irm_data - from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier - - ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) - np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') - obj_dml_data = dml.DoubleMLData(data, 'y', 'd') - dml_apo_obj = dml.DoubleMLAPO(obj_dml_data, ml_g, ml_m, treatment_level=0) - print(dml_apo_obj.fit()) - - -.. _irm-apos-model: - -Average Potential Outcomes (APOs) for Multiple Treatment Levels -*************************************************************** - -.. include:: ../shared/models/apos.rst - -``DoubleMLAPOS`` implements the estimation of average potential outcomes for multiple treatment levels. -Estimation is conducted via its ``fit()`` method. The ``causal_contrast()`` method allows to estimate causal contrasts between treatment levels: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_irm_data - from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier +.. include:: models/irm/irm_models.inc - ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) - np.random.seed(3333) - data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') - obj_dml_data = dml.DoubleMLData(data, 'y', 'd') - dml_apos_obj = dml.DoubleMLAPOS(obj_dml_data, ml_g, ml_m, treatment_levels=[0, 1]) - print(dml_apos_obj.fit()) - causal_contrast_model = dml_apos_obj.causal_contrast(reference_levels=0) - print(causal_contrast_model.summary) - - -.. _iivm-model: - -Interactive IV model (IIVM) -*************************** - -.. include:: ../shared/models/iivm.rst - -.. include:: ../shared/causal_graphs/pliv_iivm_causal_graph.rst - -``DoubleMLIIVM`` implements IIVM models. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - :okwarning: - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_iivm_data - from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier - - ml_g = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_m = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - ml_r = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) - np.random.seed(4444) - data = make_iivm_data(theta=0.5, n_obs=1000, dim_x=20, alpha_x=1.0, return_type='DataFrame') - obj_dml_data = dml.DoubleMLData(data, 'y', 'd', z_cols='z') - dml_iivm_obj = dml.DoubleMLIIVM(obj_dml_data, ml_g, ml_m, ml_r) - print(dml_iivm_obj.fit()) - - .. tab-item:: R - :sync: r - - .. jupyter-execute:: - - library(DoubleML) - library(mlr3) - library(mlr3learners) - library(data.table) - - set.seed(4444) - ml_g = lrn("regr.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) - ml_m = lrn("classif.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) - ml_r = ml_m$clone() - data = make_iivm_data(theta=0.5, n_obs=1000, dim_x=20, alpha_x=1, return_type="data.table") - obj_dml_data = DoubleMLData$new(data, y_col="y", d_cols="d", z_cols="z") - dml_iivm_obj = DoubleMLIIVM$new(obj_dml_data, ml_g, ml_m, ml_r) - dml_iivm_obj$fit() - print(dml_iivm_obj) - - -.. _did-model: +.. _did-models: Difference-in-Differences Models (DID) ++++++++++++++++++++++++++++++++++++++ -.. include:: ../shared/models/did.rst - - -.. _did-pa-model: - -Panel data -********** - -If panel data are available, the observations are assumed to be iid. of form :math:`(Y_{i0}, Y_{i1}, D_i, X_i)`. -Remark that the difference :math:`\Delta Y_i= Y_{i1}-Y_{i0}` has to be defined as the outcome ``y`` in the ``DoubleMLData`` object. - -``DoubleMLIDID`` implements difference-in-differences models for panel data. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - :okwarning: - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_did_SZ2020 - from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier - - ml_g = RandomForestRegressor(n_estimators=100, max_depth=5, min_samples_leaf=5) - ml_m = RandomForestClassifier(n_estimators=100, max_depth=5, min_samples_leaf=5) - np.random.seed(42) - data = make_did_SZ2020(n_obs=500, return_type='DataFrame') - # y is already defined as the difference of observed outcomes - obj_dml_data = dml.DoubleMLData(data, 'y', 'd') - dml_did_obj = dml.DoubleMLDID(obj_dml_data, ml_g, ml_m) - print(dml_did_obj.fit()) - -.. _did-cs-model: - -Repeated cross-sections -*********************** - -For repeated cross-sections, the observations are assumed to be iid. of form :math:`(Y_{i}, D_i, X_i, T_i)`, -where :math:`T_i` is a dummy variable if unit :math:`i` is observed pre- or post-treatment period, such -that the observed outcome can be defined as - -.. math:: - - Y_i = T_i Y_{i1} + (1-T_i) Y_{i0}. - -Further, treatment and covariates are assumed to be stationary, such that the joint distribution of :math:`(D,X)` is invariant to :math:`T`. - -``DoubleMLIDIDCS`` implements difference-in-differences models for repeated cross-sections. -Estimation is conducted via its ``fit()`` method: +.. include:: models/did/did_models.inc -.. tab-set:: - .. tab-item:: Python - :sync: py - - .. ipython:: python - :okwarning: - - import numpy as np - import doubleml as dml - from doubleml.datasets import make_did_SZ2020 - from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier - - ml_g = RandomForestRegressor(n_estimators=100, max_depth=5, min_samples_leaf=5) - ml_m = RandomForestClassifier(n_estimators=100, max_depth=5, min_samples_leaf=5) - np.random.seed(42) - data = make_did_SZ2020(n_obs=500, cross_sectional_data=True, return_type='DataFrame') - obj_dml_data = dml.DoubleMLData(data, 'y', 'd', t_col='t') - dml_did_obj = dml.DoubleMLDIDCS(obj_dml_data, ml_g, ml_m) - print(dml_did_obj.fit()) - -.. _ssm-model: +.. _ssm-models: Sample Selection Models (SSM) ++++++++++++++++++++++++++++++++++++++ -.. include:: ../shared/models/ssm.rst - -.. _ssm-mar-model: - -Missingness at Random -********************* - -Consider the following two additional assumptions for the sample selection model: - -- **Cond. Independence of Selection:** :math:`Y_i(d) \perp S_i|D_i=d, X_i\quad a.s.` for :math:`d=0,1` -- **Common Support:** :math:`P(D_i=1|X_i)>0` and :math:`P(S_i=1|D_i=d, X_i)>0` for :math:`d=0,1` - -such that outcomes are missing at random (for the score see :ref:`Scores `). - -``DoubleMLSSM`` implements sample selection models. The score ``score='missing-at-random'`` refers to the correponding score -relying on the assumptions above. The ``DoubleMLData`` object has to be defined with the additional argument ``s_col`` for the selection indicator. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - :okwarning: - - import numpy as np - from sklearn.linear_model import LassoCV, LogisticRegressionCV - from doubleml.datasets import make_ssm_data - import doubleml as dml - - np.random.seed(42) - n_obs = 2000 - df = make_ssm_data(n_obs=n_obs, mar=True, return_type='DataFrame') - dml_data = dml.DoubleMLData(df, 'y', 'd', s_col='s') - - ml_g = LassoCV() - ml_m = LogisticRegressionCV(penalty='l1', solver='liblinear') - ml_pi = LogisticRegressionCV(penalty='l1', solver='liblinear') - - dml_ssm = dml.DoubleMLSSM(dml_data, ml_g, ml_m, ml_pi, score='missing-at-random') - dml_ssm.fit() - print(dml_ssm) - - -.. _ssm-nr-model: - -Nonignorable Nonresponse -************************ - -When sample selection or outcome attriction is realated to unobservables, identification generally requires an instrument for the selection indicator :math:`S_i`. -Consider the following additional assumptions for the instrumental variable: - -- **Cond. Correlation:** :math:`\exists Z: \mathbb{E}[Z\cdot S|D,X] \neq 0` -- **Cond. Independence:** :math:`Y_i(d,z)=Y_i(d)` and :math:`Y_i \perp Z_i|D_i=d, X_i\quad a.s.` for :math:`d=0,1` - -This requires the instrumental variable :math:`Z_i`, which must not affect :math:`Y_i` or be associated -with unobservables affecting :math:`Y_i` conditional on :math:`D_i` and :math:`X_i`. Further, the selection is determined via -a (unknown) threshold model: - -- **Threshold:** :math:`S_i = 1\{V_i \le \xi(D,X,Z)\}` where :math:`\xi` is a general function and :math:`V_i` is a scalar with strictly monotonic cumulative distribution function conditional on :math:`X_i`. -- **Cond. Independence:** :math:`Y_i \perp (Z_i, D_i)|X_i`. - -Let :math:`\Pi_i := P(S_i=1|D_i, X_i, Z_i)` denote the selection probability. -Additionally, the following assumptions are required: - -- **Common Support for Treatment:** :math:`P(D_i=1|X_i, \Pi)>0` -- **Cond. Effect Homogeneity:** :math:`\mathbb{E}[Y_i(1)-Y_i(0)|S_i=1, X_i=x, V_i=v] = \mathbb{E}[Y_i(1)-Y_i(0)|X_i=x, V_i=v]` -- **Common Support for Selection:** :math:`P(S_i=1|D_i=d, X_i=x, Z_i=z)>0\quad a.s.` for :math:`d=0,1` - -For further details, see `Bia, Huber and Lafférs (2023) `_. - -.. figure:: figures/py_ssm.svg - :width: 400 - :alt: DAG - :align: center - - Causal paths under nonignorable nonresponse - - -``DoubleMLSSM`` implements sample selection models. The score ``score='nonignorable'`` refers to the correponding score -relying on the assumptions above. The ``DoubleMLData`` object has to be defined with the additional argument ``s_col`` for the selection indicator -and ``z_cols`` for the instrument. -Estimation is conducted via its ``fit()`` method: - -.. tab-set:: - - .. tab-item:: Python - :sync: py - - .. ipython:: python - :okwarning: - - import numpy as np - from sklearn.linear_model import LassoCV, LogisticRegressionCV - from doubleml.datasets import make_ssm_data - import doubleml as dml +.. include:: models/ssm/ssm_models.inc - np.random.seed(42) - n_obs = 2000 - df = make_ssm_data(n_obs=n_obs, mar=False, return_type='DataFrame') - dml_data = dml.DoubleMLData(df, 'y', 'd', z_cols='z', s_col='s') - ml_g = LassoCV() - ml_m = LogisticRegressionCV(penalty='l1', solver='liblinear') - ml_pi = LogisticRegressionCV(penalty='l1', solver='liblinear') - - dml_ssm = dml.DoubleMLSSM(dml_data, ml_g, ml_m, ml_pi, score='nonignorable') - dml_ssm.fit() - print(dml_ssm) +.. _rdd-models: Regression Discontinuity Designs (RDD) ++++++++++++++++++++++++++++++++++++++ -.. include:: ../shared/models/rdd.rst +.. include:: models/rdd/rdd_models.inc diff --git a/doc/guide/models/did/did_aggregation.rst b/doc/guide/models/did/did_aggregation.rst new file mode 100644 index 00000000..d1750089 --- /dev/null +++ b/doc/guide/models/did/did_aggregation.rst @@ -0,0 +1,57 @@ +The following section considers the aggregation of different :math:`ATT(\mathrm{g},t)` to summary measures based on `Callaway and Sant'Anna (2021) `_. +All implemented aggregation schemes take the form of a weighted average of the :math:`ATT(\mathrm{g},t)` estimates + +.. math:: + \theta = \sum_{\mathrm{g}\in \mathcal{G}} \sum_{t=2}^{\mathcal{T}} \omega(\mathrm{g},t) \cdot ATT(\mathrm{g},t) + +where :math:`\omega(\mathrm{g},t)` is a weight function based on the treatment group :math:`\mathrm{g}` and time period :math:`t`. +The aggragation schemes are implmented via the ``aggregate()`` method of the ``DoubleMLDIDMulti`` class. + + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + import doubleml as dml + from doubleml.did.datasets import make_did_CS2021 + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + np.random.seed(42) + df = make_did_CS2021(n_obs=500) + dml_data = dml.data.DoubleMLPanelData( + df, + y_col="y", + d_cols="d", + id_col="id", + t_col="t", + x_cols=["Z1", "Z2", "Z3", "Z4"], + datetime_unit="M" + ) + dml_did_obj = dml.did.DoubleMLDIDMulti( + obj_dml_data=dml_data, + ml_g=RandomForestRegressor(min_samples_split=10), + ml_m=RandomForestClassifier(min_samples_split=10), + gt_combinations="standard", + control_group="never_treated", + ) + dml_did_obj.fit() + + agg_did_obj = dml_did_obj.aggregate(aggregation="group") + agg_did_obj.aggregated_frameworks.bootstrap() + print(agg_did_obj) + +The method ``aggregate()`` requires the ``aggregation`` argument to be set to one of the following values: + +* ``'group'``: aggregates :math:`ATT(\mathrm{g},t)` estimates by the treatment group :math:`\mathrm{g}`. +* ``'time'``: aggregates :math:`ATT(\mathrm{g},t)` estimates by the time period :math:`t` (based on group size). +* ``'eventstudy'``: aggregates :math:`ATT(\mathrm{g},t)` estimates based on time difference to first treatment assignment like an event study (based on group size). +* ``dictionary``: a dictionary with values containing the aggregation weights (as ``numpy.ma.MaskedArray``). + +.. note:: + A more detailed example on effect aggregation is available in the :ref:`example gallery `. + For a detailed discussion on different aggregation schemes, we refer to of `Callaway and Sant'Anna (2021) `_. diff --git a/doc/guide/models/did/did_binary.rst b/doc/guide/models/did/did_binary.rst new file mode 100644 index 00000000..90fda78d --- /dev/null +++ b/doc/guide/models/did/did_binary.rst @@ -0,0 +1,94 @@ +**Difference-in-Differences Models (DID)** implemented in the package focus on the the binary treatment case with +with two treatment periods. + +Adopting the notation from `Sant'Anna and Zhao (2020) `_, +let :math:`Y_{it}` be the outcome of interest for unit :math:`i` at time :math:`t`. Further, let :math:`D_{it}=1` indicate +if unit :math:`i` is treated before time :math:`t` (otherwise :math:`D_{it}=0`). Since all units start as untreated (:math:`D_{i0}=0`), define +:math:`D_{i}=D_{i1}.` Relying on the potential outcome notation, denote :math:`Y_{it}(0)` as the outcome of unit :math:`i` at time :math:`t` if the unit did not receive +treatment up until time :math:`t` and analogously for :math:`Y_{it}(1)` with treatment. Consequently, the observed outcome +for unit is :math:`i` at time :math:`t` is :math:`Y_{it}=D_{it} Y_{it}(1) + (1-D_{it}) Y_{it}(0)`. Further, let +:math:`X_i` be a vector of pre-treatment covariates. + +Target parameter of interest is the average treatment effect on the treated (ATTE) + +.. math:: + + \theta_0 = \mathbb{E}[Y_{i1}(1)- Y_{i1}(0)|D_i=1]. + +The corresponding identifying assumptions are + +- **(Cond.) Parallel Trends:** :math:`\mathbb{E}[Y_{i1}(0) - Y_{i0}(0)|X_i, D_i=1] = \mathbb{E}[Y_{i1}(0) - Y_{i0}(0)|X_i, D_i=0]\quad a.s.` +- **Overlap:** :math:`\exists\epsilon > 0`: :math:`P(D_i=1) > \epsilon` and :math:`P(D_i=1|X_i) \le 1-\epsilon\quad a.s.` + +.. note:: + For a more detailed introduction and recent developments of the difference-in-differences literature see e.g. `Roth et al. (2022) `_. + + +Panel Data +~~~~~~~~~~~ + +If panel data are available, the observations are assumed to be iid. of form :math:`(Y_{i0}, Y_{i1}, D_i, X_i)`. +Remark that the difference :math:`\Delta Y_i= Y_{i1}-Y_{i0}` has to be defined as the outcome ``y`` in the ``DoubleMLData`` object. + +``DoubleMLIDID`` implements difference-in-differences models for panel data. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + import doubleml as dml + from doubleml.did.datasets import make_did_SZ2020 + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + ml_g = RandomForestRegressor(n_estimators=100, max_depth=5, min_samples_leaf=5) + ml_m = RandomForestClassifier(n_estimators=100, max_depth=5, min_samples_leaf=5) + np.random.seed(42) + data = make_did_SZ2020(n_obs=500, return_type='DataFrame') + # y is already defined as the difference of observed outcomes + obj_dml_data = dml.DoubleMLData(data, 'y', 'd') + dml_did_obj = dml.DoubleMLDID(obj_dml_data, ml_g, ml_m) + print(dml_did_obj.fit()) + + +Repeated cross-sections +~~~~~~~~~~~~~~~~~~~~~~~~~ + +For repeated cross-sections, the observations are assumed to be iid. of form :math:`(Y_{i}, D_i, X_i, T_i)`, +where :math:`T_i` is a dummy variable if unit :math:`i` is observed pre- or post-treatment period, such +that the observed outcome can be defined as + +.. math:: + + Y_i = T_i Y_{i1} + (1-T_i) Y_{i0}. + +Further, treatment and covariates are assumed to be stationary, such that the joint distribution of :math:`(D,X)` is invariant to :math:`T`. + +``DoubleMLIDIDCS`` implements difference-in-differences models for repeated cross-sections. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + import doubleml as dml + from doubleml.did.datasets import make_did_SZ2020 + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + ml_g = RandomForestRegressor(n_estimators=100, max_depth=5, min_samples_leaf=5) + ml_m = RandomForestClassifier(n_estimators=100, max_depth=5, min_samples_leaf=5) + np.random.seed(42) + data = make_did_SZ2020(n_obs=500, cross_sectional_data=True, return_type='DataFrame') + obj_dml_data = dml.DoubleMLData(data, 'y', 'd', t_col='t') + dml_did_obj = dml.DoubleMLDIDCS(obj_dml_data, ml_g, ml_m) + print(dml_did_obj.fit()) diff --git a/doc/guide/models/did/did_cs.rst b/doc/guide/models/did/did_cs.rst new file mode 100644 index 00000000..dee07ecb --- /dev/null +++ b/doc/guide/models/did/did_cs.rst @@ -0,0 +1,2 @@ +.. note:: + Will be implemented soon. \ No newline at end of file diff --git a/doc/guide/models/did/did_models.inc b/doc/guide/models/did/did_models.inc new file mode 100644 index 00000000..88575949 --- /dev/null +++ b/doc/guide/models/did/did_models.inc @@ -0,0 +1,40 @@ +.. include:: /guide/models/did/did_setup.rst + + +.. _did-pa-model: + +Panel data +********** + +.. include:: /guide/models/did/did_pa.rst + + +.. _did-cs-model: + +Repeated cross-sections +*********************** + +.. include:: /guide/models/did/did_cs.rst + + +.. _did-aggregation: + +Effect Aggregation +****************** + +.. include:: /guide/models/did/did_aggregation.rst + + +.. _did-binary-model: + +Two treatment periods +********************* + +.. warning:: + This documentation refers to the deprecated implementation for two time periods. + This functionality will be removed in a future version. + +.. note:: + We recommend using the implementation :ref:`did-pa-model` and :ref:`did-cs-model`. + +.. include:: /guide/models/did/did_binary.rst diff --git a/doc/guide/models/did/did_pa.rst b/doc/guide/models/did/did_pa.rst new file mode 100644 index 00000000..3b12d6d9 --- /dev/null +++ b/doc/guide/models/did/did_pa.rst @@ -0,0 +1,97 @@ +For the estimation of the target parameters :math:`ATT(\mathrm{g},t)` the following nuisance functions are required: + +.. math:: + \begin{align} + g_{0, \mathrm{g}, t_\text{pre}, t_\text{eval}, \delta}(X_i) &:= \mathbb{E}[Y_{i,t_\text{eval}} - Y_{i,t_\text{pre}}|X_i, C_{i,t_\text{eval} + \delta}^{(\cdot)} = 1], \\ + m_{0, \mathrm{g}, t_\text{eval} + \delta}(X_i) &:= P(G_i^{\mathrm{g}}=1|X_i, G_i^{\mathrm{g}} + C_{i,t_\text{eval} + \delta}^{(\cdot)}=1). + \end{align} + +where :math:`g_{0, \mathrm{g}, t_\text{pre}, t_\text{eval},\delta}(\cdot)` denotes the population outcome regression function and :math:`m_{0, \mathrm{g}, t_\text{eval} + \delta}(\cdot)` the generalized propensity score. +The interpretation of the parameters is as follows: + +* :math:`\mathrm{g}` is the first post-treatment period of interest, i.e. the treatment group. +* :math:`t_\text{pre}` is the pre-treatment period, i.e. the time period from which the conditional parallel trends are assumed. +* :math:`t_\text{eval}` is the time period of interest or evaluation period, i.e. the time period where the treatment effect is evaluated. +* :math:`\delta` is number of anticipation periods, i.e. the number of time periods for which units are assumed to anticipate the treatment. + +.. note:: + Remark that the nuisance functions depend on the control group used for the estimation of the target parameter. + By slight abuse of notation we use the same notation for both control groups :math:`C_{i,t}^{(\text{nev})}` and :math:`C_{i,t}^{(\text{nyt})}`. More specifically, the + control group only depends on :math:`\delta` for *not yet treated* units. + +Under these assumptions the target parameter :math:`ATT(\mathrm{g},t_\text{eval})` can be estimated by choosing a suitable combination +of :math:`(\mathrm{g}, t_\text{pre}, t_\text{eval}, \delta)` if :math:`t_\text{eval} - t_\text{pre} \ge 1 + \delta`, i.e. the parallel trends are assumed to hold at least one period more than the anticipation period. + +.. note:: + The choice :math:`t_\text{pre}= \min(\mathrm{g},t_\text{eval}) -\delta-1` corresponds to the definition of :math:`ATT_{dr}(\mathrm{g},t_\text{eval};\delta)` from `Callaway and Sant'Anna (2021) `_. + + As an example, if the target parameter is the effect on the group receiving treatment in :math:`2006` but evaluated in :math:`2007` with an anticipation period of :math:`\delta=1`, then the pre-treatment period is :math:`2004`. + The parallel trend assumption is slightly stronger with anticipation as the trends have to parallel for a longer periods, i.e. :math:`ATT_{dr}(2006,2007;1)=ATT(2006,2004;2006)`. + +In the following, we will omit the subscript :math:`\delta` in the notation of the nuisance functions and the control group (implicitly assuming :math:`\delta=0`). + +For a given tuple :math:`(\mathrm{g}, t_\text{pre}, t_\text{eval})` the target parameter :math:`ATT(\mathrm{g},t)` is estimated by solving the empirical version of the the following linear moment condition: + +.. math:: + ATT(\mathrm{g}, t_\text{pre}, t_\text{eval}):= -\frac{\mathbb{E}[\psi_b(W,\eta_0)]}{\mathbb{E}[\psi_a(W,\eta_0)]} + +with nuisance elements :math:`\eta_0=(g_{0, \mathrm{g}, t_\text{pre}, t_\text{eval}}, m_{0, \mathrm{g}, t_\text{eval}})` and score function :math:`\psi(W,\theta, \eta)` being defined in section :ref:`did-pa-score`. +Under the identifying assumptions above + +.. math:: + ATT(\mathrm{g}, t_\text{pre}, t_\text{eval}) = ATT(\mathrm{g},t). + +``DoubleMLDIDMulti`` implements the estimation of :math:`ATT(\mathrm{g}, t_\text{pre}, t_\text{eval})` for multiple time periods and requires :ref:`DoubleMLPanelData ` as input. +Setting ``gt_combinations='standard'`` will estimate the target parameter for all (possible) combinations of :math:`(\mathrm{g}, t_\text{pre}, t_\text{eval})` with :math:`\mathrm{g}\in\{2,\dots,\mathcal{T}\}` and :math:`(t_\text{pre}, t_\text{eval})` with :math:`t_\text{eval}\in\{2,\dots,\mathcal{T}\}` and +:math:`t_\text{pre}= \min(\mathrm{g},t_\text{eval}) -\delta-1`. +This corresponds to the setting where all trends are set as short as possible, but still respecting the anticipation period. + +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + import doubleml as dml + from doubleml.did.datasets import make_did_CS2021 + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + np.random.seed(42) + df = make_did_CS2021(n_obs=500) + dml_data = dml.data.DoubleMLPanelData( + df, + y_col="y", + d_cols="d", + id_col="id", + t_col="t", + x_cols=["Z1", "Z2", "Z3", "Z4"], + datetime_unit="M" + ) + dml_did_obj = dml.did.DoubleMLDIDMulti( + obj_dml_data=dml_data, + ml_g=RandomForestRegressor(min_samples_split=10), + ml_m=RandomForestClassifier(min_samples_split=10), + gt_combinations="standard", + control_group="never_treated", + ) + print(dml_did_obj.fit()) + +.. note:: + Remark that the output contains two different outcome regressions :math:`g(0,X)` and :math:`g(1,X)`. As in the :ref:`IRM model ` + the outcome regression :math:`g(0,X)` refers to the control group, whereas :math:`g(1,X)` refers to the outcome regression for the treatment group, i.e. + + .. math:: + \begin{align} + g(0,X) &\approx g_{0, \mathrm{g}, t_\text{pre}, t_\text{eval}, \delta}(X_i) = \mathbb{E}[Y_{i,t_\text{eval}} - Y_{i,t_\text{pre}}|X_i, C_{i,t_\text{eval} + \delta}^{(\cdot)} = 1],\\ + g(1,X) &\approx \mathbb{E}[Y_{i,t_\text{eval}} - Y_{i,t_\text{pre}}|X_i, G_i^{\mathrm{g}} = 1]. + \end{align} + + Further, :math:`g(1,X)` is only required for :ref:`Sensitivity Analysis ` and is not used for the estimation of the target parameter. + +.. note:: + A more detailed example is available in the :ref:`Example Gallery `. diff --git a/doc/guide/models/did/did_setup.rst b/doc/guide/models/did/did_setup.rst new file mode 100644 index 00000000..73fbb732 --- /dev/null +++ b/doc/guide/models/did/did_setup.rst @@ -0,0 +1,67 @@ +**Difference-in-Differences Models (DID)** implemented in the package focus on the the binary treatment case with staggered adoption. + +.. note:: + The notation and identifying assumptions are based on `Callaway and Sant'Anna (2021) `_, but adjusted to better fit into the general package documentation conventions, sometimes slightly abusing notation. + The underlying score functions are based on `Sant'Anna and Zhao (2020) `_, `Zimmert (2018) `_ and `Chang (2020) `_. + For a more detailed introduction and recent developments of the difference-in-differences literature see e.g. `Roth et al. (2022) `_. + +We consider :math:`n` observed units at time periods :math:`t=1,\dots, \mathcal{T}`. +The treatment status for unit :math:`i` at time period :math:`t` is denoted by the binary variable :math:`D_{i,t}=1`. The package considers the staggered adoption setting, +where a unit stays treated after it has been treated once (*Irreversibility of Treatment*). + +Let :math:`G^{\mathrm{g}}_i` be an indicator variable that takes value one if unit :math:`i` is treated at time period :math:`t=\mathrm{g}`, :math:`G^{\mathrm{g}}_i=1\{G_i=\mathrm{g}\}` with :math:`G_i` refering to the first post-treatment period. +I units are never exposed to the treatment, define :math:`G_i=\infty`. + +The target parameters are defined in terms of differences in potential outcomes. The observed and potential outcome for each unit :math:`i` at time period :math:`t` are assumed to be of the form + +.. math:: + Y_{i,t} = Y_{i,t}(0) + \sum_{\mathrm{g}=2}^{\mathcal{T}} (Y_{i,t}(\mathrm{g}) - Y_{i,t}(0)) \cdot G^{\mathrm{g}}_i, + +such that we observe one consistent potential outcome for each unit at each time period. + +The corresponding target parameters are the average causal effects of the treatment + +.. math:: + ATT(\mathrm{g},t):= \mathbb{E}[Y_{i,t}(\mathrm{g}) - Y_{i,t}(0)|G^{\mathrm{g}}_i=1]. + +This target parameter quantifies the average change in potential outcomes for units that are treated the first time in period :math:`\mathrm{g}` with the difference in outcome being evaluated for time period :math:`t`. +The corresponding control groups, defined by an indicator :math:`C`, can be typically set as either the *never treated* or *not yet treated* units. +Let + +.. math:: + \begin{align} + C_{i,t}^{(\text{nev})} \equiv C_{i}^{(\text{nev})} &:= 1\{G_i=\infty\} \quad \text{(never treated)}, \\ + C_{i,t}^{(\text{nyt})} &:= 1\{G_i > t\} \quad \text{(not yet treated)}. + \end{align} + +The corresponding identifying assumptions are: + +1. **Irreversibility of Treatment:** + :math:`D_{i,1} = 0 \quad a.s.` + For all :math:`t=2,\dots,\mathcal{T}`, :math:`D_{i,t-1} = 1` implies :math:`D_{i,t} = 1 \quad a.s.` + +2. **Panel Data (Random Sampling):** + :math:`(Y_{i,1},\dots, Y_{i,\mathcal{T}}, X_i, D_{i,1}, \dots, D_{i,\mathcal{T}})_{i=1}^n` is independent and identically distributed. + +3. **Limited Treatment Anticipation:** + There is a known :math:`\delta\ge 0` such that + :math:`\mathbb{E}[Y_{i,t}(\mathrm{g})|X_i, G_i^{\mathrm{g}}=1] = \mathbb{E}[Y_{i,t}(0)|X_i, G_i^{\mathrm{g}}=1]\quad a.s.` for all :math:`\mathrm{g}\in\mathcal{G}, t\in\{1,\dots,\mathcal{T}\}` such that :math:`t< \mathrm{g}-\delta`. + +4. **Conditional Parallel Trends:** + Let :math:`\delta` be defined as in Assumption 3.\\ + For each :math:`\mathrm{g}\in\mathcal{G}` and :math:`t\in\{2,\dots,\mathcal{T}\}` such that :math:`t\ge \mathrm{g}-\delta`: + + a. **Never Treated:** + :math:`\mathbb{E}[Y_{i,t}(0) - Y_{i,t-1}(0)|X_i, G_i^{\mathrm{g}}=1] = \mathbb{E}[Y_{i,t}(0) - Y_{i,t-1}(0)|X_i,C_{i}^{(\text{nev})}=1] \quad a.s.` + + b. **Not Yet Treated:** + :math:`\mathbb{E}[Y_{i,t}(0) - Y_{i,t-1}(0)|X_i, G_i^{\mathrm{g}}=1] = \mathbb{E}[Y_{i,t}(0) - Y_{i,t-1}(0)|X_i,C_{i,t+\delta}^{(\text{nyt})}=1] \quad a.s.` + +5. **Overlap:** + For each time period :math:`t=2,\dots,\mathcal{T}` and :math:`\mathrm{g}\in\mathcal{G}` there exists a :math:`\epsilon > 0` such that + :math:`P(G_i^{\mathrm{g}}=1) > \epsilon` and :math:`P(G_i^{\mathrm{g}}=1|X_i, G_i^{\mathrm{g}} + C_{i,t}^{(\text{nyt})}=1) < 1-\epsilon\quad a.s.` + +.. note:: + For a detailed discussion of the assumptions see `Callaway and Sant'Anna (2021) `_. + +Under the assumptions above (either Assumption 4.a or 4.b), the target parameter :math:`ATT(\mathrm{g},t)` is identified see Theorem 1. `Callaway and Sant'Anna (2021) `_. \ No newline at end of file diff --git a/doc/shared/models/apo.rst b/doc/guide/models/irm/apo.rst similarity index 100% rename from doc/shared/models/apo.rst rename to doc/guide/models/irm/apo.rst diff --git a/doc/shared/models/apos.rst b/doc/guide/models/irm/apos.rst similarity index 100% rename from doc/shared/models/apos.rst rename to doc/guide/models/irm/apos.rst diff --git a/doc/shared/models/iivm.rst b/doc/guide/models/irm/iivm.rst similarity index 100% rename from doc/shared/models/iivm.rst rename to doc/guide/models/irm/iivm.rst diff --git a/doc/shared/models/irm.rst b/doc/guide/models/irm/irm.rst similarity index 100% rename from doc/shared/models/irm.rst rename to doc/guide/models/irm/irm.rst diff --git a/doc/guide/models/irm/irm_models.inc b/doc/guide/models/irm/irm_models.inc new file mode 100644 index 00000000..b61526a6 --- /dev/null +++ b/doc/guide/models/irm/irm_models.inc @@ -0,0 +1,177 @@ +The interactive regression model (IRM) take the form + +.. math:: + + Y = g_0(D, X) + U, + +where treatment effects are fully heterogeneous. + +.. _irm-model: + +Binary Interactive Regression Model (IRM) +***************************************** + +.. include:: /guide/models/irm/irm.rst + +.. include:: /shared/causal_graphs/plr_irm_causal_graph.rst + +``DoubleMLIRM`` implements IRM models. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + import numpy as np + import doubleml as dml + from doubleml.datasets import make_irm_data + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + np.random.seed(3333) + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') + obj_dml_data = dml.DoubleMLData(data, 'y', 'd') + dml_irm_obj = dml.DoubleMLIRM(obj_dml_data, ml_g, ml_m) + print(dml_irm_obj.fit()) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + library(mlr3) + library(mlr3learners) + library(data.table) + + set.seed(3333) + ml_g = lrn("regr.ranger", num.trees = 100, mtry = 10, min.node.size = 2, max.depth = 5) + ml_m = lrn("classif.ranger", num.trees = 100, mtry = 10, min.node.size = 2, max.depth = 5) + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type="data.table") + obj_dml_data = DoubleMLData$new(data, y_col="y", d_cols="d") + dml_irm_obj = DoubleMLIRM$new(obj_dml_data, ml_g, ml_m) + dml_irm_obj$fit() + print(dml_irm_obj) + +.. _irm-apo-model: + +Average Potential Outcomes (APOs) +********************************* + +.. include:: /guide/models/irm/apo.rst + +``DoubleMLAPO`` implements the estimation of average potential outcomes. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + import numpy as np + import doubleml as dml + from doubleml.datasets import make_irm_data + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + np.random.seed(3333) + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') + obj_dml_data = dml.DoubleMLData(data, 'y', 'd') + dml_apo_obj = dml.DoubleMLAPO(obj_dml_data, ml_g, ml_m, treatment_level=0) + print(dml_apo_obj.fit()) + + +.. _irm-apos-model: + +Average Potential Outcomes (APOs) for Multiple Treatment Levels +*************************************************************** + +.. include:: /guide/models/irm/apos.rst + +``DoubleMLAPOS`` implements the estimation of average potential outcomes for multiple treatment levels. +Estimation is conducted via its ``fit()`` method. The ``causal_contrast()`` method allows to estimate causal contrasts between treatment levels: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + import numpy as np + import doubleml as dml + from doubleml.datasets import make_irm_data + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + ml_g = RandomForestRegressor(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=10, max_depth=5, min_samples_leaf=2) + np.random.seed(3333) + data = make_irm_data(theta=0.5, n_obs=500, dim_x=10, return_type='DataFrame') + obj_dml_data = dml.DoubleMLData(data, 'y', 'd') + dml_apos_obj = dml.DoubleMLAPOS(obj_dml_data, ml_g, ml_m, treatment_levels=[0, 1]) + print(dml_apos_obj.fit()) + + causal_contrast_model = dml_apos_obj.causal_contrast(reference_levels=0) + print(causal_contrast_model.summary) + + +.. _iivm-model: + +Interactive IV model (IIVM) +*************************** + +.. include:: /guide/models/irm/iivm.rst + +.. include:: /shared/causal_graphs/pliv_iivm_causal_graph.rst + +``DoubleMLIIVM`` implements IIVM models. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + import doubleml as dml + from doubleml.datasets import make_iivm_data + from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier + + ml_g = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_m = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_r = RandomForestClassifier(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + np.random.seed(4444) + data = make_iivm_data(theta=0.5, n_obs=1000, dim_x=20, alpha_x=1.0, return_type='DataFrame') + obj_dml_data = dml.DoubleMLData(data, 'y', 'd', z_cols='z') + dml_iivm_obj = dml.DoubleMLIIVM(obj_dml_data, ml_g, ml_m, ml_r) + print(dml_iivm_obj.fit()) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + library(mlr3) + library(mlr3learners) + library(data.table) + + set.seed(4444) + ml_g = lrn("regr.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) + ml_m = lrn("classif.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) + ml_r = ml_m$clone() + data = make_iivm_data(theta=0.5, n_obs=1000, dim_x=20, alpha_x=1, return_type="data.table") + obj_dml_data = DoubleMLData$new(data, y_col="y", d_cols="d", z_cols="z") + dml_iivm_obj = DoubleMLIIVM$new(obj_dml_data, ml_g, ml_m, ml_r) + dml_iivm_obj$fit() + print(dml_iivm_obj) \ No newline at end of file diff --git a/doc/shared/models/pliv.rst b/doc/guide/models/plm/pliv.rst similarity index 100% rename from doc/shared/models/pliv.rst rename to doc/guide/models/plm/pliv.rst diff --git a/doc/guide/models/plm/plm_models.inc b/doc/guide/models/plm/plm_models.inc new file mode 100644 index 00000000..086a3051 --- /dev/null +++ b/doc/guide/models/plm/plm_models.inc @@ -0,0 +1,123 @@ +The partially linear models (PLM) take the form + +.. math:: + + Y = D \theta_0 + g_0(X) + \zeta, + +where treatment effects are additive with some sort of linear form. + +.. _plr-model: + +Partially linear regression model (PLR) +*************************************** + +.. include:: /guide/models/plm/plr.rst + +.. include:: /shared/causal_graphs/plr_irm_causal_graph.rst + +``DoubleMLPLR`` implements PLR models. Estimation is conducted via its ``fit()`` method. + +.. note:: + Remark that standard approach with ``score='partialling out'`` does not rely on a direct estimate of :math:`g_0(X)`, + but :math:`\ell_0(X) := \mathbb{E}[Y \mid X] = \theta_0 \mathbb{E}[D \mid X] + g(X)`. + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + + import numpy as np + import doubleml as dml + from doubleml.datasets import make_plr_CCDDHNR2018 + from sklearn.ensemble import RandomForestRegressor + from sklearn.base import clone + + learner = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_l = clone(learner) + ml_m = clone(learner) + np.random.seed(1111) + data = make_plr_CCDDHNR2018(alpha=0.5, n_obs=500, dim_x=20, return_type='DataFrame') + obj_dml_data = dml.DoubleMLData(data, 'y', 'd') + dml_plr_obj = dml.DoubleMLPLR(obj_dml_data, ml_l, ml_m) + print(dml_plr_obj.fit()) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + library(mlr3) + library(mlr3learners) + library(data.table) + lgr::get_logger("mlr3")$set_threshold("warn") + + learner = lrn("regr.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) + ml_l = learner$clone() + ml_m = learner$clone() + set.seed(1111) + data = make_plr_CCDDHNR2018(alpha=0.5, n_obs=500, dim_x=20, return_type='data.table') + obj_dml_data = DoubleMLData$new(data, y_col="y", d_cols="d") + dml_plr_obj = DoubleMLPLR$new(obj_dml_data, ml_l, ml_m) + dml_plr_obj$fit() + print(dml_plr_obj) + + +.. _pliv-model: + +Partially linear IV regression model (PLIV) +******************************************* + +.. include:: /guide/models/plm/pliv.rst + +.. include:: /shared/causal_graphs/pliv_iivm_causal_graph.rst + +``DoubleMLPLIV`` implements PLIV models. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + import doubleml as dml + from doubleml.datasets import make_pliv_CHS2015 + from sklearn.ensemble import RandomForestRegressor + from sklearn.base import clone + + learner = RandomForestRegressor(n_estimators=100, max_features=20, max_depth=5, min_samples_leaf=2) + ml_l = clone(learner) + ml_m = clone(learner) + ml_r = clone(learner) + np.random.seed(2222) + data = make_pliv_CHS2015(alpha=0.5, n_obs=500, dim_x=20, dim_z=1, return_type='DataFrame') + obj_dml_data = dml.DoubleMLData(data, 'y', 'd', z_cols='Z1') + dml_pliv_obj = dml.DoubleMLPLIV(obj_dml_data, ml_l, ml_m, ml_r) + print(dml_pliv_obj.fit()) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + library(mlr3) + library(mlr3learners) + library(data.table) + + learner = lrn("regr.ranger", num.trees = 100, mtry = 20, min.node.size = 2, max.depth = 5) + ml_l = learner$clone() + ml_m = learner$clone() + ml_r = learner$clone() + set.seed(2222) + data = make_pliv_CHS2015(alpha=0.5, n_obs=500, dim_x=20, dim_z=1, return_type="data.table") + obj_dml_data = DoubleMLData$new(data, y_col="y", d_col = "d", z_cols= "Z1") + dml_pliv_obj = DoubleMLPLIV$new(obj_dml_data, ml_l, ml_m, ml_r) + dml_pliv_obj$fit() + print(dml_pliv_obj) \ No newline at end of file diff --git a/doc/shared/models/plr.rst b/doc/guide/models/plm/plr.rst similarity index 100% rename from doc/shared/models/plr.rst rename to doc/guide/models/plm/plr.rst diff --git a/doc/shared/models/rdd.rst b/doc/guide/models/rdd/rdd_models.inc similarity index 100% rename from doc/shared/models/rdd.rst rename to doc/guide/models/rdd/rdd_models.inc diff --git a/doc/shared/models/ssm.rst b/doc/guide/models/ssm/ssm.rst similarity index 100% rename from doc/shared/models/ssm.rst rename to doc/guide/models/ssm/ssm.rst diff --git a/doc/guide/models/ssm/ssm_models.inc b/doc/guide/models/ssm/ssm_models.inc new file mode 100644 index 00000000..d218c815 --- /dev/null +++ b/doc/guide/models/ssm/ssm_models.inc @@ -0,0 +1,154 @@ +.. include:: /guide/models/ssm/ssm.rst + +.. _ssm-mar-model: + +Missingness at Random +********************* + +Consider the following two additional assumptions for the sample selection model: + +- **Cond. Independence of Selection:** :math:`Y_i(d) \perp S_i|D_i=d, X_i\quad a.s.` for :math:`d=0,1` +- **Common Support:** :math:`P(D_i=1|X_i)>0` and :math:`P(S_i=1|D_i=d, X_i)>0` for :math:`d=0,1` + +such that outcomes are missing at random (for the score see :ref:`Scores `). + +``DoubleMLSSM`` implements sample selection models. The score ``score='missing-at-random'`` refers to the correponding score +relying on the assumptions above. The ``DoubleMLData`` object has to be defined with the additional argument ``s_col`` for the selection indicator. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + from sklearn.linear_model import LassoCV, LogisticRegressionCV + from doubleml.datasets import make_ssm_data + import doubleml as dml + + np.random.seed(42) + n_obs = 2000 + df = make_ssm_data(n_obs=n_obs, mar=True, return_type='DataFrame') + dml_data = dml.DoubleMLData(df, 'y', 'd', s_col='s') + + ml_g = LassoCV() + ml_m = LogisticRegressionCV(penalty='l1', solver='liblinear') + ml_pi = LogisticRegressionCV(penalty='l1', solver='liblinear') + + dml_ssm = dml.DoubleMLSSM(dml_data, ml_g, ml_m, ml_pi, score='missing-at-random') + dml_ssm.fit() + print(dml_ssm) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + library(mlr3) + library(data.table) + + set.seed(3141) + n_obs = 2000 + df = make_ssm_data(n_obs=n_obs, mar=TRUE, return_type="data.table") + dml_data = DoubleMLData$new(df, y_col="y", d_cols="d", s_col="s") + + ml_g = lrn("regr.cv_glmnet", nfolds = 5, s = "lambda.min") + ml_m = lrn("classif.cv_glmnet", nfolds = 5, s = "lambda.min") + ml_pi = lrn("classif.cv_glmnet", nfolds = 5, s = "lambda.min") + + dml_ssm = DoubleMLSSM$new(dml_data, ml_g, ml_m, ml_pi, score="missing-at-random") + dml_ssm$fit() + print(dml_ssm) + + +.. _ssm-nr-model: + +Nonignorable Nonresponse +************************ + +When sample selection or outcome attriction is realated to unobservables, identification generally requires an instrument for the selection indicator :math:`S_i`. +Consider the following additional assumptions for the instrumental variable: + +- **Cond. Correlation:** :math:`\exists Z: \mathbb{E}[Z\cdot S|D,X] \neq 0` +- **Cond. Independence:** :math:`Y_i(d,z)=Y_i(d)` and :math:`Y_i \perp Z_i|D_i=d, X_i\quad a.s.` for :math:`d=0,1` + +This requires the instrumental variable :math:`Z_i`, which must not affect :math:`Y_i` or be associated +with unobservables affecting :math:`Y_i` conditional on :math:`D_i` and :math:`X_i`. Further, the selection is determined via +a (unknown) threshold model: + +- **Threshold:** :math:`S_i = 1\{V_i \le \xi(D,X,Z)\}` where :math:`\xi` is a general function and :math:`V_i` is a scalar with strictly monotonic cumulative distribution function conditional on :math:`X_i`. +- **Cond. Independence:** :math:`V_i \perp (Z_i, D_i)|X_i`. + +Let :math:`\Pi_i := P(S_i=1|D_i, X_i, Z_i)` denote the selection probability. +Additionally, the following assumptions are required: + +- **Common Support for Treatment:** :math:`P(D_i=1|X_i, \Pi)>0` +- **Cond. Effect Homogeneity:** :math:`\mathbb{E}[Y_i(1)-Y_i(0)|S_i=1, X_i=x, V_i=v] = \mathbb{E}[Y_i(1)-Y_i(0)|X_i=x, V_i=v]` +- **Common Support for Selection:** :math:`P(S_i=1|D_i=d, X_i=x, Z_i=z)>0\quad a.s.` for :math:`d=0,1` + +For further details, see `Bia, Huber and Lafférs (2023) `_. + +.. figure:: /guide/figures/py_ssm.svg + :width: 400 + :alt: DAG + :align: center + + Causal paths under nonignorable nonresponse + + +``DoubleMLSSM`` implements sample selection models. The score ``score='nonignorable'`` refers to the correponding score +relying on the assumptions above. The ``DoubleMLData`` object has to be defined with the additional argument ``s_col`` for the selection indicator +and ``z_cols`` for the instrument. +Estimation is conducted via its ``fit()`` method: + +.. tab-set:: + + .. tab-item:: Python + :sync: py + + .. ipython:: python + :okwarning: + + import numpy as np + from sklearn.linear_model import LassoCV, LogisticRegressionCV + from doubleml.datasets import make_ssm_data + import doubleml as dml + + np.random.seed(42) + n_obs = 2000 + df = make_ssm_data(n_obs=n_obs, mar=False, return_type='DataFrame') + dml_data = dml.DoubleMLData(df, 'y', 'd', z_cols='z', s_col='s') + + ml_g = LassoCV() + ml_m = LogisticRegressionCV(penalty='l1', solver='liblinear') + ml_pi = LogisticRegressionCV(penalty='l1', solver='liblinear') + + dml_ssm = dml.DoubleMLSSM(dml_data, ml_g, ml_m, ml_pi, score='nonignorable') + dml_ssm.fit() + print(dml_ssm) + + .. tab-item:: R + :sync: r + + .. jupyter-execute:: + + library(DoubleML) + library(mlr3) + library(data.table) + + set.seed(3141) + n_obs = 2000 + df = make_ssm_data(n_obs=n_obs, mar=FALSE, return_type="data.table") + dml_data = DoubleMLData$new(df, y_col="y", d_cols="d", z_cols = "z", s_col="s") + + ml_g = lrn("regr.cv_glmnet", nfolds = 5, s = "lambda.min") + ml_m = lrn("classif.cv_glmnet", nfolds = 5, s = "lambda.min") + ml_pi = lrn("classif.cv_glmnet", nfolds = 5, s = "lambda.min") + + dml_ssm = DoubleMLSSM$new(dml_data, ml_g, ml_m, ml_pi, score="nonignorable") + dml_ssm$fit() + print(dml_ssm) \ No newline at end of file diff --git a/doc/guide/resampling.rst b/doc/guide/resampling.rst index 5aac4ffd..6eee60ac 100644 --- a/doc/guide/resampling.rst +++ b/doc/guide/resampling.rst @@ -202,10 +202,7 @@ Standard errors are obtained as described in :ref:`se_confint`. The aggregation of the estimates of the causal parameter and its standard errors is done using the median .. math:: - \tilde{\theta}_{0} &= \text{Median}\big((\tilde{\theta}_{0,m})_{m \in [M]}\big), - - \hat{\sigma} &= \sqrt{\text{Median}\big((\hat{\sigma}_m^2 + (\tilde{\theta}_{0,m} - \tilde{\theta}_{0})^2)_{m \in [M]}\big)}. - + \tilde{\theta}_{0} = \text{Median}\big((\tilde{\theta}_{0,m})_{m \in [M]}\big). The estimate of the causal parameter :math:`\tilde{\theta}_{0}` is stored in the ``coef`` attribute and the asymptotic standard error :math:`\hat{\sigma}/\sqrt{N}` in ``se``. @@ -214,6 +211,18 @@ and the asymptotic standard error :math:`\hat{\sigma}/\sqrt{N}` in ``se``. .. tab-item:: Python :sync: py + In python, the confidence intervals and p-values are based on the :py:class:`doubleml.DoubleMLFramework` object. + This class provides methods such as ``confint``, ``bootstrap`` or ``p_adjust``. For different repetitions, + the computations are done separately and combined via the median (based on Chernozhukov et al., 2018). + + The estimate of the asymptotic standard error :math:`\hat{\sigma}/\sqrt{N}` is then based on the median aggregated confidence intervals with crictial value :math:`1.96`, i.e., + + .. math:: + + \hat{\sigma}/\sqrt{N} = (\text{Median}\big((\tilde{\theta}_{0,m} + 1.96\cdot \tilde{\sigma}_{m}/\sqrt{N})_{m \in [M]}\big) - \text{Median}\big((\tilde{\theta}_{0,m})_{m \in [M]}\big)) / 1.96. + + Remark that methods such as methods such as ``confint``, ``bootstrap`` or ``p_adjust`` do not use the estimate of the standard error. + .. ipython:: python print(dml_plr_obj.coef) @@ -222,6 +231,12 @@ and the asymptotic standard error :math:`\hat{\sigma}/\sqrt{N}` in ``se``. .. tab-item:: R :sync: r + The aggregation of the standard errors is done using the median + + .. math:: + + \hat{\sigma} = \sqrt{\text{Median}\big((\hat{\sigma}_m^2 + (\tilde{\theta}_{0,m} - \tilde{\theta}_{0})^2)_{m \in [M]}\big)}. + .. jupyter-execute:: print(dml_plr_obj$coef) @@ -249,9 +264,7 @@ The parameter estimates :math:`(\tilde{\theta}_{0,m})_{m \in [M]}` and asymptoti print(dml_plr_obj$all_coef) print(dml_plr_obj$all_se) -In python, the confidence intervals and p-values are based on the :py:class:`doubleml.DoubleMLFramework` object. -This class provides methods such as ``confint``, ``bootstrap`` or ``p_adjust``. For different repetitions, -the computations are done seperately and combined via the median (as based on Chernozhukov et al., 2018). + Externally provide a sample splitting / partition +++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/doc/guide/scores.rst b/doc/guide/scores.rst index 5cb36838..8a2679ec 100644 --- a/doc/guide/scores.rst +++ b/doc/guide/scores.rst @@ -141,83 +141,37 @@ In the attribute ``psi`` the values of the score function :math:`\psi(W_i; \tild Implemented Neyman orthogonal score functions +++++++++++++++++++++++++++++++++++++++++++++ +.. _plm-scores: + Partially linear models (PLM) ***************************** -.. _plr-score: - -Partially linear regression model (PLR) -======================================= +.. include:: scores/plm/plm_scores.inc -.. include:: ./scores/plr_score.rst -Partially linear IV regression model (PLIV) -=========================================== - -.. include:: ./scores/pliv_score.rst +.. _irm-scores: Interactive regression models (IRM) *********************************** -Binary Interactive Regression Model (IRM) -========================================== - -.. include:: ./scores/irm_score.rst - -Average Potential Outcomes (APOs) -================================= - -.. include:: ./scores/apo_score.rst - -Interactive IV model (IIVM) -=========================== - -.. include:: ./scores/iivm_score.rst - -Potential quantiles (PQs) -========================= +.. include:: scores/irm/irm_scores.inc -.. include:: ./scores/pq_score.rst -Local potential quantiles (LPQs) -================================ - -.. include:: ./scores/lpq_score.rst - -Conditional value at risk (CVaR) -================================ - -.. include:: ./scores/cvar_score.rst +.. _did-scores: Difference-in-Differences Models ******************************** -Panel Data -========== - -.. include:: ./scores/did_score.rst +.. include:: scores/did/did_scores.inc -Repeated Cross-Sectional Data -============================= -.. include:: ./scores/didcs_score.rst +.. _ssm-scores: Sample Selection Models ************************ -.. _ssm-mar-score: - -Missingness at Random -====================== - -.. include:: ./scores/mar_score.rst - -.. _ssm-nr-score: - -Nonignorable Nonresponse -========================= +.. include:: scores/ssm/ssm_scores.inc -.. include:: ./scores/nr_score.rst Specifying alternative score functions via callables ++++++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/doc/guide/scores/didcs_score.rst b/doc/guide/scores/did/did_cs_binary_score.rst similarity index 100% rename from doc/guide/scores/didcs_score.rst rename to doc/guide/scores/did/did_cs_binary_score.rst diff --git a/doc/guide/scores/did/did_cs_score.rst b/doc/guide/scores/did/did_cs_score.rst new file mode 100644 index 00000000..dee07ecb --- /dev/null +++ b/doc/guide/scores/did/did_cs_score.rst @@ -0,0 +1,2 @@ +.. note:: + Will be implemented soon. \ No newline at end of file diff --git a/doc/guide/scores/did_score.rst b/doc/guide/scores/did/did_pa_binary_score.rst similarity index 100% rename from doc/guide/scores/did_score.rst rename to doc/guide/scores/did/did_pa_binary_score.rst diff --git a/doc/guide/scores/did/did_pa_score.rst b/doc/guide/scores/did/did_pa_score.rst new file mode 100644 index 00000000..8d6602e5 --- /dev/null +++ b/doc/guide/scores/did/did_pa_score.rst @@ -0,0 +1,101 @@ +As in the description of the :ref:`DiD model `, the required nuisance elements are + +.. math:: + \begin{align} + g_{0, \mathrm{g}, t_\text{pre}, t_\text{eval}, \delta}(X_i) &:= \mathbb{E}[Y_{i,t_\text{eval}} - Y_{i,t_\text{pre}}|X_i, C_{i,t_\text{eval} + \delta}^{(\cdot)} = 1], \\ + m_{0, \mathrm{g}, t_\text{eval} + \delta}(X_i) &:= P(G_i^{\mathrm{g}}=1|X_i, G_i^{\mathrm{g}} + C_{i,t_\text{eval} + \delta}^{(\cdot)}=1). + \end{align} + +for a certain choice of :math:`(\mathrm{g}, t_\text{pre}, t_\text{eval})` and :math:`\delta` and control group :math:`C_{i,t_\text{eval} + \delta}^{(\cdot)}`. + +For notational purposes, we will omit the subscripts :math:`\mathrm{g}, t_\text{pre}, t_\text{eval}, \delta` in the following and use the notation + +* :math:`g_0(0, X_i)\equiv g_{0, \mathrm{g}, t_\text{pre}, t_\text{eval}, \delta}(X_i)` (population outcome regression function of the control group) +* :math:`m_0(X_i)\equiv m_{0, \mathrm{g}, t_\text{eval} + \delta}(X_i)` (generalized propensity score) + +All scores in the multi-period setting have the form + +.. math:: + + \psi(W_i,\theta, \eta) := + \begin{cases} + \tilde{\psi}(W_i,\theta, \eta) & \text{for } G_i^{\mathrm{g}} \vee C_{i,t_\text{eval} + \delta}^{(\cdot)}=1 \\ + 0 & \text{otherwise} + \end{cases} + +i.e. the score is only non-zero for units in the corresponding treatment group :math:`\mathrm{g}` and control group :math:`C_{i,t_\text{eval} + \delta}^{(\cdot)}`. + +For the difference-in-differences model implemented in ``DoubleMLDIDMulti`` one can choose between +``score='observational'`` and ``score='experimental'``. + +``score='observational'`` implements the score function (dropping the unit index :math:`i`): + +.. math:: + + \tilde{\psi}(W,\theta, \eta) + :&= -\frac{G^{\mathrm{g}}}{\mathbb{E}_n[G^{\mathrm{g}}]}\theta + \left(\frac{G^{\mathrm{g}}}{\mathbb{E}_n[G^{\mathrm{g}}]} - \frac{\frac{m(X) (1-G^{\mathrm{g}})}{1-m(X)}}{\mathbb{E}_n\left[\frac{m(X) (1-G^{\mathrm{g}})}{1-m(X)}\right]}\right) \left(Y_{t_\text{eval}} - Y_{t_\text{pre}} - g(0,X)\right) + + &= \tilde{\psi}_a(W; \eta) \theta + \tilde{\psi}_b(W; \eta) + +where the components of the final linear score :math:`\psi` are + +.. math:: + \psi_a(W; \eta) &= \tilde{\psi}_a(W; \eta) \cdot \max(G^{\mathrm{g}}, C^{(\cdot)}), + + \psi_b(W; \eta) &= \tilde{\psi}_b(W; \eta) \cdot \max(G^{\mathrm{g}}, C^{(\cdot)}) + +and the nuisance elements :math:`\eta=(g, m)`. + +.. note:: + Remark that :math:`1-G^{\mathrm{g}}=C^{(\cdot)}` if :math:`G^{\mathrm{g}} \vee C_{t_\text{eval} + \delta}^{(\cdot)}=1`. + +If ``in_sample_normalization='False'``, the score is set to + +.. math:: + + \tilde{\psi}(W,\theta,\eta) &= - \frac{G^{\mathrm{g}}}{\mathbb{E}_n[G^{\mathrm{g}}]}\theta + \frac{G^{\mathrm{g}} - m(X)}{\mathbb{E}_n[G^{\mathrm{g}}](1-m(X))}\left(Y_{t_\text{eval}} - Y_{t_\text{pre}} - g(0,X)\right) + + &= \tilde{\psi}_a(W; \eta) \theta + \tilde{\psi}_b(W; \eta) + +with :math:`\eta=(g, m)`. +Remark that this will result in the same score, but just uses slightly different normalization. + +``score='experimental'`` assumes that the treatment probability is independent of the covariates :math:`X` and does not rely on the propensity score. Instead define +the population outcome regression for treated and control group as + +* :math:`g_0(0, X_i)\equiv \mathbb{E}[Y_{i,t_\text{eval}} - Y_{i,t_\text{pre}}|X_i, C_{i,t_\text{eval} + \delta}^{(\cdot)} = 1]` (control group) +* :math:`g_0(1, X_i)\equiv \mathbb{E}[Y_{i,t_\text{eval}} - Y_{i,t_\text{pre}}|X_i, G_i^{\mathrm{g}} = 1]` (treated group) + +``score='experimental'`` implements the score function: + +.. math:: + + \tilde{\psi}(W,\theta, \eta) + :=\; &-\theta + \left(\frac{G^{\mathrm{g}}}{\mathbb{E}_n[G^{\mathrm{g}}]} - \frac{1-G^{\mathrm{g}}}{\mathbb{E}_n[1-G^{\mathrm{g}}]}\right)\left(Y_{t_\text{eval}} - Y_{t_\text{pre}} - g(0,X)\right) + + &+ \left(1 - \frac{G^{\mathrm{g}}}{\mathbb{E}_n[G^{\mathrm{g}}]}\right) \left(g(1,X) - g(0,X)\right) + + =\; &\tilde{\psi}_a(W; \eta) \theta + \tilde{\psi}_b(W; \eta) + +where the components of the final linear score :math:`\psi` are + +.. math:: + \psi_a(W; \eta) &= \tilde{\psi}_a(W; \eta) \cdot \max(G^{\mathrm{g}}, C^{(\cdot)}), + + \psi_b(W; \eta) &= \tilde{\psi}_b(W; \eta) \cdot \max(G^{\mathrm{g}}, C^{(\cdot)}) + +and the nuisance elements :math:`\eta=(g)`. + +Analogously, if ``in_sample_normalization='False'``, the score is set to + +.. math:: + + \tilde{\psi}(W,\theta, \eta) + :=\; &-\theta + \frac{G^{\mathrm{g}} - \mathbb{E}_n[G^{\mathrm{g}}]}{\mathbb{E}_n[G^{\mathrm{g}}](1-\mathbb{E}_n[G^{\mathrm{g}}])}\left(Y_{t_\text{eval}} - Y_{t_\text{pre}} - g(0,X)\right) + + &+ \left(1 - \frac{G^{\mathrm{g}}}{\mathbb{E}_n[G^{\mathrm{g}}]}\right) \left(g(1,X) - g(0,X)\right) + + =\; &\tilde{\psi}_a(W; \eta) \theta + \tilde{\psi}_b(W; \eta) + +with :math:`\eta=(g)`. +Remark that this will result in the same score, but just uses slightly different normalization. diff --git a/doc/guide/scores/did/did_scores.inc b/doc/guide/scores/did/did_scores.inc new file mode 100644 index 00000000..14498bde --- /dev/null +++ b/doc/guide/scores/did/did_scores.inc @@ -0,0 +1,38 @@ +The following scores for difference-in-differences models are implemented. + + +.. _did-pa-score: + +Panel Data +========== + +.. include:: /guide/scores/did/did_pa_score.rst + + +.. _did-cs-score: + +Repeated Cross-Sectional Data +============================= + +.. include:: /guide/scores/did/did_cs_score.rst + + +Two treatment periods +===================== + +.. warning:: + This documentation refers to the deprecated implementation for two time periods. + This functionality will be removed in a future version. The generalized version are :ref:`did-pa-score` and :ref:`did-cs-score`. + + +Panel Data +~~~~~~~~~~~ + +.. include:: /guide/scores/did/did_pa_binary_score.rst + + +Repeated Cross-Sectional Data +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +.. include:: /guide/scores/did/did_cs_binary_score.rst \ No newline at end of file diff --git a/doc/guide/scores/apo_score.rst b/doc/guide/scores/irm/apo_score.rst similarity index 100% rename from doc/guide/scores/apo_score.rst rename to doc/guide/scores/irm/apo_score.rst diff --git a/doc/guide/scores/cvar_score.rst b/doc/guide/scores/irm/cvar_score.rst similarity index 100% rename from doc/guide/scores/cvar_score.rst rename to doc/guide/scores/irm/cvar_score.rst diff --git a/doc/guide/scores/iivm_score.rst b/doc/guide/scores/irm/iivm_score.rst similarity index 100% rename from doc/guide/scores/iivm_score.rst rename to doc/guide/scores/irm/iivm_score.rst diff --git a/doc/guide/scores/irm_score.rst b/doc/guide/scores/irm/irm_score.rst similarity index 77% rename from doc/guide/scores/irm_score.rst rename to doc/guide/scores/irm/irm_score.rst index a17a0116..65522001 100644 --- a/doc/guide/scores/irm_score.rst +++ b/doc/guide/scores/irm/irm_score.rst @@ -39,6 +39,14 @@ whereas ``score='ATTE'`` changes weights to: \omega(Y,D,X) &= \frac{D}{\mathbb{E}_n[D]} - \omega(Y,D,X) &= \frac{m(X)}{\mathbb{E}_n[D]}. + \bar{\omega}(Y,D,X) &= \frac{m(X)}{\mathbb{E}_n[D]}. + +This score is identical to the original presentation in Section 5.1. of Chernozhukov et al. (2018) + +.. math:: + + \psi_a(W; \eta) &= -\frac{D}{\mathbb{E}_n[D]} + + \psi_b(W; \eta) &= \frac{D(Y-g(0,X))}{\mathbb{E}_n[D]} - \frac{m(X)(1-D)(Y-g(0,X))}{\mathbb{E}_n[D](1-m(X))}. For more details on other weight specifications, see :ref:`weighted_cates`. diff --git a/doc/guide/scores/irm/irm_scores.inc b/doc/guide/scores/irm/irm_scores.inc new file mode 100644 index 00000000..9841dcb6 --- /dev/null +++ b/doc/guide/scores/irm/irm_scores.inc @@ -0,0 +1,48 @@ +The following scores for nonparametric regression models are implemented. + +.. _irm-score: + +Binary Interactive Regression Model (IRM) +========================================== + +.. include:: /guide/scores/irm/irm_score.rst + + +.. _apo-score: + +Average Potential Outcomes (APOs) +================================= + +.. include:: /guide/scores/irm/apo_score.rst + + +.. _iivm-score: + +Interactive IV model (IIVM) +=========================== + +.. include:: /guide/scores/irm/iivm_score.rst + + +.. _pq-score: + +Potential quantiles (PQs) +========================= + +.. include:: /guide/scores/irm/pq_score.rst + + +.. _lpq-score: + +Local potential quantiles (LPQs) +================================ + +.. include:: /guide/scores/irm/lpq_score.rst + + +.. _cvar-score: + +Conditional value at risk (CVaR) +================================ + +.. include:: /guide/scores/irm/cvar_score.rst \ No newline at end of file diff --git a/doc/guide/scores/lpq_score.rst b/doc/guide/scores/irm/lpq_score.rst similarity index 100% rename from doc/guide/scores/lpq_score.rst rename to doc/guide/scores/irm/lpq_score.rst diff --git a/doc/guide/scores/pq_score.rst b/doc/guide/scores/irm/pq_score.rst similarity index 100% rename from doc/guide/scores/pq_score.rst rename to doc/guide/scores/irm/pq_score.rst diff --git a/doc/guide/scores/pliv_score.rst b/doc/guide/scores/plm/pliv_score.rst similarity index 100% rename from doc/guide/scores/pliv_score.rst rename to doc/guide/scores/plm/pliv_score.rst diff --git a/doc/guide/scores/plm/plm_scores.inc b/doc/guide/scores/plm/plm_scores.inc new file mode 100644 index 00000000..b70269aa --- /dev/null +++ b/doc/guide/scores/plm/plm_scores.inc @@ -0,0 +1,16 @@ +The following scores for partially linear models are implemented. + +.. _plr-score: + +Partially linear regression model (PLR) +======================================= + +.. include:: /guide/scores/plm/plr_score.rst + + +.. _pliv-score: + +Partially linear IV regression model (PLIV) +=========================================== + +.. include:: /guide/scores/plm/pliv_score.rst diff --git a/doc/guide/scores/plr_score.rst b/doc/guide/scores/plm/plr_score.rst similarity index 69% rename from doc/guide/scores/plr_score.rst rename to doc/guide/scores/plm/plr_score.rst index 0b4f493e..0dba9733 100644 --- a/doc/guide/scores/plr_score.rst +++ b/doc/guide/scores/plm/plr_score.rst @@ -11,7 +11,15 @@ For the PLR model implemented in ``DoubleMLPLR`` one can choose between &= \psi_a(W; \eta) \theta + \psi_b(W; \eta) -with :math:`\eta=(\ell,m)` and where the components of the linear score are +with :math:`\eta=(\ell,m)`, where + +.. math:: + + \ell_0(X) &:= \mathbb{E}[Y \mid X] = \theta_0\mathbb{E}[D \mid X] + g(X), + + m_0(X) &:= \mathbb{E}[D \mid X]. + +The components of the linear score are .. math:: @@ -29,7 +37,15 @@ with :math:`\eta=(\ell,m)` and where the components of the linear score are &= \psi_a(W; \eta) \theta + \psi_b(W; \eta) -with :math:`\eta=(g,m)` and where the components of the linear score are +with :math:`\eta=(g,m)`, where + +.. math:: + + g_0(X) &:= \mathbb{E}[Y - D \theta_0\mid X], + + m_0(X) &:= \mathbb{E}[D \mid X]. + +The components of the linear score are .. math:: diff --git a/doc/guide/scores/mar_score.rst b/doc/guide/scores/ssm/mar_score.rst similarity index 100% rename from doc/guide/scores/mar_score.rst rename to doc/guide/scores/ssm/mar_score.rst diff --git a/doc/guide/scores/nr_score.rst b/doc/guide/scores/ssm/nr_score.rst similarity index 100% rename from doc/guide/scores/nr_score.rst rename to doc/guide/scores/ssm/nr_score.rst diff --git a/doc/guide/scores/ssm/ssm_scores.inc b/doc/guide/scores/ssm/ssm_scores.inc new file mode 100644 index 00000000..b1ebc1ed --- /dev/null +++ b/doc/guide/scores/ssm/ssm_scores.inc @@ -0,0 +1,16 @@ +The following scores for sample selection models are implemented. + +.. _ssm-mar-score: + +Missingness at Random +====================== + +.. include:: /guide/scores/ssm/mar_score.rst + + +.. _ssm-nr-score: + +Nonignorable Nonresponse +========================= + +.. include:: /guide/scores/ssm/nr_score.rst diff --git a/doc/guide/sensitivity.rst b/doc/guide/sensitivity.rst index df9d49bc..4782f780 100644 --- a/doc/guide/sensitivity.rst +++ b/doc/guide/sensitivity.rst @@ -42,37 +42,25 @@ Model-specific implementations This section contains the implementation details for each specific model and model specific interpretations. -.. _sensitivity_plr: +.. _plm-sensitivity: -Partially linear regression model (PLR) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Partially linear models (PLM) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. include:: ./sensitivity/plr_sensitivity.rst +.. include:: sensitivity/plm/plm_sensitivity.inc -.. _sensitivity_irm: -Interactive regression model (IRM) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _irm-sensitivity: -.. include:: ./sensitivity/irm_sensitivity.rst +Interactive regression models (IRM) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. _sensitivity_apo: +.. include:: sensitivity/irm/irm_sensitivity.inc -Average Potential Outcomes (APOs) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. include:: ./sensitivity/apo_sensitivity.rst - -.. _sensitivity_did: -Difference-in-Differences for Panel Data -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _did-sensitivity: -.. include:: ./sensitivity/did_sensitivity.rst - -.. _sensitivity_did_cs: - -Difference-in-Differences for repeated cross-sections -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Difference-in-Differences Models +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. include:: ./sensitivity/did_cs_sensitivity.rst \ No newline at end of file +.. include:: sensitivity/did/did_sensitivity.inc diff --git a/doc/guide/sensitivity/did_cs_sensitivity.rst b/doc/guide/sensitivity/did/did_cs_binary_sensitivity.rst similarity index 100% rename from doc/guide/sensitivity/did_cs_sensitivity.rst rename to doc/guide/sensitivity/did/did_cs_binary_sensitivity.rst diff --git a/doc/guide/sensitivity/did/did_cs_sensitivity.rst b/doc/guide/sensitivity/did/did_cs_sensitivity.rst new file mode 100644 index 00000000..dee07ecb --- /dev/null +++ b/doc/guide/sensitivity/did/did_cs_sensitivity.rst @@ -0,0 +1,2 @@ +.. note:: + Will be implemented soon. \ No newline at end of file diff --git a/doc/guide/sensitivity/did_sensitivity.rst b/doc/guide/sensitivity/did/did_pa_binary_sensitivity.rst similarity index 100% rename from doc/guide/sensitivity/did_sensitivity.rst rename to doc/guide/sensitivity/did/did_pa_binary_sensitivity.rst diff --git a/doc/guide/sensitivity/did/did_pa_sensitivity.rst b/doc/guide/sensitivity/did/did_pa_sensitivity.rst new file mode 100644 index 00000000..7a9a665d --- /dev/null +++ b/doc/guide/sensitivity/did/did_pa_sensitivity.rst @@ -0,0 +1,28 @@ +For a detailed description of the scores and nuisance elements, see :ref:`did-pa-score`. + +In the :ref:`did-pa-model` with ``score='observational'`` and ``in_sample_normalization=True`` the score function implies the following representations + +.. math:: + + m(W,g) &= \big(g(1,X) - g(0,X)\big)\cdot \frac{G^{\mathrm{g}}}{\mathbb{E}[G^{\mathrm{g}}]} + + \alpha(W) &= \left(\frac{G^{\mathrm{g}}}{\mathbb{E}[G^{\mathrm{g}}]} - \frac{\frac{m(X)(1-G^{\mathrm{g}})}{1-m(X)}}{\mathbb{E}\left[\frac{m(X)(1-G^{\mathrm{g}})}{1-m(X)}\right]}\right) \cdot \max(G^{\mathrm{g}}, C^{(\cdot)}). + +If instead ``in_sample_normalization=False``, the Riesz representer changes to + +.. math:: + + \alpha(W) = \left(\frac{G^{\mathrm{g}}}{\mathbb{E}[G^{\mathrm{g}}]} - \frac{m(X)(1-G^{\mathrm{g}})}{\mathbb{E}[G^{\mathrm{g}}](1-m(X))}\right) \cdot \max(G^{\mathrm{g}}, C^{(\cdot)}). + +For ``score='experimental'`` implies the score function implies the following representations + +.. math:: + + m(W,g) &= \big(g(1,X) - g(0,X)\big)\cdot \max(G^{\mathrm{g}}, C^{(\cdot)}) + + \alpha(W) &= \left(\frac{G^{\mathrm{g}}}{\mathbb{E}[G^{\mathrm{g}}]} - \frac{1-G^{\mathrm{g}}}{1-\mathbb{E}[G^{\mathrm{g}}]}\right) \cdot \max(G^{\mathrm{g}}, C^{(\cdot)}). + +The ``nuisance_elements`` are then computed with plug-in versions according to the general :ref:`sensitivity_implementation`. + +.. note:: + Remark that the elements are only non-zero for units in the corresponding treatment group :math:`\mathrm{g}` and control group :math:`C^{(\cdot)}`, as :math:`1-G^{\mathrm{g}}=C^{(\cdot)}` if :math:`G^{\mathrm{g}} \vee C_{t_\text{eval} + \delta}^{(\cdot)}=1`. diff --git a/doc/guide/sensitivity/did/did_sensitivity.inc b/doc/guide/sensitivity/did/did_sensitivity.inc new file mode 100644 index 00000000..9ae884a8 --- /dev/null +++ b/doc/guide/sensitivity/did/did_sensitivity.inc @@ -0,0 +1,48 @@ +The following difference-in-differences models implemented. + +.. note:: + Remark that :ref:`sensitivity_benchmark` is only relevant for ``score='observational'``, since no effect of :math:`X` on treatment assignment is assumed. + Generally, we recommend ``score='observational'``, if unobserved confounding seems plausible. + + +.. _sensitivity-did-pa: + +Difference-in-Differences for Panel Data +======================================== + +.. include:: /guide/sensitivity/did/did_pa_sensitivity.rst + + +.. _sensitivity-did-cs: + +Difference-in-Differences for repeated cross-sections +===================================================== + +.. include:: /guide/sensitivity/did/did_cs_sensitivity.rst + + +.. _sensitivity-did-binary: + +Two treatment periods +====================== + + +.. warning:: + This documentation refers to the deprecated implementation for two time periods. + This functionality will be removed in a future version. The generalized version are :ref:`sensitivity-did-pa` and :ref:`sensitivity-did-cs`. + + +.. _sensitivity-did-pa-binary: + +Panel Data +"""""""""" + +.. include:: /guide/sensitivity/did/did_pa_binary_sensitivity.rst + + +.. _sensitivity-did-cs-binary: + +Repeated Cross-Sectional Data +""""""""""""""""""""""""""""" + +.. include:: /guide/sensitivity/did/did_cs_binary_sensitivity.rst \ No newline at end of file diff --git a/doc/guide/sensitivity/apo_sensitivity.rst b/doc/guide/sensitivity/irm/apo_sensitivity.rst similarity index 100% rename from doc/guide/sensitivity/apo_sensitivity.rst rename to doc/guide/sensitivity/irm/apo_sensitivity.rst diff --git a/doc/guide/sensitivity/irm/irm_sensitivity.inc b/doc/guide/sensitivity/irm/irm_sensitivity.inc new file mode 100644 index 00000000..a461f52a --- /dev/null +++ b/doc/guide/sensitivity/irm/irm_sensitivity.inc @@ -0,0 +1,18 @@ +The following nonparametric regression models implemented. + + +.. _sensitivity_irm: + +Interactive regression model (IRM) +======================================= + +.. include:: /guide/sensitivity/irm/irm_sensitivity.rst + + +.. _sensitivity_apo: + +Average Potential Outcomes (APOs) +======================================= + +.. include:: /guide/sensitivity/irm/apo_sensitivity.rst + diff --git a/doc/guide/sensitivity/irm_sensitivity.rst b/doc/guide/sensitivity/irm/irm_sensitivity.rst similarity index 100% rename from doc/guide/sensitivity/irm_sensitivity.rst rename to doc/guide/sensitivity/irm/irm_sensitivity.rst diff --git a/doc/guide/sensitivity/plm/plm_sensitivity.inc b/doc/guide/sensitivity/plm/plm_sensitivity.inc new file mode 100644 index 00000000..347da4d2 --- /dev/null +++ b/doc/guide/sensitivity/plm/plm_sensitivity.inc @@ -0,0 +1,8 @@ +The following partially linear models are implemented. + +.. _sensitivity_plr: + +Partially linear regression model (PLR) +======================================= + +.. include:: /guide/sensitivity/plm/plr_sensitivity.rst diff --git a/doc/guide/sensitivity/plr_sensitivity.rst b/doc/guide/sensitivity/plm/plr_sensitivity.rst similarity index 100% rename from doc/guide/sensitivity/plr_sensitivity.rst rename to doc/guide/sensitivity/plm/plr_sensitivity.rst diff --git a/doc/index.rst b/doc/index.rst index c8ff3a7f..28b0be27 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -186,8 +186,7 @@ Source code and maintenance Documentation and website: `https://docs.doubleml.org/ `_ -DoubleML is currently maintained by -`@MalteKurz `_, `@PhilippBach `_ and +DoubleML is currently maintained by `@PhilippBach `_ and `@SvenKlaassen `_. The source code is available on GitHub: `Python source `_ and diff --git a/doc/literature/literature.rst b/doc/literature/literature.rst index 03c18ec1..e9fb61a1 100644 --- a/doc/literature/literature.rst +++ b/doc/literature/literature.rst @@ -104,12 +104,6 @@ Double Machine Learning Literature * Journal of Business & Economic Statistics, 1-12., 2023* |br| :octicon:`link` :bdg-link-dark:`URL ` |hr| - - - Neng-Chieh Chang |br| - **Double/debiased machine learning for difference-in-differences models** |br| - *The Econometrics Journal, 23(2), Pages 177–191, 2020* |br| - :octicon:`link` :bdg-link-dark:`URL ` - |hr| - Chernozhukov, Victor and Demirer, Mert and Duflo, Esther and Fernández-Val, Iván |br| **Generic Machine Learning Inference on Heterogeneous Treatment Effects in Randomized Experiments, with an Application to Immunization in India** |br| @@ -150,12 +144,6 @@ Double Machine Learning Literature :bdg-link-dark:`arXiv ` |hr| - - Pedro HC Sant'Anna, Jun Zhao |br| - **Doubly robust difference-in-differences estimators** |br| - *Journal of Econometrics, 219(1), Pages 101-122, 2020* |br| - :octicon:`link` :bdg-link-dark:`URL ` - |hr| - - Victor Chernozhukov, Carlos Cinelli, Whitney Newey, Amit Sharma, Vasilis Syrgkanis |br| **Long Story Short: Omitted Variable Bias in Causal Machine Learning** |br| *No. w30302. National Bureau of Economic Research, 2022* |br| @@ -174,12 +162,6 @@ Double Machine Learning Literature :octicon:`link` :bdg-link-dark:`arXiv ` |hr| - - Michael Zimmert |br| - **Efficient Difference-in-Differences Estimation with High-Dimensional Common Trend Confounding** |br| - *arXiv preprint arXiv:1809.01643 [econ.EM], 2018* |br| - :octicon:`link` :bdg-link-dark:`arXiv ` - |hr| - - Claudia Noack, Tomasz Olma, Christoph Rothe |br| **Flexible Covariate Adjustments in Regression Discontinuity Designs** |br| *arXiv preprint arXiv:2107.07942v3 [econ.EM], 2024* |br| @@ -239,6 +221,40 @@ Double Machine Learning Literature :octicon:`link` :bdg-link-dark:`URL ` |hr| + .. dropdown:: Difference-in-Differences + :class-title: sd-bg-primary sd-font-weight-bold + + - Brantly Callaway, Pedro HC Sant'Anna |br| + **Difference-in-Differences with multiple time periods** |br| + *Journal of Econometrics, 225(2), Pages 200-230, 2021* |br| + :octicon:`link` :bdg-link-dark:`URL ` + |hr| + + - Neng-Chieh Chang |br| + **Double/debiased machine learning for difference-in-differences models** |br| + *The Econometrics Journal, 23(2), Pages 177–191, 2020* |br| + :octicon:`link` :bdg-link-dark:`URL ` + |hr| + + - Jonathan Roth, Pedro HC Sant'Anna, Alyssa Bilinski, John Poe |br| + **What’s trending in difference-in-differences? A synthesis of the recent econometrics literature** |br| + *Journal of Econometrics, 235(2), Pages 2218-2244, 2023* |br| + :octicon:`link` :bdg-link-dark:`URL ` + |hr| + + - Pedro HC Sant'Anna, Jun Zhao |br| + **Doubly robust difference-in-differences estimators** |br| + *Journal of Econometrics, 219(1), Pages 101-122, 2020* |br| + :octicon:`link` :bdg-link-dark:`URL ` + |hr| + + - Michael Zimmert |br| + **Efficient Difference-in-Differences Estimation with High-Dimensional Common Trend Confounding** |br| + *arXiv preprint arXiv:1809.01643 [econ.EM], 2018* |br| + :octicon:`link` :bdg-link-dark:`arXiv ` + |hr| + + .. grid:: 1 .. grid-item-card:: Want to add or update a reference in the literature overview? diff --git a/doc/release/release.rst b/doc/release/release.rst index 5123ebb9..a5febb76 100644 --- a/doc/release/release.rst +++ b/doc/release/release.rst @@ -7,16 +7,82 @@ Release Notes .. tab-item:: Python - .. dropdown:: DoubleML 0.9.3 + .. dropdown:: DoubleML 0.10.0 :class-title: sd-bg-primary sd-font-weight-bold :open: + - **Release highlight:** Multi-Period Difference-in-Differences for Panel Data + + - Implementation via ``DoubleMLDIDMulti`` class + `Py #292 `_ + `Py #315 `_ + - New ``doubleml.data`` submodule including ``DoubleMLData`` and ``DoubleMLPanelData`` classes + `Py #292 `_ + - Extended User Guide and Example Gallery + `Docs #224 `_ + `Docs #233 `_ + `Docs #237 `_ + + - Added Confidence sets which are robust to weak IVs: ``robust_confset()`` method for ``DoubleMLIIVM`` + (added by `Ezequiel Smucler `_ and `David Masip `_) + `Py #318 `_ + `Docs #234 `_ + + - Update sensitivity operations to improve sensitivity bounds + `Py #295 `_ + + - Improve ``DoubleMLAPO`` nuisance estimation and update weighted score elements. + Added example to compare ``DoubleMLIRM`` and ``DoubleMLAPO``. + `Py #295 `_ + `Py #297 `_ + `Docs #220 `_ + + - Updated variance aggregation over repetitions via confidence intervals + `Py #324 `_ + `Docs #236 `_ + + - Added a separate package citation using `CITATION.cff` + `Py #321 `_ + + - Update package formatting, linting and add pre-commit hooks + `Py #288 `_ + `Py #289 `_ + `Py #294 `_ + `Py #316 `_ + + - Maintenance package + `Py #287 `_ + `Py #288 `_ + `Py #291 `_ + `Py #319 `_ + + - Maintenance documentation + `Docs #211 `_ + `Docs #213 `_ + `Docs #214 `_ + `Docs #215 `_ + `Docs #216 `_ + `Docs #217 `_ + `Docs #218 `_ + `Docs #219 `_ + `Docs #221 `_ + `Docs #225 `_ + `Docs #227 `_ + `Docs #228 `_ + `Docs #229 `_ + `Docs #230 `_ + `Docs #232 `_ + `Docs #238 `_ + `Docs #239 `_ + + .. dropdown:: DoubleML 0.9.3 + :class-title: sd-bg-primary sd-font-weight-bold + - Fix / adapted unit tests which failed in the release of 0.9.2 to conda-forge `Docs #208 `_ .. dropdown:: DoubleML 0.9.2 :class-title: sd-bg-primary sd-font-weight-bold - :open: - Make `rdrobust` optional for conda. Create `pyproject.toml` and remove `setup.py` for packaging `Py #285 `_ @@ -25,14 +91,13 @@ Release Notes - Maintenance package `Py #284 `_ - - Maintenance doccumentation + - Maintenance documentation `Docs #205 `_ `Docs #206 `_ `Docs #207 `_ .. dropdown:: DoubleML 0.9.1 :class-title: sd-bg-primary sd-font-weight-bold - :open: - **Release highlight:** Regression Discontinuity Designs with Flexible Covariate Adjustment via ``RDFlex`` class (in cooperation with `Claudia Noack `_ @@ -499,17 +564,27 @@ Release Notes .. tab-item:: R - .. dropdown:: DoubleML 1.0.1 + .. dropdown:: DoubleML 1.0.2 :class-title: sd-bg-primary sd-font-weight-bold :open: + - Add sample selection models, thanks to new contributor Petra Jasenakova `@petronelaj `_ + `R #213 `_ + `Docs #223 `_ + - Maintenance including updates to GitHub workflows + `R #205 `_ + `R #220 `_ + `Docs #226 `_ + + .. dropdown:: DoubleML 1.0.1 + :class-title: sd-bg-primary sd-font-weight-bold + - Maintenance (upcoming breaking changes from ``paradox`` package), thanks to new contributor Martin Binder `@mb706 `_ `R #195 `_ `R #198 `_ .. dropdown:: DoubleML 1.0.0 :class-title: sd-bg-primary sd-font-weight-bold - :open: - Update citation info to publication in Journal of Statistical Software, rename helper function and fix links and GH actions `R #191 `_ diff --git a/doc/shared/models/did.rst b/doc/shared/models/did.rst deleted file mode 100644 index 38cea0f9..00000000 --- a/doc/shared/models/did.rst +++ /dev/null @@ -1,24 +0,0 @@ -**Difference-in-Differences Models (DID)** implemented in the package focus on the the binary treatment case with -with two treatment periods. - -Adopting the notation from `Sant'Anna and Zhao (2020) `_, -let :math:`Y_{it}` be the outcome of interest for unit :math:`i` at time :math:`t`. Further, let :math:`D_{it}=1` indicate -if unit :math:`i` is treated before time :math:`t` (otherwise :math:`D_{it}=0`). Since all units start as untreated (:math:`D_{i0}=0`), define -:math:`D_{i}=D_{i1}.` Relying on the potential outcome notation, denote :math:`Y_{it}(0)` as the outcome of unit :math:`i` at time :math:`t` if the unit did not receive -treatment up until time :math:`t` and analogously for :math:`Y_{it}(1)` with treatment. Consequently, the observed outcome -for unit is :math:`i` at time :math:`t` is :math:`Y_{it}=D_{it} Y_{it}(1) + (1-D_{it}) Y_{it}(0)`. Further, let -:math:`X_i` be a vector of pre-treatment covariates. - -Target parameter of interest is the average treatment effect on the treated (ATTE) - -.. math:: - - \theta_0 = \mathbb{E}[Y_{i1}(1)- Y_{i1}(0)|D_i=1]. - -The corresponding identifying assumptions are - -- **(Cond.) Parallel Trends:** :math:`\mathbb{E}[Y_{i1}(0) - Y_{i0}(0)|X_i, D_i=1] = \mathbb{E}[Y_{i1}(0) - Y_{i0}(0)|X_i, D_i=0]\quad a.s.` -- **Overlap:** :math:`\exists\epsilon > 0`: :math:`P(D_i=1) > \epsilon` and :math:`P(D_i=1|X_i) \le 1-\epsilon\quad a.s.` - -.. note:: - For a more detailed introduction and recent developments of the difference-in-differences literature see e.g. `Roth et al. (2022) `_. diff --git a/doc/workflow/workflow.rst b/doc/workflow/workflow.rst index d0615b42..8e201e02 100644 --- a/doc/workflow/workflow.rst +++ b/doc/workflow/workflow.rst @@ -62,15 +62,15 @@ transform the confounding variables in the regression model. However, machine learning techniques offer greater flexibility in terms of a more data-driven specification of the main regression equation and the first stage. -1. Data-Backend +1. Data Backend --------------- -In Step 1., we initialize the data-backend and thereby declare the role of the outcome, the treatment, and the confounding variables. +In Step 1., we initialize the data backend and thereby declare the role of the outcome, the treatment, and the confounding variables. We use data from the 1991 Survey of Income and Program Participation which is available via the function `fetch_401K (Python) `_ or `fetch_401k (R) `_. -The data-backend can be initialized from various data frame objects in Python and R. To estimate the intent-to-treat effect in the +The data backend can be initialized from various data frame objects in Python and R. To estimate the intent-to-treat effect in the 401(k) example, we use eligibility (``e401``) as the treatment variable of interest. The outcome variable is ``net_tfa`` and we control for confounding variables ``['age', 'inc', 'educ', 'fsize', 'marr', 'twoearn', 'db', 'pira', 'hown']``. diff --git a/requirements.txt b/requirements.txt index 3e6af923..543443a5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ DoubleML[rdd] +scikit-learn<1.6 # test pytest @@ -21,3 +22,7 @@ seaborn xgboost lightgbm flaml + +# notebooks +ipykernel +pyreadr \ No newline at end of file