diff --git a/.circleci/.dockerignore b/.circleci/.dockerignore deleted file mode 100644 index dbe9a91d7..000000000 --- a/.circleci/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!requirements-conda.txt -!fix-permissions diff --git a/.circleci/Dockerfile b/.circleci/Dockerfile deleted file mode 100644 index f4629597a..000000000 --- a/.circleci/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM circleci/openjdk:11-jdk -#LABEL org.opencontainers.image.source=https://github.com/locationtech/rasterframes - -USER root - -# See: https://docs.conda.io/projects/conda/en/latest/user-guide/install/rpm-debian.html -RUN \ - curl -s https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list - -RUN \ - apt-get update && \ - apt-get install -yq --no-install-recommends conda && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -ENV CONDA_DIR=/opt/conda -ENV PATH=$CONDA_DIR/bin:$PATH - -COPY requirements-conda.txt fix-permissions /tmp -RUN \ - conda install --quiet --yes --channel=conda-forge --file=/tmp/requirements-conda.txt && \ - echo "$CONDA_DIR/lib" > /etc/ld.so.conf.d/conda.conf && \ - ldconfig && \ - conda clean --all --force-pkgs-dirs --yes --quiet && \ - sh /tmp/fix-permissions $CONDA_DIR - - -# Work-around for pyproj issue https://github.com/pyproj4/pyproj/issues/415 -ENV PROJ_LIB=/opt/conda/share/proj - -USER 3434 -WORKDIR /home/circleci diff --git a/.circleci/Makefile b/.circleci/Makefile deleted file mode 100644 index 578140c4e..000000000 --- a/.circleci/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -IMAGE_NAME=circleci-openjdk-conda-gdal -SHA=$(shell git log -n1 --format=format:"%H" | cut -c 1-7) -VERSION?=$(SHA) -HOST=docker.io -REPO=$(HOST)/s22s -FULL_NAME=$(REPO)/$(IMAGE_NAME):$(VERSION) - -.DEFAULT_GOAL := help -help: -# http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html - @echo "Usage: make [target]" - @echo "Targets: " - @grep -E '^[a-zA-Z0-9_%/-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\t\033[36m%-20s\033[0m %s\n", $$1, $$2}' - -all: build push ## Build and then push image - -build: ## Build the docker image - docker build . -t ${FULL_NAME} - -login: ## Login to the docker registry - docker login - -push: login ## Push docker image to registry - docker push ${FULL_NAME} - -run: build ## Build image and launch shell - docker run --rm -it ${FULL_NAME} bash diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 5b832beb6..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,249 +0,0 @@ -version: 2.1 - -orbs: - sbt: - description: SBT build/test runtime - executors: - default: - docker: - - image: s22s/circleci-openjdk-conda-gdal:b8e30ee - working_directory: ~/repo - environment: - SBT_OPTS: "-Xms32M -Xmx2G -XX:+UseStringDeduplication -XX:+UseCompressedOops -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp" - commands: - setup: - description: Setup for sbt build - steps: - - run: - name: Setup sbt - command: 'true' # NOOP - - compile: - description: Do just the compilation stage to minimize sbt memory footprint - steps: - - run: - name: "Compile Scala via sbt" - command: sbt -v -batch compile test:compile it:compile - - python: - commands: - setup: - description: Ensure a minimal python environment is avalable and ready - steps: - - run: - name: Install Python and PIP - command: |- - python -m pip install --user 'setuptools>=45.2' - - requirements: - description: Install packages identified in requirements file - steps: - - run: - name: Install requirements - command: /opt/conda/bin/conda install -c conda-forge --yes --file pyrasterframes/src/main/python/requirements-condaforge.txt - - - rasterframes: - commands: - setup: - steps: - - run: - name: Enable saving core files - command: ulimit -c unlimited -S - - save-artifacts: - steps: - - run: - command: | - mkdir -p /tmp/core_dumps - ls -lh /tmp - cp core.* *.hs /tmp/core_dumps 2> /dev/null || true - cp core/* /tmp/core_dumps/ 2> /dev/null || true - cp -r /tmp/hsperfdata* /tmp/*.hprof /tmp/core_dumps 2> /dev/null || true - when: on_fail - - - store_artifacts: - path: /tmp/core_dumps - - - store_test_results: - path: core/target/test-reports - - - store_test_results: - path: datasource/target/test-reports - - - store_test_results: - path: experimental/target/test-reports - - save-doc-artifacts: - steps: - - run: - command: | - mkdir -p /tmp/core_dumps - cp core.* *.hs /tmp/core_dumps 2> /dev/null || true - mkdir -p /tmp/markdown - cp /home/circleci/repo/pyrasterframes/target/python/docs/*.md /tmp/markdown 2> /dev/null || true - when: on_fail - - - store_artifacts: - path: /tmp/core_dumps - - - store_artifacts: - path: /tmp/markdown - - - store_artifacts: - path: docs/target/site - destination: rf-site - - save-cache: - steps: - - save_cache: - key: v4-dependencies--{{ checksum "build.sbt" }} - paths: - - ~/.ivy2/cache - - ~/.sbt - - ~/.cache/coursier - - ~/.local - - restore-cache: - steps: - - restore_cache: - keys: - - v4-dependencies-{{ checksum "build.sbt" }} - -jobs: - test: - executor: sbt/default - steps: - - checkout - - sbt/setup - - python/setup - - python/requirements - - rasterframes/setup - - rasterframes/restore-cache - - sbt/compile - - - run: - name: "Scala Tests: core" - command: sbt -v -batch core/test - - - run: - name: "Scala Tests: datasource" - command: sbt -v -batch datasource/test - - - run: - name: "Scala Tests: experimental" - command: sbt -v -batch experimental/test - - - run: - name: "Create PyRasterFrames package" - command: sbt -v -batch pyrasterframes/package - - - run: - name: "Python Tests" - command: sbt -v -batch pyrasterframes/test - - - rasterframes/save-artifacts - - rasterframes/save-cache - - docs: - executor: sbt/default - steps: - - checkout - - sbt/setup - - python/setup - - python/requirements - - rasterframes/setup - - rasterframes/restore-cache - - sbt/compile - - - run: - name: Build documentation - command: sbt makeSite - no_output_timeout: 30m - - - rasterframes/save-doc-artifacts - - rasterframes/save-cache - - it: - executor: sbt/default - steps: - - checkout - - sbt/setup - - rasterframes/setup - - rasterframes/restore-cache - - sbt/compile - - - run: - name: Integration tests - command: sbt it:test - no_output_timeout: 30m - - - rasterframes/save-artifacts - - rasterframes/save-cache - - it-no-gdal: - executor: sbt/default - steps: - - checkout - - sbt/setup - - rasterframes/setup - - rasterframes/restore-cache - - - run: - name: Uninstall GDAL - command: conda remove gdal -q -y --offline - - - sbt/compile - - - run: - name: Integration tests - command: sbt it:test - no_output_timeout: 30m - - - rasterframes/save-artifacts - - rasterframes/save-cache - -workflows: - version: 2 - all: - jobs: - - test - - - it: - requires: - - test - filters: - branches: - only: - - /feature\/.*-it.*/ - - /it\/.*/ - - - it-no-gdal: - requires: - - test - filters: - branches: - only: - - /feature\/.*-it.*/ - - /it\/.*/ - - - docs: - filters: - branches: - only: - - /feature\/.*docs.*/ - - /fix\/.*docs.*/ - - /docs\/.*/ - - weekly: - triggers: - - schedule: - cron: "0 8 4 * *" - filters: - branches: - only: - - develop - jobs: - - test - - it - - it-no-gdal diff --git a/.circleci/fix-permissions b/.circleci/fix-permissions deleted file mode 100755 index d8e14920f..000000000 --- a/.circleci/fix-permissions +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -set -e - -GID=3434 # circleci - -for d in "$@"; do - find "$d" \ - ! \( \ - -group $GID \ - -a -perm -g+rwX \ - \) \ - -exec chgrp $GID {} \; \ - -exec chmod g+rwX {} \; - # setuid,setgid *on directories only* - find "$d" \ - \( \ - -type d \ - -a ! -perm -6000 \ - \) \ - -exec chmod +6000 {} \; -done diff --git a/.circleci/requirements-conda.txt b/.circleci/requirements-conda.txt deleted file mode 100644 index a8ebfd56b..000000000 --- a/.circleci/requirements-conda.txt +++ /dev/null @@ -1,5 +0,0 @@ -python==3.8 -gdal==3.1.2 -libspatialindex -rasterio[s3] -rtree \ No newline at end of file diff --git a/.github/actions/collect_artefacts/action.yml b/.github/actions/collect_artefacts/action.yml new file mode 100644 index 000000000..27575e34f --- /dev/null +++ b/.github/actions/collect_artefacts/action.yml @@ -0,0 +1,10 @@ +name: upload rasterframes artefacts +description: upload rasterframes artefacts +runs: + using: "composite" + steps: + - name: upload core dumps + uses: actions/upload-artifact@v3 + with: + name: core-dumps + path: /tmp/core_dumps \ No newline at end of file diff --git a/.github/actions/init-python-env/action.yaml b/.github/actions/init-python-env/action.yaml new file mode 100644 index 000000000..89f45cfec --- /dev/null +++ b/.github/actions/init-python-env/action.yaml @@ -0,0 +1,40 @@ +name: Setup Python Environment + +description: Install Python, Poetry and project dependencies + +inputs: + python_version: + description: 'Version of Python to configure' + default: '3.8' + poetry_version: + description: 'Version of Poetry to configure' + default: '1.3.2' + +runs: + using: "composite" + steps: + - name: Load cached Poetry installation + id: cached-poetry + uses: actions/cache@v3 + with: + path: ~/.local # the path depends on the OS, this is linux + key: poetry-${{inputs.poetry_version}}-0 # increment to reset cache + + - name: Install Poetry + if: steps.cached-poetry.outputs.cache-hit != 'true' + uses: snok/install-poetry@v1 + with: + version: ${{ inputs.poetry_version }} + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python_version }} + cache: 'poetry' + + - name: Install Poetry project dependencies + # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + shell: bash + run: make init-python \ No newline at end of file diff --git a/.github/actions/init-scala-env/action.yaml b/.github/actions/init-scala-env/action.yaml new file mode 100644 index 000000000..902f8de40 --- /dev/null +++ b/.github/actions/init-scala-env/action.yaml @@ -0,0 +1,10 @@ +name: setup scala +description: setup scala environment +runs: + using: "composite" + steps: + - uses: coursier/cache-action@v6 + - uses: coursier/setup-action@v1 + with: + jvm: zulu:8.0.362 + apps: sbt diff --git a/.github/image/.dockerignore b/.github/image/.dockerignore deleted file mode 100644 index dbe9a91d7..000000000 --- a/.github/image/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!requirements-conda.txt -!fix-permissions diff --git a/.github/image/Dockerfile b/.github/image/Dockerfile deleted file mode 100644 index 27cd7a1aa..000000000 --- a/.github/image/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -FROM adoptopenjdk/openjdk11:debian-slim - -# See: https://docs.conda.io/projects/conda/en/latest/user-guide/install/rpm-debian.html -RUN \ - apt-get update && \ - apt-get install -yq gpg && \ - curl -s https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list && \ - apt-get update && \ - apt-get install -yq --no-install-recommends conda && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -ENV CONDA_DIR=/opt/conda -ENV PATH=$CONDA_DIR/bin:$PATH - -COPY requirements-conda.txt /tmp -RUN \ - conda install --quiet --yes --channel=conda-forge --file=/tmp/requirements-conda.txt && \ - echo "$CONDA_DIR/lib" > /etc/ld.so.conf.d/conda.conf && \ - ldconfig && \ - conda clean --all --force-pkgs-dirs --yes --quiet - -# Work-around for pyproj issue https://github.com/pyproj4/pyproj/issues/415 -ENV PROJ_LIB=/opt/conda/share/proj - diff --git a/.github/image/Makefile b/.github/image/Makefile deleted file mode 100644 index 1dab66b65..000000000 --- a/.github/image/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -IMAGE_NAME=debian-openjdk-conda-gdal -SHA=$(shell git log -n1 --format=format:"%H" | cut -c 1-7) -VERSION?=$(SHA) -HOST=docker.io -REPO=$(HOST)/s22s -FULL_NAME=$(REPO)/$(IMAGE_NAME):$(VERSION) - -.DEFAULT_GOAL := help -help: -# http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html - @echo "Usage: make [target]" - @echo "Targets: " - @grep -E '^[a-zA-Z0-9_%/-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\t\033[36m%-20s\033[0m %s\n", $$1, $$2}' - -all: build push ## Build and then push image - -build: ## Build the docker image - docker build . -t ${FULL_NAME} - -login: ## Login to the docker registry - docker login - -push: login ## Push docker image to registry - docker push ${FULL_NAME} - -run: build ## Build image and launch shell - docker run --rm -it ${FULL_NAME} bash diff --git a/.github/image/requirements-conda.txt b/.github/image/requirements-conda.txt deleted file mode 100644 index a8ebfd56b..000000000 --- a/.github/image/requirements-conda.txt +++ /dev/null @@ -1,5 +0,0 @@ -python==3.8 -gdal==3.1.2 -libspatialindex -rasterio[s3] -rtree \ No newline at end of file diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml deleted file mode 100644 index 97afa087b..000000000 --- a/.github/workflows/build-test.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: Build and Test - -on: - pull_request: - branches: ['**'] - push: - branches: ['master', 'develop', 'release/*', 'spark-3.2'] - tags: [v*] - release: - types: [published] - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - uses: coursier/cache-action@v6 - - name: Setup JDK - uses: actions/setup-java@v3 - with: - distribution: temurin - java-version: 8 - cache: sbt - - # Do just the compilation stage to minimize sbt memory footprint - - name: Compile - run: sbt -v -batch compile test:compile it:compile - - - name: Core tests - run: sbt -batch core/test - - - name: Datasource tests - run: sbt -batch datasource/test - - - name: Experimental tests - run: sbt -batch experimental/test - - ## TODO: Update python build to be PEP 517 compatible - # - name: Install Conda dependencies - # run: | - # # $CONDA_DIR is an environment variable pointing to the root of the miniconda directory - # $CONDA_DIR/bin/conda install -c conda-forge --yes --file pyrasterframes/src/main/python/requirements-condaforge.txt - # - name: Create PyRasterFrames package - # run: sbt -v -batch pyrasterframes/package - # - name: Python tests - # run: sbt -batch pyrasterframes/test - - - name: Collect artifacts - if: ${{ failure() }} - run: | - mkdir -p /tmp/core_dumps - ls -lh /tmp - cp core.* *.hs /tmp/core_dumps/ 2> /dev/null || true - cp ./core/*.log /tmp/core_dumps/ 2> /dev/null || true - cp -r /tmp/hsperfdata* /tmp/*.hprof /tmp/core_dumps/ 2> /dev/null || true - cp repo/core/core/* /tmp/core_dumps/ 2> /dev/null || true - - - name: Upload core dumps - if: ${{ failure() }} - uses: actions/upload-artifact@v2 - with: - name: core-dumps - path: /tmp/core_dumps \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..668b34546 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,122 @@ +name: Continuous Integration + +on: + pull_request: + branches: + - '**' + push: + branches: + - '**' + tags: + - 'v*' + +jobs: + + build-scala: + runs-on: ubuntu-20.04 + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Setup Scala Build Tools + uses: ./.github/actions/init-scala-env + + - name: Compile Scala Project + run: make compile-scala + + - name: Test Scala Project + # python/* branches are not supposed to change scala code, trust them + if: ${{ !startsWith(github.event.inputs.from_branch, 'python/') }} + run: make test-scala + + - name: Build Spark Assembly + shell: bash + run: make build-scala + + - name: Cache Spark Assembly + uses: actions/cache@v3 + with: + path: ./dist/* + key: dist-${{ github.sha }} + + build-python: + # scala/* branches are not supposed to change python code, trust them + if: ${{ !startsWith(github.event.inputs.from_branch, 'scala/') }} + runs-on: ubuntu-20.04 + needs: build-scala + + strategy: + matrix: + python: [ "3.8" ] + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: ./.github/actions/init-python-env + with: + python_version: ${{ matrix.python }} + + - name: Static checks + shell: bash + run: make lint-python + + - uses: actions/cache@v3 + with: + path: ./dist/* + key: dist-${{ github.sha }} + + - name: Run tests + shell: bash + run: make test-python-quick + + publish: + name: Publish Artifacts + needs: [ build-scala, build-python ] + runs-on: ubuntu-20.04 + if: (github.event_name != 'pull_request') && startsWith(github.ref, 'refs/tags/v') + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Setup Scala Build Tools + uses: ./.github/actions/init-scala-env + + - name: Publish JARs to GitHub Packages + shell: bash + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + run: make publish-scala + + - uses: ./.github/actions/init-python-env + with: + python_version: "3.8" + + - name: Build Python whl + shell: bash + run: make build-python + + +# TODO: Where does this go, do we need it? +# - name: upload artefacts +# uses: ./.github/actions/upload_artefacts + +# TODO: Where does this go, do we need it? +# - uses: actions/cache@v3 +# with: +# path: ./dist/* +# key: dist-${{ github.sha }} + +# TODO: Where does this go? +# - name: upload wheel +# working-directory: dist +# shell: bash +# run: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 100b78d4f..ddf7b107d 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -1,3 +1,4 @@ +# TODO: This needs refactor name: Compile documentation on: diff --git a/.gitignore b/.gitignore index ac5807ecd..b8ce6ce00 100644 --- a/.gitignore +++ b/.gitignore @@ -48,3 +48,16 @@ rf-notebook/src/main/notebooks/.ipython .bloop metals.sbt *.parquet/ + +# Python + +.coverage +.venv +htmlcov +dist/ +docs/*.md +docs/*.ipynb +__pycache__ +*.pipe/ +.coverage* +*.jar diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..9142d0b3c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks + +files: ^python/ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: local + hooks: + - id: black + name: black formatting + language: system + types: [python] + entry: poetry run black + + - id: isort + name: isort import sorting + language: system + types: [python] + entry: poetry run isort + args: ["--profile", "black"] diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..486335119 --- /dev/null +++ b/Makefile @@ -0,0 +1,90 @@ +SHELL := /usr/bin/env bash + +.PHONY: init test lint build docs notebooks help + +help: + @echo "init - Setup the repository" + @echo "clean - clean all compiled python files, build artifacts and virtual envs. Run \`make init\` anew afterwards." + @echo "test - run unit tests" + @echo "lint - run linter and checks" + @echo "build - build wheel" + @echo "docs - build documentations" + @echo "help - this command" + +test: test-scala test-python + +############### +# SCALA +############### + +compile-scala: + sbt -v -batch compile test:compile it:compile + +test-scala: test-core-scala test-datasource-scala test-experimental-scala + +test-core-scala: + sbt -batch core/test + +test-datasource-scala: + sbt -batch datasource/test + +test-experimental-scala: + sbt -batch experimental/test + +build-scala: + sbt "pyrasterframes/assembly" + +clean-scala: + sbt clean + +publish-scala: + sbt publish + +################ +# PYTHON +################ + +init-python: + python -m venv ./.venv + ./.venv/bin/python -m pip install --upgrade pip + poetry self add "poetry-dynamic-versioning[plugin]" + poetry install + poetry run pre-commit install + +test-python: build-scala + poetry run pytest -vv python/tests --cov=python/pyrasterframes --cov=python/geomesa_pyspark --cov-report=term-missing + +test-python-quick: + poetry run pytest -vv python/tests --cov=python/pyrasterframes --cov=python/geomesa_pyspark --cov-report=term-missing + +lint-python: + poetry run pre-commit run --all-file + +build-python: clean-build-python + poetry build + +docs-python: clean-docs-python + poetry run python python/docs/build_docs.py + +notebooks-python: clean-notebooks-python + poetry run python python/docs/build_docs.py --format notebook + +clean-python: clean-build-python clean-test-python clean-venv-python clean-docs-python clean-notebooks-python + +clean-build-python: + find ./dist -name 'pyrasterframes*.whl' -exec rm -fr {} + + find ./dist -name 'pyrasterframes*.tar.gz' -exec rm -fr {} + + +clean-test-python: + rm -f .coverage + rm -fr htmlcov/ + rm -fr test*.pipe + +clean-venv-python: + rm -fr .venv/ + +clean-docs-python: + find docs -name '*.md' -exec rm -f {} + + +clean-notebooks-python: + find docs -name '*.ipynb' -exec rm -f {} + diff --git a/datasource/src/main/resources/slippy.html b/datasource/src/main/resources/slippy.html index 96cf2d168..83bd67357 100644 --- a/datasource/src/main/resources/slippy.html +++ b/datasource/src/main/resources/slippy.html @@ -23,7 +23,7 @@ RasterFrames Rendering - + @@ -74,4 +74,4 @@ map.on('click', showPos); - + \ No newline at end of file diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..e825fd0f7 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2669 @@ +[[package]] +name = "affine" +version = "2.4.0" +description = "Matrices describing affine transformation of the plane" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +dev = ["coveralls", "flake8", "pydocstyle"] +test = ["pytest (>=4.6)", "pytest-cov"] + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "asttokens" +version = "2.2.1" +description = "Annotate AST trees with source code positions" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid", "pytest"] + +[[package]] +name = "attrs" +version = "22.2.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "beautifulsoup4" +version = "4.12.0" +description = "Screen-scraping library" +category = "dev" +optional = false +python-versions = ">=3.6.0" + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bleach" +version = "6.0.0" +description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.2)"] + +[[package]] +name = "boto3" +version = "1.26.96" +description = "The AWS SDK for Python" +category = "dev" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.29.96,<1.30.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.29.96" +description = "Low-level, data-driven core of boto 3." +category = "dev" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.16.9)"] + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.3.1" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "cligj" +version = "0.7.2" +description = "Click params for commmand line interfaces to GeoJSON" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4" + +[package.dependencies] +click = ">=4.0" + +[package.extras] +test = ["pytest-cov"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "comm" +version = "0.1.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +traitlets = ">=5.3" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "contourpy" +version = "1.0.7" +description = "Python library for calculating contours of 2D quadrilateral grids" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +numpy = ">=1.16" + +[package.extras] +bokeh = ["bokeh", "chromedriver", "selenium"] +docs = ["furo", "sphinx-copybutton"] +mypy = ["contourpy[bokeh]", "docutils-stubs", "mypy (==0.991)", "types-Pillow"] +test = ["Pillow", "matplotlib", "pytest"] +test-no-images = ["pytest"] + +[[package]] +name = "coverage" +version = "7.2.2" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cycler" +version = "0.11.0" +description = "Composable style cycles" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "debugpy" +version = "1.6.6" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +packaging = "*" + +[[package]] +name = "distlib" +version = "0.3.6" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "exceptiongroup" +version = "1.1.1" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "1.2.0" +description = "Get the currently executing AST node of a frame, and other information" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] + +[[package]] +name = "fastjsonschema" +version = "2.16.3" +description = "Fastest Python implementation of JSON schema" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "filelock" +version = "3.10.0" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "fiona" +version = "1.9.2" +description = "Fiona reads and writes spatial data files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +certifi = "*" +click = ">=8.0,<9.0" +click-plugins = ">=1.0" +cligj = ">=0.5" +importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} +munch = ">=2.3.2" + +[package.extras] +all = ["Fiona[calc,s3,test]"] +calc = ["shapely"] +s3 = ["boto3 (>=1.3.1)"] +test = ["Fiona[s3]", "pytest (>=7)", "pytest-cov", "pytz"] + +[[package]] +name = "fonttools" +version = "4.39.2" +description = "Tools to manipulate font files" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.0.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "scipy"] +lxml = ["lxml (>=4.0,<5)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.0.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "geopandas" +version = "0.12.2" +description = "Geographic pandas extensions" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +fiona = ">=1.8" +packaging = "*" +pandas = ">=1.0.0" +pyproj = ">=2.6.1.post1" +shapely = ">=1.7" + +[[package]] +name = "identify" +version = "2.5.21" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "importlib-metadata" +version = "6.1.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "importlib-resources" +version = "5.12.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "ipykernel" +version = "6.22.0" +description = "IPython Kernel for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=20" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.11.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "jedi" +version = "0.18.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jupyter-client" +version = "8.1.0" +description = "Jupyter protocol implementation and client libraries" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.3.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.2.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "kiwisolver" +version = "1.4.4" +description = "A fast implementation of the Cassowary constraint solver" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "markdown" +version = "3.4.1" +description = "Python implementation of Markdown." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.2" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "matplotlib" +version = "3.7.1" +description = "Python plotting package" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.0.1" +numpy = ">=1.20" +packaging = ">=20.0" +pillow = ">=6.2.0" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" +setuptools_scm = ">=7" + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mistune" +version = "2.0.5" +description = "A sane Markdown parser with useful plugins and renderers" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "munch" +version = "2.5.0" +description = "A dot-accessible dictionary (a la JavaScript objects)" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[package.extras] +testing = ["astroid (>=1.5.3,<1.6.0)", "astroid (>=2.0)", "coverage", "pylint (>=1.7.2,<1.8.0)", "pylint (>=2.3.1,<2.4.0)", "pytest"] +yaml = ["PyYAML (>=5.1.0)"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "nbclient" +version = "0.7.2" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" +optional = false +python-versions = ">=3.7.0" + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +nbformat = ">=5.1" +traitlets = ">=5.3" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.2.10" +description = "Converting Jupyter Notebooks" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "*" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<3" +nbclient = ">=0.5.0" +nbformat = ">=5.1" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.0" + +[package.extras] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["nbconvert[qtpng]"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] +webpdf = ["pyppeteer (>=1,<1.1)"] + +[[package]] +name = "nbformat" +version = "5.8.0" +description = "The Jupyter Notebook format" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nest-asyncio" +version = "1.5.6" +description = "Patch asyncio to allow nested event loops" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "nodeenv" +version = "1.7.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "numpy" +version = "1.24.2" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.8" + +[[package]] +name = "packaging" +version = "23.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pandas" +version = "1.5.3" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.1" +pytz = ">=2020.1" + +[package.extras] +test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] + +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.11.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pillow" +version = "9.4.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "platformdirs" +version = "3.1.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.21.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prompt-toolkit" +version = "3.0.38" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.7.0" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pweave" +version = "0.30.3" +description = "Scientific reports with embedded python computations with reST, LaTeX or markdown" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ipykernel = "*" +ipython = ">=6.0" +jupyter-client = "*" +markdown = "*" +nbconvert = "*" +nbformat = "*" +pygments = "*" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +test = ["coverage", "ipython", "matplotlib", "nose", "notebook", "scipy"] + +[[package]] +name = "py4j" +version = "0.10.9.5" +description = "Enables Python programs to dynamically access arbitrary Java objects" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pygments" +version = "2.14.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyproj" +version = "3.4.1" +description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +certifi = "*" + +[[package]] +name = "pyrsistent" +version = "0.19.3" +description = "Persistent/Functional/Immutable data structures" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pyspark" +version = "3.3.2" +description = "Apache Spark Python API" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +py4j = "0.10.9.5" + +[package.extras] +ml = ["numpy (>=1.15)"] +mllib = ["numpy (>=1.15)"] +pandas-on-spark = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] +sql = ["pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] + +[[package]] +name = "pytest" +version = "7.2.2" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2022.7.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pywin32" +version = "305" +description = "Python for Window Extensions" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyzmq" +version = "25.0.2" +description = "Python bindings for 0MQ" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "rasterio" +version = "1.3.6" +description = "Fast and direct raster I/O for use with Numpy and SciPy" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +affine = "*" +attrs = "*" +boto3 = {version = ">=1.2.4", optional = true, markers = "extra == \"s3\""} +certifi = "*" +click = ">=4.0" +click-plugins = "*" +cligj = ">=0.5" +numpy = ">=1.18" +setuptools = "*" +snuggs = ">=1.4.1" + +[package.extras] +all = ["boto3 (>=1.2.4)", "ghp-import", "hypothesis", "ipython (>=2.0)", "matplotlib", "numpydoc", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely", "sphinx", "sphinx-rtd-theme"] +docs = ["ghp-import", "numpydoc", "sphinx", "sphinx-rtd-theme"] +ipython = ["ipython (>=2.0)"] +plot = ["matplotlib"] +s3 = ["boto3 (>=1.2.4)"] +test = ["boto3 (>=1.2.4)", "hypothesis", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely"] + +[[package]] +name = "s3transfer" +version = "0.6.0" +description = "An Amazon S3 Transfer Manager" +category = "dev" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + +[[package]] +name = "setuptools" +version = "67.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "setuptools-scm" +version = "7.1.0" +description = "the blessed package to manage your versions by scm tags" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=20.0" +setuptools = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +typing-extensions = "*" + +[package.extras] +test = ["pytest (>=6.2)", "virtualenv (>20)"] +toml = ["setuptools (>=42)"] + +[[package]] +name = "shapely" +version = "2.0.1" +description = "Manipulation and analysis of geometric objects" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +numpy = ">=1.14" + +[package.extras] +docs = ["matplotlib", "numpydoc (>=1.1.0,<1.2.0)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "snuggs" +version = "1.4.7" +description = "Snuggs are s-expressions for Numpy" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +numpy = "*" +pyparsing = ">=2.1.6" + +[package.extras] +test = ["hypothesis", "pytest"] + +[[package]] +name = "soupsieve" +version = "2.4" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "stack-data" +version = "0.6.2" +description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.7" + +[[package]] +name = "traitlets" +version = "5.9.0" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] + +[[package]] +name = "typer" +version = "0.7.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +click = ">=7.1.1,<9.0.0" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.15" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.21.0" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<4" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "wheel" +version = "0.38.4" +description = "A built-package format for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=3.0.0)"] + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.8,<4" +content-hash = "c2a940b4b2c499c69f0913bcc074966afabd8b531e9ed8f2d7c13e18349bdec9" + +[metadata.files] +affine = [ + {file = "affine-2.4.0-py3-none-any.whl", hash = "sha256:8a3df80e2b2378aef598a83c1392efd47967afec4242021a0b06b4c7cbc61a92"}, + {file = "affine-2.4.0.tar.gz", hash = "sha256:a24d818d6a836c131976d22f8c27b8d3ca32d0af64c1d8d29deb7bafa4da1eea"}, +] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] +asttokens = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] +attrs = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.12.0-py3-none-any.whl", hash = "sha256:2130a5ad7f513200fae61a17abb5e338ca980fa28c439c0571014bc0217e9591"}, + {file = "beautifulsoup4-4.12.0.tar.gz", hash = "sha256:c5fceeaec29d09c84970e47c65f2f0efe57872f7cff494c9691a26ec0ff13234"}, +] +black = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] +bleach = [ + {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, + {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"}, +] +boto3 = [ + {file = "boto3-1.26.96-py3-none-any.whl", hash = "sha256:f961aa704bd7aeefc186ede52cabc3ef4c336979bb4098d3aad7ca922d55fc27"}, + {file = "boto3-1.26.96.tar.gz", hash = "sha256:7017102c58b9984749bef3b9f476940593c311504354b9ee9dd7bb0b4657a77d"}, +] +botocore = [ + {file = "botocore-1.29.96-py3-none-any.whl", hash = "sha256:c449d7050e9bc4a8b8a62ae492cbdc931b786bf5752b792867f1276967fadaed"}, + {file = "botocore-1.29.96.tar.gz", hash = "sha256:b9781108810e33f8406942c3e3aab748650c59d5cddb7c9d323f4e2682e7b0b6"}, +] +certifi = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +click-plugins = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] +cligj = [ + {file = "cligj-0.7.2-py3-none-any.whl", hash = "sha256:c1ca117dbce1fe20a5809dc96f01e1c2840f6dcc939b3ddbb1111bf330ba82df"}, + {file = "cligj-0.7.2.tar.gz", hash = "sha256:a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27"}, +] +colorama = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +comm = [ + {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, + {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, +] +contourpy = [ + {file = "contourpy-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95c3acddf921944f241b6773b767f1cbce71d03307270e2d769fd584d5d1092d"}, + {file = "contourpy-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc1464c97579da9f3ab16763c32e5c5d5bb5fa1ec7ce509a4ca6108b61b84fab"}, + {file = "contourpy-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8acf74b5d383414401926c1598ed77825cd530ac7b463ebc2e4f46638f56cce6"}, + {file = "contourpy-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c71fdd8f1c0f84ffd58fca37d00ca4ebaa9e502fb49825484da075ac0b0b803"}, + {file = "contourpy-1.0.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99e9486bf1bb979d95d5cffed40689cb595abb2b841f2991fc894b3452290e8"}, + {file = "contourpy-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f4d8941a9564cda3f7fa6a6cd9b32ec575830780677932abdec7bcb61717b0"}, + {file = "contourpy-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9e20e5a1908e18aaa60d9077a6d8753090e3f85ca25da6e25d30dc0a9e84c2c6"}, + {file = "contourpy-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a877ada905f7d69b2a31796c4b66e31a8068b37aa9b78832d41c82fc3e056ddd"}, + {file = "contourpy-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6381fa66866b0ea35e15d197fc06ac3840a9b2643a6475c8fff267db8b9f1e69"}, + {file = "contourpy-1.0.7-cp310-cp310-win32.whl", hash = "sha256:3c184ad2433635f216645fdf0493011a4667e8d46b34082f5a3de702b6ec42e3"}, + {file = "contourpy-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:3caea6365b13119626ee996711ab63e0c9d7496f65641f4459c60a009a1f3e80"}, + {file = "contourpy-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed33433fc3820263a6368e532f19ddb4c5990855e4886088ad84fd7c4e561c71"}, + {file = "contourpy-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:38e2e577f0f092b8e6774459317c05a69935a1755ecfb621c0a98f0e3c09c9a5"}, + {file = "contourpy-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ae90d5a8590e5310c32a7630b4b8618cef7563cebf649011da80874d0aa8f414"}, + {file = "contourpy-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130230b7e49825c98edf0b428b7aa1125503d91732735ef897786fe5452b1ec2"}, + {file = "contourpy-1.0.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58569c491e7f7e874f11519ef46737cea1d6eda1b514e4eb5ac7dab6aa864d02"}, + {file = "contourpy-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54d43960d809c4c12508a60b66cb936e7ed57d51fb5e30b513934a4a23874fae"}, + {file = "contourpy-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:152fd8f730c31fd67fe0ffebe1df38ab6a669403da93df218801a893645c6ccc"}, + {file = "contourpy-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9056c5310eb1daa33fc234ef39ebfb8c8e2533f088bbf0bc7350f70a29bde1ac"}, + {file = "contourpy-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a9d7587d2fdc820cc9177139b56795c39fb8560f540bba9ceea215f1f66e1566"}, + {file = "contourpy-1.0.7-cp311-cp311-win32.whl", hash = "sha256:4ee3ee247f795a69e53cd91d927146fb16c4e803c7ac86c84104940c7d2cabf0"}, + {file = "contourpy-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:5caeacc68642e5f19d707471890f037a13007feba8427eb7f2a60811a1fc1350"}, + {file = "contourpy-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd7dc0e6812b799a34f6d12fcb1000539098c249c8da54f3566c6a6461d0dbad"}, + {file = "contourpy-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0f9d350b639db6c2c233d92c7f213d94d2e444d8e8fc5ca44c9706cf72193772"}, + {file = "contourpy-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e96a08b62bb8de960d3a6afbc5ed8421bf1a2d9c85cc4ea73f4bc81b4910500f"}, + {file = "contourpy-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:031154ed61f7328ad7f97662e48660a150ef84ee1bc8876b6472af88bf5a9b98"}, + {file = "contourpy-1.0.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e9ebb4425fc1b658e13bace354c48a933b842d53c458f02c86f371cecbedecc"}, + {file = "contourpy-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efb8f6d08ca7998cf59eaf50c9d60717f29a1a0a09caa46460d33b2924839dbd"}, + {file = "contourpy-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6c180d89a28787e4b73b07e9b0e2dac7741261dbdca95f2b489c4f8f887dd810"}, + {file = "contourpy-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b8d587cc39057d0afd4166083d289bdeff221ac6d3ee5046aef2d480dc4b503c"}, + {file = "contourpy-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:769eef00437edf115e24d87f8926955f00f7704bede656ce605097584f9966dc"}, + {file = "contourpy-1.0.7-cp38-cp38-win32.whl", hash = "sha256:62398c80ef57589bdbe1eb8537127321c1abcfdf8c5f14f479dbbe27d0322e66"}, + {file = "contourpy-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:57119b0116e3f408acbdccf9eb6ef19d7fe7baf0d1e9aaa5381489bc1aa56556"}, + {file = "contourpy-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:30676ca45084ee61e9c3da589042c24a57592e375d4b138bd84d8709893a1ba4"}, + {file = "contourpy-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e927b3868bd1e12acee7cc8f3747d815b4ab3e445a28d2e5373a7f4a6e76ba1"}, + {file = "contourpy-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:366a0cf0fc079af5204801786ad7a1c007714ee3909e364dbac1729f5b0849e5"}, + {file = "contourpy-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ba9bb365446a22411f0673abf6ee1fea3b2cf47b37533b970904880ceb72f3"}, + {file = "contourpy-1.0.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b0bf0c30d432278793d2141362ac853859e87de0a7dee24a1cea35231f0d50"}, + {file = "contourpy-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7281244c99fd7c6f27c1c6bfafba878517b0b62925a09b586d88ce750a016d2"}, + {file = "contourpy-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b6d0f9e1d39dbfb3977f9dd79f156c86eb03e57a7face96f199e02b18e58d32a"}, + {file = "contourpy-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f6979d20ee5693a1057ab53e043adffa1e7418d734c1532e2d9e915b08d8ec2"}, + {file = "contourpy-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5dd34c1ae752515318224cba7fc62b53130c45ac6a1040c8b7c1a223c46e8967"}, + {file = "contourpy-1.0.7-cp39-cp39-win32.whl", hash = "sha256:c5210e5d5117e9aec8c47d9156d1d3835570dd909a899171b9535cb4a3f32693"}, + {file = "contourpy-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:60835badb5ed5f4e194a6f21c09283dd6e007664a86101431bf870d9e86266c4"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ce41676b3d0dd16dbcfabcc1dc46090aaf4688fd6e819ef343dbda5a57ef0161"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a011cf354107b47c58ea932d13b04d93c6d1d69b8b6dce885e642531f847566"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31a55dccc8426e71817e3fe09b37d6d48ae40aae4ecbc8c7ad59d6893569c436"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69f8ff4db108815addd900a74df665e135dbbd6547a8a69333a68e1f6e368ac2"}, + {file = "contourpy-1.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efe99298ba37e37787f6a2ea868265465410822f7bea163edcc1bd3903354ea9"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a1e97b86f73715e8670ef45292d7cc033548266f07d54e2183ecb3c87598888f"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc331c13902d0f50845099434cd936d49d7a2ca76cb654b39691974cb1e4812d"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24847601071f740837aefb730e01bd169fbcaa610209779a78db7ebb6e6a7051"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abf298af1e7ad44eeb93501e40eb5a67abbf93b5d90e468d01fc0c4451971afa"}, + {file = "contourpy-1.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:64757f6460fc55d7e16ed4f1de193f362104285c667c112b50a804d482777edd"}, + {file = "contourpy-1.0.7.tar.gz", hash = "sha256:d8165a088d31798b59e91117d1f5fc3df8168d8b48c4acc10fc0df0d0bdbcc5e"}, +] +coverage = [ + {file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"}, + {file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"}, + {file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"}, + {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"}, + {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"}, + {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"}, + {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"}, + {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"}, + {file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"}, + {file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"}, + {file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"}, + {file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"}, + {file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"}, + {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"}, + {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"}, + {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"}, + {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"}, + {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"}, + {file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"}, + {file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"}, + {file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"}, + {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"}, + {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"}, + {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"}, + {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"}, + {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"}, + {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"}, + {file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"}, + {file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"}, + {file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"}, + {file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"}, + {file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"}, + {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"}, + {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"}, + {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"}, + {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"}, + {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"}, + {file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"}, + {file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"}, + {file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"}, + {file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"}, + {file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"}, + {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"}, + {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"}, + {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"}, + {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"}, + {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"}, + {file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"}, + {file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"}, + {file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"}, + {file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"}, +] +cycler = [ + {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, + {file = "cycler-0.11.0.tar.gz", hash = "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"}, +] +debugpy = [ + {file = "debugpy-1.6.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0ea1011e94416e90fb3598cc3ef5e08b0a4dd6ce6b9b33ccd436c1dffc8cd664"}, + {file = "debugpy-1.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dff595686178b0e75580c24d316aa45a8f4d56e2418063865c114eef651a982e"}, + {file = "debugpy-1.6.6-cp310-cp310-win32.whl", hash = "sha256:87755e173fcf2ec45f584bb9d61aa7686bb665d861b81faa366d59808bbd3494"}, + {file = "debugpy-1.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:72687b62a54d9d9e3fb85e7a37ea67f0e803aaa31be700e61d2f3742a5683917"}, + {file = "debugpy-1.6.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:78739f77c58048ec006e2b3eb2e0cd5a06d5f48c915e2fc7911a337354508110"}, + {file = "debugpy-1.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23c29e40e39ad7d869d408ded414f6d46d82f8a93b5857ac3ac1e915893139ca"}, + {file = "debugpy-1.6.6-cp37-cp37m-win32.whl", hash = "sha256:7aa7e103610e5867d19a7d069e02e72eb2b3045b124d051cfd1538f1d8832d1b"}, + {file = "debugpy-1.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:f6383c29e796203a0bba74a250615ad262c4279d398e89d895a69d3069498305"}, + {file = "debugpy-1.6.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:23363e6d2a04d726bbc1400bd4e9898d54419b36b2cdf7020e3e215e1dcd0f8e"}, + {file = "debugpy-1.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b5d1b13d7c7bf5d7cf700e33c0b8ddb7baf030fcf502f76fc061ddd9405d16c"}, + {file = "debugpy-1.6.6-cp38-cp38-win32.whl", hash = "sha256:70ab53918fd907a3ade01909b3ed783287ede362c80c75f41e79596d5ccacd32"}, + {file = "debugpy-1.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:c05349890804d846eca32ce0623ab66c06f8800db881af7a876dc073ac1c2225"}, + {file = "debugpy-1.6.6-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:11a0f3a106f69901e4a9a5683ce943a7a5605696024134b522aa1bfda25b5fec"}, + {file = "debugpy-1.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771739902b1ae22a120dbbb6bd91b2cae6696c0e318b5007c5348519a4211c6"}, + {file = "debugpy-1.6.6-cp39-cp39-win32.whl", hash = "sha256:549ae0cb2d34fc09d1675f9b01942499751d174381b6082279cf19cdb3c47cbe"}, + {file = "debugpy-1.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:de4a045fbf388e120bb6ec66501458d3134f4729faed26ff95de52a754abddb1"}, + {file = "debugpy-1.6.6-py2.py3-none-any.whl", hash = "sha256:be596b44448aac14eb3614248c91586e2bc1728e020e82ef3197189aae556115"}, + {file = "debugpy-1.6.6.zip", hash = "sha256:b9c2130e1c632540fbf9c2c88341493797ddf58016e7cba02e311de9b0a96b67"}, +] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] +deprecation = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] +distlib = [ + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, +] +exceptiongroup = [ + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, +] +executing = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] +fastjsonschema = [ + {file = "fastjsonschema-2.16.3-py3-none-any.whl", hash = "sha256:04fbecc94300436f628517b05741b7ea009506ce8f946d40996567c669318490"}, + {file = "fastjsonschema-2.16.3.tar.gz", hash = "sha256:4a30d6315a68c253cfa8f963b9697246315aa3db89f98b97235e345dedfb0b8e"}, +] +filelock = [ + {file = "filelock-3.10.0-py3-none-any.whl", hash = "sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182"}, + {file = "filelock-3.10.0.tar.gz", hash = "sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce"}, +] +fiona = [ + {file = "Fiona-1.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c14a39d6a57eaa50cbf6553e7e464960d9dc7773cf4058409a53cc26034ad947"}, + {file = "Fiona-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16576ca4f21f21c19c4306c2ebb503db408eae4e6690972b62acb897ceab0a8d"}, + {file = "Fiona-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:d2ba52ac172193d452cfcecd71fa69212056eb7e5747174d28838c9b95ba47c3"}, + {file = "Fiona-1.9.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:6a7f8830659532b3900ea202b8bb82043c4305fc61f78ffc4ffccd86c079472f"}, + {file = "Fiona-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eb7ac43c4e6633d6262cd3d6b46db3fc925de872626b10e162bbefe7fa7157e"}, + {file = "Fiona-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:509b3bd7e38a041f5ad9dd25f4ecf2ea6d736879b8abb54d987a00138beeb7a1"}, + {file = "Fiona-1.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:72f332c394e63b70800a04b92e9eb6daafaee4f5f467f8f4b4780aa249da3c37"}, + {file = "Fiona-1.9.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5630e29cf25a4381f54a1060df0368d63da833d14fabc5ce4a3650138ba519a5"}, + {file = "Fiona-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8b80d739447e9408abb1abadf198decab01baf266e163705b93bd51f5172be8d"}, + {file = "Fiona-1.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:07c9144c1056d38bfef6b071d9cb25b1ec1c3f40facc55738574ea3f704bbfec"}, + {file = "Fiona-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348e2241360863b2e9c476c1444ecc499a9f8a1d499f28568bd4f1e5fd533d1f"}, + {file = "Fiona-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:11174b13abce333929fb609e1f5c4872226398d4e4fb1bfc866ed6a11035a13d"}, + {file = "Fiona-1.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:656373f74d10300f472321b0bd96bc0be553bf64bd409b420a2ca02e4fc616f8"}, + {file = "Fiona-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2effb6a21ad3ecc4d3c8e39208cf443f3fe42300492226057f2eaccf827bc3b2"}, + {file = "Fiona-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e4bae3ca74c225d5ab8c99e5c76b55def0132b62bf2447c67a14025de428115"}, + {file = "Fiona-1.9.2.tar.gz", hash = "sha256:f9263c5f97206bf2eb2c010d52e8ffc54e96886b0e698badde25ff109b32952a"}, +] +fonttools = [ + {file = "fonttools-4.39.2-py3-none-any.whl", hash = "sha256:85245aa2fd4cf502a643c9a9a2b5a393703e150a6eaacc3e0e84bb448053f061"}, + {file = "fonttools-4.39.2.zip", hash = "sha256:e2d9f10337c9e3b17f9bce17a60a16a885a7d23b59b7f45ce07ea643e5580439"}, +] +geopandas = [ + {file = "geopandas-0.12.2-py3-none-any.whl", hash = "sha256:0a470e4bf6f5367e6fd83ab6b40405e0b805c8174665bbcb7c4077ed90202912"}, + {file = "geopandas-0.12.2.tar.gz", hash = "sha256:0acdacddefa176525e4da6d9aeeece225da26055c4becdc6e97cf40fa97c27f4"}, +] +identify = [ + {file = "identify-2.5.21-py2.py3-none-any.whl", hash = "sha256:69edcaffa8e91ae0f77d397af60f148b6b45a8044b2cc6d99cafa5b04793ff00"}, + {file = "identify-2.5.21.tar.gz", hash = "sha256:7671a05ef9cfaf8ff63b15d45a91a1147a03aaccb2976d4e9bd047cbbc508471"}, +] +importlib-metadata = [ + {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, + {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, +] +importlib-resources = [ + {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, + {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, +] +iniconfig = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] +ipykernel = [ + {file = "ipykernel-6.22.0-py3-none-any.whl", hash = "sha256:1ae6047c1277508933078163721bbb479c3e7292778a04b4bacf0874550977d6"}, + {file = "ipykernel-6.22.0.tar.gz", hash = "sha256:302558b81f1bc22dc259fb2a0c5c7cf2f4c0bdb21b50484348f7bafe7fb71421"}, +] +ipython = [ + {file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"}, + {file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"}, +] +ipython-genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +isort = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] +jedi = [ + {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, + {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, +] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] +jsonschema = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] +jupyter-client = [ + {file = "jupyter_client-8.1.0-py3-none-any.whl", hash = "sha256:d5b8e739d7816944be50f81121a109788a3d92732ecf1ad1e4dadebc948818fe"}, + {file = "jupyter_client-8.1.0.tar.gz", hash = "sha256:3fbab64100a0dcac7701b1e0f1a4412f1ccb45546ff2ad9bc4fcbe4e19804811"}, +] +jupyter-core = [ + {file = "jupyter_core-5.3.0-py3-none-any.whl", hash = "sha256:d4201af84559bc8c70cead287e1ab94aeef3c512848dde077b7684b54d67730d"}, + {file = "jupyter_core-5.3.0.tar.gz", hash = "sha256:6db75be0c83edbf1b7c9f91ec266a9a24ef945da630f3120e1a0046dc13713fc"}, +] +jupyterlab-pygments = [ + {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, + {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, +] +kiwisolver = [ + {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6"}, + {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:10ee06759482c78bdb864f4109886dff7b8a56529bc1609d4f1112b93fe6423c"}, + {file = "kiwisolver-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c79ebe8f3676a4c6630fd3f777f3cfecf9289666c84e775a67d1d358578dc2e3"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbe9fa13da955feb8202e215c4018f4bb57469b1b78c7a4c5c7b93001699938"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7577c1987baa3adc4b3c62c33bd1118c3ef5c8ddef36f0f2c950ae0b199e100d"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ad8285b01b0d4695102546b342b493b3ccc6781fc28c8c6a1bb63e95d22f09"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed58b8acf29798b036d347791141767ccf65eee7f26bde03a71c944449e53de"}, + {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a68b62a02953b9841730db7797422f983935aeefceb1679f0fc85cbfbd311c32"}, + {file = "kiwisolver-1.4.4-cp310-cp310-win32.whl", hash = "sha256:e92a513161077b53447160b9bd8f522edfbed4bd9759e4c18ab05d7ef7e49408"}, + {file = "kiwisolver-1.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:3fe20f63c9ecee44560d0e7f116b3a747a5d7203376abeea292ab3152334d004"}, + {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ea21f66820452a3f5d1655f8704a60d66ba1191359b96541eaf457710a5fc6"}, + {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc9db8a3efb3e403e4ecc6cd9489ea2bac94244f80c78e27c31dcc00d2790ac2"}, + {file = "kiwisolver-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5b61785a9ce44e5a4b880272baa7cf6c8f48a5180c3e81c59553ba0cb0821ca"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2dbb44c3f7e6c4d3487b31037b1bdbf424d97687c1747ce4ff2895795c9bf69"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295ecd49304dcf3bfbfa45d9a081c96509e95f4b9d0eb7ee4ec0530c4a96514"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bd472dbe5e136f96a4b18f295d159d7f26fd399136f5b17b08c4e5f498cd494"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf7d9fce9bcc4752ca4a1b80aabd38f6d19009ea5cbda0e0856983cf6d0023f5"}, + {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d6601aed50c74e0ef02f4204da1816147a6d3fbdc8b3872d263338a9052c51"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:877272cf6b4b7e94c9614f9b10140e198d2186363728ed0f701c6eee1baec1da"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:db608a6757adabb32f1cfe6066e39b3706d8c3aa69bbc353a5b61edad36a5cb4"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5853eb494c71e267912275e5586fe281444eb5e722de4e131cddf9d442615626"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f0a1dbdb5ecbef0d34eb77e56fcb3e95bbd7e50835d9782a45df81cc46949750"}, + {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:283dffbf061a4ec60391d51e6155e372a1f7a4f5b15d59c8505339454f8989e4"}, + {file = "kiwisolver-1.4.4-cp311-cp311-win32.whl", hash = "sha256:d06adcfa62a4431d404c31216f0f8ac97397d799cd53800e9d3efc2fbb3cf14e"}, + {file = "kiwisolver-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e7da3fec7408813a7cebc9e4ec55afed2d0fd65c4754bc376bf03498d4e92686"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:62ac9cc684da4cf1778d07a89bf5f81b35834cb96ca523d3a7fb32509380cbf6"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41dae968a94b1ef1897cb322b39360a0812661dba7c682aa45098eb8e193dbdf"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02f79693ec433cb4b5f51694e8477ae83b3205768a6fb48ffba60549080e295b"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0611a0a2a518464c05ddd5a3a1a0e856ccc10e67079bb17f265ad19ab3c7597"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db5283d90da4174865d520e7366801a93777201e91e79bacbac6e6927cbceede"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1041feb4cda8708ce73bb4dcb9ce1ccf49d553bf87c3954bdfa46f0c3f77252c"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-win32.whl", hash = "sha256:a553dadda40fef6bfa1456dc4be49b113aa92c2a9a9e8711e955618cd69622e3"}, + {file = "kiwisolver-1.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:03baab2d6b4a54ddbb43bba1a3a2d1627e82d205c5cf8f4c924dc49284b87166"}, + {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:841293b17ad704d70c578f1f0013c890e219952169ce8a24ebc063eecf775454"}, + {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4f270de01dd3e129a72efad823da90cc4d6aafb64c410c9033aba70db9f1ff0"}, + {file = "kiwisolver-1.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f9f39e2f049db33a908319cf46624a569b36983c7c78318e9726a4cb8923b26c"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97528e64cb9ebeff9701e7938653a9951922f2a38bd847787d4a8e498cc83ae"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d1573129aa0fd901076e2bfb4275a35f5b7aa60fbfb984499d661ec950320b0"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad881edc7ccb9d65b0224f4e4d05a1e85cf62d73aab798943df6d48ab0cd79a1"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b428ef021242344340460fa4c9185d0b1f66fbdbfecc6c63eff4b7c29fad429d"}, + {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e407cb4bd5a13984a6c2c0fe1845e4e41e96f183e5e5cd4d77a857d9693494c"}, + {file = "kiwisolver-1.4.4-cp38-cp38-win32.whl", hash = "sha256:75facbe9606748f43428fc91a43edb46c7ff68889b91fa31f53b58894503a191"}, + {file = "kiwisolver-1.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:5bce61af018b0cb2055e0e72e7d65290d822d3feee430b7b8203d8a855e78766"}, + {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8c808594c88a025d4e322d5bb549282c93c8e1ba71b790f539567932722d7bd8"}, + {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0a71d85ecdd570ded8ac3d1c0f480842f49a40beb423bb8014539a9f32a5897"}, + {file = "kiwisolver-1.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b533558eae785e33e8c148a8d9921692a9fe5aa516efbdff8606e7d87b9d5824"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:efda5fc8cc1c61e4f639b8067d118e742b812c930f708e6667a5ce0d13499e29"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7c43e1e1206cd421cd92e6b3280d4385d41d7166b3ed577ac20444b6995a445f"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8d3bd6c72b2dd9decf16ce70e20abcb3274ba01b4e1c96031e0c4067d1e7cd"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ea39b0ccc4f5d803e3337dd46bcce60b702be4d86fd0b3d7531ef10fd99a1ac"}, + {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968f44fdbf6dd757d12920d63b566eeb4d5b395fd2d00d29d7ef00a00582aac9"}, + {file = "kiwisolver-1.4.4-cp39-cp39-win32.whl", hash = "sha256:da7e547706e69e45d95e116e6939488d62174e033b763ab1496b4c29b76fabea"}, + {file = "kiwisolver-1.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:ba59c92039ec0a66103b1d5fe588fa546373587a7d68f5c96f743c3396afc04b"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91672bacaa030f92fc2f43b620d7b337fd9a5af28b0d6ed3f77afc43c4a64b5a"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787518a6789009c159453da4d6b683f468ef7a65bbde796bcea803ccf191058d"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da152d8cdcab0e56e4f45eb08b9aea6455845ec83172092f09b0e077ece2cf7a"}, + {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ecb1fa0db7bf4cff9dac752abb19505a233c7f16684c5826d1f11ebd9472b871"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:28bc5b299f48150b5f822ce68624e445040595a4ac3d59251703779836eceff9"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:81e38381b782cc7e1e46c4e14cd997ee6040768101aefc8fa3c24a4cc58e98f8"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2a66fdfb34e05b705620dd567f5a03f239a088d5a3f321e7b6ac3239d22aa286"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:872b8ca05c40d309ed13eb2e582cab0c5a05e81e987ab9c521bf05ad1d5cf5cb"}, + {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:70e7c2e7b750585569564e2e5ca9845acfaa5da56ac46df68414f29fea97be9f"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9f85003f5dfa867e86d53fac6f7e6f30c045673fa27b603c397753bebadc3008"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e307eb9bd99801f82789b44bb45e9f541961831c7311521b13a6c85afc09767"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1792d939ec70abe76f5054d3f36ed5656021dcad1322d1cc996d4e54165cef9"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cb459eea32a4e2cf18ba5fcece2dbdf496384413bc1bae15583f19e567f3b2"}, + {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36dafec3d6d6088d34e2de6b85f9d8e2324eb734162fba59d2ba9ed7a2043d5b"}, + {file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"}, +] +markdown = [ + {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, + {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] +matplotlib = [ + {file = "matplotlib-3.7.1-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:95cbc13c1fc6844ab8812a525bbc237fa1470863ff3dace7352e910519e194b1"}, + {file = "matplotlib-3.7.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:08308bae9e91aca1ec6fd6dda66237eef9f6294ddb17f0d0b3c863169bf82353"}, + {file = "matplotlib-3.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:544764ba51900da4639c0f983b323d288f94f65f4024dc40ecb1542d74dc0500"}, + {file = "matplotlib-3.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56d94989191de3fcc4e002f93f7f1be5da476385dde410ddafbb70686acf00ea"}, + {file = "matplotlib-3.7.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99bc9e65901bb9a7ce5e7bb24af03675cbd7c70b30ac670aa263240635999a4"}, + {file = "matplotlib-3.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb7d248c34a341cd4c31a06fd34d64306624c8cd8d0def7abb08792a5abfd556"}, + {file = "matplotlib-3.7.1-cp310-cp310-win32.whl", hash = "sha256:ce463ce590f3825b52e9fe5c19a3c6a69fd7675a39d589e8b5fbe772272b3a24"}, + {file = "matplotlib-3.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d7bc90727351fb841e4d8ae620d2d86d8ed92b50473cd2b42ce9186104ecbba"}, + {file = "matplotlib-3.7.1-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:770a205966d641627fd5cf9d3cb4b6280a716522cd36b8b284a8eb1581310f61"}, + {file = "matplotlib-3.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f67bfdb83a8232cb7a92b869f9355d677bce24485c460b19d01970b64b2ed476"}, + {file = "matplotlib-3.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2bf092f9210e105f414a043b92af583c98f50050559616930d884387d0772aba"}, + {file = "matplotlib-3.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89768d84187f31717349c6bfadc0e0d8c321e8eb34522acec8a67b1236a66332"}, + {file = "matplotlib-3.7.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83111e6388dec67822e2534e13b243cc644c7494a4bb60584edbff91585a83c6"}, + {file = "matplotlib-3.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a867bf73a7eb808ef2afbca03bcdb785dae09595fbe550e1bab0cd023eba3de0"}, + {file = "matplotlib-3.7.1-cp311-cp311-win32.whl", hash = "sha256:fbdeeb58c0cf0595efe89c05c224e0a502d1aa6a8696e68a73c3efc6bc354304"}, + {file = "matplotlib-3.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:c0bd19c72ae53e6ab979f0ac6a3fafceb02d2ecafa023c5cca47acd934d10be7"}, + {file = "matplotlib-3.7.1-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:6eb88d87cb2c49af00d3bbc33a003f89fd9f78d318848da029383bfc08ecfbfb"}, + {file = "matplotlib-3.7.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:cf0e4f727534b7b1457898c4f4ae838af1ef87c359b76dcd5330fa31893a3ac7"}, + {file = "matplotlib-3.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:46a561d23b91f30bccfd25429c3c706afe7d73a5cc64ef2dfaf2b2ac47c1a5dc"}, + {file = "matplotlib-3.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8704726d33e9aa8a6d5215044b8d00804561971163563e6e6591f9dcf64340cc"}, + {file = "matplotlib-3.7.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4cf327e98ecf08fcbb82685acaf1939d3338548620ab8dfa02828706402c34de"}, + {file = "matplotlib-3.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:617f14ae9d53292ece33f45cba8503494ee199a75b44de7717964f70637a36aa"}, + {file = "matplotlib-3.7.1-cp38-cp38-win32.whl", hash = "sha256:7c9a4b2da6fac77bcc41b1ea95fadb314e92508bf5493ceff058e727e7ecf5b0"}, + {file = "matplotlib-3.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:14645aad967684e92fc349493fa10c08a6da514b3d03a5931a1bac26e6792bd1"}, + {file = "matplotlib-3.7.1-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:81a6b377ea444336538638d31fdb39af6be1a043ca5e343fe18d0f17e098770b"}, + {file = "matplotlib-3.7.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:28506a03bd7f3fe59cd3cd4ceb2a8d8a2b1db41afede01f66c42561b9be7b4b7"}, + {file = "matplotlib-3.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8c587963b85ce41e0a8af53b9b2de8dddbf5ece4c34553f7bd9d066148dc719c"}, + {file = "matplotlib-3.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bf26ade3ff0f27668989d98c8435ce9327d24cffb7f07d24ef609e33d582439"}, + {file = "matplotlib-3.7.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:def58098f96a05f90af7e92fd127d21a287068202aa43b2a93476170ebd99e87"}, + {file = "matplotlib-3.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f883a22a56a84dba3b588696a2b8a1ab0d2c3d41be53264115c71b0a942d8fdb"}, + {file = "matplotlib-3.7.1-cp39-cp39-win32.whl", hash = "sha256:4f99e1b234c30c1e9714610eb0c6d2f11809c9c78c984a613ae539ea2ad2eb4b"}, + {file = "matplotlib-3.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:3ba2af245e36990facf67fde840a760128ddd71210b2ab6406e640188d69d136"}, + {file = "matplotlib-3.7.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3032884084f541163f295db8a6536e0abb0db464008fadca6c98aaf84ccf4717"}, + {file = "matplotlib-3.7.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a2cb34336110e0ed8bb4f650e817eed61fa064acbefeb3591f1b33e3a84fd96"}, + {file = "matplotlib-3.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b867e2f952ed592237a1828f027d332d8ee219ad722345b79a001f49df0936eb"}, + {file = "matplotlib-3.7.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:57bfb8c8ea253be947ccb2bc2d1bb3862c2bccc662ad1b4626e1f5e004557042"}, + {file = "matplotlib-3.7.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:438196cdf5dc8d39b50a45cb6e3f6274edbcf2254f85fa9b895bf85851c3a613"}, + {file = "matplotlib-3.7.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21e9cff1a58d42e74d01153360de92b326708fb205250150018a52c70f43c290"}, + {file = "matplotlib-3.7.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75d4725d70b7c03e082bbb8a34639ede17f333d7247f56caceb3801cb6ff703d"}, + {file = "matplotlib-3.7.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:97cc368a7268141afb5690760921765ed34867ffb9655dd325ed207af85c7529"}, + {file = "matplotlib-3.7.1.tar.gz", hash = "sha256:7b73305f25eab4541bd7ee0b96d87e53ae9c9f1823be5659b806cd85786fe882"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] +mistune = [ + {file = "mistune-2.0.5-py2.py3-none-any.whl", hash = "sha256:bad7f5d431886fcbaf5f758118ecff70d31f75231b34024a1341120340a65ce8"}, + {file = "mistune-2.0.5.tar.gz", hash = "sha256:0246113cb2492db875c6be56974a7c893333bf26cd92891c85f63151cee09d34"}, +] +munch = [ + {file = "munch-2.5.0-py2.py3-none-any.whl", hash = "sha256:6f44af89a2ce4ed04ff8de41f70b226b984db10a91dcc7b9ac2efc1c77022fdd"}, + {file = "munch-2.5.0.tar.gz", hash = "sha256:2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2"}, +] +mypy-extensions = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] +nbclient = [ + {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, + {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, +] +nbconvert = [ + {file = "nbconvert-7.2.10-py3-none-any.whl", hash = "sha256:e41118f81698d3d59b3c7c2887937446048f741aba6c367c1c1a77810b3e2d08"}, + {file = "nbconvert-7.2.10.tar.gz", hash = "sha256:8eed67bd8314f3ec87c4351c2f674af3a04e5890ab905d6bd927c05aec1cf27d"}, +] +nbformat = [ + {file = "nbformat-5.8.0-py3-none-any.whl", hash = "sha256:d910082bd3e0bffcf07eabf3683ed7dda0727a326c446eeb2922abe102e65162"}, + {file = "nbformat-5.8.0.tar.gz", hash = "sha256:46dac64c781f1c34dfd8acba16547024110348f9fc7eab0f31981c2a3dc48d1f"}, +] +nest-asyncio = [ + {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, + {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, +] +nodeenv = [ + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, +] +numpy = [ + {file = "numpy-1.24.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d"}, + {file = "numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978"}, + {file = "numpy-1.24.2-cp310-cp310-win32.whl", hash = "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9"}, + {file = "numpy-1.24.2-cp310-cp310-win_amd64.whl", hash = "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910"}, + {file = "numpy-1.24.2-cp311-cp311-win32.whl", hash = "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95"}, + {file = "numpy-1.24.2-cp311-cp311-win_amd64.whl", hash = "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96"}, + {file = "numpy-1.24.2-cp38-cp38-win32.whl", hash = "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d"}, + {file = "numpy-1.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780"}, + {file = "numpy-1.24.2-cp39-cp39-win32.whl", hash = "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468"}, + {file = "numpy-1.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f"}, + {file = "numpy-1.24.2.tar.gz", hash = "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22"}, +] +packaging = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] +pandas = [ + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, +] +pandocfilters = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] +pathspec = [ + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +pillow = [ + {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, + {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, + {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, + {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, + {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, + {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, + {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, + {file = "Pillow-9.4.0-2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0"}, + {file = "Pillow-9.4.0-2-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f"}, + {file = "Pillow-9.4.0-2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c"}, + {file = "Pillow-9.4.0-2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848"}, + {file = "Pillow-9.4.0-2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1"}, + {file = "Pillow-9.4.0-2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33"}, + {file = "Pillow-9.4.0-2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, + {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, + {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, + {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, + {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, + {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, + {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, + {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, + {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, + {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, + {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, + {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, + {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, +] +pkgutil-resolve-name = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] +platformdirs = [ + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +pre-commit = [ + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, +] +psutil = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] +pweave = [ + {file = "Pweave-0.30.3-py2.py3-none-any.whl", hash = "sha256:60cf8de680084b5423caa3a2131d4ff981c236f12f84f9d969a41f6632a44165"}, + {file = "Pweave-0.30.3.tar.gz", hash = "sha256:5e5298d90e06414a01f48e0d6aa4c36a70c5f223d929f2a9c7e2d388451c7357"}, +] +py4j = [ + {file = "py4j-0.10.9.5-py2.py3-none-any.whl", hash = "sha256:52d171a6a2b031d8a5d1de6efe451cf4f5baff1a2819aabc3741c8406539ba04"}, + {file = "py4j-0.10.9.5.tar.gz", hash = "sha256:276a4a3c5a2154df1860ef3303a927460e02e97b047dc0a47c1c3fb8cce34db6"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pygments = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pyproj = [ + {file = "pyproj-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e463c687007861a9949909211986850cfc2e72930deda0d06449ef2e315db534"}, + {file = "pyproj-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f87f16b902c8b2af007295c63a435f043db9e40bd45e6f96962c7b8cd08fdb5"}, + {file = "pyproj-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c60d112d8f1621a606b7f2adb0b1582f80498e663413d2ba9f5df1c93d99f432"}, + {file = "pyproj-3.4.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f38dea459e22e86326b1c7d47718a3e10c7a27910cf5eb86ea2679b8084d0c4e"}, + {file = "pyproj-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a53acbde511a7a9e1873c7f93c68f35b8c3653467b77195fe18e847555dcb7a"}, + {file = "pyproj-3.4.1-cp310-cp310-win32.whl", hash = "sha256:0c7b32382ae22a9bf5b690d24c7b4c0fb89ba313c3a91ef1a8c54b50baf10954"}, + {file = "pyproj-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:6bdac3bc1899fcc4021be06d303b342923fb8311fe06f8d862c348a1a0e78b41"}, + {file = "pyproj-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd9f9c409f465834988ce0aa8c1ed496081c6957f2e5ef40ed28de04397d3c0b"}, + {file = "pyproj-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0406f64ff59eb3342efb102c9f31536430aa5cde5ef0bfabd5aaccb73dd8cd5a"}, + {file = "pyproj-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a98fe3e53be428e67ae6a9ee9affff92346622e0e3ea0cbc15dce939b318d395"}, + {file = "pyproj-3.4.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0189fdd7aa789542a7a623010dfff066c5849b24397f81f860ec3ee085cbf55c"}, + {file = "pyproj-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3f75b030cf811f040c90a8758a20115e8746063e4cad0d0e941a4954d1219b"}, + {file = "pyproj-3.4.1-cp311-cp311-win32.whl", hash = "sha256:ef8c30c62fe4e386e523e14e1e83bd460f745bd2c8dfd0d0c327f9460c4d3c0c"}, + {file = "pyproj-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d1e7f42da205e0534831ae9aa9cee0353ab8c1aab2c369474adbb060294d98a"}, + {file = "pyproj-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a5eada965e8ac24e783f2493d1d9bcd11c5c93959bd43558224dd31d9faebd1c"}, + {file = "pyproj-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:19f5de1a7c3b81b676d846350d4bdf2ae6af13b9a450d1881706f088ecad0e2c"}, + {file = "pyproj-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57ec7d2b7f2773d877927abc72e2229ef8530c09181be0e28217742bae1bc4f5"}, + {file = "pyproj-3.4.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a30d78e619dae5cd1bb69addae2f1e5f8ee1b4a8ab4f3d954e9eaf41948db506"}, + {file = "pyproj-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32e1d12340ad93232b7ea4dc1a4f4b21fa9fa9efa4b293adad45be7af6b51ec"}, + {file = "pyproj-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ce50126dad7cd4749ab86fc4c8b54ec0898149ce6710ab5c93c76a54a4afa249"}, + {file = "pyproj-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:129234afa179c8293b010ea4f73655ff7b20b5afdf7fac170f223bcf0ed6defd"}, + {file = "pyproj-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:231c038c6b65395c41ae3362320f03ce8054cb54dc63556e605695e5d461a27e"}, + {file = "pyproj-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9d82df555cf19001bac40e1de0e40fb762dec785685b77edd6993286c01b7f7"}, + {file = "pyproj-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c0d1ac9ef5a4d2e6501a4b30136c55f1e1db049d1626cc313855c4f97d196d"}, + {file = "pyproj-3.4.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97065fe82e80f7e2740e7897a0e36e8defc0a3614927f0276b4f1d1ea1ef66fa"}, + {file = "pyproj-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd633f3b8ca6eb09135dfaf06f09e2869deb139985aab26d728e8a60c9938b9"}, + {file = "pyproj-3.4.1-cp39-cp39-win32.whl", hash = "sha256:da96319b137cfd66f0bae0e300cdc77dd17af4785b9360a9bdddb1d7176a0bbb"}, + {file = "pyproj-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:7aef19d5a0a3b2d6b17f7dc9a87af722e71139cd1eea7eb82ed062a8a4b0e272"}, + {file = "pyproj-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8078c90cea07d53e3406c7c84cbf76a2ac0ffc580c365f13801575486b9d558c"}, + {file = "pyproj-3.4.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:321b82210dc5271558573d0874b9967c5a25872a28d0168049ddabe8bfecffce"}, + {file = "pyproj-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a5425cd2a0b16f5f944d49165196eebaa60b898a08c404a644c29e6a7a04b3"}, + {file = "pyproj-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3d70ca5933cddbe6f51396006fb9fc78bc2b1f9d28775922453c4b04625a7efb"}, + {file = "pyproj-3.4.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c240fe6bcb5c325b50fc967d5458d708412633f4f05fefc7fb14c14254ebf421"}, + {file = "pyproj-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef76abfee1a0676ef973470abe11e22998750f2bd944afaf76d44ad70b538c06"}, + {file = "pyproj-3.4.1.tar.gz", hash = "sha256:261eb29b1d55b1eb7f336127344d9b31284d950a9446d1e0d1c2411f7dd8e3ac"}, +] +pyrsistent = [ + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, +] +pyspark = [ + {file = "pyspark-3.3.2.tar.gz", hash = "sha256:0dfd5db4300c1f6cc9c16d8dbdfb82d881b4b172984da71344ede1a9d4893da8"}, +] +pytest = [ + {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, + {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, +] +pytest-cov = [ + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +pytz = [ + {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, + {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, +] +pywin32 = [ + {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, + {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, + {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, + {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, + {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, + {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, + {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, + {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, + {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, + {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, + {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, + {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, + {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, + {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +pyzmq = [ + {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ac178e666c097c8d3deb5097b58cd1316092fc43e8ef5b5fdb259b51da7e7315"}, + {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:659e62e1cbb063151c52f5b01a38e1df6b54feccfa3e2509d44c35ca6d7962ee"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8280ada89010735a12b968ec3ea9a468ac2e04fddcc1cede59cb7f5178783b9c"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b5eeb5278a8a636bb0abdd9ff5076bcbb836cd2302565df53ff1fa7d106d54"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a2e5fe42dfe6b73ca120b97ac9f34bfa8414feb15e00e37415dbd51cf227ef6"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:827bf60e749e78acb408a6c5af6688efbc9993e44ecc792b036ec2f4b4acf485"}, + {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b504ae43d37e282301da586529e2ded8b36d4ee2cd5e6db4386724ddeaa6bbc"}, + {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb1f69a0a2a2b1aae8412979dd6293cc6bcddd4439bf07e4758d864ddb112354"}, + {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b9c9cc965cdf28381e36da525dcb89fc1571d9c54800fdcd73e3f73a2fc29bd"}, + {file = "pyzmq-25.0.2-cp310-cp310-win32.whl", hash = "sha256:24abbfdbb75ac5039205e72d6c75f10fc39d925f2df8ff21ebc74179488ebfca"}, + {file = "pyzmq-25.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a821a506822fac55d2df2085a52530f68ab15ceed12d63539adc32bd4410f6e"}, + {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:9af0bb0277e92f41af35e991c242c9c71920169d6aa53ade7e444f338f4c8128"}, + {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:54a96cf77684a3a537b76acfa7237b1e79a8f8d14e7f00e0171a94b346c5293e"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88649b19ede1cab03b96b66c364cbbf17c953615cdbc844f7f6e5f14c5e5261c"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:715cff7644a80a7795953c11b067a75f16eb9fc695a5a53316891ebee7f3c9d5"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b3f0f066b4f1d17383aae509bacf833ccaf591184a1f3c7a1661c085063ae"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d488c5c8630f7e782e800869f82744c3aca4aca62c63232e5d8c490d3d66956a"}, + {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:38d9f78d69bcdeec0c11e0feb3bc70f36f9b8c44fc06e5d06d91dc0a21b453c7"}, + {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3059a6a534c910e1d5d068df42f60d434f79e6cc6285aa469b384fa921f78cf8"}, + {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6526d097b75192f228c09d48420854d53dfbc7abbb41b0e26f363ccb26fbc177"}, + {file = "pyzmq-25.0.2-cp311-cp311-win32.whl", hash = "sha256:5c5fbb229e40a89a2fe73d0c1181916f31e30f253cb2d6d91bea7927c2e18413"}, + {file = "pyzmq-25.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed15e3a2c3c2398e6ae5ce86d6a31b452dfd6ad4cd5d312596b30929c4b6e182"}, + {file = "pyzmq-25.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:032f5c8483c85bf9c9ca0593a11c7c749d734ce68d435e38c3f72e759b98b3c9"}, + {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:374b55516393bfd4d7a7daa6c3b36d6dd6a31ff9d2adad0838cd6a203125e714"}, + {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08bfcc21b5997a9be4fefa405341320d8e7f19b4d684fb9c0580255c5bd6d695"}, + {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1a843d26a8da1b752c74bc019c7b20e6791ee813cd6877449e6a1415589d22ff"}, + {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b48616a09d7df9dbae2f45a0256eee7b794b903ddc6d8657a9948669b345f220"}, + {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d4427b4a136e3b7f85516c76dd2e0756c22eec4026afb76ca1397152b0ca8145"}, + {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:26b0358e8933990502f4513c991c9935b6c06af01787a36d133b7c39b1df37fa"}, + {file = "pyzmq-25.0.2-cp36-cp36m-win32.whl", hash = "sha256:c8fedc3ccd62c6b77dfe6f43802057a803a411ee96f14e946f4a76ec4ed0e117"}, + {file = "pyzmq-25.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2da6813b7995b6b1d1307329c73d3e3be2fd2d78e19acfc4eff2e27262732388"}, + {file = "pyzmq-25.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a35960c8b2f63e4ef67fd6731851030df68e4b617a6715dd11b4b10312d19fef"}, + {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2a0b880ab40aca5a878933376cb6c1ec483fba72f7f34e015c0f675c90b20"}, + {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:85762712b74c7bd18e340c3639d1bf2f23735a998d63f46bb6584d904b5e401d"}, + {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64812f29d6eee565e129ca14b0c785744bfff679a4727137484101b34602d1a7"}, + {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:510d8e55b3a7cd13f8d3e9121edf0a8730b87d925d25298bace29a7e7bc82810"}, + {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b164cc3c8acb3d102e311f2eb6f3c305865ecb377e56adc015cb51f721f1dda6"}, + {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28fdb9224a258134784a9cf009b59265a9dde79582fb750d4e88a6bcbc6fa3dc"}, + {file = "pyzmq-25.0.2-cp37-cp37m-win32.whl", hash = "sha256:dd771a440effa1c36d3523bc6ba4e54ff5d2e54b4adcc1e060d8f3ca3721d228"}, + {file = "pyzmq-25.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:9bdc40efb679b9dcc39c06d25629e55581e4c4f7870a5e88db4f1c51ce25e20d"}, + {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1f82906a2d8e4ee310f30487b165e7cc8ed09c009e4502da67178b03083c4ce0"}, + {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:21ec0bf4831988af43c8d66ba3ccd81af2c5e793e1bf6790eb2d50e27b3c570a"}, + {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbce982a17c88d2312ec2cf7673985d444f1beaac6e8189424e0a0e0448dbb3"}, + {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e1d2f2d86fc75ed7f8845a992c5f6f1ab5db99747fb0d78b5e4046d041164d2"}, + {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e92ff20ad5d13266bc999a29ed29a3b5b101c21fdf4b2cf420c09db9fb690e"}, + {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edbbf06cc2719889470a8d2bf5072bb00f423e12de0eb9ffec946c2c9748e149"}, + {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77942243ff4d14d90c11b2afd8ee6c039b45a0be4e53fb6fa7f5e4fd0b59da39"}, + {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab046e9cb902d1f62c9cc0eca055b1d11108bdc271caf7c2171487298f229b56"}, + {file = "pyzmq-25.0.2-cp38-cp38-win32.whl", hash = "sha256:ad761cfbe477236802a7ab2c080d268c95e784fe30cafa7e055aacd1ca877eb0"}, + {file = "pyzmq-25.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8560756318ec7c4c49d2c341012167e704b5a46d9034905853c3d1ade4f55bee"}, + {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:ab2c056ac503f25a63f6c8c6771373e2a711b98b304614151dfb552d3d6c81f6"}, + {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cca8524b61c0eaaa3505382dc9b9a3bc8165f1d6c010fdd1452c224225a26689"}, + {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb9f7eae02d3ac42fbedad30006b7407c984a0eb4189a1322241a20944d61e5"}, + {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5eaeae038c68748082137d6896d5c4db7927e9349237ded08ee1bbd94f7361c9"}, + {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a31992a8f8d51663ebf79df0df6a04ffb905063083d682d4380ab8d2c67257c"}, + {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6a979e59d2184a0c8f2ede4b0810cbdd86b64d99d9cc8a023929e40dce7c86cc"}, + {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f124cb73f1aa6654d31b183810febc8505fd0c597afa127c4f40076be4574e0"}, + {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:65c19a63b4a83ae45d62178b70223adeee5f12f3032726b897431b6553aa25af"}, + {file = "pyzmq-25.0.2-cp39-cp39-win32.whl", hash = "sha256:83d822e8687621bed87404afc1c03d83fa2ce39733d54c2fd52d8829edb8a7ff"}, + {file = "pyzmq-25.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:24683285cc6b7bf18ad37d75b9db0e0fefe58404e7001f1d82bf9e721806daa7"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a4b4261eb8f9ed71f63b9eb0198dd7c934aa3b3972dac586d0ef502ba9ab08b"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:62ec8d979f56c0053a92b2b6a10ff54b9ec8a4f187db2b6ec31ee3dd6d3ca6e2"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:affec1470351178e892121b3414c8ef7803269f207bf9bef85f9a6dd11cde264"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffc71111433bd6ec8607a37b9211f4ef42e3d3b271c6d76c813669834764b248"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6fadc60970714d86eff27821f8fb01f8328dd36bebd496b0564a500fe4a9e354"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:269968f2a76c0513490aeb3ba0dc3c77b7c7a11daa894f9d1da88d4a0db09835"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f7c8b8368e84381ae7c57f1f5283b029c888504aaf4949c32e6e6fb256ec9bf0"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25e6873a70ad5aa31e4a7c41e5e8c709296edef4a92313e1cd5fc87bbd1874e2"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b733076ff46e7db5504c5e7284f04a9852c63214c74688bdb6135808531755a3"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a6f6ae12478fdc26a6d5fdb21f806b08fa5403cd02fd312e4cb5f72df078f96f"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:67da1c213fbd208906ab3470cfff1ee0048838365135a9bddc7b40b11e6d6c89"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531e36d9fcd66f18de27434a25b51d137eb546931033f392e85674c7a7cea853"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34a6fddd159ff38aa9497b2e342a559f142ab365576284bc8f77cb3ead1f79c5"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b491998ef886662c1f3d49ea2198055a9a536ddf7430b051b21054f2a5831800"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5d496815074e3e3d183fe2c7fcea2109ad67b74084c254481f87b64e04e9a471"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:56a94ab1d12af982b55ca96c6853db6ac85505e820d9458ac76364c1998972f4"}, + {file = "pyzmq-25.0.2.tar.gz", hash = "sha256:6b8c1bbb70e868dc88801aa532cae6bd4e3b5233784692b786f17ad2962e5149"}, +] +rasterio = [ + {file = "rasterio-1.3.6-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:23a8d10ba17301029962a5667915381a8b4711ed80b712eb71cf68834cb5f946"}, + {file = "rasterio-1.3.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76b6bd4b566cd733f0ddd05ba88bea3f96705ff74e2e5fab73ead2a26cbc5979"}, + {file = "rasterio-1.3.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50785004d7adf66cf96c9c3498cf530ec91292e9349e66e8d1f1183085ee93b1"}, + {file = "rasterio-1.3.6-cp310-cp310-win_amd64.whl", hash = "sha256:9f3f901097c3f306f1143d6fdc503440596c66a2c39054e25604bdf3f4eaaff3"}, + {file = "rasterio-1.3.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a732f8d314b7d9cb532b1969e968d08bf208886f04309662a5d16884af39bb4a"}, + {file = "rasterio-1.3.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d03e2fcd8f3aafb0ea1fa27a021fecc385655630a46c70d6ba693675c6cc3830"}, + {file = "rasterio-1.3.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69fdc712e9c79e82d00d783d23034bb16ca8faa18856e83e297bb7e4d7e3e277"}, + {file = "rasterio-1.3.6-cp311-cp311-win_amd64.whl", hash = "sha256:83f764c2b30e3d07bea5626392f1ce5481e61d5583256ab66f3a610a2f40dec7"}, + {file = "rasterio-1.3.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:1321372c653a36928b4e5e11cbe7f851903fb76608b8e48a860168b248d5f8e6"}, + {file = "rasterio-1.3.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8a584fedd92953a0580e8de3f41ce9f33a3205ba79ea58fff8f90ba5d14a0c04"}, + {file = "rasterio-1.3.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92f0f92254fcce57d25d5f60ef2cf649297f8a1e1fa279b32795bde20f11ff41"}, + {file = "rasterio-1.3.6-cp38-cp38-win_amd64.whl", hash = "sha256:e73339e8fb9b9091a4a0ffd9f84725b2d1f118cf51c35fb0d03b94e82e1736a3"}, + {file = "rasterio-1.3.6-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:eaaeb2e661d1ffc07a7ae4fd997bb326d3561f641178126102842d608a010cc3"}, + {file = "rasterio-1.3.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0883a38bd32e6a3d8d85bac67e3b75a2f04f7de265803585516883223ddbb8d1"}, + {file = "rasterio-1.3.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b72fc032ddca55d73de87ef3872530b7384989378a1bc66d77c69cedafe7feaf"}, + {file = "rasterio-1.3.6-cp39-cp39-win_amd64.whl", hash = "sha256:cb3288add5d55248f5d48815f9d509819ba8985cd0302d2e8dd743f83c5ec96d"}, + {file = "rasterio-1.3.6.tar.gz", hash = "sha256:c8b90eb10e16102d1ab0334a7436185f295de1c07f0d197e206d1c005fc33905"}, +] +s3transfer = [ + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, +] +setuptools = [ + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, +] +setuptools-scm = [ + {file = "setuptools_scm-7.1.0-py3-none-any.whl", hash = "sha256:73988b6d848709e2af142aa48c986ea29592bbcfca5375678064708205253d8e"}, + {file = "setuptools_scm-7.1.0.tar.gz", hash = "sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27"}, +] +shapely = [ + {file = "shapely-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b06d031bc64149e340448fea25eee01360a58936c89985cf584134171e05863f"}, + {file = "shapely-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9a6ac34c16f4d5d3c174c76c9d7614ec8fe735f8f82b6cc97a46b54f386a86bf"}, + {file = "shapely-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:865bc3d7cc0ea63189d11a0b1120d1307ed7a64720a8bfa5be2fde5fc6d0d33f"}, + {file = "shapely-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45b4833235b90bc87ee26c6537438fa77559d994d2d3be5190dd2e54d31b2820"}, + {file = "shapely-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce88ec79df55430e37178a191ad8df45cae90b0f6972d46d867bf6ebbb58cc4d"}, + {file = "shapely-2.0.1-cp310-cp310-win32.whl", hash = "sha256:01224899ff692a62929ef1a3f5fe389043e262698a708ab7569f43a99a48ae82"}, + {file = "shapely-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:da71de5bf552d83dcc21b78cc0020e86f8d0feea43e202110973987ffa781c21"}, + {file = "shapely-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:502e0a607f1dcc6dee0125aeee886379be5242c854500ea5fd2e7ac076b9ce6d"}, + {file = "shapely-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7d3bbeefd8a6a1a1017265d2d36f8ff2d79d0162d8c141aa0d37a87063525656"}, + {file = "shapely-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f470a130d6ddb05b810fc1776d918659407f8d025b7f56d2742a596b6dffa6c7"}, + {file = "shapely-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4641325e065fd3e07d55677849c9ddfd0cf3ee98f96475126942e746d55b17c8"}, + {file = "shapely-2.0.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90cfa4144ff189a3c3de62e2f3669283c98fb760cfa2e82ff70df40f11cadb39"}, + {file = "shapely-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70a18fc7d6418e5aea76ac55dce33f98e75bd413c6eb39cfed6a1ba36469d7d4"}, + {file = "shapely-2.0.1-cp311-cp311-win32.whl", hash = "sha256:09d6c7763b1bee0d0a2b84bb32a4c25c6359ad1ac582a62d8b211e89de986154"}, + {file = "shapely-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d8f55f355be7821dade839df785a49dc9f16d1af363134d07eb11e9207e0b189"}, + {file = "shapely-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:83a8ec0ee0192b6e3feee9f6a499d1377e9c295af74d7f81ecba5a42a6b195b7"}, + {file = "shapely-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a529218e72a3dbdc83676198e610485fdfa31178f4be5b519a8ae12ea688db14"}, + {file = "shapely-2.0.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91575d97fd67391b85686573d758896ed2fc7476321c9d2e2b0c398b628b961c"}, + {file = "shapely-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8b0d834b11be97d5ab2b4dceada20ae8e07bcccbc0f55d71df6729965f406ad"}, + {file = "shapely-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:b4f0711cc83734c6fad94fc8d4ec30f3d52c1787b17d9dca261dc841d4731c64"}, + {file = "shapely-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:05c51a29336e604c084fb43ae5dbbfa2c0ef9bd6fedeae0a0d02c7b57a56ba46"}, + {file = "shapely-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b519cf3726ddb6c67f6a951d1bb1d29691111eaa67ea19ddca4d454fbe35949c"}, + {file = "shapely-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:193a398d81c97a62fc3634a1a33798a58fd1dcf4aead254d080b273efbb7e3ff"}, + {file = "shapely-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e55698e0ed95a70fe9ff9a23c763acfe0bf335b02df12142f74e4543095e9a9b"}, + {file = "shapely-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32a748703e7bf6e92dfa3d2936b2fbfe76f8ce5f756e24f49ef72d17d26ad02"}, + {file = "shapely-2.0.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a34a23d6266ca162499e4a22b79159dc0052f4973d16f16f990baa4d29e58b6"}, + {file = "shapely-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d173d24e85e51510e658fb108513d5bc11e3fd2820db6b1bd0522266ddd11f51"}, + {file = "shapely-2.0.1-cp38-cp38-win32.whl", hash = "sha256:3cb256ae0c01b17f7bc68ee2ffdd45aebf42af8992484ea55c29a6151abe4386"}, + {file = "shapely-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c7eed1fb3008a8a4a56425334b7eb82651a51f9e9a9c2f72844a2fb394f38a6c"}, + {file = "shapely-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ac1dfc397475d1de485e76de0c3c91cc9d79bd39012a84bb0f5e8a199fc17bef"}, + {file = "shapely-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33403b8896e1d98aaa3a52110d828b18985d740cc9f34f198922018b1e0f8afe"}, + {file = "shapely-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2569a4b91caeef54dd5ae9091ae6f63526d8ca0b376b5bb9fd1a3195d047d7d4"}, + {file = "shapely-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a70a614791ff65f5e283feed747e1cc3d9e6c6ba91556e640636bbb0a1e32a71"}, + {file = "shapely-2.0.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c43755d2c46b75a7b74ac6226d2cc9fa2a76c3263c5ae70c195c6fb4e7b08e79"}, + {file = "shapely-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad81f292fffbd568ae71828e6c387da7eb5384a79db9b4fde14dd9fdeffca9a"}, + {file = "shapely-2.0.1-cp39-cp39-win32.whl", hash = "sha256:b50c401b64883e61556a90b89948297f1714dbac29243d17ed9284a47e6dd731"}, + {file = "shapely-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:bca57b683e3d94d0919e2f31e4d70fdfbb7059650ef1b431d9f4e045690edcd5"}, + {file = "shapely-2.0.1.tar.gz", hash = "sha256:66a6b1a3e72ece97fc85536a281476f9b7794de2e646ca8a4517e2e3c1446893"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +snuggs = [ + {file = "snuggs-1.4.7-py3-none-any.whl", hash = "sha256:988dde5d4db88e9d71c99457404773dabcc7a1c45971bfbe81900999942d9f07"}, + {file = "snuggs-1.4.7.tar.gz", hash = "sha256:501cf113fe3892e14e2fee76da5cd0606b7e149c411c271898e6259ebde2617b"}, +] +soupsieve = [ + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, +] +stack-data = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] +tinycss2 = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tornado = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, +] +traitlets = [ + {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, + {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, +] +typer = [ + {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"}, + {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"}, +] +typing-extensions = [ + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, +] +urllib3 = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] +virtualenv = [ + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, +] +wcwidth = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] +wheel = [ + {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, + {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, +] +zipp = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] diff --git a/project/RFAssemblyPlugin.scala b/project/RFAssemblyPlugin.scala index a3bb2038c..c45a59cb2 100644 --- a/project/RFAssemblyPlugin.scala +++ b/project/RFAssemblyPlugin.scala @@ -27,8 +27,8 @@ import sbtassembly.AssemblyPlugin.autoImport.{ShadeRule, _} import scala.util.matching.Regex /** - * Standard support for creating assembly jars. - */ + * Standard support for creating assembly jars. + */ object RFAssemblyPlugin extends AutoPlugin { override def requires = AssemblyPlugin && RFDependenciesPlugin @@ -48,7 +48,7 @@ object RFAssemblyPlugin extends AutoPlugin { "scalatest.*".r, "junit.*".r ), - assembly / assemblyShadeRules:= { + assembly / assemblyShadeRules := { val shadePrefixes = Seq( "shapeless", "com.github.ben-manes.caffeine", @@ -69,7 +69,7 @@ object RFAssemblyPlugin extends AutoPlugin { }, assembly / assemblyOption := (assembly / assemblyOption).value.withIncludeScala(false), - assembly / assemblyJarName := s"${normalizedName.value}-assembly-${version.value}.jar", + assembly / assemblyOutputPath := (ThisBuild / baseDirectory).value / "dist" / s"${normalizedName.value}-assembly-${version.value}.jar", assembly / assemblyExcludedJars := { val cp = (assembly / fullClasspath).value val excludedJarPatterns = autoImport.assemblyExcludedJarPatterns.value @@ -85,15 +85,17 @@ object RFAssemblyPlugin extends AutoPlugin { // org.threeten % threeten-extra % 1.6.0 case "module-info.class" => MergeStrategy.discard case x if Assembly.isConfigFile(x) => MergeStrategy.concat - case PathList(ps @ _*) if Assembly.isReadme(ps.last) || Assembly.isLicenseFile(ps.last) => + case PathList(ps@_*) if Assembly.isReadme(ps.last) || Assembly.isLicenseFile(ps.last) => MergeStrategy.rename - case PathList("META-INF", xs @ _*) => - xs map { _.toLowerCase } match { + case PathList("META-INF", xs@_*) => + xs map { + _.toLowerCase + } match { case "manifest.mf" :: Nil | "index.list" :: Nil | "dependencies" :: Nil => MergeStrategy.discard case "io.netty.versions.properties" :: Nil => MergeStrategy.concat - case ps @ x :: _ if ps.last.endsWith(".sf") || ps.last.endsWith(".dsa") => + case ps@x :: _ if ps.last.endsWith(".sf") || ps.last.endsWith(".dsa") => MergeStrategy.discard case "plexus" :: _ => MergeStrategy.discard diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..d2eca0547 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,76 @@ +[tool.poetry] +name = "pyrasterframes" +version = "0.0.0" # versioning is handled by poetry-dynamic-versioning +authors = ["Astraea, Inc. "] +description = "Access and process geospatial raster data in PySpark DataFrames" +homepage = "https://rasterframes.io" +license = "Apache-2.0" +readme = "python/README.md" +classifiers = [ + "Development Status :: 4 - Beta", + "Environment :: Other Environment", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: Unix", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries", + "Topic :: Scientific/Engineering :: GIS", + "Topic :: Multimedia :: Graphics :: Graphics Conversion", +] +packages = [ + { include = "geomesa_pyspark", from = "python" }, + { include = "pyrasterframes", from = "python"}, +] + +[tool.poetry-dynamic-versioning] +enable = true +vcs = "git" +pattern = "^((?P\\d+)!)?(?P\\d+(\\.\\d+)*)" + +[tool.poetry-dynamic-versioning.substitution] +files = ["python/pyrasterframes/version.py"] + +[tool.poetry.dependencies] +python = ">=3.8,<4" +shapely = "^2.0.0" +pyproj = "^3.4.1" +deprecation = "^2.1.0" +matplotlib = "^3.6.3" +pandas = "^1.5.3" +py4j = "^0.10.9.3" +pyspark = "3.3.2" +numpy = "^1.24.1" + + +[tool.poetry.group.dev.dependencies] +pre-commit = "^2.21.0" +rasterio = {extras = ["s3"], version = "^1.3.5"} +wheel = "^0.38.4" +ipython = "^8.7.0" +pweave = "^0.30.3" +ipython-genutils = "^0.2.0" +typer = "^0.7.0" +pytest = "^7.2.1" +pytest-cov = "^4.0.0" +geopandas = "^0.12.2" +isort = "^5.11.4" +black = "^22.12.0" + + +[tool.pytest.ini_options] +addopts = "--verbose" +testpaths = ["tests"] +python_files = "*.py" + + +[tool.black] +line-length = 100 +target-version = ["py38"] + +[tool.isort] +profile = "black" +line_length = 100 + +[build-system] +requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] +build-backend = "poetry_dynamic_versioning.backend" diff --git a/pyrasterframes/src/main/python/.gitignore b/pyrasterframes/src/main/python/.gitignore deleted file mode 100644 index d43a8f7ce..000000000 --- a/pyrasterframes/src/main/python/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.coverage -htmlcov - diff --git a/pyrasterframes/src/main/python/MANIFEST.in b/pyrasterframes/src/main/python/MANIFEST.in deleted file mode 100644 index 88f63e05a..000000000 --- a/pyrasterframes/src/main/python/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ - -global-exclude *.py[cod] __pycache__ .DS_Store -recursive-include pyrasterframes/jars *.jar diff --git a/pyrasterframes/src/main/python/docs/__init__.py b/pyrasterframes/src/main/python/docs/__init__.py deleted file mode 100644 index 0fa3d800b..000000000 --- a/pyrasterframes/src/main/python/docs/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -from pweave import PwebPandocFormatter - -# Setuptools/easy_install doesn't properly set the execute bit on the Spark scripts, -# So this preemptively attempts to do it. -def _chmodit(): - try: - from importlib.util import find_spec - import os - module_home = find_spec("pyspark").origin - print(module_home) - bin_dir = os.path.join(os.path.dirname(module_home), 'bin') - for filename in os.listdir(bin_dir): - try: - os.chmod(os.path.join(bin_dir, filename), mode=0o555, follow_symlinks=True) - except OSError: - pass - except ImportError: - pass - -_chmodit() - - -class PegdownMarkdownFormatter(PwebPandocFormatter): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - # Pegdown doesn't support the width and label options. - def make_figure_string(self, figname, width, label, caption=""): - return "![%s](%s)" % (caption, figname) diff --git a/pyrasterframes/src/main/python/pyrasterframes/utils.py b/pyrasterframes/src/main/python/pyrasterframes/utils.py deleted file mode 100644 index e88bde590..000000000 --- a/pyrasterframes/src/main/python/pyrasterframes/utils.py +++ /dev/null @@ -1,102 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -import glob -from pyspark.sql import SparkSession -from pyspark import SparkConf -import os -from . import RFContext -from typing import Union, Dict, Optional - -__all__ = ["create_rf_spark_session", "find_pyrasterframes_jar_dir", "find_pyrasterframes_assembly", "gdal_version", 'gdal_version', 'build_info', 'quiet_logs'] - - -def find_pyrasterframes_jar_dir() -> str: - """ - Locates the directory where JVM libraries for Spark are stored. - :return: path to jar director as a string - """ - jar_dir = None - - from importlib.util import find_spec - try: - module_home = find_spec("pyrasterframes").origin - jar_dir = os.path.join(os.path.dirname(module_home), 'jars') - except ImportError as e: - import logging - logging.critical("Error finding runtime JAR directory", exc_info=e) - raise e - - return os.path.realpath(jar_dir) - - -def find_pyrasterframes_assembly() -> Union[bytes, str]: - jar_dir = find_pyrasterframes_jar_dir() - jarpath = glob.glob(os.path.join(jar_dir, 'pyrasterframes-assembly*.jar')) - - if not len(jarpath) == 1: - raise RuntimeError(f""" -Expected to find exactly one assembly in '{jar_dir}'. -Found '{jarpath}' instead.""") - return jarpath[0] - - -def quiet_logs(sc): - logger = sc._jvm.org.apache.log4j - logger.LogManager.getLogger("geotrellis.raster.gdal").setLevel(logger.Level.ERROR) - logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR) - - -def create_rf_spark_session(master="local[*]", **kwargs: str) -> Optional[SparkSession]: - """ Create a SparkSession with pyrasterframes enabled and configured. """ - jar_path = find_pyrasterframes_assembly() - - if 'spark.jars' in kwargs.keys(): - if 'pyrasterframes' not in kwargs['spark.jars']: - raise UserWarning("spark.jars config is set, but it seems to be missing the pyrasterframes assembly jar.") - - conf = SparkConf().setAll([(k, kwargs[k]) for k in kwargs]) - - spark = (SparkSession.builder - .master(master) - .appName("RasterFrames") - .config('spark.jars', jar_path) - .withKryoSerialization() - .config(conf=conf) # user can override the defaults - .getOrCreate()) - - quiet_logs(spark) - - try: - spark.withRasterFrames() - return spark - except TypeError as te: - print("Error setting up SparkSession; cannot find the pyrasterframes assembly jar\n", te) - return None - - -def gdal_version() -> str: - fcn = RFContext.active().lookup("buildInfo") - return fcn()["GDAL"] - - -def build_info() -> Dict[str, str]: - fcn = RFContext.active().lookup("buildInfo") - return fcn() diff --git a/pyrasterframes/src/main/python/requirements-condaforge.txt b/pyrasterframes/src/main/python/requirements-condaforge.txt deleted file mode 100644 index 827a7f431..000000000 --- a/pyrasterframes/src/main/python/requirements-condaforge.txt +++ /dev/null @@ -1,4 +0,0 @@ -# These packages should be installed from conda-forge, given their complex binary components. -gdal -rasterio[s3] -rtree diff --git a/pyrasterframes/src/main/python/setup.cfg b/pyrasterframes/src/main/python/setup.cfg deleted file mode 100644 index 4d9369ec4..000000000 --- a/pyrasterframes/src/main/python/setup.cfg +++ /dev/null @@ -1,13 +0,0 @@ -[metadata] -license_files = LICENSE.txt - -[bdist_wheel] -universal = 0 - -[aliases] -test = pytest - -[tool:pytest] -addopts = --verbose -testpaths = tests -python_files = *.py diff --git a/pyrasterframes/src/main/python/setup.py b/pyrasterframes/src/main/python/setup.py deleted file mode 100644 index 8f70b36b0..000000000 --- a/pyrasterframes/src/main/python/setup.py +++ /dev/null @@ -1,256 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -# Always prefer setuptools over distutils -from setuptools import setup -from os import path, environ, mkdir -import sys -from glob import glob -from io import open -import distutils.cmd - -try: - enver = environ.get('RASTERFRAMES_VERSION') - if enver is not None: - open('pyrasterframes/version.py', mode="w").write(f"__version__: str = '{enver}'\n") - exec(open('pyrasterframes/version.py').read()) # executable python script contains __version__; credit pyspark -except IOError as e: - print(e) - print("Try running setup via `sbt 'pySetup arg1 arg2'` to ensure correct access to all source files and binaries.") - sys.exit(-1) - -VERSION = __version__ -print(f"setup.py sees the version as {VERSION}") - -here = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(here, 'README.md'), encoding='utf-8') as f: - readme = f.read() - - -def _divided(msg): - divider = ('-' * 50) - return divider + '\n' + msg + '\n' + divider - - -class PweaveDocs(distutils.cmd.Command): - """A custom command to run documentation scripts through pweave.""" - description = 'Pweave PyRasterFrames documentation scripts' - user_options = [ - # The format is (long option, short option, description). - ('files=', 's', 'Specific files to pweave. Defaults to all in `docs` directory.'), - ('format=', 'f', 'Output format type. Defaults to `markdown`'), - ('quick=', 'q', 'Check to see if the source file is newer than existing output before building. Defaults to `False`.') - ] - - def initialize_options(self): - """Set default values for options.""" - # Each user option must be listed here with their default value. - self.files = filter( - lambda x: not path.basename(x)[:1] == '_', - glob(path.join(here, 'docs', '*.pymd')) - ) - self.format = 'markdown' - self.quick = False - - def finalize_options(self): - """Post-process options.""" - import re - if isinstance(self.files, str): - self.files = filter(lambda s: len(s) > 0, re.split(',', self.files)) - # `html` doesn't do quite what one expects... only replaces code blocks, leaving markdown in place - print("format.....", self.format) - if self.format.strip() == 'html': - self.format = 'pandoc2html' - if isinstance(self.quick, str): - self.quick = self.quick == 'True' or self.quick == 'true' - - def dest_file(self, src_file): - return path.splitext(src_file)[0] + '.md' - - def run(self): - """Run pweave.""" - import traceback - import pweave - from docs import PegdownMarkdownFormatter - - bad_words = ["Error"] - pweave.rcParams["chunk"]["defaultoptions"].update({'wrap': False, 'dpi': 175}) - if self.format == 'markdown': - pweave.PwebFormats.formats['markdown'] = { - 'class': PegdownMarkdownFormatter, - 'description': 'Pegdown compatible markdown' - } - if self.format == 'notebook': - # Just convert to an unevaluated notebook. - pweave.rcParams["chunk"]["defaultoptions"].update({'evaluate': False}) - - for file in sorted(self.files, reverse=False): - name = path.splitext(path.basename(file))[0] - dest = self.dest_file(file) - - if (not self.quick) or (not path.exists(dest)) or (path.getmtime(dest) < path.getmtime(file)): - print(_divided('Running %s' % name)) - try: - pweave.weave(file=str(file), doctype=self.format) - if self.format == 'markdown': - if not path.exists(dest): - raise FileNotFoundError("Markdown file '%s' didn't get created as expected" % dest) - with open(dest, "r") as result: - for (n, line) in enumerate(result): - for word in bad_words: - if word in line: - raise ChildProcessError("Error detected on line %s in %s:\n%s" % (n + 1, dest, line)) - - except Exception: - print(_divided('%s Failed:' % file)) - print(traceback.format_exc()) - exit(1) - else: - print(_divided('Skipping %s' % name)) - - -class PweaveNotebooks(PweaveDocs): - def initialize_options(self): - super().initialize_options() - self.format = 'notebook' - - def dest_file(self, src_file): - return path.splitext(src_file)[0] + '.ipynb' - -# WARNING: Changing this version bounding will result in branca's use of jinja2 -# to throw a `NotImplementedError: Can't perform this operation for unregistered loader type` -pytest = 'pytest>=4.0.0,<5.0.0' - -pyspark = 'pyspark==3.2.1' -boto3 = 'boto3' -deprecation = 'deprecation' -descartes = 'descartes' -matplotlib = 'matplotlib' -fiona = 'fiona' -folium = 'folium' -gdal = 'gdal' -geopandas = 'geopandas' -ipykernel = 'ipykernel' -ipython = 'ipython' -numpy = 'numpy' -pandas = 'pandas' -pypandoc = 'pypandoc' -pyproj = 'pyproj' -pytest_runner = 'pytest-runner' -pytz = 'pytz' -rasterio = 'rasterio' -requests = 'requests' -setuptools = 'setuptools' -shapely = 'Shapely' -tabulate = 'tabulate' -tqdm = 'tqdm' -utm = 'utm' - -# Documentation build stuff. Until we can replace pweave, these pins are necessary -pweave = 'pweave==0.30.3' -jupyter_client = 'jupyter-client<6.0' # v6 breaks pweave -nbclient = 'nbclient==0.1.0' # compatible with our pweave => jupyter_client restrictions -nbconvert = 'nbconvert==5.5.0' - -setup( - name='pyrasterframes', - description='Access and process geospatial raster data in PySpark DataFrames', - long_description=readme, - long_description_content_type='text/markdown', - version=VERSION, - author='Astraea, Inc.', - author_email='info@astraea.earth', - license='Apache 2', - url='https://rasterframes.io', - project_urls={ - 'Bug Reports': 'https://github.com/locationtech/rasterframes/issues', - 'Source': 'https://github.com/locationtech/rasterframes', - }, - python_requires=">=3.7", - install_requires=[ - gdal, - pytz, - shapely, - pyspark, - numpy, - pandas, - pyproj, - tabulate, - deprecation, - ], - setup_requires=[ - pytz, - shapely, - pyspark, - numpy, - matplotlib, - pandas, - geopandas, - requests, - pytest_runner, - setuptools, - ipython, - pweave, - jupyter_client, - nbclient, - nbconvert, - fiona, - rasterio, - folium, - ], - tests_require=[ - pytest, - pypandoc, - numpy, - shapely, - pandas, - rasterio, - boto3, - pweave - ], - packages=[ - 'pyrasterframes', - 'geomesa_pyspark', - 'pyrasterframes.jars', - ], - package_data={ - 'pyrasterframes.jars': ['*.jar'] - }, - include_package_data=True, - classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Other Environment', - 'License :: OSI Approved :: Apache Software License', - 'Natural Language :: English', - 'Operating System :: Unix', - 'Programming Language :: Python :: 3', - 'Topic :: Software Development :: Libraries', - 'Topic :: Scientific/Engineering :: GIS', - 'Topic :: Multimedia :: Graphics :: Graphics Conversion', - ], - zip_safe=False, - test_suite="pytest-runner", - cmdclass={ - 'pweave': PweaveDocs, - 'notebooks': PweaveNotebooks - } -) diff --git a/pyrasterframes/src/main/python/tests/ExploderTests.py b/pyrasterframes/src/main/python/tests/ExploderTests.py deleted file mode 100644 index 4b24f2f6b..000000000 --- a/pyrasterframes/src/main/python/tests/ExploderTests.py +++ /dev/null @@ -1,71 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -from . import TestEnvironment - -from pyrasterframes.rasterfunctions import * -from pyrasterframes.rf_types import * -from pyrasterframes import TileExploder - -from pyspark.ml.feature import VectorAssembler -from pyspark.ml import Pipeline, PipelineModel -from pyspark.sql.functions import * - -import unittest - - -class ExploderTests(TestEnvironment): - - def test_tile_exploder_pipeline_for_prt(self): - # NB the tile is a Projected Raster Tile - df = self.spark.read.raster(self.img_uri) - t_col = 'proj_raster' - self.assertTrue(t_col in df.columns) - - assembler = VectorAssembler().setInputCols([t_col]) - pipe = Pipeline().setStages([TileExploder(), assembler]) - pipe_model = pipe.fit(df) - tranformed_df = pipe_model.transform(df) - self.assertTrue(tranformed_df.count() > df.count()) - - def test_tile_exploder_pipeline_for_tile(self): - t_col = 'tile' - df = self.spark.read.raster(self.img_uri) \ - .withColumn(t_col, rf_tile('proj_raster')) \ - .drop('proj_raster') - - assembler = VectorAssembler().setInputCols([t_col]) - pipe = Pipeline().setStages([TileExploder(), assembler]) - pipe_model = pipe.fit(df) - tranformed_df = pipe_model.transform(df) - self.assertTrue(tranformed_df.count() > df.count()) - - def test_tile_exploder_read_write(self): - path = 'test_tile_exploder_read_write.pipe' - df = self.spark.read.raster(self.img_uri) - - assembler = VectorAssembler().setInputCols(['proj_raster']) - pipe = Pipeline().setStages([TileExploder(), assembler]) - - pipe.fit(df).write().overwrite().save(path) - - read_pipe = PipelineModel.load(path) - self.assertEqual(len(read_pipe.stages), 2) - self.assertTrue(isinstance(read_pipe.stages[0], TileExploder)) diff --git a/pyrasterframes/src/main/python/tests/GeoTiffWriterTests.py b/pyrasterframes/src/main/python/tests/GeoTiffWriterTests.py deleted file mode 100644 index e8f34f3a4..000000000 --- a/pyrasterframes/src/main/python/tests/GeoTiffWriterTests.py +++ /dev/null @@ -1,89 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -import os -import tempfile - -from . import TestEnvironment -import rasterio - - -class GeoTiffWriter(TestEnvironment): - - @staticmethod - def _tmpfile(): - return os.path.join(tempfile.gettempdir(), "pyrf-test.tif") - - def test_identity_write(self): - rf = self.spark.read.geotiff(self.img_uri) - rf_count = rf.count() - self.assertTrue(rf_count > 0) - - dest = self._tmpfile() - rf.write.geotiff(dest) - - rf2 = self.spark.read.geotiff(dest) - - self.assertEqual(rf2.count(), rf.count()) - - os.remove(dest) - - def test_unstructured_write(self): - rf = self.spark.read.raster(self.img_uri) - dest_file = self._tmpfile() - rf.write.geotiff(dest_file, crs='EPSG:32616') - - rf2 = self.spark.read.raster(dest_file) - self.assertEqual(rf2.count(), rf.count()) - - with rasterio.open(self.img_uri) as source: - with rasterio.open(dest_file) as dest: - self.assertEqual((dest.width, dest.height), (source.width, source.height)) - self.assertEqual(dest.bounds, source.bounds) - self.assertEqual(dest.crs, source.crs) - - os.remove(dest_file) - - def test_unstructured_write_schemaless(self): - # should be able to write a projected raster tile column to path like '/data/foo/file.tif' - from pyrasterframes.rasterfunctions import rf_agg_stats, rf_crs - rf = self.spark.read.raster(self.img_uri) - max = rf.agg(rf_agg_stats('proj_raster').max.alias('max')).first()['max'] - crs = rf.select(rf_crs('proj_raster').alias('crs')).first()['crs'] - - dest_file = self._tmpfile() - self.assertTrue(not dest_file.startswith('file://')) - rf.write.geotiff(dest_file, crs=crs) - - with rasterio.open(dest_file) as src: - self.assertEqual(src.read().max(), max) - - os.remove(dest_file) - - def test_downsampled_write(self): - rf = self.spark.read.raster(self.img_uri) - dest = self._tmpfile() - rf.write.geotiff(dest, crs='EPSG:32616', raster_dimensions=(128, 128)) - - with rasterio.open(dest) as f: - self.assertEqual((f.width, f.height), (128, 128)) - - os.remove(dest) - diff --git a/pyrasterframes/src/main/python/tests/GeotrellisTests.py b/pyrasterframes/src/main/python/tests/GeotrellisTests.py deleted file mode 100644 index da7373d54..000000000 --- a/pyrasterframes/src/main/python/tests/GeotrellisTests.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -import os -import shutil -import tempfile -import pathlib -from . import TestEnvironment -from unittest import skipIf -import os - - -class GeotrellisTests(TestEnvironment): - - on_circle_ci = os.environ.get('CIRCLECI', 'false') == 'true' - - @skipIf(on_circle_ci, 'CircleCI has java.lang.NoClassDefFoundError fs2/Stream when taking action on rf_gt') - def test_write_geotrellis_layer(self): - rf = self.spark.read.geotiff(self.img_uri).cache() - rf_count = rf.count() - self.assertTrue(rf_count > 0) - - layer = "gt_layer" - zoom = 0 - - dest = tempfile.mkdtemp() - dest_uri = pathlib.Path(dest).as_uri() - rf.write.option("layer", layer).option("zoom", zoom).geotrellis(dest_uri) - - rf_gt = self.spark.read.format("geotrellis").option("layer", layer).option("zoom", zoom).load(dest_uri) - rf_gt_count = rf_gt.count() - self.assertTrue(rf_gt_count > 0) - - _ = rf_gt.take(1) - - shutil.rmtree(dest, ignore_errors=True) - - @skipIf(on_circle_ci, 'CircleCI has java.lang.NoClassDefFoundError fs2/Stream when taking action on rf_gt') - def test_write_geotrellis_multiband_layer(self): - rf = self.spark.read.geotiff(self.img_rgb_uri).cache() - rf_count = rf.count() - self.assertTrue(rf_count > 0) - - layer = "gt_multiband_layer" - zoom = 0 - - dest = tempfile.mkdtemp() - dest_uri = pathlib.Path(dest).as_uri() - rf.write.option("layer", layer).option("zoom", zoom).geotrellis(dest_uri) - - rf_gt = self.spark.read.format("geotrellis").option("layer", layer).option("zoom", zoom).load(dest_uri) - rf_gt_count = rf_gt.count() - self.assertTrue(rf_gt_count > 0) - - _ = rf_gt.take(1) - - shutil.rmtree(dest, ignore_errors=True) diff --git a/pyrasterframes/src/main/python/tests/IpythonTests.py b/pyrasterframes/src/main/python/tests/IpythonTests.py deleted file mode 100644 index d5bd4db29..000000000 --- a/pyrasterframes/src/main/python/tests/IpythonTests.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -from unittest import skip - - -import pyrasterframes -from pyrasterframes.rf_types import * - -import numpy as np - -from py4j.protocol import Py4JJavaError -from IPython.testing import globalipapp -from . import TestEnvironment - -class IpythonTests(TestEnvironment): - - @classmethod - def setUpClass(cls): - super().setUpClass() - globalipapp.start_ipython() - - @classmethod - def tearDownClass(cls) -> None: - globalipapp.get_ipython().atexit_operations() - - - @skip("Pending fix for issue #458") - def test_all_nodata_tile(self): - # https://github.com/locationtech/rasterframes/issues/458 - - from pyspark.sql.types import StructType, StructField - - from pyspark.sql import Row - df = self.spark.createDataFrame([ - Row( - tile=Tile(np.array([[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], dtype='float64'), - CellType.float64()) - ), - Row(tile=None) - ], schema=StructType([StructField('tile', TileUDT(), True)])) - - try: - pyrasterframes.rf_ipython.spark_df_to_html(df) - except Py4JJavaError: - self.fail("test_all_nodata_tile failed with Py4JJavaError") - except: - self.fail("um") - - def test_display_extension(self): - # noinspection PyUnresolvedReferences - import pyrasterframes.rf_ipython - - self.create_layer() - ip = globalipapp.get_ipython() - - num_rows = 2 - - result = {} - - def counter(data, md): - nonlocal result - result['payload'] = (data, md) - result['row_count'] = data.count('') - ip.mime_renderers['text/html'] = counter - - # ip.mime_renderers['text/markdown'] = lambda a, b: print(a, b) - - self.df.display(num_rows=num_rows) - - # Plus one for the header row. - self.assertIs(result['row_count'], num_rows+1, msg=f"Received: {result['payload']}") - diff --git a/pyrasterframes/src/main/python/tests/PyRasterFramesTests.py b/pyrasterframes/src/main/python/tests/PyRasterFramesTests.py deleted file mode 100644 index 620e96a6c..000000000 --- a/pyrasterframes/src/main/python/tests/PyRasterFramesTests.py +++ /dev/null @@ -1,367 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -import unittest - -import numpy as np -from pyrasterframes.rasterfunctions import * -from pyrasterframes.rf_types import * -from pyspark.sql import SQLContext -from pyspark.sql.functions import * -from pyspark.sql import Row - -from . import TestEnvironment - -class UtilTest(TestEnvironment): - - def test_spark_confs(self): - from . import app_name - self.assertEqual(self.spark.conf.get('spark.app.name'), app_name) - self.assertEqual(self.spark.conf.get('spark.ui.enabled'), 'false') - - -class CellTypeHandling(unittest.TestCase): - - def test_is_raw(self): - self.assertTrue(CellType("float32raw").is_raw()) - self.assertFalse(CellType("float64ud1234").is_raw()) - self.assertFalse(CellType("float32").is_raw()) - self.assertTrue(CellType("int8raw").is_raw()) - self.assertFalse(CellType("uint16d12").is_raw()) - self.assertFalse(CellType("int32").is_raw()) - - def test_is_floating_point(self): - self.assertTrue(CellType("float32raw").is_floating_point()) - self.assertTrue(CellType("float64ud1234").is_floating_point()) - self.assertTrue(CellType("float32").is_floating_point()) - self.assertFalse(CellType("int8raw").is_floating_point()) - self.assertFalse(CellType("uint16d12").is_floating_point()) - self.assertFalse(CellType("int32").is_floating_point()) - - def test_cell_type_no_data(self): - import math - self.assertIsNone(CellType.bool().no_data_value()) - - self.assertTrue(CellType.int8().has_no_data()) - self.assertEqual(CellType.int8().no_data_value(), -128) - - self.assertTrue(CellType.uint8().has_no_data()) - self.assertEqual(CellType.uint8().no_data_value(), 0) - - self.assertTrue(CellType.int16().has_no_data()) - self.assertEqual(CellType.int16().no_data_value(), -32768) - - self.assertTrue(CellType.uint16().has_no_data()) - self.assertEqual(CellType.uint16().no_data_value(), 0) - - self.assertTrue(CellType.float32().has_no_data()) - self.assertTrue(np.isnan(CellType.float32().no_data_value())) - - self.assertEqual(CellType("float32ud-98").no_data_value(), -98.0) - self.assertEqual(CellType("float32ud-98").no_data_value(), -98) - self.assertEqual(CellType("int32ud-98").no_data_value(), -98.0) - self.assertEqual(CellType("int32ud-98").no_data_value(), -98) - - self.assertTrue(math.isnan(CellType.float64().no_data_value())) - self.assertEqual(CellType.uint8().no_data_value(), 0) - - def test_cell_type_conversion(self): - for ct in rf_cell_types(): - self.assertEqual(ct.to_numpy_dtype(), - CellType.from_numpy_dtype(ct.to_numpy_dtype()).to_numpy_dtype(), - "dtype comparison for " + str(ct)) - if not ct.is_raw(): - self.assertEqual(ct, - CellType.from_numpy_dtype(ct.to_numpy_dtype()), - "GTCellType comparison for " + str(ct)) - else: - ct_ud = ct.with_no_data_value(99) - self.assertEqual(ct_ud.base_cell_type_name(), - repr(CellType.from_numpy_dtype(ct_ud.to_numpy_dtype())), - "GTCellType comparison for " + str(ct_ud) - ) - - - -class TileOps(TestEnvironment): - - def setUp(self): - # convenience so we can assert around Tile() == Tile() - self.t1 = Tile(np.array([[1, 2], - [3, 4]]), CellType.int8().with_no_data_value(3)) - self.t2 = Tile(np.array([[1, 2], - [3, 4]]), CellType.int8().with_no_data_value(1)) - self.t3 = Tile(np.array([[1, 2], - [-3, 4]]), CellType.int8().with_no_data_value(3)) - - self.df = self.spark.createDataFrame([Row(t1=self.t1, t2=self.t2, t3=self.t3)]) - - def test_addition(self): - e1 = np.ma.masked_equal(np.array([[5, 6], - [7, 8]]), 7) - self.assertTrue(np.array_equal((self.t1 + 4).cells, e1)) - - e2 = np.ma.masked_equal(np.array([[3, 4], - [3, 8]]), 3) - r2 = (self.t1 + self.t2).cells - self.assertTrue(np.ma.allequal(r2, e2)) - - col_result = self.df.select(rf_local_add('t1', 't3').alias('sum')).first() - self.assertEqual(col_result.sum, self.t1 + self.t3) - - def test_multiplication(self): - e1 = np.ma.masked_equal(np.array([[4, 8], - [12, 16]]), 12) - - self.assertTrue(np.array_equal((self.t1 * 4).cells, e1)) - - e2 = np.ma.masked_equal(np.array([[3, 4], [3, 16]]), 3) - r2 = (self.t1 * self.t2).cells - self.assertTrue(np.ma.allequal(r2, e2)) - - r3 = self.df.select(rf_local_multiply('t1', 't3').alias('r3')).first().r3 - self.assertEqual(r3, self.t1 * self.t3) - - def test_subtraction(self): - t3 = self.t1 * 4 - r1 = t3 - self.t1 - # note careful construction of mask value and dtype above - e1 = Tile(np.ma.masked_equal(np.array([[4 - 1, 8 - 2], - [3, 16 - 4]], dtype='int8'), - 3, ) - ) - self.assertTrue(r1 == e1, - "{} does not equal {}".format(r1, e1)) - # put another way - self.assertTrue(r1 == self.t1 * 3, - "{} does not equal {}".format(r1, self.t1 * 3)) - - def test_division(self): - t3 = self.t1 * 9 - r1 = t3 / 9 - self.assertTrue(np.array_equal(r1.cells, self.t1.cells), - "{} does not equal {}".format(r1, self.t1)) - - r2 = (self.t1 / self.t1).cells - self.assertTrue(np.array_equal(r2, np.array([[1,1], [1, 1]], dtype=r2.dtype))) - - def test_matmul(self): - r1 = self.t1 @ self.t2 - - # The behavior of np.matmul with masked arrays is not well documented - # it seems to treat the 2nd arg as if not a MaskedArray - e1 = Tile(np.matmul(self.t1.cells, self.t2.cells), r1.cell_type) - - self.assertTrue(r1 == e1, "{} was not equal to {}".format(r1, e1)) - self.assertEqual(r1, e1) - - -class PandasInterop(TestEnvironment): - - def setUp(self): - self.create_layer() - - def test_pandas_conversion(self): - import pandas as pd - # pd.options.display.max_colwidth = 256 - cell_types = (ct for ct in rf_cell_types() if not (ct.is_raw() or ("bool" in ct.base_cell_type_name()))) - tiles = [Tile(np.random.randn(5, 5) * 100, ct) for ct in cell_types] - in_pandas = pd.DataFrame({ - 'tile': tiles - }) - - in_spark = self.spark.createDataFrame(in_pandas) - out_pandas = in_spark.select(rf_identity('tile').alias('tile')).toPandas() - self.assertTrue(out_pandas.equals(in_pandas), str(in_pandas) + "\n\n" + str(out_pandas)) - - def test_extended_pandas_ops(self): - import pandas as pd - - self.assertIsInstance(self.rf.sql_ctx, SQLContext) - - # Try to collect self.rf which is read from a geotiff - rf_collect = self.rf.take(2) - self.assertTrue( - all([isinstance(row.tile.cells, np.ndarray) for row in rf_collect])) - - # Try to create a tile from numpy. - self.assertEqual(Tile(np.random.randn(10, 10), CellType.int8()).dimensions(), [10, 10]) - - tiles = [Tile(np.random.randn(10, 12), CellType.float64()) for _ in range(3)] - to_spark = pd.DataFrame({ - 't': tiles, - 'b': ['a', 'b', 'c'], - 'c': [1, 2, 4], - }) - rf_maybe = self.spark.createDataFrame(to_spark) - - # rf_maybe.select(rf_render_matrix(rf_maybe.t)).show(truncate=False) - - # Try to do something with it. - sums = to_spark.t.apply(lambda a: a.cells.sum()).tolist() - maybe_sums = rf_maybe.select(rf_tile_sum(rf_maybe.t).alias('tsum')) - maybe_sums = [r.tsum for r in maybe_sums.collect()] - np.testing.assert_almost_equal(maybe_sums, sums, 12) - - # Test round trip for an array - simple_array = Tile(np.array([[1, 2], [3, 4]]), CellType.float64()) - to_spark_2 = pd.DataFrame({ - 't': [simple_array] - }) - - rf_maybe_2 = self.spark.createDataFrame(to_spark_2) - #print("RasterFrameLayer `show`:") - #rf_maybe_2.select(rf_render_matrix(rf_maybe_2.t).alias('t')).show(truncate=False) - - pd_2 = rf_maybe_2.toPandas() - array_back_2 = pd_2.iloc[0].t - #print("Array collected from toPandas output\n", array_back_2) - - self.assertIsInstance(array_back_2, Tile) - np.testing.assert_equal(array_back_2.cells, simple_array.cells) - - -class RasterJoin(TestEnvironment): - - def setUp(self): - self.create_layer() - - def test_raster_join(self): - # re-read the same source - rf_prime = self.spark.read.geotiff(self.img_uri) \ - .withColumnRenamed('tile', 'tile2') - - rf_joined = self.rf.raster_join(rf_prime) - - self.assertTrue(rf_joined.count(), self.rf.count()) - self.assertTrue(len(rf_joined.columns) == len(self.rf.columns) + len(rf_prime.columns) - 2) - - rf_joined_2 = self.rf.raster_join(rf_prime, self.rf.extent, self.rf.crs, rf_prime.extent, rf_prime.crs) - self.assertTrue(rf_joined_2.count(), self.rf.count()) - self.assertTrue(len(rf_joined_2.columns) == len(self.rf.columns) + len(rf_prime.columns) - 2) - - # this will bring arbitrary additional data into join; garbage result - join_expression = self.rf.extent.xmin == rf_prime.extent.xmin - rf_joined_3 = self.rf.raster_join(rf_prime, self.rf.extent, self.rf.crs, - rf_prime.extent, rf_prime.crs, - join_expression) - self.assertTrue(rf_joined_3.count(), self.rf.count()) - self.assertTrue(len(rf_joined_3.columns) == len(self.rf.columns) + len(rf_prime.columns) - 2) - - # throws if you don't pass in all expected columns - with self.assertRaises(AssertionError): - self.rf.raster_join(rf_prime, join_exprs=self.rf.extent) - - def test_raster_join_resample_method(self): - import os - from pyspark.sql.functions import col - df = self.spark.read.raster('file://' + os.path.join(self.resource_dir, 'L8-B4-Elkton-VA.tiff')) \ - .select(col('proj_raster').alias('tile')) - df_prime = self.spark.read.raster('file://' + os.path.join(self.resource_dir, 'L8-B4-Elkton-VA-4326.tiff')) \ - .select(col('proj_raster').alias('tile2')) - - result_methods = df \ - .raster_join(df_prime.withColumnRenamed('tile2', 'bilinear'), resampling_method="bilinear") \ - .select('tile', rf_proj_raster('bilinear', rf_extent('tile'), rf_crs('tile')).alias('bilinear')) \ - .raster_join(df_prime.withColumnRenamed('tile2', 'cubic_spline'), resampling_method="cubic_spline") \ - .select(rf_local_subtract('bilinear', 'cubic_spline').alias('diff')) \ - .agg(rf_agg_stats('diff').alias('stats')) \ - .select("stats.min") \ - .first() - - self.assertGreater(result_methods[0], 0.0) - - def test_raster_join_with_null_left_head(self): - # https://github.com/locationtech/rasterframes/issues/462 - - from py4j.protocol import Py4JJavaError - - ones = np.ones((10, 10), dtype='uint8') - t = Tile(ones, CellType.uint8()) - e = Extent(0.0, 0.0, 40.0, 40.0) - c = CRS('EPSG:32611') - - # Note: there's a bug in Spark 2.x whereby the serialization of Extent - # reorders the fields, causing deserialization errors in the JVM side. - # So we end up manually forcing ordering with the use of `struct`. - # See https://stackoverflow.com/questions/35343525/how-do-i-order-fields-of-my-row-objects-in-spark-python/35343885#35343885 - left = self.spark.createDataFrame( - [ - Row(i=1, j='a', t=t, u=t, e=e, c=c), - Row(i=1, j='b', t=None, u=t, e=e, c=c) - ] - ).withColumn('e2', struct('e.xmin', 'e.ymin', 'e.xmax', 'e.ymax')) - - - right = self.spark.createDataFrame( - [ - Row(i=1, r=Tile(ones, CellType.uint8()), e=e, c=c), - ]).withColumn('e2', struct('e.xmin', 'e.ymin', 'e.xmax', 'e.ymax')) - - try: - joined = left.raster_join(right, - join_exprs=left.i == right.i, - left_extent=left.e2, right_extent=right.e2, - left_crs=left.c, right_crs=right.c) - - self.assertEqual(joined.count(), 2) - # In the case where the head column is null it will be passed thru - self.assertTrue(joined.select(isnull('t')).filter(col('j') == 'b').first()[0]) - - # The right hand side tile should get dimensions from col `u` however - collected = joined.select(rf_dimensions('r').cols.alias('cols'), - rf_dimensions('r').rows.alias('rows')) \ - .collect() - - for r in collected: - self.assertEqual(10, r.rows) - self.assertEqual(10, r.cols) - - # If there is no non-null tile on the LHS then the RHS is ill defined - joined_no_left_tile = left.drop('u') \ - .raster_join(right, - join_exprs=left.i == right.i, - left_extent=left.e, right_extent=right.e, - left_crs=left.c, right_crs=right.c) - self.assertEqual(joined_no_left_tile.count(), 2) - - # Tile col from Left side passed thru as null - self.assertTrue( - joined_no_left_tile.select(isnull('t')) \ - .filter(col('j') == 'b') \ - .first()[0] - ) - # Because no non-null tile col on Left side, the right side is null too - self.assertTrue( - joined_no_left_tile.select(isnull('r')) \ - .filter(col('j') == 'b') \ - .first()[0] - ) - - except Py4JJavaError as e: - self.fail('test_raster_join_with_null_left_head failed with Py4JJavaError:' + e) - - -def suite(): - function_tests = unittest.TestSuite() - return function_tests - - -unittest.TextTestRunner().run(suite()) diff --git a/pyrasterframes/src/main/python/tests/RasterFunctionsTests.py b/pyrasterframes/src/main/python/tests/RasterFunctionsTests.py deleted file mode 100644 index 7b94c2f05..000000000 --- a/pyrasterframes/src/main/python/tests/RasterFunctionsTests.py +++ /dev/null @@ -1,646 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -from unittest import skip - - -import pyrasterframes -from pyrasterframes.rasterfunctions import * -from pyrasterframes.rf_types import * -from pyrasterframes.utils import gdal_version -from pyspark import Row -from pyspark.sql.functions import * - -import numpy as np -from deprecation import fail_if_not_removed -from numpy.testing import assert_equal, assert_allclose - -from . import TestEnvironment - - -class RasterFunctions(TestEnvironment): - - def setUp(self): - import sys - if not sys.warnoptions: - import warnings - warnings.simplefilter("ignore") - self.create_layer() - - def test_setup(self): - self.assertEqual(self.spark.sparkContext.getConf().get("spark.serializer"), - "org.apache.spark.serializer.KryoSerializer") - print("GDAL version", gdal_version()) - - def test_identify_columns(self): - cols = self.rf.tile_columns() - self.assertEqual(len(cols), 1, '`tileColumns` did not find the proper number of columns.') - print("Tile columns: ", cols) - col = self.rf.spatial_key_column() - self.assertIsInstance(col, Column, '`spatialKeyColumn` was not found') - print("Spatial key column: ", col) - col = self.rf.temporal_key_column() - self.assertIsNone(col, '`temporalKeyColumn` should be `None`') - print("Temporal key column: ", col) - - def test_tile_creation(self): - from pyrasterframes.rf_types import CellType - - base = self.spark.createDataFrame([1, 2, 3, 4], 'integer') - tiles = base.select(rf_make_constant_tile(3, 3, 3, "int32"), rf_make_zeros_tile(3, 3, "int32"), - rf_make_ones_tile(3, 3, CellType.int32())) - tiles.show() - self.assertEqual(tiles.count(), 4) - - def test_multi_column_operations(self): - df1 = self.rf.withColumnRenamed('tile', 't1').as_layer() - df2 = self.rf.withColumnRenamed('tile', 't2').as_layer() - df3 = df1.spatial_join(df2).as_layer() - df3 = df3.withColumn('norm_diff', rf_normalized_difference('t1', 't2')) - # df3.printSchema() - - aggs = df3.agg( - rf_agg_mean('norm_diff'), - ) - aggs.show() - row = aggs.first() - - self.assertTrue(self.rounded_compare(row['rf_agg_mean(norm_diff)'], 0)) - - def test_general(self): - meta = self.rf.tile_layer_metadata() - self.assertIsNotNone(meta['bounds']) - df = self.rf.withColumn('dims', rf_dimensions('tile')) \ - .withColumn('type', rf_cell_type('tile')) \ - .withColumn('dCells', rf_data_cells('tile')) \ - .withColumn('ndCells', rf_no_data_cells('tile')) \ - .withColumn('min', rf_tile_min('tile')) \ - .withColumn('max', rf_tile_max('tile')) \ - .withColumn('mean', rf_tile_mean('tile')) \ - .withColumn('sum', rf_tile_sum('tile')) \ - .withColumn('stats', rf_tile_stats('tile')) \ - .withColumn('extent', st_extent('geometry')) \ - .withColumn('extent_geom1', st_geometry('extent')) \ - .withColumn('ascii', rf_render_ascii('tile')) \ - .withColumn('log', rf_log('tile')) \ - .withColumn('exp', rf_exp('tile')) \ - .withColumn('expm1', rf_expm1('tile')) \ - .withColumn('sqrt', rf_sqrt('tile')) \ - .withColumn('round', rf_round('tile')) \ - .withColumn('abs', rf_abs('tile')) - - df.first() - - def test_st_geometry_from_struct(self): - from pyspark.sql import Row - from pyspark.sql.functions import struct - df = self.spark.createDataFrame([Row(xmin=0, ymin=1, xmax=2, ymax=3)]) - df2 = df.select(st_geometry(struct(df.xmin, df.ymin, df.xmax, df.ymax)).alias('geom')) - - actual_bounds = df2.first()['geom'].bounds - self.assertEqual((0.0, 1.0, 2.0, 3.0), actual_bounds) - - def test_agg_mean(self): - mean = self.rf.agg(rf_agg_mean('tile')).first()['rf_agg_mean(tile)'] - self.assertTrue(self.rounded_compare(mean, 10160)) - - def test_agg_local_mean(self): - from pyspark.sql import Row - from pyrasterframes.rf_types import Tile - - # this is really testing the nodata propagation in the agg local summation - ct = CellType.int8().with_no_data_value(4) - df = self.spark.createDataFrame([ - Row(tile=Tile(np.array([[1, 2, 3, 4, 5, 6]]), ct)), - Row(tile=Tile(np.array([[1, 2, 4, 3, 5, 6]]), ct)), - ]) - - result = df.agg(rf_agg_local_mean('tile').alias('mean')).first().mean - - expected = Tile(np.array([[1.0, 2.0, 3.0, 3.0, 5.0, 6.0]]), CellType.float64()) - self.assertEqual(result, expected) - - def test_aggregations(self): - aggs = self.rf.agg( - rf_agg_data_cells('tile'), - rf_agg_no_data_cells('tile'), - rf_agg_stats('tile'), - rf_agg_approx_histogram('tile') - ) - row = aggs.first() - - # print(row['rf_agg_data_cells(tile)']) - self.assertEqual(row['rf_agg_data_cells(tile)'], 387000) - self.assertEqual(row['rf_agg_no_data_cells(tile)'], 1000) - self.assertEqual(row['rf_agg_stats(tile)'].data_cells, row['rf_agg_data_cells(tile)']) - - @fail_if_not_removed - def test_add_scalar(self): - # Trivial test to trigger the deprecation failure at the right time. - result: Row = self.rf.select(rf_local_add_double('tile', 99.9), rf_local_add_int('tile', 42)).first() - self.assertTrue(True) - - def test_agg_approx_quantiles(self): - agg = self.rf.agg(rf_agg_approx_quantiles('tile', [0.1, 0.5, 0.9, 0.98])) - result = agg.first()[0] - # expected result from computing in external python process; c.f. scala tests - assert_allclose(result, np.array([7963., 10068., 12160., 14366.])) - - def test_sql(self): - - self.rf.createOrReplaceTempView("rf_test_sql") - - arith = self.spark.sql("""SELECT tile, - rf_local_add(tile, 1) AS add_one, - rf_local_subtract(tile, 1) AS less_one, - rf_local_multiply(tile, 2) AS times_two, - rf_local_divide( - rf_convert_cell_type(tile, "float32"), - 2) AS over_two - FROM rf_test_sql""") - - arith.createOrReplaceTempView('rf_test_sql_1') - arith.show(truncate=False) - stats = self.spark.sql(""" - SELECT rf_tile_mean(tile) as base, - rf_tile_mean(add_one) as plus_one, - rf_tile_mean(less_one) as minus_one, - rf_tile_mean(times_two) as double, - rf_tile_mean(over_two) as half, - rf_no_data_cells(tile) as nd - - FROM rf_test_sql_1 - ORDER BY rf_no_data_cells(tile) - """) - stats.show(truncate=False) - stats.createOrReplaceTempView('rf_test_sql_stats') - - compare = self.spark.sql(""" - SELECT - plus_one - 1.0 = base as add, - minus_one + 1.0 = base as subtract, - double / 2.0 = base as multiply, - half * 2.0 = base as divide, - nd - FROM rf_test_sql_stats - """) - - expect_row1 = compare.orderBy('nd').first() - - self.assertTrue(expect_row1.subtract) - self.assertTrue(expect_row1.multiply) - self.assertTrue(expect_row1.divide) - self.assertEqual(expect_row1.nd, 0) - self.assertTrue(expect_row1.add) - - expect_row2 = compare.orderBy('nd', ascending=False).first() - - self.assertTrue(expect_row2.subtract) - self.assertTrue(expect_row2.multiply) - self.assertTrue(expect_row2.divide) - self.assertTrue(expect_row2.nd > 0) - self.assertTrue(expect_row2.add) # <-- Would fail in a case where ND + 1 = 1 - - def test_explode(self): - import pyspark.sql.functions as F - self.rf.select('spatial_key', rf_explode_tiles('tile')).show() - # +-----------+------------+---------+-------+ - # |spatial_key|column_index|row_index|tile | - # +-----------+------------+---------+-------+ - # |[2,1] |4 |0 |10150.0| - cell = self.rf.select(self.rf.spatial_key_column(), rf_explode_tiles(self.rf.tile)) \ - .where(F.col("spatial_key.col") == 2) \ - .where(F.col("spatial_key.row") == 1) \ - .where(F.col("column_index") == 4) \ - .where(F.col("row_index") == 0) \ - .select(F.col("tile")) \ - .collect()[0][0] - self.assertEqual(cell, 10150.0) - - # Test the sample version - frac = 0.01 - sample_count = self.rf.select(rf_explode_tiles_sample(frac, 1872, 'tile')).count() - print('Sample count is {}'.format(sample_count)) - self.assertTrue(sample_count > 0) - self.assertTrue(sample_count < (frac * 1.1) * 387000) # give some wiggle room - - def test_mask_by_value(self): - from pyspark.sql.functions import lit - - # create an artificial mask for values > 25000; masking value will be 4 - mask_value = 4 - - rf1 = self.rf.select(self.rf.tile, - rf_local_multiply( - rf_convert_cell_type( - rf_local_greater(self.rf.tile, 25000), - "uint8"), - lit(mask_value)).alias('mask')) - rf2 = rf1.select(rf1.tile, rf_mask_by_value(rf1.tile, rf1.mask, lit(mask_value), False).alias('masked')) - result = rf2.agg(rf_agg_no_data_cells(rf2.tile) < rf_agg_no_data_cells(rf2.masked)) \ - .collect()[0][0] - self.assertTrue(result) - - # note supplying a `int` here, not a column to mask value - rf3 = rf1.select( - rf1.tile, - rf_inverse_mask_by_value(rf1.tile, rf1.mask, mask_value).alias('masked'), - rf_mask_by_value(rf1.tile, rf1.mask, mask_value, True).alias('masked2'), - ) - result = rf3.agg( - rf_agg_no_data_cells(rf3.tile) < rf_agg_no_data_cells(rf3.masked), - rf_agg_no_data_cells(rf3.tile) < rf_agg_no_data_cells(rf3.masked2), - ) \ - .first() - self.assertTrue(result[0]) - self.assertTrue(result[1]) # inverse mask arg gives equivalent result - - result_equiv_tiles = rf3.select(rf_for_all(rf_local_equal(rf3.masked, rf3.masked2))).first()[0] - self.assertTrue(result_equiv_tiles) # inverse fn and inverse arg produce same Tile - - def test_mask_by_values(self): - - tile = Tile(np.random.randint(1, 100, (5, 5)), CellType.uint8()) - mask_tile = Tile(np.array(range(1, 26), 'uint8').reshape(5, 5)) - expected_diag_nd = Tile(np.ma.masked_array(tile.cells, mask=np.eye(5))) - - df = self.spark.createDataFrame([Row(t=tile, m=mask_tile)]) \ - .select(rf_mask_by_values('t', 'm', [0, 6, 12, 18, 24])) # values on the diagonal - result0 = df.first() - # assert_equal(result0[0].cells, expected_diag_nd) - self.assertTrue(result0[0] == expected_diag_nd) - - def test_mask_bits(self): - t = Tile(42 * np.ones((4, 4), 'uint16'), CellType.uint16()) - # with a varitey of known values - mask = Tile(np.array([ - [1, 1, 2720, 2720], - [1, 6816, 6816, 2756], - [2720, 2720, 6900, 2720], - [2720, 6900, 6816, 1] - ]), CellType('uint16raw')) - - df = self.spark.createDataFrame([Row(t=t, mask=mask)]) - - # removes fill value 1 - mask_fill_df = df.select(rf_mask_by_bit('t', 'mask', 0, True).alias('mbb')) - mask_fill_tile = mask_fill_df.first()['mbb'] - - self.assertTrue(mask_fill_tile.cell_type.has_no_data()) - - self.assertTrue( - mask_fill_df.select(rf_data_cells('mbb')).first()[0], - 16 - 4 - ) - - # mask out 6816, 6900 - mask_med_hi_cir = df.withColumn('mask_cir_mh', - rf_mask_by_bits('t', 'mask', 11, 2, [2, 3])) \ - .first()['mask_cir_mh'].cells - - self.assertEqual( - mask_med_hi_cir.mask.sum(), - 5 - ) - - @skip('Issue #422 https://github.com/locationtech/rasterframes/issues/422') - def test_mask_and_deser(self): - # duplicates much of test_mask_bits but - t = Tile(42 * np.ones((4, 4), 'uint16'), CellType.uint16()) - # with a varitey of known values - mask = Tile(np.array([ - [1, 1, 2720, 2720], - [1, 6816, 6816, 2756], - [2720, 2720, 6900, 2720], - [2720, 6900, 6816, 1] - ]), CellType('uint16raw')) - - df = self.spark.createDataFrame([Row(t=t, mask=mask)]) - - # removes fill value 1 - mask_fill_df = df.select(rf_mask_by_bit('t', 'mask', 0, True).alias('mbb')) - mask_fill_tile = mask_fill_df.first()['mbb'] - - self.assertTrue(mask_fill_tile.cell_type.has_no_data()) - - # Unsure why this fails. mask_fill_tile.cells is all 42 unmasked. - self.assertEqual(mask_fill_tile.cells.mask.sum(), 4, - f'Expected {16 - 4} data values but got the masked tile:' - f'{mask_fill_tile}' - ) - - def test_mask(self): - from pyspark.sql import Row - from pyrasterframes.rf_types import Tile, CellType - - np.random.seed(999) - # importantly exclude 0 from teh range because that's the nodata value for the `data_tile`'s cell type - ma = np.ma.array(np.random.randint(1, 10, (5, 5), dtype='int8'), mask=np.random.rand(5, 5) > 0.7) - expected_data_values = ma.compressed().size - expected_no_data_values = ma.size - expected_data_values - self.assertTrue(expected_data_values > 0, "Make sure random seed is cooperative ") - self.assertTrue(expected_no_data_values > 0, "Make sure random seed is cooperative ") - - data_tile = Tile(np.ones(ma.shape, ma.dtype), CellType.uint8()) - - df = self.spark.createDataFrame([Row(t=data_tile, m=Tile(ma))]) \ - .withColumn('masked_t', rf_mask('t', 'm')) - - result = df.select(rf_data_cells('masked_t')).first()[0] - self.assertEqual(result, expected_data_values, - f"Masked tile should have {expected_data_values} data values but found: {df.select('masked_t').first()[0].cells}." - f"Original data: {data_tile.cells}" - f"Masked by {ma}") - - nd_result = df.select(rf_no_data_cells('masked_t')).first()[0] - self.assertEqual(nd_result, expected_no_data_values) - - # deser of tile is correct - self.assertEqual( - df.select('masked_t').first()[0].cells.compressed().size, - expected_data_values - ) - - def test_extract_bits(self): - one = np.ones((6, 6), 'uint8') - t = Tile(84 * one) - df = self.spark.createDataFrame([Row(t=t)]) - result_py_literals = df.select(rf_local_extract_bits('t', 2, 3)).first()[0] - # expect value binary 84 => 1010100 => 101 - assert_equal(result_py_literals.cells, 5 * one) - - result_cols = df.select(rf_local_extract_bits('t', lit(2), lit(3))).first()[0] - assert_equal(result_cols.cells, 5 * one) - - def test_resample(self): - from pyspark.sql.functions import lit - result = self.rf.select( - rf_tile_min(rf_local_equal( - rf_resample(rf_resample(self.rf.tile, lit(2)), lit(0.5)), - self.rf.tile)) - ).collect()[0][0] - - self.assertTrue(result == 1) # short hand for all values are true - - def test_exists_for_all(self): - df = self.rf.withColumn('should_exist', rf_make_ones_tile(5, 5, 'int8')) \ - .withColumn('should_not_exist', rf_make_zeros_tile(5, 5, 'int8')) - - should_exist = df.select(rf_exists(df.should_exist).alias('se')).take(1)[0].se - self.assertTrue(should_exist) - - should_not_exist = df.select(rf_exists(df.should_not_exist).alias('se')).take(1)[0].se - self.assertTrue(not should_not_exist) - - self.assertTrue(df.select(rf_for_all(df.should_exist).alias('se')).take(1)[0].se) - self.assertTrue(not df.select(rf_for_all(df.should_not_exist).alias('se')).take(1)[0].se) - - def test_cell_type_in_functions(self): - from pyrasterframes.rf_types import CellType - ct = CellType.float32().with_no_data_value(-999) - - df = self.rf.withColumn('ct_str', rf_convert_cell_type('tile', ct.cell_type_name)) \ - .withColumn('ct', rf_convert_cell_type('tile', ct)) \ - .withColumn('make', rf_make_constant_tile(99, 3, 4, CellType.int8())) \ - .withColumn('make2', rf_with_no_data('make', 99)) - - result = df.select('ct', 'ct_str', 'make', 'make2').first() - - self.assertEqual(result['ct'].cell_type, ct) - self.assertEqual(result['ct_str'].cell_type, ct) - self.assertEqual(result['make'].cell_type, CellType.int8()) - - counts = df.select( - rf_no_data_cells('make').alias("nodata1"), - rf_data_cells('make').alias("data1"), - rf_no_data_cells('make2').alias("nodata2"), - rf_data_cells('make2').alias("data2") - ).first() - - self.assertEqual(counts["data1"], 3 * 4) - self.assertEqual(counts["nodata1"], 0) - self.assertEqual(counts["data2"], 0) - self.assertEqual(counts["nodata2"], 3 * 4) - self.assertEqual(result['make2'].cell_type, CellType.int8().with_no_data_value(99)) - - def test_render_composite(self): - cat = self.spark.createDataFrame([ - Row(red=self.l8band_uri(4), green=self.l8band_uri(3), blue=self.l8band_uri(2)) - ]) - rf = self.spark.read.raster(cat, catalog_col_names=cat.columns) - - # Test composite construction - rgb = rf.select(rf_tile(rf_rgb_composite('red', 'green', 'blue')).alias('rgb')).first()['rgb'] - - # TODO: how to better test this? - self.assertIsInstance(rgb, Tile) - self.assertEqual(rgb.dimensions(), [186, 169]) - - ## Test PNG generation - png_bytes = rf.select(rf_render_png('red', 'green', 'blue').alias('png')).first()['png'] - # Look for the PNG magic cookie - self.assert_png(png_bytes) - - def test_rf_interpret_cell_type_as(self): - from pyspark.sql import Row - from pyrasterframes.rf_types import Tile - - df = self.spark.createDataFrame([ - Row(t=Tile(np.array([[1, 3, 4], [5, 0, 3]]), CellType.uint8().with_no_data_value(5))) - ]) - df = df.withColumn('tile', rf_interpret_cell_type_as('t', 'uint8ud3')) # threes become ND - result = df.select(rf_tile_sum(rf_local_equal('t', lit(3))).alias('threes')).first()['threes'] - self.assertEqual(result, 2) - - result_5 = df.select(rf_tile_sum(rf_local_equal('t', lit(5))).alias('fives')).first()['fives'] - self.assertEqual(result_5, 0) - - def test_rf_local_data_and_no_data(self): - from pyspark.sql import Row - from pyrasterframes.rf_types import Tile - - nd = 5 - t = Tile( - np.array([[1, 3, 4], [nd, 0, 3]]), - CellType.uint8().with_no_data_value(nd)) - # note the convert is due to issue #188 - df = self.spark.createDataFrame([Row(t=t)])\ - .withColumn('lnd', rf_convert_cell_type(rf_local_no_data('t'), 'uint8')) \ - .withColumn('ld', rf_convert_cell_type(rf_local_data('t'), 'uint8')) - - result = df.first() - result_nd = result['lnd'] - assert_equal(result_nd.cells, t.cells.mask) - - result_d = result['ld'] - assert_equal(result_d.cells, np.invert(t.cells.mask)) - - def test_rf_local_is_in(self): - from pyspark.sql.functions import lit, array, col - from pyspark.sql import Row - - nd = 5 - t = Tile( - np.array([[1, 3, 4], [nd, 0, 3]]), - CellType.uint8().with_no_data_value(nd)) - # note the convert is due to issue #188 - df = self.spark.createDataFrame([Row(t=t)]) \ - .withColumn('a', array(lit(3), lit(4))) \ - .withColumn('in2', rf_convert_cell_type( - rf_local_is_in(col('t'), array(lit(0), lit(4))), - 'uint8')) \ - .withColumn('in3', rf_convert_cell_type(rf_local_is_in('t', 'a'), 'uint8')) \ - .withColumn('in4', rf_convert_cell_type( - rf_local_is_in('t', array(lit(0), lit(4), lit(3))), - 'uint8')) \ - .withColumn('in_list', rf_convert_cell_type(rf_local_is_in(col('t'), [4, 1]), 'uint8')) - - result = df.first() - self.assertEqual(result['in2'].cells.sum(), 2) - assert_equal(result['in2'].cells, np.isin(t.cells, np.array([0, 4]))) - self.assertEqual(result['in3'].cells.sum(), 3) - self.assertEqual(result['in4'].cells.sum(), 4) - self.assertEqual(result['in_list'].cells.sum(), 2, - "Tile value {} should contain two 1s as: [[1, 0, 1],[0, 0, 0]]" - .format(result['in_list'].cells)) - - def test_local_min_max_clamp(self): - tile = Tile(np.random.randint(-20, 20, (10, 10)), CellType.int8()) - min_tile = Tile(np.random.randint(-20, 0, (10, 10)), CellType.int8()) - max_tile = Tile(np.random.randint(0, 20, (10, 10)), CellType.int8()) - - df = self.spark.createDataFrame([Row(t=tile, mn=min_tile, mx=max_tile)]) - assert_equal( - df.select(rf_local_min('t', 'mn')).first()[0].cells, - np.clip(tile.cells, None, min_tile.cells) - ) - - assert_equal( - df.select(rf_local_min('t', -5)).first()[0].cells, - np.clip(tile.cells, None, -5) - ) - - assert_equal( - df.select(rf_local_max('t', 'mx')).first()[0].cells, - np.clip(tile.cells, max_tile.cells, None) - ) - - assert_equal( - df.select(rf_local_max('t', 5)).first()[0].cells, - np.clip(tile.cells, 5, None) - ) - - assert_equal( - df.select(rf_local_clamp('t', 'mn', 'mx')).first()[0].cells, - np.clip(tile.cells, min_tile.cells, max_tile.cells) - ) - - def test_rf_where(self): - cond = Tile(np.random.binomial(1, 0.35, (10, 10)), CellType.uint8()) - x = Tile(np.random.randint(-20, 10, (10, 10)), CellType.int8()) - y = Tile(np.random.randint(0, 30, (10, 10)), CellType.int8()) - - df = self.spark.createDataFrame([Row(cond=cond, x=x, y=y)]) - result = df.select(rf_where('cond', 'x', 'y')).first()[0].cells - assert_equal(result, np.where(cond.cells, x.cells, y.cells)) - - def test_rf_standardize(self): - from pyspark.sql.functions import sqrt as F_sqrt - stats = self.prdf.select(rf_agg_stats('proj_raster').alias('stat')) \ - .select('stat.mean', F_sqrt('stat.variance').alias('sttdev')) \ - .first() - - result = self.prdf.select(rf_standardize('proj_raster', stats[0], stats[1]).alias('z')) \ - .select(rf_agg_stats('z').alias('z_stat')) \ - .select('z_stat.mean', 'z_stat.variance') \ - .first() - - self.assertAlmostEqual(result[0], 0.0) - self.assertAlmostEqual(result[1], 1.0) - - def test_rf_standardize_per_tile(self): - - # 10k samples so should be pretty stable - x = Tile(np.random.randint(-20, 0, (100, 100)), CellType.int8()) - df = self.spark.createDataFrame([Row(x=x)]) - - result = df.select(rf_standardize('x').alias('z')) \ - .select(rf_agg_stats('z').alias('z_stat')) \ - .select('z_stat.mean', 'z_stat.variance') \ - .first() - - self.assertAlmostEqual(result[0], 0.0) - self.assertAlmostEqual(result[1], 1.0) - - def test_rf_rescale(self): - from pyspark.sql.functions import min as F_min - from pyspark.sql.functions import max as F_max - - x1 = Tile(np.random.randint(-60, 12, (10, 10)), CellType.int8()) - x2 = Tile(np.random.randint(15, 122, (10, 10)), CellType.int8()) - df = self.spark.createDataFrame([Row(x=x1), Row(x=x2)]) - # Note there will be some clipping - rescaled = df.select(rf_rescale('x', -20, 50).alias('x_prime'), 'x') - result = rescaled \ - .agg( - F_max(rf_tile_min('x_prime')), - F_min(rf_tile_max('x_prime')) - ).first() - - self.assertGreater(result[0], 0.0, f'Expected max tile_min to be > 0 (strictly); but it is ' - f'{rescaled.select("x", "x_prime", rf_tile_min("x_prime")).take(2)}') - self.assertLess(result[1], 1.0, f'Expected min tile_max to be < 1 (strictly); it is' - f'{rescaled.select(rf_tile_max("x_prime")).take(2)}') - - def test_rf_rescale_per_tile(self): - x1 = Tile(np.random.randint(-20, 42, (10, 10)), CellType.int8()) - x2 = Tile(np.random.randint(20, 242, (10, 10)), CellType.int8()) - df = self.spark.createDataFrame([Row(x=x1), Row(x=x2)]) - result = df.select(rf_rescale('x').alias('x_prime')) \ - .agg(rf_agg_stats('x_prime').alias('stat')) \ - .select('stat.min', 'stat.max') \ - .first() - - self.assertEqual(result[0], 0.0) - self.assertEqual(result[1], 1.0) - - - def test_rf_agg_overview_raster(self): - width = 500 - height = 400 - agg = self.prdf.select(rf_agg_extent(rf_extent(self.prdf.proj_raster)).alias("extent")).first().extent - crs = self.prdf.select(rf_crs(self.prdf.proj_raster).alias("crs")).first().crs.crsProj4 - aoi = Extent.from_row(agg) - aoi = aoi.reproject(crs, "EPSG:3857") - aoi = aoi.buffer(-(aoi.width * 0.2)) - - ovr = self.prdf.select(rf_agg_overview_raster(self.prdf.proj_raster, width, height, aoi).alias("agg")) - png = ovr.select(rf_render_color_ramp_png('agg', 'Greyscale64')).first()[0] - self.assert_png(png) - - # with open('/tmp/test_rf_agg_overview_raster.png', 'wb') as f: - # f.write(png) - - def test_rf_proj_raster(self): - df = self.prdf.select(rf_proj_raster(rf_tile('proj_raster'), - rf_extent('proj_raster'), - rf_crs('proj_raster')).alias('roll_your_own')) - self.assertIn('extent', df.schema['roll_your_own'].dataType.fieldNames()) - diff --git a/pyrasterframes/src/main/python/tests/RasterSourceTest.py b/pyrasterframes/src/main/python/tests/RasterSourceTest.py deleted file mode 100644 index ec0877486..000000000 --- a/pyrasterframes/src/main/python/tests/RasterSourceTest.py +++ /dev/null @@ -1,225 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -from pyrasterframes.rasterfunctions import * -from pyrasterframes.rf_types import * -from pyspark.sql.functions import * -import pandas as pd -from shapely.geometry import Point -import os.path -from unittest import skip -from . import TestEnvironment - - -class RasterSourceTest(TestEnvironment): - - @staticmethod - def path(scene, band): - scene_dict = { - 1: 'https://landsat-pds.s3.amazonaws.com/c1/L8/015/041/LC08_L1TP_015041_20190305_20190309_01_T1/LC08_L1TP_015041_20190305_20190309_01_T1_B{}.TIF', - 2: 'https://landsat-pds.s3.amazonaws.com/c1/L8/015/042/LC08_L1TP_015042_20190305_20190309_01_T1/LC08_L1TP_015042_20190305_20190309_01_T1_B{}.TIF', - 3: 'https://landsat-pds.s3.amazonaws.com/c1/L8/016/041/LC08_L1TP_016041_20190224_20190309_01_T1/LC08_L1TP_016041_20190224_20190309_01_T1_B{}.TIF', - } - - assert band in range(1, 12) - assert scene in scene_dict.keys() - p = scene_dict[scene] - return p.format(band) - - def path_pandas_df(self): - return pd.DataFrame([ - {'b1': self.path(1, 1), 'b2': self.path(1, 2), 'b3': self.path(1, 3), 'geo': Point(1, 1)}, - {'b1': self.path(2, 1), 'b2': self.path(2, 2), 'b3': self.path(2, 3), 'geo': Point(2, 2)}, - {'b1': self.path(3, 1), 'b2': self.path(3, 2), 'b3': self.path(3, 3), 'geo': Point(3, 3)}, - ]) - - - def test_handle_lazy_eval(self): - df = self.spark.read.raster(self.path(1, 1)) - ltdf = df.select('proj_raster') - self.assertGreater(ltdf.count(), 0) - self.assertIsNotNone(ltdf.first().proj_raster) - - tdf = df.select(rf_tile('proj_raster').alias('pr')) - self.assertGreater(tdf.count(), 0) - self.assertIsNotNone(tdf.first().pr) - - def test_strict_eval(self): - df_lazy = self.spark.read.raster(self.img_uri, lazy_tiles=True) - # when doing Show on a lazy tile we will see something like RasterRefTile(RasterRef(JVMGeoTiffRasterSource(... - # use this trick to get the `show` string - show_str_lazy = df_lazy.select('proj_raster')._jdf.showString(1, -1, False) - print(show_str_lazy) - self.assertTrue('RasterRef' in show_str_lazy) - - # again for strict - df_strict = self.spark.read.raster(self.img_uri, lazy_tiles=False) - show_str_strict = df_strict.select('proj_raster')._jdf.showString(1, -1, False) - self.assertTrue('RasterRef' not in show_str_strict) - - def test_prt_functions(self): - df = self.spark.read.raster(self.img_uri) \ - .withColumn('crs', rf_crs('proj_raster')) \ - .withColumn('ext', rf_extent('proj_raster')) \ - .withColumn('geom', rf_geometry('proj_raster')) - df.select('crs', 'ext', 'geom').first() - - def test_list_of_str(self): - # much the same as RasterSourceDataSourceSpec here; but using https PDS. Takes about 30s to run - - def l8path(b): - assert b in range(1, 12) - base = "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/199/026/LC08_L1TP_199026_20180919_20180928_01_T1/LC08_L1TP_199026_20180919_20180928_01_T1_B{}.TIF" - return base.format(b) - - path_param = [l8path(b) for b in [1, 2, 3]] - tile_size = 512 - - df = self.spark.read.raster( - path_param, - tile_dimensions=(tile_size, tile_size), - lazy_tiles=True, - ).cache() - - print(df.take(3)) - - # schema is tile_path and tile - # df.printSchema() - self.assertTrue(len(df.columns) == 2 and 'proj_raster_path' in df.columns and 'proj_raster' in df.columns) - - # the most common tile dimensions should be as passed to `options`, showing that options are correctly applied - tile_size_df = df.select(rf_dimensions(df.proj_raster).rows.alias('r'), rf_dimensions(df.proj_raster).cols.alias('c')) \ - .groupby(['r', 'c']).count().toPandas() - most_common_size = tile_size_df.loc[tile_size_df['count'].idxmax()] - self.assertTrue(most_common_size.r == tile_size and most_common_size.c == tile_size) - - # all rows are from a single source URI - path_count = df.groupby(df.proj_raster_path).count() - print(path_count.collect()) - self.assertTrue(path_count.count() == 3) - - def test_list_of_list_of_str(self): - lol = [ - [self.path(1, 1), self.path(1, 2)], - [self.path(2, 1), self.path(2, 2)], - [self.path(3, 1), self.path(3, 2)] - ] - df = self.spark.read.raster(lol) - self.assertTrue(len(df.columns) == 4) # 2 cols of uris plus 2 cols of proj_rasters - self.assertEqual(sorted(df.columns), sorted(['proj_raster_0_path', 'proj_raster_1_path', - 'proj_raster_0', 'proj_raster_1'])) - uri_df = df.select('proj_raster_0_path', 'proj_raster_1_path').distinct() - - # check that various uri's are in the dataframe - self.assertEqual( - uri_df.filter(col('proj_raster_0_path') == lit(self.path(1, 1))).count(), - 1) - - self.assertEqual( - uri_df \ - .filter(col('proj_raster_0_path') == lit(self.path(1, 1))) \ - .filter(col('proj_raster_1_path') == lit(self.path(1, 2))) \ - .count(), - 1) - - self.assertEqual( - uri_df \ - .filter(col('proj_raster_0_path') == lit(self.path(3, 1))) \ - .filter(col('proj_raster_1_path') == lit(self.path(3, 2))) \ - .count(), - 1) - - def test_schemeless_string(self): - import os.path - path = os.path.join(self.resource_dir, "L8-B8-Robinson-IL.tiff") - self.assertTrue(not path.startswith('file://')) - self.assertTrue(os.path.exists(path)) - df = self.spark.read.raster(path) - self.assertTrue(df.count() > 0) - - def test_spark_df_source(self): - catalog_columns = ['b1', 'b2', 'b3'] - catalog = self.spark.createDataFrame(self.path_pandas_df()) - - df = self.spark.read.raster( - catalog, - tile_dimensions=(512, 512), - catalog_col_names=catalog_columns, - lazy_tiles=True # We'll get an OOM error if we try to read 9 scenes all at once! - ) - - self.assertTrue(len(df.columns) == 7) # three bands times {path, tile} plus geo - self.assertTrue(df.select('b1_path').distinct().count() == 3) # as per scene_dict - b1_paths_maybe = df.select('b1_path').distinct().collect() - b1_paths = [self.path(s, 1) for s in [1, 2, 3]] - self.assertTrue(all([row.b1_path in b1_paths for row in b1_paths_maybe])) - - def test_pandas_source(self): - - df = self.spark.read.raster( - self.path_pandas_df(), - catalog_col_names=['b1', 'b2', 'b3'] - ) - self.assertEqual(len(df.columns), 7) # three path cols, three tile cols, and geo - self.assertTrue('geo' in df.columns) - self.assertTrue(df.select('b1_path').distinct().count() == 3) - - def test_geopandas_source(self): - from geopandas import GeoDataFrame - # Same test as test_pandas_source with geopandas - geo_df = GeoDataFrame(self.path_pandas_df(), crs={'init': 'EPSG:4326'}, geometry='geo') - df = self.spark.read.raster(geo_df, ['b1', 'b2', 'b3']) - - self.assertEqual(len(df.columns), 7) # three path cols, three tile cols, and geo - self.assertTrue('geo' in df.columns) - self.assertTrue(df.select('b1_path').distinct().count() == 3) - - def test_csv_string(self): - - s = """metadata,b1,b2 - a,{},{} - b,{},{} - c,{},{} - """.format( - self.path(1, 1), self.path(1, 2), - self.path(2, 1), self.path(2, 2), - self.path(3, 1), self.path(3, 2), - ) - - df = self.spark.read.raster(s, ['b1', 'b2']) - self.assertEqual(len(df.columns), 3 + 2) # number of columns in original DF plus cardinality of catalog_col_names - self.assertTrue(len(df.take(1))) # non-empty check - - def test_catalog_named_arg(self): - # through version 0.8.1 reading a catalog was via named argument only. - df = self.spark.read.raster(catalog=self.path_pandas_df(), catalog_col_names=['b1', 'b2', 'b3']) - self.assertEqual(len(df.columns), 7) # three path cols, three tile cols, and geo - self.assertTrue(df.select('b1_path').distinct().count() == 3) - - def test_spatial_partitioning(self): - f = self.path(1, 1) - df = self.spark.read.raster(f, spatial_index_partitions=True) - self.assertTrue('spatial_index' in df.columns) - - self.assertEqual(df.rdd.getNumPartitions(), int(self.spark.conf.get("spark.sql.shuffle.partitions"))) - self.assertEqual(self.spark.read.raster(f, spatial_index_partitions=34).rdd.getNumPartitions(), 34) - self.assertEqual(self.spark.read.raster(f, spatial_index_partitions="42").rdd.getNumPartitions(), 42) - self.assertFalse('spatial_index' in self.spark.read.raster(f, spatial_index_partitions=False).columns) - self.assertFalse('spatial_index' in self.spark.read.raster(f, spatial_index_partitions=0).columns) \ No newline at end of file diff --git a/pyrasterframes/src/main/python/tests/UDTTests.py b/pyrasterframes/src/main/python/tests/UDTTests.py deleted file mode 100644 index 6ed39391d..000000000 --- a/pyrasterframes/src/main/python/tests/UDTTests.py +++ /dev/null @@ -1,195 +0,0 @@ -import unittest - -import numpy as np -from pyrasterframes.rasterfunctions import * -from pyrasterframes.rf_types import * -from pyspark.sql.functions import * -from pyspark.sql import Row, DataFrame -from pyproj import CRS as pyCRS - -from . import TestEnvironment - - -class TileUDTTests(TestEnvironment): - - def setUp(self): - self.create_layer() - - def test_mask_no_data(self): - t1 = Tile(np.array([[1, 2], [3, 4]]), CellType("int8ud3")) - self.assertTrue(t1.cells.mask[1][0]) - self.assertIsNotNone(t1.cells[1][1]) - self.assertEqual(len(t1.cells.compressed()), 3) - - t2 = Tile(np.array([[1.0, 2.0], [float('nan'), 4.0]]), CellType.float32()) - self.assertEqual(len(t2.cells.compressed()), 3) - self.assertTrue(t2.cells.mask[1][0]) - self.assertIsNotNone(t2.cells[1][1]) - - def test_tile_udt_serialization(self): - from pyspark.sql.types import StructType, StructField - - udt = TileUDT() - cell_types = (ct for ct in rf_cell_types() if not (ct.is_raw() or ("bool" in ct.base_cell_type_name()))) - - for ct in cell_types: - cells = (100 + np.random.randn(3, 3) * 100).astype(ct.to_numpy_dtype()) - - if ct.is_floating_point(): - nd = 33.0 - else: - nd = 33 - - cells[1][1] = nd - a_tile = Tile(cells, ct.with_no_data_value(nd)) - round_trip = udt.fromInternal(udt.toInternal(a_tile)) - self.assertEqual(a_tile, round_trip, "round-trip serialization for " + str(ct)) - - schema = StructType([StructField("tile", TileUDT(), False)]) - df = self.spark.createDataFrame([{"tile": a_tile}], schema) - - long_trip = df.first()["tile"] - self.assertEqual(long_trip, a_tile) - - def test_masked_deser(self): - t = Tile(np.array([[1, 2, 3,], [4, 5, 6], [7, 8, 9]]), - CellType('uint8')) - - df = self.spark.createDataFrame([Row(t=t)]) - roundtrip = df.select(rf_mask_by_value('t', - rf_local_greater('t', lit(6)), - 1)) \ - .first()[0] - self.assertEqual( - roundtrip.cells.mask.sum(), - 3, - f"Expected {3} nodata values but found Tile" - f"{roundtrip}" - ) - - def test_udf_on_tile_type_input(self): - import numpy.testing - df = self.spark.read.raster(self.img_uri) - rf = self.rf - - # create trivial UDF that does something we already do with raster_Functions - @udf('integer') - def my_udf(t): - a = t.cells - return a.size # same as rf_dimensions.cols * rf_dimensions.rows - - rf_result = rf.select( - (rf_dimensions('tile').cols.cast('int') * rf_dimensions('tile').rows.cast('int')).alias('expected'), - my_udf('tile').alias('result')).toPandas() - - numpy.testing.assert_array_equal( - rf_result.expected.tolist(), - rf_result.result.tolist() - ) - - df_result = df.select( - (rf_dimensions(df.proj_raster).cols.cast('int') * rf_dimensions(df.proj_raster).rows.cast('int') - - my_udf(rf_tile(df.proj_raster))).alias('result') - ).toPandas() - - numpy.testing.assert_array_equal( - np.zeros(len(df_result)), - df_result.result.tolist() - ) - - def test_udf_on_tile_type_output(self): - import numpy.testing - - rf = self.rf - - # create a trivial UDF that does something we already do with a raster_functions - @udf(TileUDT()) - def my_udf(t): - import numpy as np - return Tile(np.log1p(t.cells)) - - rf_result = rf.select( - rf_tile_max( - rf_local_subtract( - my_udf(rf.tile), - rf_log1p(rf.tile) - ) - ).alias('expect_zeros') - ).collect() - - # almost equal because of different implemenations under the hoods: C (numpy) versus Java (rf_) - numpy.testing.assert_almost_equal( - [r['expect_zeros'] for r in rf_result], - [0.0 for _ in rf_result], - decimal=6 - ) - - def test_no_data_udf_handling(self): - from pyspark.sql.types import StructType, StructField - - t1 = Tile(np.array([[1, 2], [0, 4]]), CellType.uint8()) - self.assertEqual(t1.cell_type.to_numpy_dtype(), np.dtype("uint8")) - e1 = Tile(np.array([[2, 3], [0, 5]]), CellType.uint8()) - schema = StructType([StructField("tile", TileUDT(), False)]) - df = self.spark.createDataFrame([{"tile": t1}], schema) - - @udf(TileUDT()) - def increment(t): - return t + 1 - - r1 = df.select(increment(df.tile).alias("inc")).first()["inc"] - self.assertEqual(r1, e1) - - def test_udf_np_implicit_type_conversion(self): - import math - import pandas - - a1 = np.array([[1, 2], [0, 4]]) - t1 = Tile(a1, CellType.uint8()) - exp_array = a1.astype('>f8') - - @udf(TileUDT()) - def times_pi(t): - return t * math.pi - - @udf(TileUDT()) - def divide_pi(t): - return t / math.pi - - @udf(TileUDT()) - def plus_pi(t): - return t + math.pi - - @udf(TileUDT()) - def less_pi(t): - return t - math.pi - - df = self.spark.createDataFrame(pandas.DataFrame([{"tile": t1}])) - r1 = df.select( - less_pi(divide_pi(times_pi(plus_pi(df.tile)))) - ).first()[0] - - self.assertTrue(np.all(r1.cells == exp_array)) - self.assertEqual(r1.cells.dtype, exp_array.dtype) - -class CrsUDTTests(TestEnvironment): - - def setUp(self): - pass - - - def test_crs_udt_serialization(self): - udt = CrsUDT() - - crs = CRS(pyCRS.from_epsg(4326).to_proj4()) - - roundtrip = udt.fromInternal(udt.toInternal(crs)) - assert(crs == roundtrip) - - def test_extract_from_raster(self): - # should be able to write a projected raster tile column to path like '/data/foo/file.tif' - from pyrasterframes.rasterfunctions import rf_crs - rf = self.spark.read.raster(self.img_uri) - crs: DataFrame = rf.select(rf_crs('proj_raster').alias('crs')).distinct() - assert(crs.schema.fields[0].dataType == CrsUDT()) - assert(crs.first()['crs'].proj4_str == '+proj=utm +zone=16 +datum=WGS84 +units=m +no_defs ') diff --git a/pyrasterframes/src/main/python/tests/VectorTypesTests.py b/pyrasterframes/src/main/python/tests/VectorTypesTests.py deleted file mode 100644 index 05d30a6ed..000000000 --- a/pyrasterframes/src/main/python/tests/VectorTypesTests.py +++ /dev/null @@ -1,208 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -from pyrasterframes.rasterfunctions import * -from pyspark.sql import Row -from pyspark.sql.functions import * - -from . import TestEnvironment - -class VectorTypes(TestEnvironment): - - def setUp(self): - self.create_layer() - import pandas as pd - self.pandas_df = pd.DataFrame({ - 'eye': ['a', 'b', 'c', 'd'], - 'x': [0.0, 1.0, 2.0, 3.0], - 'y': [-4.0, -3.0, -2.0, -1.0], - }) - df = self.spark.createDataFrame(self.pandas_df) - df = df.withColumn("point_geom", - st_point(df.x, df.y) - ) - self.df = df.withColumn("poly_geom", st_bufferPoint(df.point_geom, lit(1250.0))) - - def test_spatial_relations(self): - from pyspark.sql.functions import udf, sum - from geomesa_pyspark.types import PointUDT - import shapely - import numpy.testing - - # Use python shapely UDT in a UDF - @udf("double") - def area_fn(g): - return g.area - - @udf("double") - def length_fn(g): - return g.length - - df = self.df.withColumn("poly_area", area_fn(self.df.poly_geom)) - df = df.withColumn("poly_len", length_fn(df.poly_geom)) - - # Return UDT in a UDF! - def some_point(g): - return g.representative_point() - - some_point_udf = udf(some_point, PointUDT()) - - df = df.withColumn("any_point", some_point_udf(df.poly_geom)) - # spark-side UDF/UDT are correct - intersect_total = df.agg(sum( - st_intersects(df.poly_geom, df.any_point).astype('double') - ).alias('s')).collect()[0].s - self.assertTrue(intersect_total == df.count()) - - # Collect to python driver in shapely UDT - pandas_df_out = df.toPandas() - - # Confirm we get a shapely type back from st_* function and UDF - self.assertIsInstance(pandas_df_out.poly_geom.iloc[0], shapely.geometry.Polygon) - self.assertIsInstance(pandas_df_out.any_point.iloc[0], shapely.geometry.Point) - - # And our spark-side manipulations were correct - xs_correct = pandas_df_out.point_geom.apply(lambda g: g.coords[0][0]) == self.pandas_df.x - self.assertTrue(all(xs_correct)) - - centroid_ys = pandas_df_out.poly_geom.apply(lambda g: - g.centroid.coords[0][1]).tolist() - numpy.testing.assert_almost_equal(centroid_ys, self.pandas_df.y.tolist()) - - # Including from UDF's - numpy.testing.assert_almost_equal( - pandas_df_out.poly_geom.apply(lambda g: g.area).values, - pandas_df_out.poly_area.values - ) - numpy.testing.assert_almost_equal( - pandas_df_out.poly_geom.apply(lambda g: g.length).values, - pandas_df_out.poly_len.values - ) - - def test_geometry_udf(self): - from geomesa_pyspark.types import PolygonUDT - # simple test that raster contents are not invalid - - # create a udf to buffer (the bounds) polygon - def _buffer(g, d): - return g.buffer(d) - - @udf("double") - def area(g): - return g.area - - buffer_udf = udf(_buffer, PolygonUDT()) - - buf_cells = 10 - with_poly = self.rf.withColumn('poly', buffer_udf(self.rf.geometry, lit(-15 * buf_cells))) # cell res is 15x15 - area = with_poly.select(area('poly') < area('geometry')) - area_result = area.collect() - self.assertTrue(all([r[0] for r in area_result])) - - def test_rasterize(self): - from geomesa_pyspark.types import PolygonUDT - - @udf(PolygonUDT()) - def buffer(g, d): - return g.buffer(d) - - # start with known polygon, the tile extents, **negative buffered** by 10 cells - buf_cells = 10 - with_poly = self.rf.withColumn('poly', buffer(self.rf.geometry, lit(-15 * buf_cells))) # cell res is 15x15 - - # rasterize value 16 into buffer shape. - cols = 194 # from dims of tile - rows = 250 # from dims of tile - with_raster = with_poly.withColumn('rasterized', - rf_rasterize('poly', 'geometry', lit(16), lit(cols), lit(rows))) - result = with_raster.select(rf_tile_sum(rf_local_equal_int(with_raster.rasterized, 16)), - rf_tile_sum(with_raster.rasterized)) - # - expected_burned_in_cells = (cols - 2 * buf_cells) * (rows - 2 * buf_cells) - self.assertEqual(result.first()[0], float(expected_burned_in_cells)) - self.assertEqual(result.first()[1], 16. * expected_burned_in_cells) - - def test_parse_crs(self): - df = self.spark.createDataFrame([Row(id=1)]) - self.assertEqual(df.select(rf_mk_crs('EPSG:4326')).count(), 1) - - def test_reproject(self): - reprojected = self.rf.withColumn('reprojected', - st_reproject('center', rf_mk_crs('EPSG:4326'), rf_mk_crs('EPSG:3857'))) - reprojected.show() - self.assertEqual(reprojected.count(), 8) - - def test_geojson(self): - import os - sample = 'file://' + os.path.join(self.resource_dir, 'buildings.geojson') - geo = self.spark.read.geojson(sample) - geo.show() - self.assertEqual(geo.select('geometry').count(), 8) - - def test_xz2_index(self): - from pyspark.sql.functions import min as F_min - df = self.df.select(rf_xz2_index(self.df.poly_geom, rf_crs(lit("EPSG:4326"))).alias('index')) - expected = {22858201775, 38132946267, 38166922588, 38180072113} - indexes = {x[0] for x in df.collect()} - self.assertSetEqual(indexes, expected) - - # Test against proj_raster (has CRS and Extent embedded). - df = self.spark.read.raster(self.img_uri) - result_one_arg = df.select(rf_xz2_index('proj_raster').alias('ix')) \ - .agg(F_min('ix')).first()[0] - - result_two_arg = df.select(rf_xz2_index(rf_extent('proj_raster'), rf_crs('proj_raster')).alias('ix')) \ - .agg(F_min('ix')).first()[0] - - self.assertEqual(result_two_arg, result_one_arg) - self.assertEqual(result_one_arg, 55179438768) # this is a bit more fragile but less important - - # Custom resolution - df = self.df.select(rf_xz2_index(self.df.poly_geom, rf_crs(lit("EPSG:4326")), 3).alias('index')) - expected = {21, 36} - indexes = {x[0] for x in df.collect()} - self.assertSetEqual(indexes, expected) - - def test_z2_index(self): - df = self.df.select(rf_z2_index(self.df.poly_geom, rf_crs(lit("EPSG:4326"))).alias('index')) - - expected = {28596898472, 28625192874, 28635062506, 28599712232} - indexes = {x[0] for x in df.collect()} - self.assertSetEqual(indexes, expected) - - # Custom resolution - df = self.df.select(rf_z2_index(self.df.poly_geom, rf_crs(lit("EPSG:4326")), 6).alias('index')) - expected = {1704, 1706} - indexes = {x[0] for x in df.collect()} - self.assertSetEqual(indexes, expected) - - def test_agg_extent(self): - r = self.df.select(rf_agg_extent(st_extent('poly_geom')).alias('agg_extent')).select('agg_extent.*').first() - self.assertDictEqual( - r.asDict(), - Row(xmin=-0.011268955205879273, ymin=-4.011268955205879, xmax=3.0112432169934484, ymax=-0.9887567830065516).asDict() - ) - - def test_agg_reprojected_extent(self): - r = self.df.select(rf_agg_reprojected_extent(st_extent('poly_geom'), rf_mk_crs("EPSG:4326"), "EPSG:3857")).first()[0] - self.assertDictEqual( - r.asDict(), - Row(xmin=-1254.45435529069, ymin=-446897.63591665257, xmax=335210.0615704097, ymax=-110073.36515944061).asDict() - ) \ No newline at end of file diff --git a/pyrasterframes/src/main/python/tests/__init__.py b/pyrasterframes/src/main/python/tests/__init__.py deleted file mode 100644 index d273f8188..000000000 --- a/pyrasterframes/src/main/python/tests/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ -# -# This software is licensed under the Apache 2 license, quoted below. -# -# Copyright 2019 Astraea, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0] -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -import os -import unittest - -from pyrasterframes.utils import create_rf_spark_session - -import builtins - -app_name = 'PyRasterFrames test suite' - -# Setuptools/easy_install doesn't properly set the execute bit on the Spark scripts, -# So this preemptively attempts to do it. -def _chmodit(): - try: - from importlib.util import find_spec - module_home = find_spec("pyspark").origin - print(module_home) - bin_dir = os.path.join(os.path.dirname(module_home), 'bin') - for filename in os.listdir(bin_dir): - try: - os.chmod(os.path.join(bin_dir, filename), mode=0o555, follow_symlinks=True) - except OSError: - pass - except ImportError: - pass - -_chmodit() - - -def resource_dir(): - def pdir(curr): - return os.path.dirname(curr) - - here = os.path.dirname(os.path.realpath(__file__)) - scala_target = os.path.realpath(os.path.join(pdir(pdir(here)), 'scala-2.12')) - rez_dir = os.path.realpath(os.path.join(scala_target, 'test-classes')) - # If not running in build mode, try source dirs. - if not os.path.exists(rez_dir): - rez_dir = os.path.realpath(os.path.join(pdir(pdir(pdir(here))), 'test', 'resources')) - return rez_dir - - -def spark_test_session(): - spark = create_rf_spark_session(**{ - 'spark.master': 'local[*, 2]', - 'spark.ui.enabled': 'false', - 'spark.app.name': app_name, - #'spark.driver.extraJavaOptions': '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' - }) - spark.sparkContext.setLogLevel('ERROR') - - print("Spark Version: " + spark.version) - print("Spark Config: " + str(spark.sparkContext._conf.getAll())) - - return spark - - -class TestEnvironment(unittest.TestCase): - """ - Base class for tests. - """ - - def rounded_compare(self, val1, val2): - print('Comparing {} and {} using round()'.format(val1, val2)) - return builtins.round(val1) == builtins.round(val2) - - @classmethod - def setUpClass(cls): - # hard-coded relative path for resources - cls.resource_dir = resource_dir() - - cls.spark = spark_test_session() - - cls.img_path = os.path.join(cls.resource_dir, 'L8-B8-Robinson-IL.tiff') - - cls.img_uri = 'file://' + cls.img_path - - cls.img_rgb_path = os.path.join(cls.resource_dir, 'L8-B4_3_2-Elkton-VA.tiff') - - cls.img_rgb_uri = 'file://' + cls.img_rgb_path - - @classmethod - def l8band_uri(cls, band_index): - return 'file://' + os.path.join(cls.resource_dir, 'L8-B{}-Elkton-VA.tiff'.format(band_index)) - - def create_layer(self): - from pyrasterframes.rasterfunctions import rf_convert_cell_type - # load something into a rasterframe - rf = self.spark.read.geotiff(self.img_uri) \ - .with_bounds() \ - .with_center() - - # convert the tile cell type to provide for other operations - self.rf = rf.withColumn('tile2', rf_convert_cell_type('tile', 'float32')) \ - .drop('tile') \ - .withColumnRenamed('tile2', 'tile').as_layer() - - self.prdf = self.spark.read.raster(self.img_uri) - self.df = self.prdf.withColumn('tile', rf_convert_cell_type('proj_raster', 'float32')) \ - .drop('proj_raster') - - def assert_png(self, bytes): - self.assertEqual(bytes[0:8], bytearray([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]), "png header") - diff --git a/pyrasterframes/src/main/python/tests/coverage-report.sh b/pyrasterframes/src/main/python/tests/coverage-report.sh deleted file mode 100755 index 6b547e026..000000000 --- a/pyrasterframes/src/main/python/tests/coverage-report.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -e - -# If `coverage` tool isn't installed: `{pip|conda} install coverage` - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -cd "$( dirname "${BASH_SOURCE[0]}" )"/.. - -coverage run setup.py test && coverage html --omit='.eggs/*,setup.py' && open htmlcov/index.html \ No newline at end of file diff --git a/pyrasterframes/src/main/python/LICENSE.txt b/python/LICENSE.txt similarity index 100% rename from pyrasterframes/src/main/python/LICENSE.txt rename to python/LICENSE.txt diff --git a/pyrasterframes/src/main/python/README.md b/python/README.md similarity index 97% rename from pyrasterframes/src/main/python/README.md rename to python/README.md index ea8f163e2..e71f564d1 100644 --- a/pyrasterframes/src/main/python/README.md +++ b/python/README.md @@ -32,7 +32,7 @@ df.select(rf_local_add(df.tile, lit(3))).show(5, False) Reach out to us on [gitter][gitter]! -Issue tracking is through [github](https://github.com/locationtech/rasterframes/issues). +Issue tracking is through [github](https://github.com/locationtech/rasterframes/issues). ## Contributing @@ -44,9 +44,9 @@ For best results, we suggest using `conda` and the `conda-forge` channel to inst conda create -n rasterframes python==3.7 conda install --file ./requirements-condaforge.txt - + Then you can install the source dependencies: - pip install -e . + pip install -e . [gitter]: https://gitter.im/locationtech/rasterframes diff --git a/pyrasterframes/src/main/python/geomesa_pyspark/__init__.py b/python/docs/__init__.py similarity index 100% rename from pyrasterframes/src/main/python/geomesa_pyspark/__init__.py rename to python/docs/__init__.py diff --git a/pyrasterframes/src/main/python/docs/aggregation.pymd b/python/docs/aggregation.pymd similarity index 99% rename from pyrasterframes/src/main/python/docs/aggregation.pymd rename to python/docs/aggregation.pymd index feafb13aa..e32df5393 100644 --- a/pyrasterframes/src/main/python/docs/aggregation.pymd +++ b/python/docs/aggregation.pymd @@ -97,7 +97,7 @@ The @ref:[`rf_agg_stats`](reference.md#rf-agg-stats) function aggregates over al ```python, agg_stats stats = rf.agg(rf_agg_stats('proj_raster').alias('stats')) \ .select('stats.min', 'stats.max', 'stats.mean', 'stats.variance') -stats +stats ``` The @ref:[`rf_agg_local_stats`](reference.md#rf-agg-local-stats) function computes the element-wise local aggregate statistical summary as shown below. The DataFrame used in the previous two code blocks has unequal _tile_ dimensions, so a different DataFrame is used in this code block to avoid a runtime error. @@ -108,7 +108,7 @@ rf = spark.createDataFrame([ Row(id=3, tile=t1 * 3), Row(id=5, tile=t1 * 5) ]).agg(rf_agg_local_stats('tile').alias('stats')) - + agg_local_stats = rf.select('stats.min', 'stats.max', 'stats.mean', 'stats.variance').collect() for r in agg_local_stats: diff --git a/python/docs/build_docs.py b/python/docs/build_docs.py new file mode 100644 index 000000000..ff88ec651 --- /dev/null +++ b/python/docs/build_docs.py @@ -0,0 +1,151 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import traceback +from enum import Enum +from glob import glob +from os import path +from typing import List + +import pweave +import typer +from pweave import PwebPandocFormatter + + +# Setuptools/easy_install doesn't properly set the execute bit on the Spark scripts, +# So this preemptively attempts to do it. +def _chmodit(): + try: + import os + from importlib.util import find_spec + + module_home = find_spec("pyspark").origin + print(module_home) + bin_dir = os.path.join(os.path.dirname(module_home), "bin") + for filename in os.listdir(bin_dir): + try: + os.chmod(os.path.join(bin_dir, filename), mode=0o555, follow_symlinks=True) + except OSError: + pass + except ImportError: + pass + + +_chmodit() + + +class PegdownMarkdownFormatter(PwebPandocFormatter): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # Pegdown doesn't support the width and label options. + def make_figure_string(self, figname, width, label, caption=""): + return "![%s](%s)" % (caption, figname) + + +app = typer.Typer() + + +def _dest_file(src_file, ext): + return path.splitext(src_file)[0] + ext + + +def _divided(msg): + divider = "-" * 50 + return divider + "\n" + msg + "\n" + divider + + +def _get_files(): + here = path.abspath(path.dirname(__file__)) + return list(filter(lambda x: not path.basename(x)[:1] == "_", glob(path.join(here, "*.pymd")))) + + +class Format(str, Enum): + html = "html" + markdown = "markdown" + notebook = "notebook" + pandoc2html = "pandoc2html" + + +@app.command() +def pweave_docs( + files: List[str] = typer.Option( + _get_files(), help="Specific files to pweave. Defaults to all in `docs` directory." + ), + format: Format = typer.Option( + Format.markdown, help="Output format type. Defaults to `markdown`" + ), + quick: bool = typer.Option( + False, + help="Check to see if the source file is newer than existing output before building. Defaults to `False`.", + ), +): + + """Pweave PyRasterFrames documentation scripts""" + + ext = ".md" + bad_words = ["Error"] + pweave.rcParams["chunk"]["defaultoptions"].update({"wrap": False, "dpi": 175}) + + if format == Format.markdown: + pweave.PwebFormats.formats["markdown"] = { + "class": PegdownMarkdownFormatter, + "description": "Pegdown compatible markdown", + } + elif format == Format.notebook: + # Just convert to an unevaluated notebook. + pweave.rcParams["chunk"]["defaultoptions"].update({"evaluate": False}) + ext = ".ipynb" + elif format == Format.html: + # `html` doesn't do quite what one expects... only replaces code blocks, leaving markdown in place + format = Format.pandoc2html + + for file in sorted(files, reverse=False): + name = path.splitext(path.basename(file))[0] + dest = _dest_file(file, ext) + + if (not quick) or (not path.exists(dest)) or (path.getmtime(dest) < path.getmtime(file)): + print(_divided("Running %s" % name)) + try: + pweave.weave(file=str(file), doctype=format) + if format == Format.markdown: + if not path.exists(dest): + raise FileNotFoundError( + "Markdown file '%s' didn't get created as expected" % dest + ) + with open(dest, "r") as result: + for (n, line) in enumerate(result): + for word in bad_words: + if word in line: + raise ChildProcessError( + "Error detected on line %s in %s:\n%s" % (n + 1, dest, line) + ) + + except Exception: + print(_divided("%s Failed:" % file)) + print(traceback.format_exc()) + # raise typer.Exit(code=1) + else: + print(_divided("Skipping %s" % name)) + + +if __name__ == "__main__": + app() diff --git a/pyrasterframes/src/main/python/docs/description.pymd b/python/docs/description.pymd similarity index 100% rename from pyrasterframes/src/main/python/docs/description.pymd rename to python/docs/description.pymd diff --git a/pyrasterframes/src/main/python/docs/getting-started.pymd b/python/docs/getting-started.pymd similarity index 99% rename from pyrasterframes/src/main/python/docs/getting-started.pymd rename to python/docs/getting-started.pymd index 11d3c8363..2ae114c2b 100644 --- a/pyrasterframes/src/main/python/docs/getting-started.pymd +++ b/python/docs/getting-started.pymd @@ -125,7 +125,7 @@ libraryDependencies ++= Seq( // This is optional. Provides access to AWS PDS catalogs. "org.locationtech.rasterframes" %% "rasterframes-experimental" % ${VERSION} ) -``` +``` RasterFrames is compatible with Spark 2.4.x. diff --git a/pyrasterframes/src/main/python/docs/ipython.pymd b/python/docs/ipython.pymd similarity index 96% rename from pyrasterframes/src/main/python/docs/ipython.pymd rename to python/docs/ipython.pymd index 581e584c5..263a0c44f 100644 --- a/pyrasterframes/src/main/python/docs/ipython.pymd +++ b/python/docs/ipython.pymd @@ -44,17 +44,17 @@ rf # or `display(rf)`, or `rf.display()` By default the RasterFrame sample display renders 5 rows. Because the `IPython.display.display` function doesn't pass parameters to the underlying rendering functions, we have to provide a different means of passing parameters to the rendering code. Pandas approach to this is to use global settings via `set_option`/`get_option`. We take a more functional approach and have the user invoke an explicit `display` method: -```python custom_display, evaluate=False +```python custom_display, evaluate=False rf.display(num_rows=1, truncate=True) -``` +``` -```python custom_display_mime, echo=False +```python custom_display_mime, echo=False rf.display(num_rows=1, truncate=True, mimetype='text/markdown') -``` +``` ### Pandas -There is similar rendering support injected into the Pandas by the `rf_ipython` module, for Pandas Dataframes having Tiles in them: +There is similar rendering support injected into the Pandas by the `rf_ipython` module, for Pandas Dataframes having Tiles in them: ```python pandas_dataframe # Limit copy of data from Spark to a few tiles. @@ -66,7 +66,7 @@ pandas_df # or `display(pandas_df)` RasterFrames uses the "Viridis" color ramp as the default color profile for tile column. There are other options for reasoning about how color should be applied in the results. -### Color Composite +### Color Composite As shown in @ref:[Writing Raster Data section](raster-write.md) section, composites can be constructed for visualization: @@ -76,7 +76,7 @@ from IPython.display import Image # For telling IPython how to interpret the PNG three_band_rf = spark.read.raster(source=[[scene(1), scene(4), scene(3)]]) composite_rf = three_band_rf.withColumn('png', rf_render_png('proj_raster_0', 'proj_raster_1', 'proj_raster_2')) -png_bytes = composite_rf.select('png').first()['png'] +png_bytes = composite_rf.select('png').first()['png'] Image(png_bytes) ``` diff --git a/pyrasterframes/src/main/python/docs/languages.pymd b/python/docs/languages.pymd similarity index 96% rename from pyrasterframes/src/main/python/docs/languages.pymd rename to python/docs/languages.pymd index b4d189fbe..fca39f5f2 100644 --- a/pyrasterframes/src/main/python/docs/languages.pymd +++ b/python/docs/languages.pymd @@ -1,6 +1,6 @@ # Scala and SQL -One of the great powers of RasterFrames is the ability to express computation in multiple programming languages. The content in this manual focuses on Python because it is the most commonly used language in data science and GIS analytics. However, Scala (the implementation language of RasterFrames) and SQL (commonly used in many domains) are also fully supported. Examples in Python can be mechanically translated into the other two languages without much difficulty once the naming conventions are understood. +One of the great powers of RasterFrames is the ability to express computation in multiple programming languages. The content in this manual focuses on Python because it is the most commonly used language in data science and GIS analytics. However, Scala (the implementation language of RasterFrames) and SQL (commonly used in many domains) are also fully supported. Examples in Python can be mechanically translated into the other two languages without much difficulty once the naming conventions are understood. In the sections below we will show the same example program in each language. To do so we will compute the average NDVI per month for a single _tile_ in Tanzania. @@ -33,11 +33,11 @@ red_nir_monthly_2017 = modis \ col('B02').alias('nir') ) \ .where( - (year('acquisition_date') == 2017) & - (dayofmonth('acquisition_date') == 15) & + (year('acquisition_date') == 2017) & + (dayofmonth('acquisition_date') == 15) & (col('granule_id') == 'h21v09') ) -red_nir_monthly_2017.printSchema() +red_nir_monthly_2017.printSchema() ``` ### Step 3: Read tiles @@ -125,7 +125,7 @@ grouped The latest Scala API documentation is available here: -* [Scala API Documentation](https://rasterframes.io/latest/api/index.html) +* [Scala API Documentation](https://rasterframes.io/latest/api/index.html) ### Step 1: Load the catalog @@ -178,6 +178,6 @@ val result = red_nir_tiles_monthly_2017 .agg(rf_agg_stats(rf_normalized_difference($"nir", $"red")) as "ndvi_stats") .orderBy("month") .select("month", "ndvi_stats.*") - -result.show() + +result.show() ``` diff --git a/pyrasterframes/src/main/python/docs/local-algebra.pymd b/python/docs/local-algebra.pymd similarity index 99% rename from pyrasterframes/src/main/python/docs/local-algebra.pymd rename to python/docs/local-algebra.pymd index 3b5e3d27f..0e1f13d06 100644 --- a/pyrasterframes/src/main/python/docs/local-algebra.pymd +++ b/python/docs/local-algebra.pymd @@ -25,7 +25,7 @@ Here is an example of computing the Normalized Differential Vegetation Index (ND > NDVI is often used worldwide to monitor drought, monitor and predict agricultural production, assist in predicting hazardous fire zones, and map desert encroachment. The NDVI is preferred for global vegetation monitoring because it helps to compensate for changing illumination conditions, surface slope, aspect, and other extraneous factors (Lillesand. _Remote sensing and image interpretation_. 2004) -We will apply the @ref:[catalog pattern](raster-catalogs.md) for defining the data we wish to process. To compute NDVI we need to compute local algebra on the *red* and *near infrared* (nir) bands: +We will apply the @ref:[catalog pattern](raster-catalogs.md) for defining the data we wish to process. To compute NDVI we need to compute local algebra on the *red* and *near infrared* (nir) bands: nir - red NDVI = --------- diff --git a/pyrasterframes/src/main/python/docs/masking.pymd b/python/docs/masking.pymd similarity index 94% rename from pyrasterframes/src/main/python/docs/masking.pymd rename to python/docs/masking.pymd index c25b701c5..0949fc315 100644 --- a/pyrasterframes/src/main/python/docs/masking.pymd +++ b/python/docs/masking.pymd @@ -13,10 +13,10 @@ from pyrasterframes.rf_types import Tile spark = create_rf_spark_session() ``` -Masking is a common operation in raster processing. It is setting certain cells to the @ref:[NoData value](nodata-handling.md). This is usually done to remove low-quality observations from the raster processing. Another related use case is to @ref:["clip"](masking.md#clipping) a raster to a given polygon. +Masking is a common operation in raster processing. It is setting certain cells to the @ref:[NoData value](nodata-handling.md). This is usually done to remove low-quality observations from the raster processing. Another related use case is to @ref:["clip"](masking.md#clipping) a raster to a given polygon. In this section we will demonstrate two common schemes for masking. In Sentinel 2, there is a separate classification raster that defines low quality areas. In Landsat 8, several quality factors are measured and the indications are packed into a single integer, which we have to unpack. - + ## Masking Sentinel 2 Let's demonstrate masking with a pair of bands of Sentinel-2 data. The measurement bands we will use, blue and green, have no defined NoData. They share quality information from a separate file called the scene classification (SCL), which delineates areas of missing data and probable clouds. For more information on this, see the [Sentinel-2 algorithm overview](https://earth.esa.int/web/sentinel/technical-guides/sentinel-2-msi/level-2a/algorithm). Figure 3 tells us how to interpret the scene classification. For this example, we will exclude NoData, defective pixels, probable clouds, and cirrus clouds: values 0, 1, 8, 9, and 10. @@ -44,9 +44,9 @@ unmasked.select(rf_cell_type('blue'), rf_cell_type('scl')).distinct() ### Define CellType for Masked Tile -Because there is not a NoData already defined for the blue band, we must choose one. If we try to apply a masking function to a tile whose cell type has no NoData defined, an error will be thrown. - -In this particular example, the minimum value of all cells in all tiles in the column is greater than zero, so we can use 0 as the NoData value. We will construct a new `CellType` object to represent this. +Because there is not a NoData already defined for the blue band, we must choose one. If we try to apply a masking function to a tile whose cell type has no NoData defined, an error will be thrown. + +In this particular example, the minimum value of all cells in all tiles in the column is greater than zero, so we can use 0 as the NoData value. We will construct a new `CellType` object to represent this. ```python, pick_nd blue_min = unmasked.agg(rf_agg_stats('blue').min.alias('blue_min')) @@ -95,7 +95,7 @@ display(sample[1]) ### Transferring Mask -We can now apply the same mask from the blue column to the green column. Note here we have supressed the step of explicitly checking what a "safe" NoData value for the green band should be. +We can now apply the same mask from the blue column to the green column. Note here we have supressed the step of explicitly checking what a "safe" NoData value for the green band should be. ```python, mask_green masked.withColumn('green_masked', rf_mask(rf_convert_cell_type('green', masked_blue_ct), 'blue_masked')) \ @@ -105,10 +105,10 @@ masked.withColumn('green_masked', rf_mask(rf_convert_cell_type('green', masked_b ## Masking Landsat 8 -We will work with the Landsat scene [here](https://landsat-pds.s3.us-west-2.amazonaws.com/c1/L8/153/075/LC08_L1TP_153075_20190718_20190731_01_T1/index.html). For simplicity, we will just use two of the seven 30m bands. The quality mask for all bands is all contained in the `BQA` band. +We will work with the Landsat scene [here](https://landsat-pds.s3.us-west-2.amazonaws.com/c1/L8/153/075/LC08_L1TP_153075_20190718_20190731_01_T1/index.html). For simplicity, we will just use two of the seven 30m bands. The quality mask for all bands is all contained in the `BQA` band. -```python, build_l8_df +```python, build_l8_df base_url = 'https://landsat-pds.s3.us-west-2.amazonaws.com/c1/L8/153/075/LC08_L1TP_153075_20190718_20190731_01_T1/LC08_L1TP_153075_20190718_20190731_01_T1_' data4 = base_url + 'B4.TIF' data2 = base_url + 'B2.TIF' @@ -119,19 +119,19 @@ l8_df = spark.read.raster([[data4, data2, mask]]) \ .withColumnRenamed('proj_raster_2', 'mask') ``` -Masking is described [on the Landsat Missions page](https://www.usgs.gov/land-resources/nli/landsat/landsat-collection-1-level-1-quality-assessment-band). It is pretty dense. Focus for this data set is the Collection 1 Level-1 for Landsat 8. - +Masking is described [on the Landsat Missions page](https://www.usgs.gov/land-resources/nli/landsat/landsat-collection-1-level-1-quality-assessment-band). It is pretty dense. Focus for this data set is the Collection 1 Level-1 for Landsat 8. + There are several inter-related factors to consider. In this exercise we will mask away the following. - + * Designated Fill = yes * Cloud = yes * Cloud Shadow Confidence = Medium or High * Cirrus Confidence = Medium or High - + Note that you should consider your application and do your own exploratory analysis to determine the most appropriate mask! - + According to the information on the Landsat site this translates to masking by bit values in the BQA according to the following table. - + | Description | Value | Bits | Bit values | |-------------------- |---------- |------- |---------------- | | Designated fill | yes | 0 | 1 | @@ -145,9 +145,9 @@ In this case, we will use the value of 0 as the NoData in the band data. Inspect The code chunk below works through each of the rows in the table above. The first expression sets the cell type to have the selected NoData. The @ref:[`rf_mask_by_bit`](reference.md#rf-mask-by-bit) and @ref:[`rf_mask_by_bits`](reference.md#rf-mask-by-bits) functions extract the selected bit or bits from the `mask` cells and compare them to the provided values. ```python, build_l8_mask -l8_df = l8_df.withColumn('data_masked', # set to cell type that has a nodata +l8_df = l8_df.withColumn('data_masked', # set to cell type that has a nodata rf_convert_cell_type('data', CellType.uint16())) \ - .withColumn('data_masked', # fill yes + .withColumn('data_masked', # fill yes rf_mask_by_bit('data_masked', 'mask', 0, 1)) \ .withColumn('data_masked', # cloud yes rf_mask_by_bit('data_masked', 'mask', 4, 1)) \ @@ -171,9 +171,9 @@ Clipping is the use of a polygon to determine the areas to mask in a raster. Typ ```python, reproject_geom -to_rasterize = masked.withColumn('geom_4326', +to_rasterize = masked.withColumn('geom_4326', st_bufferPoint( - st_point(lit(-78.0783132), lit(38.3184340)), + st_point(lit(-78.0783132), lit(38.3184340)), lit(15000))) \ .withColumn('geom_native', st_reproject('geom_4326', rf_mk_crs('epsg:4326'), rf_crs('blue_masked'))) ``` @@ -181,7 +181,7 @@ to_rasterize = masked.withColumn('geom_4326', Second, we will rasterize the geometry, or burn-in the geometry into the same grid as the raster. ```python, rasterize -to_clip = to_rasterize.withColumn('clip_raster', +to_clip = to_rasterize.withColumn('clip_raster', rf_rasterize('geom_native', rf_geometry('blue_masked'), lit(1), rf_dimensions('blue_masked').cols, rf_dimensions('blue_masked').rows)) # visualize some of the edges of our circle @@ -193,11 +193,11 @@ to_clip.select('blue_masked', 'clip_raster') \ Finally, we create a new _tile_ column with the blue band clipped to our circle. Again we will use the `rf_mask` function to pass the NoData regions along from the rasterized geometry. ```python, clip -to_clip.select('blue_masked', +to_clip.select('blue_masked', 'clip_raster', rf_mask('blue_masked', 'clip_raster').alias('blue_clipped')) \ .filter(rf_data_cells('clip_raster') > 20) \ - .orderBy(rf_data_cells('clip_raster')) + .orderBy(rf_data_cells('clip_raster')) ``` -This kind of clipping technique is further used in @ref:[zonal statistics](zonal-algebra.md). \ No newline at end of file +This kind of clipping technique is further used in @ref:[zonal statistics](zonal-algebra.md). diff --git a/pyrasterframes/src/main/python/docs/nodata-handling.pymd b/python/docs/nodata-handling.pymd similarity index 99% rename from pyrasterframes/src/main/python/docs/nodata-handling.pymd rename to python/docs/nodata-handling.pymd index 7d27a5536..915553c21 100644 --- a/pyrasterframes/src/main/python/docs/nodata-handling.pymd +++ b/python/docs/nodata-handling.pymd @@ -42,7 +42,7 @@ We can also inspect the cell type of a given _tile_ or `proj_raster` column. ```python, ct_from_sen cell_types = spark.read.raster('https://rasterframes.s3.amazonaws.com/samples/luray_snp/B02.tif') \ .select(rf_cell_type('proj_raster')).distinct() -cell_types +cell_types ``` ### Understanding Cell Types and NoData @@ -181,7 +181,7 @@ sums = rf.select( rf_cell_type('y'), rf_cell_type(rf_local_add('x', 'y')).alias('xy_sum'), ) -sums +sums ``` Combining _tile_ columns of different cell types gets a little trickier when user defined NoData cell types are involved. Let's create two _tile_ columns: one with a NoData value of 1, and one with a NoData value of 2 (using our previously defined `get_nodata_ct` function). diff --git a/pyrasterframes/src/main/python/docs/numpy-pandas.pymd b/python/docs/numpy-pandas.pymd similarity index 100% rename from pyrasterframes/src/main/python/docs/numpy-pandas.pymd rename to python/docs/numpy-pandas.pymd diff --git a/pyrasterframes/src/main/python/docs/raster-catalogs.pymd b/python/docs/raster-catalogs.pymd similarity index 99% rename from pyrasterframes/src/main/python/docs/raster-catalogs.pymd rename to python/docs/raster-catalogs.pymd index 3b634b767..1af68c2c6 100644 --- a/pyrasterframes/src/main/python/docs/raster-catalogs.pymd +++ b/python/docs/raster-catalogs.pymd @@ -72,14 +72,14 @@ scene2_B02 = "https://modis-pds.s3.amazonaws.com/MCD43A4.006/04/09/2018188/MCD43 two_d_cat_pd = pd.DataFrame([ {'B01': [scene1_B01], 'B02': [scene1_B02]}, {'B01': [scene2_B01], 'B02': [scene2_B02]} -]) +]) # or two_d_cat_df = spark.createDataFrame([ Row(B01=scene1_B01, B02=scene1_B02), Row(B01=scene2_B01, B02=scene2_B02) ]) - + # As CSV string tow_d_cat_csv = '\n'.join(['B01,B02', scene1_B01 + "," + scene1_B02, scene2_B01 + "," + scene2_B02]) ``` diff --git a/pyrasterframes/src/main/python/docs/raster-join.pymd b/python/docs/raster-join.pymd similarity index 97% rename from pyrasterframes/src/main/python/docs/raster-join.pymd rename to python/docs/raster-join.pymd index 8419ddd42..29bd35b4a 100644 --- a/pyrasterframes/src/main/python/docs/raster-join.pymd +++ b/python/docs/raster-join.pymd @@ -17,8 +17,8 @@ spark = create_rf_spark_session(**{ ## Description A common operation for raster data is reprojecting or warping the data to a different @ref:[CRS][CRS] with a specific @link:[transform](https://gdal.org/user/raster_data_model.html#affine-geotransform) { open=new }. In many use cases, the particulars of the warp operation depend on another set of raster data. Furthermore, the warp is done to put both sets of raster data to a common set of grid to enable manipulation of the datasets together. - -In RasterFrames, you can perform a **Raster Join** on two DataFrames containing raster data. + +In RasterFrames, you can perform a **Raster Join** on two DataFrames containing raster data. The operation will perform a _spatial join_ based on the [CRS][CRS] and [extent][extent] data in each DataFrame. By default it is a left join and uses an intersection operator. For each candidate row, all _tile_ columns on the right hand side are warped to match the left hand side's [CRS][CRS], [extent][extent], and dimensions. Warping relies on GeoTrellis library code. You can specify the resampling method to be applied as one of: nearest_neighbor, bilinear, cubic_convolution, cubic_spline, lanczos, average, mode, median, max, min, or sum. The operation is also an aggregate, with multiple intersecting right-hand side tiles `merge`d into the result. There is no guarantee about the ordering of tiles used to select cell values in the case of overlapping tiles. @@ -44,7 +44,7 @@ rj = landsat8.raster_join(modis, resampling_method="cubic_convolution") # Show some non-empty tiles rj.select('landsat', 'modis', 'crs', 'extent') \ .filter(rf_data_cells('modis') > 0) \ - .filter(rf_tile_max('landsat') > 0) + .filter(rf_tile_max('landsat') > 0) ``` ## Additional Options @@ -57,14 +57,14 @@ The following optional arguments are allowed: * `right_crs` - the column on the right-hand DataFrame giving the [CRS][CRS] of the tile columns * `join_exprs` - a single column expression as would be used in the [`on` parameter of `join`](https://spark.apache.org/docs/latest/api/python/pyspark.sql.html#pyspark.sql.DataFrame.join) * `resampling_method` - resampling algorithm to use in reprojection of right-hand tile column - - - + + + Note that the `join_exprs` will override the join behavior described above. By default the expression is equivalent to: - + ```python, join_expr, evaluate=False st_intersects( - st_geometry(left[left_extent]), + st_geometry(left[left_extent]), st_reproject(st_geometry(right[right_extent]), right[right_crs], left[left_crs]) ) ``` @@ -77,4 +77,4 @@ Note the aggregating methods are intended for downsampling. For example a 0.25 f [CRS]: concepts.md#coordinate-reference-system--crs [extent]: concepts.md#extent -[spatial-index]:raster-read.md#spatial-indexing-and-partitioning \ No newline at end of file +[spatial-index]:raster-read.md#spatial-indexing-and-partitioning diff --git a/pyrasterframes/src/main/python/docs/raster-read.pymd b/python/docs/raster-read.pymd similarity index 97% rename from pyrasterframes/src/main/python/docs/raster-read.pymd rename to python/docs/raster-read.pymd index b95ed066a..6b7e1445b 100644 --- a/pyrasterframes/src/main/python/docs/raster-read.pymd +++ b/python/docs/raster-read.pymd @@ -42,10 +42,10 @@ rf.select( ) ``` -You can also see that the single raster has been broken out into many rows containing arbitrary non-overlapping regions. Doing so takes advantage of parallel in-memory reads from the cloud hosted data source and allows Spark to work on manageable amounts of data per row. +You can also see that the single raster has been broken out into many rows containing arbitrary non-overlapping regions. Doing so takes advantage of parallel in-memory reads from the cloud hosted data source and allows Spark to work on manageable amounts of data per row. The map below shows downsampled imagery with the bounds of the individual tiles. -@@@ note +@@@ note The image contains visible "seams" between the tile extents due to reprojection and downsampling used to create the image. The native imagery in the DataFrame does not contain any gaps in the source raster's coverage. @@ -54,13 +54,13 @@ The native imagery in the DataFrame does not contain any gaps in the source rast ```python, folium_map_of_tile_extents, echo=False from pyrasterframes.rf_types import Extent -import folium -import pyproj +import folium +import pyproj from functools import partial from shapely.ops import transform as shtransform from shapely.geometry import box import geopandas -import numpy +import numpy wm_crs = 'EPSG:3857' crs84 = 'urn:ogc:def:crs:OGC:1.3:CRS84' @@ -98,8 +98,8 @@ ntiles = numpy.nanquantile(ov.cells, [0.03, 0.97]) # use `filled` because folium doesn't know how to maskedArray a = numpy.clip(ov.cells.filled(0), ntiles[0], ntiles[1]) - -m = folium.Map([crs84_extent.centroid.y, crs84_extent.centroid.x], + +m = folium.Map([crs84_extent.centroid.y, crs84_extent.centroid.x], zoom_start=9) \ .add_child( folium.raster_layers.ImageOverlay( @@ -135,17 +135,17 @@ bands = [f'B{b}' for b in [4, 5]] uris = [f'https://landsat-pds.s3.us-west-2.amazonaws.com/c1/L8/014/032/LC08_L1TP_014032_20190720_20190731_01_T1/LC08_L1TP_014032_20190720_20190731_01_T1_{b}.TIF' for b in bands] catalog = ','.join(bands) + '\n' + ','.join(uris) -rf = (spark.read.raster(catalog, bands) - # Adding semantic names - .withColumnRenamed('B4', 'red').withColumnRenamed('B5', 'NIR') - # Adding tile center point for reference +rf = (spark.read.raster(catalog, bands) + # Adding semantic names + .withColumnRenamed('B4', 'red').withColumnRenamed('B5', 'NIR') + # Adding tile center point for reference .withColumn('longitude_latitude', st_reproject(st_centroid(rf_geometry('red')), rf_crs('red'), lit('EPSG:4326'))) - # Compute NDVI + # Compute NDVI .withColumn('NDVI', rf_normalized_difference('NIR', 'red')) - # For the purposes of inspection, filter out rows where there's not much vegetation - .where(rf_tile_sum('NDVI') > 10000) - # Order output - .select('longitude_latitude', 'red', 'NIR', 'NDVI')) + # For the purposes of inspection, filter out rows where there's not much vegetation + .where(rf_tile_sum('NDVI') > 10000) + # Order output + .select('longitude_latitude', 'red', 'NIR', 'NDVI')) display(rf) ``` @@ -193,7 +193,7 @@ mb2.printSchema() RasterFrames relies on three different I/O drivers, selected based on a combination of scheme, file extentions, and library availability. GDAL is used by default if a compatible version of GDAL (>= 2.4) is installed, and if GDAL supports the specified scheme. If GDAL is not available, either the _Java I/O_ or _Hadoop_ driver will be selected, depending on scheme. -Note: The GDAL driver is the only one that can read non-GeoTIFF files. +Note: The GDAL driver is the only one that can read non-GeoTIFF files. | Prefix | GDAL | Java I/O | Hadoop | @@ -249,7 +249,7 @@ MODIS data products are delivered on a regular, consistent grid, making identifi For example, MODIS data right above the equator is all grid coordinates with `v07`. ```python, catalog_filtering -equator = modis_catalog.where(F.col('gid').like('%v07%')) +equator = modis_catalog.where(F.col('gid').like('%v07%')) equator.select('date', 'gid') ``` @@ -266,7 +266,7 @@ Observe the schema of the resulting DataFrame has a projected raster struct for sample = rf \ .select('gid', rf_extent('red'), rf_extent('nir'), rf_tile('red'), rf_tile('nir')) \ .where(~rf_is_no_data_tile('red')) -sample.limit(3) +sample.limit(3) ``` ## Lazy Raster Reading @@ -296,7 +296,7 @@ This is an experimental feature, and may be removed. It's often desirable to take extra steps in ensuring your data is effectively distributed over your computing resources. One way of doing that is using something called a ["space filling curve"](https://en.wikipedia.org/wiki/Space-filling_curve), which turns an N-dimensional value into a one dimensional value, with properties that favor keeping entities near each other in N-space near each other in index space. In particular RasterFrames support space-filling curves mapping the geographic location of _tiles_ to a one-dimensional index space called [`xz2`](https://www.geomesa.org/documentation/user/datastores/index_overview.html). To have RasterFrames add a spatial index based partitioning on a raster reads, use the `spatial_index_partitions` parameter. By default it will use the same number of partitions as configured in [`spark.sql.shuffle.partitions`](https://spark.apache.org/docs/latest/sql-performance-tuning.html#other-configuration-options). - + ```python, spatial_indexing df = spark.read.raster(uri, spatial_index_partitions=True) df diff --git a/pyrasterframes/src/main/python/docs/raster-write.pymd b/python/docs/raster-write.pymd similarity index 98% rename from pyrasterframes/src/main/python/docs/raster-write.pymd rename to python/docs/raster-write.pymd index d354532a3..befc6329c 100644 --- a/pyrasterframes/src/main/python/docs/raster-write.pymd +++ b/python/docs/raster-write.pymd @@ -32,7 +32,7 @@ rf = spark.read.raster(scene(2), tile_dimensions=(256, 256)) ## Overview Rasters -In cases where writing and reading to/from a GeoTIFF isn't convenient, RasterFrames provides the @ref:[`rf_agg_overview_raster`](reference.md#rf-agg-overview-raster) aggregate function, where you can construct a single raster (rendered as a tile) downsampled from all or a subset of the DataFrame. This allows you to effectively construct the same operations the GeoTIFF writer performs, but without the file I/O. +In cases where writing and reading to/from a GeoTIFF isn't convenient, RasterFrames provides the @ref:[`rf_agg_overview_raster`](reference.md#rf-agg-overview-raster) aggregate function, where you can construct a single raster (rendered as a tile) downsampled from all or a subset of the DataFrame. This allows you to effectively construct the same operations the GeoTIFF writer performs, but without the file I/O. The `rf_agg_overview_raster` function will reproject data to the commonly used ["web mercator"](https://en.wikipedia.org/wiki/Web_Mercator_projection) CRS. You must specify an "Area of Interest" (AOI) in web mercator. You can use @ref:[`rf_agg_reprojected_extent`](reference.md#rf-agg-reprojected-extent) to compute the extent of a DataFrame in any CRS or mix of CRSs. @@ -57,7 +57,7 @@ GeoTIFF is one of the most common file formats for spatial data, providing flexi ### Limitations and mitigations -One downside to GeoTIFF is that it is not a big-data native format. To create a GeoTIFF, all the data to be written must be `collect`ed in the memory of the Spark driver. This means you must actively limit the size of the data to be written. It is trivial to lazily read a set of inputs that cannot feasibly be written to GeoTIFF in the same environment. +One downside to GeoTIFF is that it is not a big-data native format. To create a GeoTIFF, all the data to be written must be `collect`ed in the memory of the Spark driver. This means you must actively limit the size of the data to be written. It is trivial to lazily read a set of inputs that cannot feasibly be written to GeoTIFF in the same environment. When writing GeoTIFFs in RasterFrames, you should limit the size of the collected data. Consider filtering the dataframe by time or @ref:[spatial filters](vector-data.md#geomesa-functions-and-spatial-relations). @@ -71,7 +71,7 @@ If there are many _tile_ or projected raster columns in the DataFrame, the GeoTI * `crs`: the PROJ4 string of the CRS the GeoTIFF is to be written in * `raster_dimensions`: optional, a tuple of two ints giving the size of the resulting file. If specified, RasterFrames will downsample the data in distributed fashion using bilinear resampling. If not specified, the default is to write the dataframe at full resolution, which can result in an out of memory error. -### Example +### Example See also the example in the @ref:[unsupervised learning page](unsupervised-learning.md). @@ -107,7 +107,7 @@ If the DataFrame has three or four tile columns, the GeoTIFF is written with the Also see [Color Composite](ipython.md#color-composite) in the IPython/Juptyer Extensions. -### PNG +### PNG In this example we will use the @ref:[`rf_rgb_composite`](reference.md#rf-rgb-composite) function, we will compute a three band PNG image as a `bytearray`. The resulting `bytearray` will be displayed as an image in either a Spark or pandas DataFrame display if `rf_ipython` has been imported. @@ -130,7 +130,7 @@ pil_image = PIL_open(io.BytesIO(png_bytearray)) pil_image ``` -### GeoTIFF +### GeoTIFF In this example we will write a false-color composite as a GeoTIFF @@ -140,7 +140,7 @@ composite_df = spark.read.raster([[scene(3), scene(1), scene(4)]]) composite_df.write.geotiff(outfile, crs='EPSG:4326', raster_dimensions=(256, 256)) ``` -```python, show_geotiff +```python, show_geotiff with rasterio.open(outfile) as src: show(src) ``` diff --git a/pyrasterframes/src/main/python/docs/static/rasterframe-anatomy.png b/python/docs/static/rasterframe-anatomy.png similarity index 100% rename from pyrasterframes/src/main/python/docs/static/rasterframe-anatomy.png rename to python/docs/static/rasterframe-anatomy.png diff --git a/pyrasterframes/src/main/python/docs/static/rasterframes-data-sources.png b/python/docs/static/rasterframes-data-sources.png similarity index 100% rename from pyrasterframes/src/main/python/docs/static/rasterframes-data-sources.png rename to python/docs/static/rasterframes-data-sources.png diff --git a/pyrasterframes/src/main/python/docs/static/rasterframes-locationtech-stack.png b/python/docs/static/rasterframes-locationtech-stack.png similarity index 100% rename from pyrasterframes/src/main/python/docs/static/rasterframes-locationtech-stack.png rename to python/docs/static/rasterframes-locationtech-stack.png diff --git a/pyrasterframes/src/main/python/docs/static/rasterframes-pipeline-nologo.png b/python/docs/static/rasterframes-pipeline-nologo.png similarity index 100% rename from pyrasterframes/src/main/python/docs/static/rasterframes-pipeline-nologo.png rename to python/docs/static/rasterframes-pipeline-nologo.png diff --git a/pyrasterframes/src/main/python/docs/static/rasterframes-pipeline.png b/python/docs/static/rasterframes-pipeline.png similarity index 100% rename from pyrasterframes/src/main/python/docs/static/rasterframes-pipeline.png rename to python/docs/static/rasterframes-pipeline.png diff --git a/pyrasterframes/src/main/python/docs/static/sentinel-2-scene-classification-labels.png b/python/docs/static/sentinel-2-scene-classification-labels.png similarity index 100% rename from pyrasterframes/src/main/python/docs/static/sentinel-2-scene-classification-labels.png rename to python/docs/static/sentinel-2-scene-classification-labels.png diff --git a/pyrasterframes/src/main/python/docs/supervised-learning.pymd b/python/docs/supervised-learning.pymd similarity index 98% rename from pyrasterframes/src/main/python/docs/supervised-learning.pymd rename to python/docs/supervised-learning.pymd index f4c7682cf..756c344b9 100644 --- a/pyrasterframes/src/main/python/docs/supervised-learning.pymd +++ b/python/docs/supervised-learning.pymd @@ -71,7 +71,7 @@ print('Found ', len(crses), 'distinct CRS.') crs = crses[0][0] from pyspark import SparkFiles -spark.sparkContext.addFile('https://rasterframes.s3.amazonaws.com/samples/luray_snp/luray-labels.geojson') +spark.sparkContext.addFile('https://rasterframes.s3.amazonaws.com/samples/luray_snp/luray-labels.geojson') label_df = spark.read.geojson(SparkFiles.get('luray-labels.geojson')) \ .select('id', st_reproject('geometry', lit('EPSG:4326'), lit(crs)).alias('geometry')) \ @@ -80,7 +80,7 @@ label_df = spark.read.geojson(SparkFiles.get('luray-labels.geojson')) \ df_joined = df.join(label_df, st_intersects(st_geometry('extent'), 'geometry')) \ .withColumn('dims', rf_dimensions('B01')) -df_labeled = df_joined.withColumn('label', +df_labeled = df_joined.withColumn('label', rf_rasterize('geometry', st_geometry('extent'), 'id', 'dims.cols', 'dims.rows') ) ``` @@ -174,14 +174,14 @@ accuracy = eval.evaluate(prediction_df) print("\nAccuracy:", accuracy) ``` -As an example of using the flexibility provided by DataFrames, the code below computes and displays the confusion matrix. +As an example of using the flexibility provided by DataFrames, the code below computes and displays the confusion matrix. ```python, confusion_mtrx cnf_mtrx = prediction_df.groupBy(classifier.getPredictionCol()) \ .pivot(classifier.getLabelCol()) \ .count() \ .sort(classifier.getPredictionCol()) -cnf_mtrx +cnf_mtrx ``` ## Visualize Prediction diff --git a/pyrasterframes/src/main/python/docs/time-series.pymd b/python/docs/time-series.pymd similarity index 93% rename from pyrasterframes/src/main/python/docs/time-series.pymd rename to python/docs/time-series.pymd index 43899b8e8..832ebcb93 100644 --- a/pyrasterframes/src/main/python/docs/time-series.pymd +++ b/python/docs/time-series.pymd @@ -19,7 +19,7 @@ spark = create_rf_spark_session("local[4]") In this example, we will show how the flexibility of the DataFrame concept for raster data allows a simple and intuitive way to extract a time series from Earth observation data. We will continue our example from the @ref:[Zonal Map Algebra page](zonal-algebra.md). -We will summarize the change in @ref:[NDVI](local-algebra.md#computing-ndvi) over the spring and early summer of 2018 in the Cuyahoga Valley National Park in Ohio, USA. +We will summarize the change in @ref:[NDVI](local-algebra.md#computing-ndvi) over the spring and early summer of 2018 in the Cuyahoga Valley National Park in Ohio, USA. ```python vector, echo=False, results='hidden' cat = spark.read.format('aws-pds-modis-catalog').load().repartition(200) @@ -45,17 +45,17 @@ park_vector = park_vector.withColumn('geo_simp', simplify('geometry', lit(0.001) ## Catalog Read -As in our other example, we will query for a single known MODIS granule directly. We limit the vector data to the single park of interest. The longer time period selected should show the change in plant vigor as leaves emerge over the spring and into early summer. The definitions of `cat` and `park_vector` are as in the @ref:[Zonal Map Algebra page](zonal-algebra.md). +As in our other example, we will query for a single known MODIS granule directly. We limit the vector data to the single park of interest. The longer time period selected should show the change in plant vigor as leaves emerge over the spring and into early summer. The definitions of `cat` and `park_vector` are as in the @ref:[Zonal Map Algebra page](zonal-algebra.md). ```python query_catalog park_cat = cat \ .filter( (cat.granule_id == 'h11v04') & (cat.acquisition_date > lit('2018-02-19')) & - (cat.acquisition_date < lit('2018-07-01')) + (cat.acquisition_date < lit('2018-07-01')) ) \ .crossJoin(park_vector.filter('UNIT_CODE == "CUVA"')) #only coyahuga - + ``` ## Vector and Raster Data Interaction @@ -78,16 +78,16 @@ rf_park_tile = spark.read.raster( ## Create Time Series -We next aggregate across the cell values to arrive at an average NDVI for each week of the year. We use `pyspark`'s built in `groupby` and time functions with a RasterFrames @ref:[aggregate function](aggregation.md) to do this. Note that the computation is creating a weighted average, which is weighted by the number of valid observations per week. +We next aggregate across the cell values to arrive at an average NDVI for each week of the year. We use `pyspark`'s built in `groupby` and time functions with a RasterFrames @ref:[aggregate function](aggregation.md) to do this. Note that the computation is creating a weighted average, which is weighted by the number of valid observations per week. ```python ndvi_time_series from pyspark.sql.functions import col, year, weekofyear, month time_series = rf_park_tile \ .groupby( - year('acquisition_date').alias('year'), + year('acquisition_date').alias('year'), weekofyear('acquisition_date').alias('week')) \ - .agg(rf_agg_mean('ndvi_masked').alias('ndvi')) + .agg(rf_agg_mean('ndvi_masked').alias('ndvi')) ``` Finally, we will take a look at the NDVI over time. diff --git a/pyrasterframes/src/main/python/docs/unsupervised-learning.pymd b/python/docs/unsupervised-learning.pymd similarity index 99% rename from pyrasterframes/src/main/python/docs/unsupervised-learning.pymd rename to python/docs/unsupervised-learning.pymd index 4076fc470..f4db8c04f 100644 --- a/pyrasterframes/src/main/python/docs/unsupervised-learning.pymd +++ b/python/docs/unsupervised-learning.pymd @@ -43,7 +43,7 @@ df = df.withColumn('crs', rf_crs(df.b1)) \ df.printSchema() ``` -In this small example, all the images in our `catalog_df` have the same @ref:[CRS](concepts.md#coordinate-reference-system-crs-), which we verify in the code snippet below. The `crs` object will be useful for visualization later. +In this small example, all the images in our `catalog_df` have the same @ref:[CRS](concepts.md#coordinate-reference-system-crs-), which we verify in the code snippet below. The `crs` object will be useful for visualization later. ```python, crses crses = df.select('crs.crsProj4').distinct().collect() diff --git a/pyrasterframes/src/main/python/docs/vector-data.pymd b/python/docs/vector-data.pymd similarity index 98% rename from pyrasterframes/src/main/python/docs/vector-data.pymd rename to python/docs/vector-data.pymd index 31a450f6b..7226cb822 100644 --- a/pyrasterframes/src/main/python/docs/vector-data.pymd +++ b/python/docs/vector-data.pymd @@ -1,6 +1,6 @@ # Vector Data -RasterFrames provides a variety of ways to work with spatial vector data (points, lines, and polygons) alongside raster data. +RasterFrames provides a variety of ways to work with spatial vector data (points, lines, and polygons) alongside raster data. * DataSource for GeoJSON format * Ability to convert between from [GeoPandas][GeoPandas] and Spark DataFrames diff --git a/pyrasterframes/src/main/python/docs/zonal-algebra.pymd b/python/docs/zonal-algebra.pymd similarity index 97% rename from pyrasterframes/src/main/python/docs/zonal-algebra.pymd rename to python/docs/zonal-algebra.pymd index 8571e8137..556b5c0f4 100644 --- a/pyrasterframes/src/main/python/docs/zonal-algebra.pymd +++ b/python/docs/zonal-algebra.pymd @@ -64,7 +64,7 @@ park_vector = park_vector.withColumn('geo_simp', simplify('geometry', lit(0.005) ## Catalog Read -Both parks are entirely contained in MODIS granule h11 v04. We will simply filter on this granule, rather than using a @ref:[spatial relation](vector-data.md#geomesa-functions-and-spatial-relations). +Both parks are entirely contained in MODIS granule h11 v04. We will simply filter on this granule, rather than using a @ref:[spatial relation](vector-data.md#geomesa-functions-and-spatial-relations). ```python query_catalog cat = spark.read.format('aws-pds-modis-catalog').load().repartition(50) @@ -72,10 +72,10 @@ park_cat = cat \ .filter( (cat.granule_id == 'h11v04') & (cat.acquisition_date >= lit('2018-05-01')) & - (cat.acquisition_date < lit('2018-06-01')) + (cat.acquisition_date < lit('2018-06-01')) ) \ .crossJoin(park_vector) - + park_cat.printSchema() ``` @@ -89,7 +89,7 @@ park_rf = spark.read.raster( park_cat.select(['acquisition_date', 'granule_id'] + raster_cols + park_vector.columns), catalog_col_names=raster_cols) \ .withColumn('park_native', st_reproject('geo_simp', lit('EPSG:4326'), rf_crs('B01'))) \ - .filter(st_intersects('park_native', rf_geometry('B01'))) + .filter(st_intersects('park_native', rf_geometry('B01'))) park_rf.printSchema() ``` diff --git a/python/geomesa_pyspark/__init__.py b/python/geomesa_pyspark/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyrasterframes/src/main/python/geomesa_pyspark/types.py b/python/geomesa_pyspark/types.py similarity index 92% rename from pyrasterframes/src/main/python/geomesa_pyspark/types.py rename to python/geomesa_pyspark/types.py index 5f1d0a110..bbc402718 100644 --- a/pyrasterframes/src/main/python/geomesa_pyspark/types.py +++ b/python/geomesa_pyspark/types.py @@ -9,7 +9,7 @@ http://www.opensource.org/licenses/apache2.0.php. + ***********************************************************************/""" -from pyspark.sql.types import UserDefinedType, StructField, BinaryType, StructType +from pyspark.sql.types import BinaryType, StructField, StructType, UserDefinedType from shapely import wkb from shapely.geometry import LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon from shapely.geometry.base import BaseGeometry @@ -17,18 +17,17 @@ class ShapelyGeometryUDT(UserDefinedType): - @classmethod def sqlType(cls): return StructType([StructField("wkb", BinaryType(), True)]) @classmethod def module(cls): - return 'geomesa_pyspark.types' + return "geomesa_pyspark.types" @classmethod def scalaUDT(cls): - return 'org.apache.spark.sql.jts.' + cls.__name__ + return "org.apache.spark.sql.jts." + cls.__name__ def serialize(self, obj): return [_serialize_to_wkb(obj)] diff --git a/pyrasterframes/src/main/python/pyrasterframes/__init__.py b/python/pyrasterframes/__init__.py similarity index 63% rename from pyrasterframes/src/main/python/pyrasterframes/__init__.py rename to python/pyrasterframes/__init__.py index 65b0eaed4..8e569447e 100644 --- a/pyrasterframes/src/main/python/pyrasterframes/__init__.py +++ b/python/pyrasterframes/__init__.py @@ -23,23 +23,23 @@ appended to PySpark classes. """ +from typing import List, Optional, Tuple + +import geomesa_pyspark.types # enable vector integrations from pyspark import SparkContext -from pyspark.sql import SparkSession, DataFrame, DataFrameReader, DataFrameWriter +from pyspark.sql import DataFrame, DataFrameReader, DataFrameWriter, SparkSession from pyspark.sql.column import _to_java_column # Import RasterFrameLayer types and functions from .rf_context import RFContext +from .rf_types import RasterFrameLayer, RasterSourceUDT, TileExploder, TileUDT from .version import __version__ -from .rf_types import RasterFrameLayer, TileExploder, TileUDT, RasterSourceUDT -import geomesa_pyspark.types # enable vector integrations -from typing import Dict, Tuple, List, Optional, Union - -__all__ = ['RasterFrameLayer', 'TileExploder'] +__all__ = ["RasterFrameLayer", "TileExploder"] def _rf_init(spark_session: SparkSession) -> SparkSession: - """ Adds RasterFrames functionality to PySpark session.""" + """Adds RasterFrames functionality to PySpark session.""" if not hasattr(spark_session, "rasterframes"): spark_session.rasterframes = RFContext(spark_session) spark_session.sparkContext._rf_context = spark_session.rasterframes @@ -50,35 +50,59 @@ def _rf_init(spark_session: SparkSession) -> SparkSession: def _kryo_init(builder: SparkSession.Builder) -> SparkSession.Builder: """Registers Kryo Serializers for better performance.""" # NB: These methods need to be kept up-to-date wit those in `org.locationtech.rasterframes.extensions.KryoMethods` - builder \ - .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") \ - .config("spark.kryo.registrator", "org.locationtech.rasterframes.util.RFKryoRegistrator") \ - .config("spark.kryoserializer.buffer.max", "500m") + builder.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").config( + "spark.kryo.registrator", "org.locationtech.rasterframes.util.RFKryoRegistrator" + ).config("spark.kryoserializer.buffer.max", "500m") return builder def _convert_df(df: DataFrame, sp_key=None, metadata=None) -> RasterFrameLayer: - """ Internal function to convert a DataFrame to a RasterFrameLayer. """ + """Internal function to convert a DataFrame to a RasterFrameLayer.""" ctx = SparkContext._active_spark_context._rf_context if sp_key is None: return RasterFrameLayer(ctx._jrfctx.asLayer(df._jdf), ctx._spark_session) else: import json - return RasterFrameLayer(ctx._jrfctx.asLayer( - df._jdf, _to_java_column(sp_key), json.dumps(metadata)), ctx._spark_session) - -def _raster_join(df: DataFrame, other: DataFrame, - left_extent=None, left_crs=None, - right_extent=None, right_crs=None, - join_exprs=None, resampling_method='nearest_neighbor') -> DataFrame: + return RasterFrameLayer( + ctx._jrfctx.asLayer(df._jdf, _to_java_column(sp_key), json.dumps(metadata)), + ctx._spark_session, + ) + + +def _raster_join( + df: DataFrame, + other: DataFrame, + left_extent=None, + left_crs=None, + right_extent=None, + right_crs=None, + join_exprs=None, + resampling_method="nearest_neighbor", +) -> DataFrame: ctx = SparkContext._active_spark_context._rf_context - resampling_method = resampling_method.lower().strip().replace('_', '') - assert resampling_method in ['nearestneighbor', 'bilinear', 'cubicconvolution', 'cubicspline', 'lanczos', - 'average', 'mode', 'median', 'max', 'min', 'sum'] + resampling_method = resampling_method.lower().strip().replace("_", "") + assert resampling_method in [ + "nearestneighbor", + "bilinear", + "cubicconvolution", + "cubicspline", + "lanczos", + "average", + "mode", + "median", + "max", + "min", + "sum", + ] if join_exprs is not None: - assert left_extent is not None and left_crs is not None and right_extent is not None and right_crs is not None + assert ( + left_extent is not None + and left_crs is not None + and right_extent is not None + and right_crs is not None + ) # Note the order of arguments here. cols = [join_exprs, left_extent, left_crs, right_extent, right_crs] args = [_to_java_column(c) for c in cols] + [resampling_method] @@ -93,35 +117,42 @@ def _raster_join(df: DataFrame, other: DataFrame, else: jdf = ctx._jrfctx.rasterJoin(df._jdf, other._jdf, resampling_method) - return DataFrame(jdf, ctx._spark_session._wrapped) + return DataFrame(jdf, ctx._spark_session) -def _layer_reader(df_reader: DataFrameReader, format_key: str, path: Optional[str], **options: str) -> RasterFrameLayer: - """ Loads the file of the given type at the given path.""" +def _layer_reader( + df_reader: DataFrameReader, format_key: str, path: Optional[str], **options: str +) -> RasterFrameLayer: + """Loads the file of the given type at the given path.""" df = df_reader.format(format_key).load(path, **options) return _convert_df(df) -def _aliased_reader(df_reader: DataFrameReader, format_key: str, path: Optional[str], **options: str) -> DataFrame: - """ Loads the file of the given type at the given path.""" +def _aliased_reader( + df_reader: DataFrameReader, format_key: str, path: Optional[str], **options: str +) -> DataFrame: + """Loads the file of the given type at the given path.""" return df_reader.format(format_key).load(path, **options) -def _aliased_writer(df_writer: DataFrameWriter, format_key: str, path: Optional[str], **options: str): - """ Saves the dataframe to a file of the given type at the given path.""" +def _aliased_writer( + df_writer: DataFrameWriter, format_key: str, path: Optional[str], **options: str +): + """Saves the dataframe to a file of the given type at the given path.""" return df_writer.format(format_key).save(path, **options) def _raster_reader( - df_reader: DataFrameReader, - source=None, - catalog_col_names: Optional[List[str]] = None, - band_indexes: Optional[List[int]] = None, - buffer_size: int = 0, - tile_dimensions: Tuple[int] = (256, 256), - lazy_tiles: bool = True, - spatial_index_partitions=None, - **options: str) -> DataFrame: + df_reader: DataFrameReader, + source=None, + catalog_col_names: Optional[List[str]] = None, + band_indexes: Optional[List[int]] = None, + buffer_size: int = 0, + tile_dimensions: Tuple[int] = (256, 256), + lazy_tiles: bool = True, + spatial_index_partitions=None, + **options: str, +) -> DataFrame: """ Returns a Spark DataFrame from raster data files specified by URIs. Each row in the returned DataFrame will contain a column with struct of (CRS, Extent, Tile) for each item in @@ -144,19 +175,20 @@ def _raster_reader( from pandas import DataFrame as PdDataFrame - if 'catalog' in options: - source = options['catalog'] # maintain back compatibility with 0.8.0 + if "catalog" in options: + source = options["catalog"] # maintain back compatibility with 0.8.0 def to_csv(comp): if isinstance(comp, str): return comp else: - return ','.join(str(v) for v in comp) + return ",".join(str(v) for v in comp) def temp_name(): - """ Create a random name for a temporary view """ + """Create a random name for a temporary view""" import uuid - return str(uuid.uuid4()).replace('-', '') + + return str(uuid.uuid4()).replace("-", "") if band_indexes is None: band_indexes = [0] @@ -164,7 +196,7 @@ def temp_name(): if spatial_index_partitions: num = int(spatial_index_partitions) if num < 0: - spatial_index_partitions = '-1' + spatial_index_partitions = "-1" elif num == 0: spatial_index_partitions = None @@ -175,12 +207,14 @@ def temp_name(): spatial_index_partitions = str(spatial_index_partitions) options.update({"spatial_index_partitions": spatial_index_partitions}) - options.update({ - "band_indexes": to_csv(band_indexes), - "tile_dimensions": to_csv(tile_dimensions), - "lazy_tiles": str(lazy_tiles), - "buffer_size": int(buffer_size) - }) + options.update( + { + "band_indexes": to_csv(band_indexes), + "tile_dimensions": to_csv(tile_dimensions), + "lazy_tiles": str(lazy_tiles), + "buffer_size": int(buffer_size), + } + ) # Parse the `source` argument path = None # to pass into `path` param @@ -188,19 +222,24 @@ def temp_name(): if all([isinstance(i, str) for i in source]): path = None catalog = None - options.update(dict(paths='\n'.join([str(i) for i in source]))) # pass in "uri1\nuri2\nuri3\n..." + options.update( + dict(paths="\n".join([str(i) for i in source])) + ) # pass in "uri1\nuri2\nuri3\n..." if all([isinstance(i, list) for i in source]): # list of lists; we will rely on pandas to: # - coerce all data to str (possibly using objects' __str__ or __repr__) # - ensure data is not "ragged": all sublists are same len path = None - catalog_col_names = ['proj_raster_{}'.format(i) for i in range(len(source[0]))] # assign these names - catalog = PdDataFrame(source, - columns=catalog_col_names, - dtype=str, - ) + catalog_col_names = [ + "proj_raster_{}".format(i) for i in range(len(source[0])) + ] # assign these names + catalog = PdDataFrame( + source, + columns=catalog_col_names, + dtype=str, + ) elif isinstance(source, str): - if '\n' in source or '\r' in source: + if "\n" in source or "\r" in source: # then the `source` string is a catalog as a CSV (header is required) path = None catalog = source @@ -217,25 +256,23 @@ def temp_name(): raise Exception("'catalog_col_names' required when DataFrame 'catalog' specified") if isinstance(catalog, str): - options.update({ - "catalog_csv": catalog, - "catalog_col_names": to_csv(catalog_col_names) - }) + options.update({"catalog_csv": catalog, "catalog_col_names": to_csv(catalog_col_names)}) elif isinstance(catalog, DataFrame): # check catalog_col_names - assert all([c in catalog.columns for c in catalog_col_names]), \ - "All items in catalog_col_names must be the name of a column in the catalog DataFrame." + assert all( + [c in catalog.columns for c in catalog_col_names] + ), "All items in catalog_col_names must be the name of a column in the catalog DataFrame." # Create a random view name tmp_name = temp_name() catalog.createOrReplaceTempView(tmp_name) - options.update({ - "catalog_table": tmp_name, - "catalog_col_names": to_csv(catalog_col_names) - }) + options.update( + {"catalog_table": tmp_name, "catalog_col_names": to_csv(catalog_col_names)} + ) elif isinstance(catalog, PdDataFrame): # check catalog_col_names - assert all([c in catalog.columns for c in catalog_col_names]), \ - "All items in catalog_col_names must be the name of a column in the catalog DataFrame." + assert all( + [c in catalog.columns for c in catalog_col_names] + ), "All items in catalog_col_names must be the name of a column in the catalog DataFrame." # Handle to active spark session session = SparkContext._active_spark_context._rf_context._spark_session @@ -243,59 +280,53 @@ def temp_name(): tmp_name = temp_name() spark_catalog = session.createDataFrame(catalog) spark_catalog.createOrReplaceTempView(tmp_name) - options.update({ - "catalog_table": tmp_name, - "catalog_col_names": to_csv(catalog_col_names) - }) - - return df_reader \ - .format("raster") \ - .load(path, **options) - -def _stac_api_reader( - df_reader: DataFrameReader, - uri: str, - filters: dict = None) -> DataFrame: + options.update( + {"catalog_table": tmp_name, "catalog_col_names": to_csv(catalog_col_names)} + ) + + return df_reader.format("raster").load(path, **options) + + +def _stac_api_reader(df_reader: DataFrameReader, uri: str, filters: dict = None) -> DataFrame: """ :param uri: STAC API uri :param filters: STAC API Search filters dict (bbox, datetime, intersects, collections, items, limit, query, next), see the STAC API Spec for more details https://github.com/radiantearth/stac-api-spec """ import json - return df_reader \ - .format("stac-api") \ - .option("uri", uri) \ - .option("search-filters", json.dumps(filters)) \ + return ( + df_reader.format("stac-api") + .option("uri", uri) + .option("search-filters", json.dumps(filters)) .load() + ) -def _geotiff_writer( - df_writer: DataFrameWriter, - path: str, - crs: Optional[str] = None, - raster_dimensions: Tuple[int] = None, - **options: str): +def _geotiff_writer( + df_writer: DataFrameWriter, + path: str, + crs: Optional[str] = None, + raster_dimensions: Tuple[int] = None, + **options: str, +): def set_dims(parts): parts = [int(p) for p in parts] assert len(parts) == 2, "Expected dimensions specification to have exactly two components" - assert all([p > 0 for p in parts]), "Expected all components in dimensions to be positive integers" - options.update({ - "imageWidth": str(parts[0]), - "imageHeight": str(parts[1]) - }) + assert all( + [p > 0 for p in parts] + ), "Expected all components in dimensions to be positive integers" + options.update({"imageWidth": str(parts[0]), "imageHeight": str(parts[1])}) parts = [int(p) for p in parts] - assert all([p > 0 for p in parts]), 'nice message' + assert all([p > 0 for p in parts]), "nice message" if raster_dimensions is not None: if isinstance(raster_dimensions, (list, tuple)): set_dims(raster_dimensions) elif isinstance(raster_dimensions, str): - set_dims(raster_dimensions.split(',')) + set_dims(raster_dimensions.split(",")) if crs is not None: - options.update({ - "crs": crs - }) + options.update({"crs": crs}) return _aliased_writer(df_writer, "geotiff", path, **options) @@ -318,6 +349,8 @@ def set_dims(parts): DataFrameReader.geotiff = lambda df_reader, path: _layer_reader(df_reader, "geotiff", path) DataFrameWriter.geotiff = _geotiff_writer DataFrameReader.geotrellis = lambda df_reader, path: _layer_reader(df_reader, "geotrellis", path) -DataFrameReader.geotrellis_catalog = lambda df_reader, path: _aliased_reader(df_reader, "geotrellis-catalog", path) +DataFrameReader.geotrellis_catalog = lambda df_reader, path: _aliased_reader( + df_reader, "geotrellis-catalog", path +) DataFrameWriter.geotrellis = lambda df_writer, path: _aliased_writer(df_writer, "geotrellis", path) DataFrameReader.stacapi = _stac_api_reader diff --git a/pyrasterframes/src/main/python/pyrasterframes/rasterfunctions.py b/python/pyrasterframes/rasterfunctions.py similarity index 59% rename from pyrasterframes/src/main/python/pyrasterframes/rasterfunctions.py rename to python/pyrasterframes/rasterfunctions.py index 108e28afb..83a01011b 100644 --- a/pyrasterframes/src/main/python/pyrasterframes/rasterfunctions.py +++ b/python/pyrasterframes/rasterfunctions.py @@ -23,17 +23,18 @@ implementations. Most functions are standard Column functions, but those with unique signatures are handled here as well. """ +from typing import Iterable, List, Optional, Union + +from deprecation import deprecated +from py4j.java_gateway import JavaObject from pyspark.sql.column import Column, _to_java_column from pyspark.sql.functions import lit + from .rf_context import RFContext +from .rf_types import CRS, CellType, Extent from .version import __version__ -from deprecation import deprecated -from typing import Union, List, Optional, Iterable -from py4j.java_gateway import JavaObject -from .rf_types import CellType, Extent, CRS - -THIS_MODULE = 'pyrasterframes' +THIS_MODULE = "pyrasterframes" Column_type = Union[str, Column] @@ -55,26 +56,32 @@ def _apply_scalar_to_tile(name: str, tile_col: Column_type, scalar: Union[int, f def _parse_cell_type(cell_type_arg: Union[str, CellType]) -> JavaObject: - """ Convert the cell type representation to the expected JVM CellType object.""" + """Convert the cell type representation to the expected JVM CellType object.""" def to_jvm(ct): - return _context_call('_parse_cell_type', ct) + return _context_call("_parse_cell_type", ct) if isinstance(cell_type_arg, str): return to_jvm(cell_type_arg) elif isinstance(cell_type_arg, CellType): return to_jvm(cell_type_arg.cell_type_name) + def rf_cell_types() -> List[CellType]: """Return a list of standard cell types""" - return [CellType(str(ct)) for ct in _context_call('rf_cell_types')] + return [CellType(str(ct)) for ct in _context_call("rf_cell_types")] -def rf_assemble_tile(col_index: Column_type, row_index: Column_type, cell_data_col: Column_type, - num_cols: Union[int, Column_type], num_rows: Union[int, Column_type], - cell_type: Optional[Union[str, CellType]] = None) -> Column: +def rf_assemble_tile( + col_index: Column_type, + row_index: Column_type, + cell_data_col: Column_type, + num_cols: Union[int, Column_type], + num_rows: Union[int, Column_type], + cell_type: Optional[Union[str, CellType]] = None, +) -> Column: """Create a Tile from a column of cell data with location indices""" - jfcn = RFContext.active().lookup('rf_assemble_tile') + jfcn = RFContext.active().lookup("rf_assemble_tile") if isinstance(num_cols, Column): num_cols = _to_java_column(num_cols) @@ -83,332 +90,360 @@ def rf_assemble_tile(col_index: Column_type, row_index: Column_type, cell_data_c num_rows = _to_java_column(num_rows) if cell_type is None: - return Column(jfcn( - _to_java_column(col_index), _to_java_column(row_index), _to_java_column(cell_data_col), - num_cols, num_rows - )) + return Column( + jfcn( + _to_java_column(col_index), + _to_java_column(row_index), + _to_java_column(cell_data_col), + num_cols, + num_rows, + ) + ) else: - return Column(jfcn( - _to_java_column(col_index), _to_java_column(row_index), _to_java_column(cell_data_col), - num_cols, num_rows, _parse_cell_type(cell_type) - )) + return Column( + jfcn( + _to_java_column(col_index), + _to_java_column(row_index), + _to_java_column(cell_data_col), + num_cols, + num_rows, + _parse_cell_type(cell_type), + ) + ) def rf_array_to_tile(array_col: Column_type, num_cols: int, num_rows: int) -> Column: """Convert array in `array_col` into a Tile of dimensions `num_cols` and `num_rows'""" - jfcn = RFContext.active().lookup('rf_array_to_tile') + jfcn = RFContext.active().lookup("rf_array_to_tile") return Column(jfcn(_to_java_column(array_col), num_cols, num_rows)) def rf_convert_cell_type(tile_col: Column_type, cell_type: Union[str, CellType]) -> Column: """Convert the numeric type of the Tiles in `tileCol`""" - jfcn = RFContext.active().lookup('rf_convert_cell_type') + jfcn = RFContext.active().lookup("rf_convert_cell_type") return Column(jfcn(_to_java_column(tile_col), _parse_cell_type(cell_type))) def rf_interpret_cell_type_as(tile_col: Column_type, cell_type: Union[str, CellType]) -> Column: """Change the interpretation of the tile_col's cell values according to specified cell_type""" - jfcn = RFContext.active().lookup('rf_interpret_cell_type_as') + jfcn = RFContext.active().lookup("rf_interpret_cell_type_as") return Column(jfcn(_to_java_column(tile_col), _parse_cell_type(cell_type))) -def rf_make_constant_tile(scalar_value: Union[int, float], num_cols: int, num_rows: int, - cell_type: Union[str, CellType] = CellType.float64()) -> Column: +def rf_make_constant_tile( + scalar_value: Union[int, float], + num_cols: int, + num_rows: int, + cell_type: Union[str, CellType] = CellType.float64(), +) -> Column: """Constructor for constant tile column""" - jfcn = RFContext.active().lookup('rf_make_constant_tile') + jfcn = RFContext.active().lookup("rf_make_constant_tile") return Column(jfcn(scalar_value, num_cols, num_rows, _parse_cell_type(cell_type))) -def rf_make_zeros_tile(num_cols: int, num_rows: int, cell_type: Union[str, CellType] = CellType.float64()) -> Column: +def rf_make_zeros_tile( + num_cols: int, num_rows: int, cell_type: Union[str, CellType] = CellType.float64() +) -> Column: """Create column of constant tiles of zero""" - jfcn = RFContext.active().lookup('rf_make_zeros_tile') + jfcn = RFContext.active().lookup("rf_make_zeros_tile") return Column(jfcn(num_cols, num_rows, _parse_cell_type(cell_type))) -def rf_make_ones_tile(num_cols: int, num_rows: int, cell_type: Union[str, CellType] = CellType.float64()) -> Column: +def rf_make_ones_tile( + num_cols: int, num_rows: int, cell_type: Union[str, CellType] = CellType.float64() +) -> Column: """Create column of constant tiles of one""" - jfcn = RFContext.active().lookup('rf_make_ones_tile') + jfcn = RFContext.active().lookup("rf_make_ones_tile") return Column(jfcn(num_cols, num_rows, _parse_cell_type(cell_type))) -def rf_rasterize(geometry_col: Column_type, bounds_col: Column_type, value_col: Column_type, num_cols_col: Column_type, - num_rows_col: Column_type) -> Column: +def rf_rasterize( + geometry_col: Column_type, + bounds_col: Column_type, + value_col: Column_type, + num_cols_col: Column_type, + num_rows_col: Column_type, +) -> Column: """Create a tile where cells in the grid defined by cols, rows, and bounds are filled with the given value.""" - return _apply_column_function('rf_rasterize', geometry_col, bounds_col, value_col, num_cols_col, num_rows_col) + return _apply_column_function( + "rf_rasterize", geometry_col, bounds_col, value_col, num_cols_col, num_rows_col + ) def st_reproject(geometry_col: Column_type, src_crs: Column_type, dst_crs: Column_type) -> Column: """Reproject a column of geometry given the CRSs of the source and destination.""" - return _apply_column_function('st_reproject', geometry_col, src_crs, dst_crs) + return _apply_column_function("st_reproject", geometry_col, src_crs, dst_crs) def rf_explode_tiles(*tile_cols: Column_type) -> Column: """Create a row for each cell in Tile.""" - jfcn = RFContext.active().lookup('rf_explode_tiles') + jfcn = RFContext.active().lookup("rf_explode_tiles") jcols = [_to_java_column(arg) for arg in tile_cols] return Column(jfcn(RFContext.active().list_to_seq(jcols))) def rf_explode_tiles_sample(sample_frac: float, seed: int, *tile_cols: Column_type) -> Column: """Create a row for a sample of cells in Tile columns.""" - jfcn = RFContext.active().lookup('rf_explode_tiles_sample') + jfcn = RFContext.active().lookup("rf_explode_tiles_sample") jcols = [_to_java_column(arg) for arg in tile_cols] return Column(jfcn(sample_frac, seed, RFContext.active().list_to_seq(jcols))) def rf_with_no_data(tile_col: Column_type, scalar: Union[int, float]) -> Column: """Assign a `NoData` value to the Tiles in the given Column.""" - return _apply_scalar_to_tile('rf_with_no_data', tile_col, scalar) + return _apply_scalar_to_tile("rf_with_no_data", tile_col, scalar) def rf_local_add(left_tile_col: Column_type, rhs: Union[float, int, Column_type]) -> Column: """Add two Tiles, or add a scalar to a Tile""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_add', left_tile_col, rhs) + return _apply_column_function("rf_local_add", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_add_double(tile_col: Column_type, scalar: float) -> Column: """Add a floating point scalar to a Tile""" - return _apply_scalar_to_tile('rf_local_add_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_add_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_add_int(tile_col, scalar) -> Column: """Add an integral scalar to a Tile""" - return _apply_scalar_to_tile('rf_local_add_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_add_int", tile_col, scalar) def rf_local_subtract(left_tile_col: Column_type, rhs: Union[float, int, Column_type]) -> Column: """Subtract two Tiles, or subtract a scalar from a Tile""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_subtract', left_tile_col, rhs) + return _apply_column_function("rf_local_subtract", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_subtract_double(tile_col, scalar): """Subtract a floating point scalar from a Tile""" - return _apply_scalar_to_tile('rf_local_subtract_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_subtract_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_subtract_int(tile_col, scalar): """Subtract an integral scalar from a Tile""" - return _apply_scalar_to_tile('rf_local_subtract_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_subtract_int", tile_col, scalar) def rf_local_multiply(left_tile_col: Column_type, rhs: Union[float, int, Column_type]) -> Column: """Multiply two Tiles cell-wise, or multiply Tile cells by a scalar""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_multiply', left_tile_col, rhs) + return _apply_column_function("rf_local_multiply", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_multiply_double(tile_col, scalar): """Multiply a Tile by a float point scalar""" - return _apply_scalar_to_tile('rf_local_multiply_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_multiply_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_multiply_int(tile_col, scalar): """Multiply a Tile by an integral scalar""" - return _apply_scalar_to_tile('rf_local_multiply_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_multiply_int", tile_col, scalar) def rf_local_divide(left_tile_col: Column_type, rhs: Union[float, int, Column_type]) -> Column: """Divide two Tiles cell-wise, or divide a Tile's cell values by a scalar""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_divide', left_tile_col, rhs) + return _apply_column_function("rf_local_divide", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_divide_double(tile_col, scalar): """Divide a Tile by a floating point scalar""" - return _apply_scalar_to_tile('rf_local_divide_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_divide_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_divide_int(tile_col, scalar): """Divide a Tile by an integral scalar""" - return _apply_scalar_to_tile('rf_local_divide_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_divide_int", tile_col, scalar) def rf_local_less(left_tile_col: Column_type, rhs: Union[float, int, Column_type]) -> Column: """Cellwise less than comparison between two tiles, or with a scalar value""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_less', left_tile_col, rhs) + return _apply_column_function("rf_local_less", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_less_double(tile_col, scalar): """Return a Tile with values equal 1 if the cell is less than a scalar, otherwise 0""" - return _apply_scalar_to_tile('foo', tile_col, scalar) + return _apply_scalar_to_tile("foo", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_less_int(tile_col, scalar): """Return a Tile with values equal 1 if the cell is less than a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_less_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_less_double", tile_col, scalar) def rf_local_less_equal(left_tile_col: Column_type, rhs: Union[float, int, Column_type]) -> Column: """Cellwise less than or equal to comparison between two tiles, or with a scalar value""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_less_equal', left_tile_col, rhs) + return _apply_column_function("rf_local_less_equal", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_less_equal_double(tile_col, scalar): """Return a Tile with values equal 1 if the cell is less than or equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_less_equal_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_less_equal_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_less_equal_int(tile_col, scalar): """Return a Tile with values equal 1 if the cell is less than or equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_less_equal_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_less_equal_int", tile_col, scalar) def rf_local_greater(left_tile_col: Column, rhs: Union[float, int, Column_type]) -> Column: """Cellwise greater than comparison between two tiles, or with a scalar value""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_greater', left_tile_col, rhs) + return _apply_column_function("rf_local_greater", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_greater_double(tile_col, scalar): """Return a Tile with values equal 1 if the cell is greater than a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_greater_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_greater_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_greater_int(tile_col, scalar): """Return a Tile with values equal 1 if the cell is greater than a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_greater_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_greater_int", tile_col, scalar) def rf_local_greater_equal(left_tile_col: Column, rhs: Union[float, int, Column_type]) -> Column: """Cellwise greater than or equal to comparison between two tiles, or with a scalar value""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_greater_equal', left_tile_col, rhs) + return _apply_column_function("rf_local_greater_equal", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_greater_equal_double(tile_col, scalar): """Return a Tile with values equal 1 if the cell is greater than or equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_greater_equal_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_greater_equal_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_greater_equal_int(tile_col, scalar): """Return a Tile with values equal 1 if the cell is greater than or equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_greater_equal_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_greater_equal_int", tile_col, scalar) def rf_local_equal(left_tile_col, rhs: Union[float, int, Column_type]) -> Column: """Cellwise equality comparison between two tiles, or with a scalar value""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_equal', left_tile_col, rhs) + return _apply_column_function("rf_local_equal", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_equal_double(tile_col, scalar): """Return a Tile with values equal 1 if the cell is equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_equal_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_equal_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_equal_int(tile_col, scalar): """Return a Tile with values equal 1 if the cell is equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_equal_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_equal_int", tile_col, scalar) def rf_local_unequal(left_tile_col, rhs: Union[float, int, Column_type]) -> Column: """Cellwise inequality comparison between two tiles, or with a scalar value""" if isinstance(rhs, (float, int)): rhs = lit(rhs) - return _apply_column_function('rf_local_unequal', left_tile_col, rhs) + return _apply_column_function("rf_local_unequal", left_tile_col, rhs) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_unequal_double(tile_col, scalar): """Return a Tile with values equal 1 if the cell is not equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_unequal_double', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_unequal_double", tile_col, scalar) -@deprecated(deprecated_in='0.9.0', removed_in='1.0.0', current_version=__version__) +@deprecated(deprecated_in="0.9.0", removed_in="1.0.0", current_version=__version__) def rf_local_unequal_int(tile_col, scalar): """Return a Tile with values equal 1 if the cell is not equal to a scalar, otherwise 0""" - return _apply_scalar_to_tile('rf_local_unequal_int', tile_col, scalar) + return _apply_scalar_to_tile("rf_local_unequal_int", tile_col, scalar) def rf_local_no_data(tile_col: Column_type) -> Column: """Return a tile with ones where the input is NoData, otherwise zero.""" - return _apply_column_function('rf_local_no_data', tile_col) + return _apply_column_function("rf_local_no_data", tile_col) def rf_local_data(tile_col: Column_type) -> Column: """Return a tile with zeros where the input is NoData, otherwise one.""" - return _apply_column_function('rf_local_data', tile_col) + return _apply_column_function("rf_local_data", tile_col) def rf_local_is_in(tile_col: Column_type, array: Union[Column_type, List]) -> Column: """Return a tile with cell values of 1 where the `tile_col` cell is in the provided array.""" from pyspark.sql.functions import array as sql_array + if isinstance(array, list): array = sql_array([lit(v) for v in array]) - return _apply_column_function('rf_local_is_in', tile_col, array) + return _apply_column_function("rf_local_is_in", tile_col, array) def rf_dimensions(tile_col: Column_type) -> Column: """Query the number of (cols, rows) in a Tile.""" - return _apply_column_function('rf_dimensions', tile_col) + return _apply_column_function("rf_dimensions", tile_col) def rf_tile_to_array_int(tile_col: Column_type) -> Column: """Flattens Tile into an array of integers.""" - return _apply_column_function('rf_tile_to_array_int', tile_col) + return _apply_column_function("rf_tile_to_array_int", tile_col) def rf_tile_to_array_double(tile_col: Column_type) -> Column: """Flattens Tile into an array of doubles.""" - return _apply_column_function('rf_tile_to_array_double', tile_col) + return _apply_column_function("rf_tile_to_array_double", tile_col) def rf_cell_type(tile_col: Column_type) -> Column: """Extract the Tile's cell type""" - return _apply_column_function('rf_cell_type', tile_col) + return _apply_column_function("rf_cell_type", tile_col) def rf_is_no_data_tile(tile_col: Column_type) -> Column: """Report if the Tile is entirely NODDATA cells""" - return _apply_column_function('rf_is_no_data_tile', tile_col) + return _apply_column_function("rf_is_no_data_tile", tile_col) def rf_exists(tile_col: Column_type) -> Column: """Returns true if any cells in the tile are true (non-zero and not NoData)""" - return _apply_column_function('rf_exists', tile_col) + return _apply_column_function("rf_exists", tile_col) def rf_for_all(tile_col: Column_type) -> Column: """Returns true if all cells in the tile are true (non-zero and not NoData).""" - return _apply_column_function('rf_for_all', tile_col) + return _apply_column_function("rf_for_all", tile_col) def rf_agg_approx_histogram(tile_col: Column_type) -> Column: """Compute the full column aggregate floating point histogram""" - return _apply_column_function('rf_agg_approx_histogram', tile_col) + return _apply_column_function("rf_agg_approx_histogram", tile_col) + def rf_agg_approx_quantiles(tile_col, probabilities, relative_error=0.00001): """ @@ -421,41 +456,56 @@ def rf_agg_approx_quantiles(tile_col, probabilities, relative_error=0.00001): :return: An array of values approximately at the specified `probabilities` """ - _jfn = RFContext.active().lookup('rf_agg_approx_quantiles') + _jfn = RFContext.active().lookup("rf_agg_approx_quantiles") _tile_col = _to_java_column(tile_col) return Column(_jfn(_tile_col, probabilities, relative_error)) def rf_agg_stats(tile_col: Column_type) -> Column: """Compute the full column aggregate floating point statistics""" - return _apply_column_function('rf_agg_stats', tile_col) + return _apply_column_function("rf_agg_stats", tile_col) def rf_agg_mean(tile_col: Column_type) -> Column: """Computes the column aggregate mean""" - return _apply_column_function('rf_agg_mean', tile_col) + return _apply_column_function("rf_agg_mean", tile_col) def rf_agg_data_cells(tile_col: Column_type) -> Column: """Computes the number of non-NoData cells in a column""" - return _apply_column_function('rf_agg_data_cells', tile_col) + return _apply_column_function("rf_agg_data_cells", tile_col) def rf_agg_no_data_cells(tile_col: Column_type) -> Column: """Computes the number of NoData cells in a column""" - return _apply_column_function('rf_agg_no_data_cells', tile_col) + return _apply_column_function("rf_agg_no_data_cells", tile_col) + def rf_agg_extent(extent_col): """Compute the aggregate extent over a column""" - return _apply_column_function('rf_agg_extent', extent_col) + return _apply_column_function("rf_agg_extent", extent_col) def rf_agg_reprojected_extent(extent_col, src_crs_col, dest_crs): - """Compute the aggregate extent over a column, first projecting from the row CRS to the destination CRS. """ - return Column(RFContext.call('rf_agg_reprojected_extent', _to_java_column(extent_col), _to_java_column(src_crs_col), CRS(dest_crs).__jvm__)) - -def rf_agg_overview_raster(tile_col: Column, cols: int, rows: int, aoi: Extent, - tile_extent_col: Column = None, tile_crs_col: Column = None): + """Compute the aggregate extent over a column, first projecting from the row CRS to the destination CRS.""" + return Column( + RFContext.call( + "rf_agg_reprojected_extent", + _to_java_column(extent_col), + _to_java_column(src_crs_col), + CRS(dest_crs).__jvm__, + ) + ) + + +def rf_agg_overview_raster( + tile_col: Column, + cols: int, + rows: int, + aoi: Extent, + tile_extent_col: Column = None, + tile_crs_col: Column = None, +): """Construct an overview raster of size `cols`x`rows` where data in `proj_raster` intersects the `aoi` bound box in web-mercator. Uses bi-linear sampling method.""" ctx = RFContext.active() @@ -464,175 +514,211 @@ def rf_agg_overview_raster(tile_col: Column, cols: int, rows: int, aoi: Extent, if tile_extent_col is None or tile_crs_col is None: return Column(jfcn(_to_java_column(tile_col), cols, rows, aoi.__jvm__)) else: - return Column(jfcn( - _to_java_column(tile_col), _to_java_column(tile_extent_col), _to_java_column(tile_crs_col), - cols, rows, aoi.__jvm__ - )) + return Column( + jfcn( + _to_java_column(tile_col), + _to_java_column(tile_extent_col), + _to_java_column(tile_crs_col), + cols, + rows, + aoi.__jvm__, + ) + ) + def rf_tile_histogram(tile_col: Column_type) -> Column: """Compute the Tile-wise histogram""" - return _apply_column_function('rf_tile_histogram', tile_col) + return _apply_column_function("rf_tile_histogram", tile_col) def rf_tile_mean(tile_col: Column_type) -> Column: """Compute the Tile-wise mean""" - return _apply_column_function('rf_tile_mean', tile_col) + return _apply_column_function("rf_tile_mean", tile_col) def rf_tile_sum(tile_col: Column_type) -> Column: """Compute the Tile-wise sum""" - return _apply_column_function('rf_tile_sum', tile_col) + return _apply_column_function("rf_tile_sum", tile_col) def rf_tile_min(tile_col: Column_type) -> Column: """Compute the Tile-wise minimum""" - return _apply_column_function('rf_tile_min', tile_col) + return _apply_column_function("rf_tile_min", tile_col) def rf_tile_max(tile_col: Column_type) -> Column: """Compute the Tile-wise maximum""" - return _apply_column_function('rf_tile_max', tile_col) + return _apply_column_function("rf_tile_max", tile_col) def rf_tile_stats(tile_col: Column_type) -> Column: """Compute the Tile-wise floating point statistics""" - return _apply_column_function('rf_tile_stats', tile_col) + return _apply_column_function("rf_tile_stats", tile_col) def rf_render_ascii(tile_col: Column_type) -> Column: """Render ASCII art of tile""" - return _apply_column_function('rf_render_ascii', tile_col) + return _apply_column_function("rf_render_ascii", tile_col) def rf_render_matrix(tile_col: Column_type) -> Column: """Render Tile cell values as numeric values, for debugging purposes""" - return _apply_column_function('rf_render_matrix', tile_col) + return _apply_column_function("rf_render_matrix", tile_col) -def rf_render_png(red_tile_col: Column_type, green_tile_col: Column_type, blue_tile_col: Column_type) -> Column: +def rf_render_png( + red_tile_col: Column_type, green_tile_col: Column_type, blue_tile_col: Column_type +) -> Column: """Converts columns of tiles representing RGB channels into a PNG encoded byte array.""" - return _apply_column_function('rf_render_png', red_tile_col, green_tile_col, blue_tile_col) + return _apply_column_function("rf_render_png", red_tile_col, green_tile_col, blue_tile_col) def rf_render_color_ramp_png(tile_col, color_ramp_name): """Converts columns of tiles representing RGB channels into a PNG encoded byte array.""" - return Column(RFContext.call('rf_render_png', _to_java_column(tile_col), color_ramp_name)) + return Column(RFContext.call("rf_render_png", _to_java_column(tile_col), color_ramp_name)) -def rf_rgb_composite(red_tile_col: Column_type, green_tile_col: Column_type, blue_tile_col: Column_type) -> Column: +def rf_rgb_composite( + red_tile_col: Column_type, green_tile_col: Column_type, blue_tile_col: Column_type +) -> Column: """Converts columns of tiles representing RGB channels into a single RGB packaged tile.""" - return _apply_column_function('rf_rgb_composite', red_tile_col, green_tile_col, blue_tile_col) + return _apply_column_function("rf_rgb_composite", red_tile_col, green_tile_col, blue_tile_col) def rf_no_data_cells(tile_col: Column_type) -> Column: """Count of NODATA cells""" - return _apply_column_function('rf_no_data_cells', tile_col) + return _apply_column_function("rf_no_data_cells", tile_col) def rf_data_cells(tile_col: Column_type) -> Column: """Count of cells with valid data""" - return _apply_column_function('rf_data_cells', tile_col) + return _apply_column_function("rf_data_cells", tile_col) def rf_normalized_difference(left_tile_col: Column_type, right_tile_col: Column_type) -> Column: """Compute the normalized difference of two tiles""" - return _apply_column_function('rf_normalized_difference', left_tile_col, right_tile_col) + return _apply_column_function("rf_normalized_difference", left_tile_col, right_tile_col) def rf_agg_local_max(tile_col: Column_type) -> Column: """Compute the cell-wise/local max operation between Tiles in a column.""" - return _apply_column_function('rf_agg_local_max', tile_col) + return _apply_column_function("rf_agg_local_max", tile_col) def rf_agg_local_min(tile_col: Column_type) -> Column: """Compute the cellwise/local min operation between Tiles in a column.""" - return _apply_column_function('rf_agg_local_min', tile_col) + return _apply_column_function("rf_agg_local_min", tile_col) def rf_agg_local_mean(tile_col: Column_type) -> Column: """Compute the cellwise/local mean operation between Tiles in a column.""" - return _apply_column_function('rf_agg_local_mean', tile_col) + return _apply_column_function("rf_agg_local_mean", tile_col) def rf_agg_local_data_cells(tile_col: Column_type) -> Column: """Compute the cellwise/local count of non-NoData cells for all Tiles in a column.""" - return _apply_column_function('rf_agg_local_data_cells', tile_col) + return _apply_column_function("rf_agg_local_data_cells", tile_col) def rf_agg_local_no_data_cells(tile_col: Column_type) -> Column: """Compute the cellwise/local count of NoData cells for all Tiles in a column.""" - return _apply_column_function('rf_agg_local_no_data_cells', tile_col) + return _apply_column_function("rf_agg_local_no_data_cells", tile_col) def rf_agg_local_stats(tile_col: Column_type) -> Column: """Compute cell-local aggregate descriptive statistics for a column of Tiles.""" - return _apply_column_function('rf_agg_local_stats', tile_col) + return _apply_column_function("rf_agg_local_stats", tile_col) def rf_mask(src_tile_col: Column_type, mask_tile_col: Column_type, inverse: bool = False) -> Column: """Where the rf_mask (second) tile contains NODATA, replace values in the source (first) tile with NODATA. - If `inverse` is true, replaces values in the source tile with NODATA where the mask tile contains valid data. + If `inverse` is true, replaces values in the source tile with NODATA where the mask tile contains valid data. """ if not inverse: - return _apply_column_function('rf_mask', src_tile_col, mask_tile_col) + return _apply_column_function("rf_mask", src_tile_col, mask_tile_col) else: rf_inverse_mask(src_tile_col, mask_tile_col) def rf_inverse_mask(src_tile_col: Column_type, mask_tile_col: Column_type) -> Column: """Where the rf_mask (second) tile DOES NOT contain NODATA, replace values in the source - (first) tile with NODATA.""" - return _apply_column_function('rf_inverse_mask', src_tile_col, mask_tile_col) + (first) tile with NODATA.""" + return _apply_column_function("rf_inverse_mask", src_tile_col, mask_tile_col) -def rf_mask_by_value(data_tile: Column_type, mask_tile: Column_type, mask_value: Union[int, float, Column_type], - inverse: bool = False) -> Column: +def rf_mask_by_value( + data_tile: Column_type, + mask_tile: Column_type, + mask_value: Union[int, float, Column_type], + inverse: bool = False, +) -> Column: """Generate a tile with the values from the data tile, but where cells in the masking tile contain the masking - value, replace the data value with NODATA. """ + value, replace the data value with NODATA.""" if isinstance(mask_value, (int, float)): mask_value = lit(mask_value) - jfcn = RFContext.active().lookup('rf_mask_by_value') - - return Column(jfcn(_to_java_column(data_tile), _to_java_column(mask_tile), _to_java_column(mask_value), inverse)) - - -def rf_mask_by_values(data_tile: Column_type, mask_tile: Column_type, - mask_values: Union[List[Union[int, float]], Column_type]) -> Column: + jfcn = RFContext.active().lookup("rf_mask_by_value") + + return Column( + jfcn( + _to_java_column(data_tile), + _to_java_column(mask_tile), + _to_java_column(mask_value), + inverse, + ) + ) + + +def rf_mask_by_values( + data_tile: Column_type, + mask_tile: Column_type, + mask_values: Union[List[Union[int, float]], Column_type], +) -> Column: """Generate a tile with the values from `data_tile`, but where cells in the `mask_tile` are in the `mask_values` - list, replace the value with NODATA. + list, replace the value with NODATA. """ from pyspark.sql.functions import array as sql_array + if isinstance(mask_values, list): mask_values = sql_array([lit(v) for v in mask_values]) - jfcn = RFContext.active().lookup('rf_mask_by_values') + jfcn = RFContext.active().lookup("rf_mask_by_values") col_args = [_to_java_column(c) for c in [data_tile, mask_tile, mask_values]] return Column(jfcn(*col_args)) -def rf_inverse_mask_by_value(data_tile: Column_type, mask_tile: Column_type, - mask_value: Union[int, float, Column_type]) -> Column: +def rf_inverse_mask_by_value( + data_tile: Column_type, mask_tile: Column_type, mask_value: Union[int, float, Column_type] +) -> Column: """Generate a tile with the values from the data tile, but where cells in the masking tile do not contain the - masking value, replace the data value with NODATA. """ + masking value, replace the data value with NODATA.""" if isinstance(mask_value, (int, float)): mask_value = lit(mask_value) - return _apply_column_function('rf_inverse_mask_by_value', data_tile, mask_tile, mask_value) + return _apply_column_function("rf_inverse_mask_by_value", data_tile, mask_tile, mask_value) -def rf_mask_by_bit(data_tile: Column_type, mask_tile: Column_type, - bit_position: Union[int, Column_type], - value_to_mask: Union[int, float, bool, Column_type]) -> Column: +def rf_mask_by_bit( + data_tile: Column_type, + mask_tile: Column_type, + bit_position: Union[int, Column_type], + value_to_mask: Union[int, float, bool, Column_type], +) -> Column: """Applies a mask using bit values in the `mask_tile`. Working from the right, extract the bit at `bitPosition` from the `maskTile`. In all locations where these are equal to the `valueToMask`, the returned tile is set to NoData, else the original `dataTile` cell value.""" if isinstance(bit_position, int): bit_position = lit(bit_position) if isinstance(value_to_mask, (int, float, bool)): value_to_mask = lit(bool(value_to_mask)) - return _apply_column_function('rf_mask_by_bit', data_tile, mask_tile, bit_position, value_to_mask) - - -def rf_mask_by_bits(data_tile: Column_type, mask_tile: Column_type, start_bit: Union[int, Column_type], - num_bits: Union[int, Column_type], - values_to_mask: Union[Iterable[Union[int, float]], Column_type]) -> Column: + return _apply_column_function( + "rf_mask_by_bit", data_tile, mask_tile, bit_position, value_to_mask + ) + + +def rf_mask_by_bits( + data_tile: Column_type, + mask_tile: Column_type, + start_bit: Union[int, Column_type], + num_bits: Union[int, Column_type], + values_to_mask: Union[Iterable[Union[int, float]], Column_type], +) -> Column: """Applies a mask from blacklisted bit values in the `mask_tile`. Working from the right, the bits from `start_bit` to `start_bit + num_bits` are @ref:[extracted](reference.md#rf_local_extract_bits) from cell values of the `mask_tile`. In all locations where these are in the `mask_values`, the returned tile is set to NoData; otherwise the original `tile` cell value is returned.""" if isinstance(start_bit, int): start_bit = lit(start_bit) @@ -640,55 +726,59 @@ def rf_mask_by_bits(data_tile: Column_type, mask_tile: Column_type, start_bit: U num_bits = lit(num_bits) if isinstance(values_to_mask, (tuple, list)): from pyspark.sql.functions import array + values_to_mask = array([lit(v) for v in values_to_mask]) - return _apply_column_function('rf_mask_by_bits', data_tile, mask_tile, start_bit, num_bits, values_to_mask) + return _apply_column_function( + "rf_mask_by_bits", data_tile, mask_tile, start_bit, num_bits, values_to_mask + ) -def rf_local_extract_bits(tile: Column_type, start_bit: Union[int, Column_type], - num_bits: Union[int, Column_type] = 1) -> Column: +def rf_local_extract_bits( + tile: Column_type, start_bit: Union[int, Column_type], num_bits: Union[int, Column_type] = 1 +) -> Column: """Extract value from specified bits of the cells' underlying binary data. * `startBit` is the first bit to consider, working from the right. It is zero indexed. - * `numBits` is the number of bits to take moving further to the left. """ + * `numBits` is the number of bits to take moving further to the left.""" if isinstance(start_bit, int): start_bit = lit(start_bit) if isinstance(num_bits, int): num_bits = lit(num_bits) - return _apply_column_function('rf_local_extract_bits', tile, start_bit, num_bits) + return _apply_column_function("rf_local_extract_bits", tile, start_bit, num_bits) def rf_round(tile_col: Column_type) -> Column: """Round cell values to the nearest integer without changing the cell type""" - return _apply_column_function('rf_round', tile_col) + return _apply_column_function("rf_round", tile_col) def rf_local_min(tile_col, min): """Performs cell-wise minimum two tiles or a tile and a scalar.""" if isinstance(min, (int, float)): min = lit(min) - return _apply_column_function('rf_local_min', tile_col, min) + return _apply_column_function("rf_local_min", tile_col, min) def rf_local_max(tile_col, max): """Performs cell-wise maximum two tiles or a tile and a scalar.""" if isinstance(max, (int, float)): max = lit(max) - return _apply_column_function('rf_local_max', tile_col, max) + return _apply_column_function("rf_local_max", tile_col, max) def rf_local_clamp(tile_col, min, max): - """ Return the tile with its values limited to a range defined by min and max, inclusive. """ + """Return the tile with its values limited to a range defined by min and max, inclusive.""" if isinstance(min, (int, float)): min = lit(min) if isinstance(max, (int, float)): max = lit(max) - return _apply_column_function('rf_local_clamp', tile_col, min, max) + return _apply_column_function("rf_local_clamp", tile_col, min, max) def rf_where(condition, x, y): """Return a tile with cell values chosen from `x` or `y` depending on `condition`. - Operates cell-wise in a similar fashion to Spark SQL `when` and `otherwise`.""" - return _apply_column_function('rf_where', condition, x, y) + Operates cell-wise in a similar fashion to Spark SQL `when` and `otherwise`.""" + return _apply_column_function("rf_where", condition, x, y) def rf_standardize(tile, mean=None, stddev=None): @@ -703,10 +793,12 @@ def rf_standardize(tile, mean=None, stddev=None): if isinstance(stddev, (int, float)): stddev = lit(stddev) if mean is None and stddev is None: - return _apply_column_function('rf_standardize', tile) + return _apply_column_function("rf_standardize", tile) if mean is not None and stddev is not None: - return _apply_column_function('rf_standardize', tile, mean, stddev) - raise ValueError('Either `mean` or `stddev` should both be specified or omitted in call to rf_standardize.') + return _apply_column_function("rf_standardize", tile, mean, stddev) + raise ValueError( + "Either `mean` or `stddev` should both be specified or omitted in call to rf_standardize." + ) def rf_rescale(tile, min=None, max=None): @@ -721,136 +813,187 @@ def rf_rescale(tile, min=None, max=None): if isinstance(max, (int, float)): max = lit(float(max)) if min is None and max is None: - return _apply_column_function('rf_rescale', tile) + return _apply_column_function("rf_rescale", tile) if min is not None and max is not None: - return _apply_column_function('rf_rescale', tile, min, max) - raise ValueError('Either `min` or `max` should both be specified or omitted in call to rf_rescale.') + return _apply_column_function("rf_rescale", tile, min, max) + raise ValueError( + "Either `min` or `max` should both be specified or omitted in call to rf_rescale." + ) def rf_abs(tile_col: Column_type) -> Column: """Compute the absolute value of each cell""" - return _apply_column_function('rf_abs', tile_col) + return _apply_column_function("rf_abs", tile_col) def rf_log(tile_col: Column_type) -> Column: """Performs cell-wise natural logarithm""" - return _apply_column_function('rf_log', tile_col) + return _apply_column_function("rf_log", tile_col) def rf_log10(tile_col: Column_type) -> Column: """Performs cell-wise logartithm with base 10""" - return _apply_column_function('rf_log10', tile_col) + return _apply_column_function("rf_log10", tile_col) def rf_log2(tile_col: Column_type) -> Column: """Performs cell-wise logartithm with base 2""" - return _apply_column_function('rf_log2', tile_col) + return _apply_column_function("rf_log2", tile_col) def rf_log1p(tile_col: Column_type) -> Column: """Performs natural logarithm of cell values plus one""" - return _apply_column_function('rf_log1p', tile_col) + return _apply_column_function("rf_log1p", tile_col) def rf_exp(tile_col: Column_type) -> Column: """Performs cell-wise exponential""" - return _apply_column_function('rf_exp', tile_col) + return _apply_column_function("rf_exp", tile_col) def rf_exp2(tile_col: Column_type) -> Column: """Compute 2 to the power of cell values""" - return _apply_column_function('rf_exp2', tile_col) + return _apply_column_function("rf_exp2", tile_col) def rf_exp10(tile_col: Column_type) -> Column: """Compute 10 to the power of cell values""" - return _apply_column_function('rf_exp10', tile_col) + return _apply_column_function("rf_exp10", tile_col) def rf_expm1(tile_col: Column_type) -> Column: """Performs cell-wise exponential, then subtract one""" - return _apply_column_function('rf_expm1', tile_col) + return _apply_column_function("rf_expm1", tile_col) def rf_sqrt(tile_col: Column_type) -> Column: """Performs cell-wise square root""" - return _apply_column_function('rf_sqrt', tile_col) + return _apply_column_function("rf_sqrt", tile_col) + def rf_identity(tile_col: Column_type) -> Column: """Pass tile through unchanged""" - return _apply_column_function('rf_identity', tile_col) + return _apply_column_function("rf_identity", tile_col) + -def rf_focal_max(tile_col: Column_type, neighborhood: Union[str, Column_type], target: Union[str, Column_type] = 'all') -> Column: +def rf_focal_max( + tile_col: Column_type, + neighborhood: Union[str, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Compute the max value in its neighborhood of each cell""" if isinstance(neighborhood, str): neighborhood = lit(neighborhood) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_focal_max', tile_col, neighborhood, target) + return _apply_column_function("rf_focal_max", tile_col, neighborhood, target) -def rf_focal_mean(tile_col: Column_type, neighborhood: Union[str, Column_type], target: Union[str, Column_type] = 'all') -> Column: + +def rf_focal_mean( + tile_col: Column_type, + neighborhood: Union[str, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Compute the mean value in its neighborhood of each cell""" if isinstance(neighborhood, str): neighborhood = lit(neighborhood) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_focal_mean', tile_col, neighborhood, target) + return _apply_column_function("rf_focal_mean", tile_col, neighborhood, target) + -def rf_focal_median(tile_col: Column_type, neighborhood: Union[str, Column_type], target: Union[str, Column_type] = 'all') -> Column: +def rf_focal_median( + tile_col: Column_type, + neighborhood: Union[str, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Compute the max in its neighborhood value of each cell""" if isinstance(neighborhood, str): neighborhood = lit(neighborhood) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_focal_median', tile_col, neighborhood, target) + return _apply_column_function("rf_focal_median", tile_col, neighborhood, target) -def rf_focal_min(tile_col: Column_type, neighborhood: Union[str, Column_type], target: Union[str, Column_type] = 'all') -> Column: + +def rf_focal_min( + tile_col: Column_type, + neighborhood: Union[str, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Compute the min value in its neighborhood of each cell""" if isinstance(neighborhood, str): neighborhood = lit(neighborhood) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_focal_min', tile_col, neighborhood, target) + return _apply_column_function("rf_focal_min", tile_col, neighborhood, target) + -def rf_focal_mode(tile_col: Column_type, neighborhood: Union[str, Column_type], target: Union[str, Column_type] = 'all') -> Column: +def rf_focal_mode( + tile_col: Column_type, + neighborhood: Union[str, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Compute the mode value in its neighborhood of each cell""" if isinstance(neighborhood, str): neighborhood = lit(neighborhood) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_focal_mode', tile_col, neighborhood, target) + return _apply_column_function("rf_focal_mode", tile_col, neighborhood, target) -def rf_focal_std_dev(tile_col: Column_type, neighborhood: Union[str, Column_type], target: Union[str, Column_type] = 'all') -> Column: + +def rf_focal_std_dev( + tile_col: Column_type, + neighborhood: Union[str, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Compute the standard deviation value in its neighborhood of each cell""" if isinstance(neighborhood, str): neighborhood = lit(neighborhood) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_focal_std_dev', tile_col, neighborhood, target) + return _apply_column_function("rf_focal_std_dev", tile_col, neighborhood, target) + -def rf_moransI(tile_col: Column_type, neighborhood: Union[str, Column_type], target: Union[str, Column_type] = 'all') -> Column: +def rf_moransI( + tile_col: Column_type, + neighborhood: Union[str, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Compute moransI in its neighborhood value of each cell""" if isinstance(neighborhood, str): neighborhood = lit(neighborhood) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_focal_moransi', tile_col, neighborhood, target) + return _apply_column_function("rf_focal_moransi", tile_col, neighborhood, target) -def rf_aspect(tile_col: Column_type, target: Union[str, Column_type] = 'all') -> Column: + +def rf_aspect(tile_col: Column_type, target: Union[str, Column_type] = "all") -> Column: """Calculates the aspect of each cell in an elevation raster""" if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_aspect', tile_col, target) + return _apply_column_function("rf_aspect", tile_col, target) + -def rf_slope(tile_col: Column_type, z_factor: Union[int, float, Column_type], target: Union[str, Column_type] = 'all') -> Column: +def rf_slope( + tile_col: Column_type, + z_factor: Union[int, float, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Calculates slope of each cell in an elevation raster""" if isinstance(z_factor, (int, float)): z_factor = lit(z_factor) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_slope', tile_col, z_factor, target) + return _apply_column_function("rf_slope", tile_col, z_factor, target) -def rf_hillshade(tile_col: Column_type, azimuth: Union[int, float, Column_type], altitude: Union[int, float, Column_type], z_factor: Union[int, float, Column_type], target: Union[str, Column_type] = 'all') -> Column: + +def rf_hillshade( + tile_col: Column_type, + azimuth: Union[int, float, Column_type], + altitude: Union[int, float, Column_type], + z_factor: Union[int, float, Column_type], + target: Union[str, Column_type] = "all", +) -> Column: """Calculates the hillshade of each cell in an elevation raster""" if isinstance(azimuth, (int, float)): azimuth = lit(azimuth) @@ -860,64 +1003,67 @@ def rf_hillshade(tile_col: Column_type, azimuth: Union[int, float, Column_type], z_factor = lit(z_factor) if isinstance(target, str): target = lit(target) - return _apply_column_function('rf_hillshade', tile_col, azimuth, altitude, z_factor, target) + return _apply_column_function("rf_hillshade", tile_col, azimuth, altitude, z_factor, target) + def rf_resample(tile_col: Column_type, scale_factor: Union[int, float, Column_type]) -> Column: """Resample tile to different size based on scalar factor or tile whose dimension to match Scalar less than one will downsample tile; greater than one will upsample. Uses nearest-neighbor.""" if isinstance(scale_factor, (int, float)): scale_factor = lit(scale_factor) - return _apply_column_function('rf_resample', tile_col, scale_factor) + return _apply_column_function("rf_resample", tile_col, scale_factor) def rf_crs(tile_col: Column_type) -> Column: """Get the CRS of a RasterSource or ProjectedRasterTile""" - return _apply_column_function('rf_crs', tile_col) + return _apply_column_function("rf_crs", tile_col) def rf_mk_crs(crs_text: str) -> Column: """Resolve CRS from text identifier. Supported registries are EPSG, ESRI, WORLD, NAD83, & NAD27. An example of a valid CRS name is EPSG:3005.""" - return Column(_context_call('_make_crs_literal', crs_text)) + return Column(_context_call("_make_crs_literal", crs_text)) def st_extent(geom_col: Column_type) -> Column: """Compute the extent/bbox of a Geometry (a tile with embedded extent and CRS)""" - return _apply_column_function('st_extent', geom_col) + return _apply_column_function("st_extent", geom_col) def rf_extent(proj_raster_col: Column_type) -> Column: """Get the extent of a RasterSource or ProjectedRasterTile (a tile with embedded extent and CRS)""" - return _apply_column_function('rf_extent', proj_raster_col) + return _apply_column_function("rf_extent", proj_raster_col) def rf_tile(proj_raster_col: Column_type) -> Column: """Extracts the Tile component of a ProjectedRasterTile (or Tile).""" - return _apply_column_function('rf_tile', proj_raster_col) + return _apply_column_function("rf_tile", proj_raster_col) def rf_proj_raster(tile, extent, crs): """ Construct a `proj_raster` structure from individual CRS, Extent, and Tile columns """ - return _apply_column_function('rf_proj_raster', tile, extent, crs) + return _apply_column_function("rf_proj_raster", tile, extent, crs) def st_geometry(extent_col: Column_type) -> Column: """Convert the given extent/bbox to a polygon""" - return _apply_column_function('st_geometry', extent_col) + return _apply_column_function("st_geometry", extent_col) def rf_geometry(proj_raster_col: Column_type) -> Column: """Get the extent of a RasterSource or ProjectdRasterTile as a Geometry""" - return _apply_column_function('rf_geometry', proj_raster_col) + return _apply_column_function("rf_geometry", proj_raster_col) -def rf_xz2_index(geom_col: Column_type, crs_col: Optional[Column_type] = None, index_resolution: int = 18) -> Column: +def rf_xz2_index( + geom_col: Column_type, crs_col: Optional[Column_type] = None, index_resolution: int = 18 +) -> Column: """Constructs a XZ2 index in WGS84 from either a Geometry, Extent, ProjectedRasterTile, or RasterSource and its CRS. - For details: https://www.geomesa.org/documentation/user/datastores/index_overview.html """ + For details: https://www.geomesa.org/documentation/user/datastores/index_overview.html""" - jfcn = RFContext.active().lookup('rf_xz2_index') + jfcn = RFContext.active().lookup("rf_xz2_index") if crs_col is not None: return Column(jfcn(_to_java_column(geom_col), _to_java_column(crs_col), index_resolution)) @@ -925,360 +1071,364 @@ def rf_xz2_index(geom_col: Column_type, crs_col: Optional[Column_type] = None, i return Column(jfcn(_to_java_column(geom_col), index_resolution)) -def rf_z2_index(geom_col: Column_type, crs_col: Optional[Column_type] = None, index_resolution: int = 18) -> Column: +def rf_z2_index( + geom_col: Column_type, crs_col: Optional[Column_type] = None, index_resolution: int = 18 +) -> Column: """Constructs a Z2 index in WGS84 from either a Geometry, Extent, ProjectedRasterTile, or RasterSource and its CRS. - First the native extent is extracted or computed, and then center is used as the indexing location. - For details: https://www.geomesa.org/documentation/user/datastores/index_overview.html """ + First the native extent is extracted or computed, and then center is used as the indexing location. + For details: https://www.geomesa.org/documentation/user/datastores/index_overview.html""" - jfcn = RFContext.active().lookup('rf_z2_index') + jfcn = RFContext.active().lookup("rf_z2_index") if crs_col is not None: return Column(jfcn(_to_java_column(geom_col), _to_java_column(crs_col), index_resolution)) else: return Column(jfcn(_to_java_column(geom_col), index_resolution)) + # ------ GeoMesa Functions ------ + def st_geomFromGeoHash(*args): """""" - return _apply_column_function('st_geomFromGeoHash', *args) + return _apply_column_function("st_geomFromGeoHash", *args) def st_geomFromWKT(*args): """""" - return _apply_column_function('st_geomFromWKT', *args) + return _apply_column_function("st_geomFromWKT", *args) def st_geomFromWKB(*args): """""" - return _apply_column_function('st_geomFromWKB', *args) + return _apply_column_function("st_geomFromWKB", *args) def st_lineFromText(*args): """""" - return _apply_column_function('st_lineFromText', *args) + return _apply_column_function("st_lineFromText", *args) def st_makeBox2D(*args): """""" - return _apply_column_function('st_makeBox2D', *args) + return _apply_column_function("st_makeBox2D", *args) def st_makeBBox(*args): """""" - return _apply_column_function('st_makeBBox', *args) + return _apply_column_function("st_makeBBox", *args) def st_makePolygon(*args): """""" - return _apply_column_function('st_makePolygon', *args) + return _apply_column_function("st_makePolygon", *args) def st_makePoint(*args): """""" - return _apply_column_function('st_makePoint', *args) + return _apply_column_function("st_makePoint", *args) def st_makeLine(*args): """""" - return _apply_column_function('st_makeLine', *args) + return _apply_column_function("st_makeLine", *args) def st_makePointM(*args): """""" - return _apply_column_function('st_makePointM', *args) + return _apply_column_function("st_makePointM", *args) def st_mLineFromText(*args): """""" - return _apply_column_function('st_mLineFromText', *args) + return _apply_column_function("st_mLineFromText", *args) def st_mPointFromText(*args): """""" - return _apply_column_function('st_mPointFromText', *args) + return _apply_column_function("st_mPointFromText", *args) def st_mPolyFromText(*args): """""" - return _apply_column_function('st_mPolyFromText', *args) + return _apply_column_function("st_mPolyFromText", *args) def st_point(*args): """""" - return _apply_column_function('st_point', *args) + return _apply_column_function("st_point", *args) def st_pointFromGeoHash(*args): """""" - return _apply_column_function('st_pointFromGeoHash', *args) + return _apply_column_function("st_pointFromGeoHash", *args) def st_pointFromText(*args): """""" - return _apply_column_function('st_pointFromText', *args) + return _apply_column_function("st_pointFromText", *args) def st_pointFromWKB(*args): """""" - return _apply_column_function('st_pointFromWKB', *args) + return _apply_column_function("st_pointFromWKB", *args) def st_polygon(*args): """""" - return _apply_column_function('st_polygon', *args) + return _apply_column_function("st_polygon", *args) def st_polygonFromText(*args): """""" - return _apply_column_function('st_polygonFromText', *args) + return _apply_column_function("st_polygonFromText", *args) def st_castToPoint(*args): """""" - return _apply_column_function('st_castToPoint', *args) + return _apply_column_function("st_castToPoint", *args) def st_castToPolygon(*args): """""" - return _apply_column_function('st_castToPolygon', *args) + return _apply_column_function("st_castToPolygon", *args) def st_castToLineString(*args): """""" - return _apply_column_function('st_castToLineString', *args) + return _apply_column_function("st_castToLineString", *args) def st_byteArray(*args): """""" - return _apply_column_function('st_byteArray', *args) + return _apply_column_function("st_byteArray", *args) def st_boundary(*args): """""" - return _apply_column_function('st_boundary', *args) + return _apply_column_function("st_boundary", *args) def st_coordDim(*args): """""" - return _apply_column_function('st_coordDim', *args) + return _apply_column_function("st_coordDim", *args) def st_dimension(*args): """""" - return _apply_column_function('st_dimension', *args) + return _apply_column_function("st_dimension", *args) def st_envelope(*args): """""" - return _apply_column_function('st_envelope', *args) + return _apply_column_function("st_envelope", *args) def st_exteriorRing(*args): """""" - return _apply_column_function('st_exteriorRing', *args) + return _apply_column_function("st_exteriorRing", *args) def st_geometryN(*args): """""" - return _apply_column_function('st_geometryN', *args) + return _apply_column_function("st_geometryN", *args) def st_geometryType(*args): """""" - return _apply_column_function('st_geometryType', *args) + return _apply_column_function("st_geometryType", *args) def st_interiorRingN(*args): """""" - return _apply_column_function('st_interiorRingN', *args) + return _apply_column_function("st_interiorRingN", *args) def st_isClosed(*args): """""" - return _apply_column_function('st_isClosed', *args) + return _apply_column_function("st_isClosed", *args) def st_isCollection(*args): """""" - return _apply_column_function('st_isCollection', *args) + return _apply_column_function("st_isCollection", *args) def st_isEmpty(*args): """""" - return _apply_column_function('st_isEmpty', *args) + return _apply_column_function("st_isEmpty", *args) def st_isRing(*args): """""" - return _apply_column_function('st_isRing', *args) + return _apply_column_function("st_isRing", *args) def st_isSimple(*args): """""" - return _apply_column_function('st_isSimple', *args) + return _apply_column_function("st_isSimple", *args) def st_isValid(*args): """""" - return _apply_column_function('st_isValid', *args) + return _apply_column_function("st_isValid", *args) def st_numGeometries(*args): """""" - return _apply_column_function('st_numGeometries', *args) + return _apply_column_function("st_numGeometries", *args) def st_numPoints(*args): """""" - return _apply_column_function('st_numPoints', *args) + return _apply_column_function("st_numPoints", *args) def st_pointN(*args): """""" - return _apply_column_function('st_pointN', *args) + return _apply_column_function("st_pointN", *args) def st_x(*args): """""" - return _apply_column_function('st_x', *args) + return _apply_column_function("st_x", *args) def st_y(*args): """""" - return _apply_column_function('st_y', *args) + return _apply_column_function("st_y", *args) def st_asBinary(*args): """""" - return _apply_column_function('st_asBinary', *args) + return _apply_column_function("st_asBinary", *args) def st_asGeoJSON(*args): """""" - return _apply_column_function('st_asGeoJSON', *args) + return _apply_column_function("st_asGeoJSON", *args) def st_asLatLonText(*args): """""" - return _apply_column_function('st_asLatLonText', *args) + return _apply_column_function("st_asLatLonText", *args) def st_asText(*args): """""" - return _apply_column_function('st_asText', *args) + return _apply_column_function("st_asText", *args) def st_geoHash(*args): """""" - return _apply_column_function('st_geoHash', *args) + return _apply_column_function("st_geoHash", *args) def st_bufferPoint(*args): """""" - return _apply_column_function('st_bufferPoint', *args) + return _apply_column_function("st_bufferPoint", *args) def st_antimeridianSafeGeom(*args): """""" - return _apply_column_function('st_antimeridianSafeGeom', *args) + return _apply_column_function("st_antimeridianSafeGeom", *args) def st_translate(*args): """""" - return _apply_column_function('st_translate', *args) + return _apply_column_function("st_translate", *args) def st_contains(*args): """""" - return _apply_column_function('st_contains', *args) + return _apply_column_function("st_contains", *args) def st_covers(*args): """""" - return _apply_column_function('st_covers', *args) + return _apply_column_function("st_covers", *args) def st_crosses(*args): """""" - return _apply_column_function('st_crosses', *args) + return _apply_column_function("st_crosses", *args) def st_disjoint(*args): """""" - return _apply_column_function('st_disjoint', *args) + return _apply_column_function("st_disjoint", *args) def st_equals(*args): """""" - return _apply_column_function('st_equals', *args) + return _apply_column_function("st_equals", *args) def st_intersects(*args): """""" - return _apply_column_function('st_intersects', *args) + return _apply_column_function("st_intersects", *args) def st_overlaps(*args): """""" - return _apply_column_function('st_overlaps', *args) + return _apply_column_function("st_overlaps", *args) def st_touches(*args): """""" - return _apply_column_function('st_touches', *args) + return _apply_column_function("st_touches", *args) def st_within(*args): """""" - return _apply_column_function('st_within', *args) + return _apply_column_function("st_within", *args) def st_relate(*args): """""" - return _apply_column_function('st_relate', *args) + return _apply_column_function("st_relate", *args) def st_relateBool(*args): """""" - return _apply_column_function('st_relateBool', *args) + return _apply_column_function("st_relateBool", *args) def st_area(*args): """""" - return _apply_column_function('st_area', *args) + return _apply_column_function("st_area", *args) def st_closestPoint(*args): """""" - return _apply_column_function('st_closestPoint', *args) + return _apply_column_function("st_closestPoint", *args) def st_centroid(*args): """""" - return _apply_column_function('st_centroid', *args) + return _apply_column_function("st_centroid", *args) def st_distance(*args): """""" - return _apply_column_function('st_distance', *args) + return _apply_column_function("st_distance", *args) def st_distanceSphere(*args): """""" - return _apply_column_function('st_distanceSphere', *args) + return _apply_column_function("st_distanceSphere", *args) def st_length(*args): """""" - return _apply_column_function('st_length', *args) + return _apply_column_function("st_length", *args) def st_aggregateDistanceSphere(*args): """""" - return _apply_column_function('st_aggregateDistanceSphere', *args) + return _apply_column_function("st_aggregateDistanceSphere", *args) def st_lengthSphere(*args): """""" - return _apply_column_function('st_lengthSphere', *args) + return _apply_column_function("st_lengthSphere", *args) diff --git a/pyrasterframes/src/main/python/pyrasterframes/rf_context.py b/python/pyrasterframes/rf_context.py similarity index 93% rename from pyrasterframes/src/main/python/pyrasterframes/rf_context.py rename to python/pyrasterframes/rf_context.py index f720bcd37..0a4703428 100644 --- a/pyrasterframes/src/main/python/pyrasterframes/rf_context.py +++ b/python/pyrasterframes/rf_context.py @@ -22,21 +22,21 @@ This module contains access to the jvm SparkContext with RasterFrameLayer support. """ -from pyspark import SparkContext -from pyspark.sql import SparkSession +from typing import Any, List, Tuple -from typing import Any, List -from py4j.java_gateway import JavaMember from py4j.java_collections import JavaList, JavaMap -from typing import Tuple +from py4j.java_gateway import JavaMember +from pyspark import SparkContext +from pyspark.sql import SparkSession -__all__ = ['RFContext'] +__all__ = ["RFContext"] class RFContext(object): """ Entrypoint to RasterFrames services """ + def __init__(self, spark_session: SparkSession): self._spark_session = spark_session self._gateway = spark_session.sparkContext._gateway @@ -45,7 +45,7 @@ def __init__(self, spark_session: SparkSession): self._jrfctx = self._jvm.org.locationtech.rasterframes.py.PyRFContext(jsess) def list_to_seq(self, py_list: List[Any]) -> JavaList: - conv = self.lookup('_listToSeq') + conv = self.lookup("_listToSeq") return conv(py_list) def lookup(self, function_name: str) -> JavaMember: @@ -79,9 +79,10 @@ def active(): Get the active Python RFContext and throw an error if it is not enabled for RasterFrames. """ sc = SparkContext._active_spark_context - if not hasattr(sc, '_rf_context'): + if not hasattr(sc, "_rf_context"): raise AttributeError( - "RasterFrames have not been enabled for the active session. Call 'SparkSession.withRasterFrames()'.") + "RasterFrames have not been enabled for the active session. Call 'SparkSession.withRasterFrames()'." + ) return sc._rf_context @staticmethod @@ -95,4 +96,3 @@ def jvm(): Get the active Scala PyRFContext and throw an error if it is not enabled for RasterFrames. """ return RFContext.active()._jvm - diff --git a/pyrasterframes/src/main/python/pyrasterframes/rf_ipython.py b/python/pyrasterframes/rf_ipython.py similarity index 75% rename from pyrasterframes/src/main/python/pyrasterframes/rf_ipython.py rename to python/pyrasterframes/rf_ipython.py index ce76147ae..0f4a4e09a 100644 --- a/pyrasterframes/src/main/python/pyrasterframes/rf_ipython.py +++ b/python/pyrasterframes/rf_ipython.py @@ -18,20 +18,26 @@ # SPDX-License-Identifier: Apache-2.0 # from functools import partial +from typing import Optional, Tuple, Union +import numpy as np import pyrasterframes.rf_types -from pyrasterframes.rf_types import Tile -from shapely.geometry.base import BaseGeometry from matplotlib.axes import Axes -import numpy as np from pandas import DataFrame -from typing import Optional, Tuple, Union +from pyrasterframes.rf_types import Tile +from shapely.geometry.base import BaseGeometry _png_header = bytearray([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]) -def plot_tile(tile: Tile, normalize: bool = True, lower_percentile: float = 1., upper_percentile: float = 99., - axis: Optional[Axes] = None, **imshow_args): +def plot_tile( + tile: Tile, + normalize: bool = True, + lower_percentile: float = 1.0, + upper_percentile: float = 99.0, + axis: Optional[Axes] = None, + **imshow_args, +): """ Display an image of the tile @@ -53,20 +59,22 @@ def plot_tile(tile: Tile, normalize: bool = True, lower_percentile: float = 1., """ if axis is None: import matplotlib.pyplot as plt + axis = plt.gca() arr = tile.cells def normalize_cells(cells: np.ndarray) -> np.ndarray: - assert upper_percentile > lower_percentile, 'invalid upper and lower percentiles {}, {}'.format( - lower_percentile, upper_percentile) + assert ( + upper_percentile > lower_percentile + ), "invalid upper and lower percentiles {}, {}".format(lower_percentile, upper_percentile) sans_mask = np.array(cells) lower = np.nanpercentile(sans_mask, lower_percentile) upper = np.nanpercentile(sans_mask, upper_percentile) cells_clipped = np.clip(cells, lower, upper) return (cells_clipped - lower) / (upper - lower) - axis.set_aspect('equal') + axis.set_aspect("equal") axis.xaxis.set_ticks([]) axis.yaxis.set_ticks([]) @@ -80,13 +88,19 @@ def normalize_cells(cells: np.ndarray) -> np.ndarray: return axis -def tile_to_png(tile: Tile, lower_percentile: float = 1., upper_percentile: float = 99., title: Optional[str] = None, - fig_size: Optional[Tuple[int, int]] = None) -> bytes: - """ Provide image of Tile.""" +def tile_to_png( + tile: Tile, + lower_percentile: float = 1.0, + upper_percentile: float = 99.0, + title: Optional[str] = None, + fig_size: Optional[Tuple[int, int]] = None, +) -> bytes: + """Provide image of Tile.""" if tile.cells is None: return None import io + from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas from matplotlib.figure import Figure @@ -100,13 +114,14 @@ def tile_to_png(tile: Tile, lower_percentile: float = 1., upper_percentile: floa axis = fig.add_subplot(1, 1, 1) plot_tile(tile, True, lower_percentile, upper_percentile, axis=axis) - axis.set_aspect('equal') + axis.set_aspect("equal") axis.xaxis.set_ticks([]) axis.yaxis.set_ticks([]) if title is None: - axis.set_title('{}, {}'.format(tile.dimensions(), tile.cell_type.__repr__()), - fontsize=fig_size[0] * 4) # compact metadata as title + axis.set_title( + "{}, {}".format(tile.dimensions(), tile.cell_type.__repr__()), fontsize=fig_size[0] * 4 + ) # compact metadata as title else: axis.set_title(title, fontsize=fig_size[0] * 4) # compact metadata as title @@ -116,35 +131,40 @@ def tile_to_png(tile: Tile, lower_percentile: float = 1., upper_percentile: floa def tile_to_html(tile: Tile, fig_size: Optional[Tuple[int, int]] = None) -> str: - """ Provide HTML string representation of Tile image.""" + """Provide HTML string representation of Tile image.""" import base64 + b64_img_html = '' png_bits = tile_to_png(tile, fig_size=fig_size) - b64_png = base64.b64encode(png_bits).decode('utf-8').replace('\n', '') + b64_png = base64.b64encode(png_bits).decode("utf-8").replace("\n", "") return b64_img_html.format(b64_png) def binary_to_html(blob) -> Union[str, bytearray]: - """ When using rf_render_png, the result from the JVM is a byte string with special PNG header - Look for this header and return base64 encoded HTML for Jupyter display + """When using rf_render_png, the result from the JVM is a byte string with special PNG header + Look for this header and return base64 encoded HTML for Jupyter display """ import base64 + if blob[:8] == _png_header: b64_img_html = '' - b64_png = base64.b64encode(blob).decode('utf-8').replace('\n', '') + b64_png = base64.b64encode(blob).decode("utf-8").replace("\n", "") return b64_img_html.format(b64_png) else: return blob def pandas_df_to_html(df: DataFrame) -> Optional[str]: - """Provide HTML formatting for pandas.DataFrame with rf_types.Tile in the columns. """ + """Provide HTML formatting for pandas.DataFrame with rf_types.Tile in the columns.""" import pandas as pd + # honor the existing options on display if not pd.get_option("display.notebook_repr_html"): return None - default_max_colwidth = pd.get_option('display.max_colwidth') # we'll try to politely put it back + default_max_colwidth = pd.get_option( + "display.max_colwidth" + ) # we'll try to politely put it back if len(df) == 0: return df._repr_html_() @@ -153,7 +173,9 @@ def pandas_df_to_html(df: DataFrame) -> Optional[str]: geom_cols = [] bytearray_cols = [] for c in df.columns: - if isinstance(df.iloc[0][c], pyrasterframes.rf_types.Tile): # if the first is a Tile try formatting + if isinstance( + df.iloc[0][c], pyrasterframes.rf_types.Tile + ): # if the first is a Tile try formatting tile_cols.append(c) elif isinstance(df.iloc[0][c], BaseGeometry): # if the first is a Geometry try formatting geom_cols.append(c) @@ -171,7 +193,7 @@ def _safe_geom_to_html(g): if isinstance(g, BaseGeometry): wkt = g.wkt if len(wkt) > default_max_colwidth: - return wkt[:default_max_colwidth - 3] + '...' + return wkt[: default_max_colwidth - 3] + "..." else: return wkt else: @@ -189,37 +211,39 @@ def _safe_bytearray_to_html(b): formatter.update({c: _safe_bytearray_to_html for c in bytearray_cols}) # This is needed to avoid our tile being rendered as ` str: from pyrasterframes import RFContext + return RFContext.active().call("_dfToMarkdown", df._jdf, num_rows, truncate) def spark_df_to_html(df: DataFrame, num_rows: int = 5, truncate: bool = False) -> str: from pyrasterframes import RFContext + return RFContext.active().call("_dfToHTML", df._jdf, num_rows, truncate) def _folium_map_formatter(map) -> str: - """ inputs a folium.Map object and returns html of rendered map """ + """inputs a folium.Map object and returns html of rendered map""" import base64 + html_source = map.get_root().render() - b64_source = base64.b64encode( - bytes(html_source.encode('utf-8')) - ).decode('utf-8') + b64_source = base64.b64encode(bytes(html_source.encode("utf-8"))).decode("utf-8") source_blob = '' return source_blob.format(b64_source) @@ -227,7 +251,7 @@ def _folium_map_formatter(map) -> str: try: from IPython import get_ipython - from IPython.display import display_png, display_markdown, display_html, display + from IPython.display import display, display_html, display_markdown, display_png # modifications to currently running ipython session, if we are in one; these enable nicer visualization for Pandas if get_ipython() is not None: @@ -239,16 +263,16 @@ def _folium_map_formatter(map) -> str: formatters = ip.display_formatter.formatters # Register custom formatters # PNG - png_formatter = formatters['image/png'] + png_formatter = formatters["image/png"] png_formatter.for_type(Tile, tile_to_png) # HTML - html_formatter = formatters['text/html'] + html_formatter = formatters["text/html"] html_formatter.for_type(pandas.DataFrame, pandas_df_to_html) html_formatter.for_type(pyspark.sql.DataFrame, spark_df_to_html) html_formatter.for_type(Tile, tile_to_html) # Markdown. These will likely only effect docs build. - markdown_formatter = formatters['text/markdown'] + markdown_formatter = formatters["text/markdown"] # Pandas doesn't have a markdown markdown_formatter.for_type(pandas.DataFrame, pandas_df_to_html) markdown_formatter.for_type(pyspark.sql.DataFrame, spark_df_to_markdown) @@ -266,8 +290,12 @@ def _folium_map_formatter(map) -> str: Tile.show = plot_tile # noinspection PyTypeChecker - def _display(df: pyspark.sql.DataFrame, num_rows: int = 5, truncate: bool = False, - mimetype: str = 'text/html') -> (): + def _display( + df: pyspark.sql.DataFrame, + num_rows: int = 5, + truncate: bool = False, + mimetype: str = "text/html", + ) -> (): """ Invoke IPython `display` with specific controls. :param num_rows: number of rows to render @@ -280,7 +308,6 @@ def _display(df: pyspark.sql.DataFrame, num_rows: int = 5, truncate: bool = Fals else: display_markdown(spark_df_to_markdown(df, num_rows, truncate), raw=True) - # Add enhanced display function pyspark.sql.DataFrame.display = _display diff --git a/pyrasterframes/src/main/python/pyrasterframes/rf_types.py b/python/pyrasterframes/rf_types.py similarity index 76% rename from pyrasterframes/src/main/python/pyrasterframes/rf_types.py rename to python/pyrasterframes/rf_types.py index 9366fe07e..7cb0a4470 100644 --- a/pyrasterframes/src/main/python/pyrasterframes/rf_types.py +++ b/python/pyrasterframes/rf_types.py @@ -24,28 +24,43 @@ the implementations take advantage of the existing Scala functionality. The RasterFrameLayer class here provides the PyRasterFrames entry point. """ +import functools +import math from itertools import product -import functools, math +from typing import List, Tuple +import numpy as np import pyproj +from py4j.java_collections import Sequence +from pyrasterframes.rf_context import RFContext from pyspark import SparkContext -from pyspark.sql import DataFrame, Column -from pyspark.sql.types import (UserDefinedType, StructType, StructField, BinaryType, DoubleType, ShortType, IntegerType, StringType) - from pyspark.ml.param.shared import HasInputCols -from pyspark.ml.wrapper import JavaTransformer from pyspark.ml.util import DefaultParamsReadable, DefaultParamsWritable - -from pyrasterframes.rf_context import RFContext -from pyspark.sql import SparkSession -from py4j.java_collections import Sequence - -import numpy as np - -from typing import List, Tuple - -__all__ = ['RasterFrameLayer', 'Tile', 'TileUDT', 'CellType', 'Extent', - 'CRS', 'CrsUDT', 'RasterSourceUDT', 'TileExploder', 'NoDataFilter'] +from pyspark.ml.wrapper import JavaTransformer +from pyspark.sql import Column, DataFrame, SparkSession +from pyspark.sql.types import ( + BinaryType, + DoubleType, + IntegerType, + ShortType, + StringType, + StructField, + StructType, + UserDefinedType, +) + +__all__ = [ + "RasterFrameLayer", + "Tile", + "TileUDT", + "CellType", + "Extent", + "CRS", + "CrsUDT", + "RasterSourceUDT", + "TileExploder", + "NoDataFilter", +] class cached_property(object): @@ -60,9 +75,10 @@ def __get__(self, obj, type_): obj.__dict__[self.function.__name__] = val return val + class RasterFrameLayer(DataFrame): def __init__(self, jdf: DataFrame, spark_session: SparkSession): - DataFrame.__init__(self, jdf, spark_session._wrapped) + DataFrame.__init__(self, jdf, spark_session) self._jrfctx = spark_session.rasterframes._jrfctx def tile_columns(self) -> List[Column]: @@ -95,6 +111,7 @@ def tile_layer_metadata(self): :return: A dictionary of metadata. """ import json + return json.loads(str(self._jrfctx.tileLayerMetadata(self._jdf))) def spatial_join(self, other_df: DataFrame): @@ -162,16 +179,15 @@ def with_spatial_index(self): class RasterSourceUDT(UserDefinedType): @classmethod def sqlType(cls): - return StructType([ - StructField("raster_source_kryo", BinaryType(), False)]) + return StructType([StructField("raster_source_kryo", BinaryType(), False)]) @classmethod def module(cls): - return 'pyrasterframes.rf_types' + return "pyrasterframes.rf_types" @classmethod def scalaUDT(cls): - return 'org.apache.spark.sql.rf.RasterSourceUDT' + return "org.apache.spark.sql.rf.RasterSourceUDT" def needConversion(self): return False @@ -218,15 +234,13 @@ def reproject(self, src_crs, dest_crs): def buffer(self, amount): return Extent( - self.xmin - amount, - self.ymin - amount, - self.xmax + amount, - self.ymax + amount + self.xmin - amount, self.ymin - amount, self.xmax + amount, self.ymax + amount ) def __str__(self): return self.__jvm__.toString() + class CRS(object): # NB: The name `crsProj4` has to match what's used in StandardSerializers.crsSerializers def __init__(self, crsProj4): @@ -235,7 +249,7 @@ def __init__(self, crsProj4): elif isinstance(crsProj4, str): self.crsProj4 = crsProj4 else: - raise ValueError('Unexpected CRS definition type: {}'.format(type(crsProj4))) + raise ValueError("Unexpected CRS definition type: {}".format(type(crsProj4))) @cached_property def __jvm__(self): @@ -256,7 +270,7 @@ def __eq__(self, other): class CellType(object): def __init__(self, cell_type_name): - assert(isinstance(cell_type_name, str)) + assert isinstance(cell_type_name, str) self.cell_type_name = cell_type_name @classmethod @@ -265,38 +279,38 @@ def from_numpy_dtype(cls, np_dtype: np.dtype): @classmethod def bool(cls): - return CellType('bool') + return CellType("bool") @classmethod def int8(cls): - return CellType('int8') + return CellType("int8") @classmethod def uint8(cls): - return CellType('uint8') + return CellType("uint8") @classmethod def int16(cls): - return CellType('int16') + return CellType("int16") @classmethod def uint16(cls): - return CellType('uint16') + return CellType("uint16") @classmethod def int32(cls): - return CellType('int32') + return CellType("int32") @classmethod def float32(cls): - return CellType('float32') + return CellType("float32") @classmethod def float64(cls): - return CellType('float64') + return CellType("float64") def is_raw(self) -> bool: - return self.cell_type_name.endswith('raw') + return self.cell_type_name.endswith("raw") def is_user_defined_no_data(self) -> bool: return "ud" in self.cell_type_name @@ -305,13 +319,13 @@ def is_default_no_data(self) -> bool: return not (self.is_raw() or self.is_user_defined_no_data()) def is_floating_point(self) -> bool: - return self.cell_type_name.startswith('float') + return self.cell_type_name.startswith("float") def base_cell_type_name(self) -> str: if self.is_raw(): return self.cell_type_name[:-3] elif self.is_user_defined_no_data(): - return self.cell_type_name.split('ud')[0] + return self.cell_type_name.split("ud")[0] else: return self.cell_type_name @@ -322,7 +336,7 @@ def no_data_value(self): if self.is_raw(): return None elif self.is_user_defined_no_data(): - num_str = self.cell_type_name.split('ud')[1] + num_str = self.cell_type_name.split("ud")[1] if self.is_floating_point(): return float(num_str) else: @@ -332,21 +346,21 @@ def no_data_value(self): return np.nan else: n = self.base_cell_type_name() - if n == 'uint8' or n == 'uint16': + if n == "uint8" or n == "uint16": return 0 - elif n == 'int8': + elif n == "int8": return -128 - elif n == 'int16': + elif n == "int16": return -32768 - elif n == 'int32': + elif n == "int32": return -2147483648 - elif n == 'bool': + elif n == "bool": return None raise Exception("Unable to determine no_data_value from '{}'".format(n)) def to_numpy_dtype(self) -> np.dtype: n = self.base_cell_type_name() - return np.dtype(n).newbyteorder('>') + return np.dtype(n).newbyteorder(">") def with_no_data_value(self, no_data): if self.has_no_data() and self.no_data_value() == no_data: @@ -355,7 +369,7 @@ def with_no_data_value(self, no_data): no_data = str(float(no_data)) else: no_data = str(int(no_data)) - return CellType(self.base_cell_type_name() + 'ud' + no_data) + return CellType(self.base_cell_type_name() + "ud" + no_data) def __eq__(self, other): if type(other) is type(self): @@ -393,22 +407,23 @@ def __init__(self, cells, cell_type=None, grid_bounds=None): # is it a buffer tile? crop it on extraction to preserve the tile behavior if grid_bounds is not None: colmin, rowmin, colmax, rowmax = grid_bounds - self.cells = self.cells[rowmin:(rowmax+1), colmin:(colmax+1)] + self.cells = self.cells[rowmin : (rowmax + 1), colmin : (colmax + 1)] def __eq__(self, other): if type(other) is type(self): - return self.cell_type == other.cell_type and \ - np.ma.allequal(self.cells, other.cells, fill_value=True) + return self.cell_type == other.cell_type and np.ma.allequal( + self.cells, other.cells, fill_value=True + ) else: return False def __str__(self): - return "Tile(dimensions={}, cell_type={}, cells=\n{})" \ - .format(self.dimensions(), self.cell_type, self.cells) + return "Tile(dimensions={}, cell_type={}, cells=\n{})".format( + self.dimensions(), self.cell_type, self.cells + ) def __repr__(self): - return "Tile({}, {})" \ - .format(repr(self.cells), repr(self.cell_type)) + return "Tile({}, {})".format(repr(self.cells), repr(self.cell_type)) def __add__(self, right): if isinstance(right, Tile): @@ -450,7 +465,7 @@ def __matmul__(self, right): return Tile(np.matmul(self.cells, other)) def dimensions(self) -> Tuple[int, int]: - """ Return a list of cols, rows as is conventional in GeoTrellis and RasterFrames.""" + """Return a list of cols, rows as is conventional in GeoTrellis and RasterFrames.""" return [self.cells.shape[1], self.cells.shape[0]] @@ -460,56 +475,59 @@ def sqlType(cls): """ Mirrors `schema` in scala companion object org.apache.spark.sql.rf.TileUDT """ - extent = StructType([ - StructField("xmin",DoubleType(), True), - StructField("ymin",DoubleType(), True), - StructField("xmax",DoubleType(), True), - StructField("ymax",DoubleType(), True) - ]) - grid = StructType([ - StructField("colMin", IntegerType(), True), - StructField("rowMin", IntegerType(), True), - StructField("colMax", IntegerType(), True), - StructField("rowMax", IntegerType() ,True) - ]) - - ref = StructType([ - StructField("source", StructType([ - StructField("raster_source_kryo", BinaryType(), False) - ]),True), - StructField("bandIndex", IntegerType(), True), - StructField("subextent", extent ,True), - StructField("subgrid", grid, True), - ]) - - return StructType([ - StructField("cellType", StringType(), False), - StructField("cols", IntegerType(), False), - StructField("rows", IntegerType(), False), - StructField("cells", BinaryType(), True), - StructField("gridBounds", grid, True), - StructField("ref", ref, True) - ]) + extent = StructType( + [ + StructField("xmin", DoubleType(), True), + StructField("ymin", DoubleType(), True), + StructField("xmax", DoubleType(), True), + StructField("ymax", DoubleType(), True), + ] + ) + grid = StructType( + [ + StructField("colMin", IntegerType(), True), + StructField("rowMin", IntegerType(), True), + StructField("colMax", IntegerType(), True), + StructField("rowMax", IntegerType(), True), + ] + ) + + ref = StructType( + [ + StructField( + "source", + StructType([StructField("raster_source_kryo", BinaryType(), False)]), + True, + ), + StructField("bandIndex", IntegerType(), True), + StructField("subextent", extent, True), + StructField("subgrid", grid, True), + ] + ) + + return StructType( + [ + StructField("cellType", StringType(), False), + StructField("cols", IntegerType(), False), + StructField("rows", IntegerType(), False), + StructField("cells", BinaryType(), True), + StructField("gridBounds", grid, True), + StructField("ref", ref, True), + ] + ) @classmethod def module(cls): - return 'pyrasterframes.rf_types' + return "pyrasterframes.rf_types" @classmethod def scalaUDT(cls): - return 'org.apache.spark.sql.rf.TileUDT' + return "org.apache.spark.sql.rf.TileUDT" def serialize(self, tile): cells = bytearray(tile.cells.flatten().tobytes()) dims = tile.dimensions() - return [ - tile.cell_type.cell_type_name, - dims[0], - dims[1], - cells, - None, - None - ] + return [tile.cell_type.cell_type_name, dims[0], dims[1], cells, None, None] def deserialize(self, datum): """ @@ -542,15 +560,18 @@ def deserialize(self, datum): reshaped = as_numpy.reshape((rows, cols)) t = Tile(reshaped, cell_type, datum.gridBounds) except ValueError as e: - raise ValueError({ - "cell_type": cell_type, - "cols": cols, - "rows": rows, - "cell_data.length": len(cell_data_bytes), - "cell_data.type": type(cell_data_bytes), - "cell_data.values": repr(cell_data_bytes), - "grid_bounds": datum.gridBounds - }, e) + raise ValueError( + { + "cell_type": cell_type, + "cols": cols, + "rows": rows, + "cell_data.length": len(cell_data_bytes), + "cell_data.type": type(cell_data_bytes), + "cell_data.values": repr(cell_data_bytes), + "grid_bounds": datum.gridBounds, + }, + e, + ) return t deserialize.__safe_for_unpickling__ = True @@ -569,11 +590,11 @@ def sqlType(cls): @classmethod def module(cls): - return 'pyrasterframes.rf_types' + return "pyrasterframes.rf_types" @classmethod def scalaUDT(cls): - return 'org.apache.spark.sql.rf.CrsUDT' + return "org.apache.spark.sql.rf.CrsUDT" def serialize(self, crs): return crs.proj4_str @@ -594,7 +615,9 @@ class TileExploder(JavaTransformer, DefaultParamsReadable, DefaultParamsWritable def __init__(self): super(TileExploder, self).__init__() - self._java_obj = self._new_java_obj("org.locationtech.rasterframes.ml.TileExploder", self.uid) + self._java_obj = self._new_java_obj( + "org.locationtech.rasterframes.ml.TileExploder", self.uid + ) class NoDataFilter(JavaTransformer, HasInputCols, DefaultParamsReadable, DefaultParamsWritable): @@ -604,8 +627,9 @@ class NoDataFilter(JavaTransformer, HasInputCols, DefaultParamsReadable, Default def __init__(self): super(NoDataFilter, self).__init__() - self._java_obj = self._new_java_obj("org.locationtech.rasterframes.ml.NoDataFilter", self.uid) - + self._java_obj = self._new_java_obj( + "org.locationtech.rasterframes.ml.NoDataFilter", self.uid + ) def setInputCols(self, value): """ diff --git a/python/pyrasterframes/utils.py b/python/pyrasterframes/utils.py new file mode 100644 index 000000000..9a14145ec --- /dev/null +++ b/python/pyrasterframes/utils.py @@ -0,0 +1,75 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from typing import Dict, Optional + +from pyspark import SparkConf +from pyspark.sql import SparkSession + +from . import RFContext + +__all__ = [ + "create_rf_spark_session", + "gdal_version", + "gdal_version", + "build_info", + "quiet_logs", +] + + +def quiet_logs(sc): + logger = sc._jvm.org.apache.log4j + logger.LogManager.getLogger("geotrellis.raster.gdal").setLevel(logger.Level.ERROR) + logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR) + + +def create_rf_spark_session(master="local[*]", **kwargs: str) -> Optional[SparkSession]: + """ + Create a SparkSession with pyrasterframes enabled and configured. + Expects pyrasterframes-assembly-x.x.x.jar in JarPath + """ + conf = SparkConf().setAll([(k, kwargs[k]) for k in kwargs]) + + spark = ( + SparkSession.builder.master(master) + .appName("RasterFrames") + .withKryoSerialization() + .config(conf=conf) # user can override the defaults + .getOrCreate() + ) + + quiet_logs(spark) + + try: + spark.withRasterFrames() + return spark + except TypeError as te: + print("Error setting up SparkSession; cannot find the pyrasterframes assembly jar\n", te) + return None + + +def gdal_version() -> str: + fcn = RFContext.active().lookup("buildInfo") + return fcn()["GDAL"] + + +def build_info() -> Dict[str, str]: + fcn = RFContext.active().lookup("buildInfo") + return fcn() diff --git a/pyrasterframes/src/main/python/pyrasterframes/version.py b/python/pyrasterframes/version.py similarity index 95% rename from pyrasterframes/src/main/python/pyrasterframes/version.py rename to python/pyrasterframes/version.py index 53d94f04f..640b246ac 100644 --- a/pyrasterframes/src/main/python/pyrasterframes/version.py +++ b/python/pyrasterframes/version.py @@ -20,4 +20,4 @@ # # Translating Java version from version.sbt to PEP440 norms -__version__: str = '0.10.1.dev0' +__version__: str = "0.0.0" diff --git a/python/tests/ExploderTests.py b/python/tests/ExploderTests.py new file mode 100644 index 000000000..570918bfe --- /dev/null +++ b/python/tests/ExploderTests.py @@ -0,0 +1,65 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +from pyrasterframes.rasterfunctions import * +from pyrasterframes.rf_types import * +from pyspark.ml import Pipeline, PipelineModel +from pyspark.ml.feature import VectorAssembler +from pyspark.sql.functions import * + +from pyrasterframes import TileExploder + + +def test_tile_exploder_pipeline_for_prt(spark, img_uri): + # NB the tile is a Projected Raster Tile + df = spark.read.raster(img_uri) + t_col = "proj_raster" + assert t_col in df.columns, "proj_raster column not found" + + assembler = VectorAssembler().setInputCols([t_col]) + pipe = Pipeline().setStages([TileExploder(), assembler]) + pipe_model = pipe.fit(df) + tranformed_df = pipe_model.transform(df) + assert tranformed_df.count() > df.count(), "DF count has not the expected size" + + +def test_tile_exploder_pipeline_for_tile(spark, img_uri): + t_col = "tile" + df = spark.read.raster(img_uri).withColumn(t_col, rf_tile("proj_raster")).drop("proj_raster") + + assembler = VectorAssembler().setInputCols([t_col]) + pipe = Pipeline().setStages([TileExploder(), assembler]) + pipe_model = pipe.fit(df) + tranformed_df = pipe_model.transform(df) + assert tranformed_df.count() > df.count(), "DF count has not the expected size" + + +def test_tile_exploder_read_write(spark, img_uri): + path = "test_tile_exploder_read_write.pipe" + df = spark.read.raster(img_uri) + + assembler = VectorAssembler().setInputCols(["proj_raster"]) + pipe = Pipeline().setStages([TileExploder(), assembler]) + + pipe.fit(df).write().overwrite().save(path) + + read_pipe = PipelineModel.load(path) + assert len(read_pipe.stages) == 2 + assert isinstance(read_pipe.stages[0], TileExploder) diff --git a/python/tests/GeoTiffWriterTests.py b/python/tests/GeoTiffWriterTests.py new file mode 100644 index 000000000..df42690ed --- /dev/null +++ b/python/tests/GeoTiffWriterTests.py @@ -0,0 +1,82 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import os +import tempfile + +import pytest +import rasterio + + +@pytest.fixture +def tmpfile(): + file_name = os.path.join(tempfile.gettempdir(), "pyrf-test.tif") + yield file_name + os.remove(file_name) + + +def test_identity_write(spark, img_uri, tmpfile): + rf = spark.read.geotiff(img_uri) + rf_count = rf.count() + assert rf_count > 0 + + rf.write.geotiff(tmpfile) + rf2 = spark.read.geotiff(tmpfile) + assert rf2.count() == rf.count() + + +def test_unstructured_write(spark, img_uri, tmpfile): + rf = spark.read.raster(img_uri) + + rf.write.geotiff(tmpfile, crs="EPSG:32616") + + rf2 = spark.read.raster(tmpfile) + + assert rf2.count() == rf.count() + + with rasterio.open(img_uri) as source: + with rasterio.open(tmpfile) as dest: + assert (dest.width, dest.height) == (source.width, source.height) + assert dest.bounds == source.bounds + assert dest.crs == source.crs + + +def test_unstructured_write_schemaless(spark, img_uri, tmpfile): + # should be able to write a projected raster tile column to path like '/data/foo/file.tif' + from pyrasterframes.rasterfunctions import rf_agg_stats, rf_crs + + rf = spark.read.raster(img_uri) + max = rf.agg(rf_agg_stats("proj_raster").max.alias("max")).first()["max"] + crs = rf.select(rf_crs("proj_raster").alias("crs")).first()["crs"] + + assert not tmpfile.startswith("file://") + + rf.write.geotiff(tmpfile, crs=crs) + + with rasterio.open(tmpfile) as src: + assert src.read().max() == max + + +def test_downsampled_write(spark, img_uri, tmpfile): + rf = spark.read.raster(img_uri) + rf.write.geotiff(tmpfile, crs="EPSG:32616", raster_dimensions=(128, 128)) + + with rasterio.open(tmpfile) as f: + assert (f.width, f.height) == (128, 128) diff --git a/python/tests/GeotrellisTests.py b/python/tests/GeotrellisTests.py new file mode 100644 index 000000000..478185af0 --- /dev/null +++ b/python/tests/GeotrellisTests.py @@ -0,0 +1,64 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +import pathlib +import shutil +import tempfile + +import pytest + + +@pytest.fixture() +def tmpdir(): + dest = tempfile.mkdtemp() + yield pathlib.Path(dest).as_uri() + shutil.rmtree(dest, ignore_errors=True) + + +def test_write_geotrellis_layer(spark, img_uri, tmpdir): + rf = spark.read.geotiff(img_uri).cache() + rf_count = rf.count() + assert rf_count > 0 + + layer = "gt_layer" + zoom = 0 + + rf.write.option("layer", layer).option("zoom", zoom).geotrellis(tmpdir) + + rf_gt = spark.read.format("geotrellis").option("layer", layer).option("zoom", zoom).load(tmpdir) + rf_gt_count = rf_gt.count() + assert rf_gt_count > 0 + + _ = rf_gt.take(1) + + +def test_write_geotrellis_multiband_layer(spark, img_rgb_uri, tmpdir): + rf = spark.read.geotiff(img_rgb_uri).cache() + rf_count = rf.count() + assert rf_count > 0 + + layer = "gt_multiband_layer" + zoom = 0 + + rf.write.option("layer", layer).option("zoom", zoom).geotrellis(tmpdir) + + rf_gt = spark.read.format("geotrellis").option("layer", layer).option("zoom", zoom).load(tmpdir) + rf_gt_count = rf_gt.count() + assert rf_gt_count > 0 + + _ = rf_gt.take(1) diff --git a/python/tests/IpythonTests.py b/python/tests/IpythonTests.py new file mode 100644 index 000000000..1c2627895 --- /dev/null +++ b/python/tests/IpythonTests.py @@ -0,0 +1,84 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import numpy as np +import pytest +from IPython.testing import globalipapp +from py4j.protocol import Py4JJavaError +from pyrasterframes.rf_types import * +from pyspark.sql import Row +from pyspark.sql.types import StructField, StructType + +import pyrasterframes + + +@pytest.fixture(scope="module") +def ip(): + globalipapp.start_ipython() + yield globalipapp.get_ipython() + globalipapp.get_ipython().atexit_operations() + + +@pytest.mark.skip("Pending fix for issue #458") +def test_all_nodata_tile(spark): + # https://github.com/locationtech/rasterframes/issues/458 + + df = spark.createDataFrame( + [ + Row( + tile=Tile( + np.array([[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], dtype="float64"), + CellType.float64(), + ) + ), + Row(tile=None), + ], + schema=StructType([StructField("tile", TileUDT(), True)]), + ) + + try: + pyrasterframes.rf_ipython.spark_df_to_html(df) + except Py4JJavaError: + raise Exception("test_all_nodata_tile failed with Py4JJavaError") + except: + raise Exception("um") + + +def test_display_extension(ip, df): + import pyrasterframes.rf_ipython + + num_rows = 2 + + result = {} + + def counter(data, md): + nonlocal result + result["payload"] = (data, md) + result["row_count"] = data.count("") + + ip.mime_renderers["text/html"] = counter + + # ip.mime_renderers['text/markdown'] = lambda a, b: print(a, b) + + df.display(num_rows=num_rows) + + # Plus one for the header row. + assert result["row_count"] == num_rows + 1, f"Received: {result['payload']}" diff --git a/pyrasterframes/src/main/python/tests/NoDataFilterTests.py b/python/tests/NoDataFilterTests.py similarity index 54% rename from pyrasterframes/src/main/python/tests/NoDataFilterTests.py rename to python/tests/NoDataFilterTests.py index 169783358..20f41191b 100644 --- a/pyrasterframes/src/main/python/tests/NoDataFilterTests.py +++ b/python/tests/NoDataFilterTests.py @@ -18,34 +18,27 @@ # SPDX-License-Identifier: Apache-2.0 # -from . import TestEnvironment from pyrasterframes.rasterfunctions import * from pyrasterframes.rf_types import * - -from pyspark.ml.feature import VectorAssembler from pyspark.ml import Pipeline, PipelineModel +from pyspark.ml.feature import VectorAssembler from pyspark.sql.functions import * -import unittest - - -class ExploderTests(TestEnvironment): - def test_no_data_filter_read_write(self): - path = 'test_no_data_filter_read_write.pipe' - df = self.spark.read.raster(self.img_uri) \ - .select(rf_tile_mean('proj_raster').alias('mean')) +def test_no_data_filter_read_write(spark, img_uri): + path = "test_no_data_filter_read_write.pipe" + df = spark.read.raster(img_uri).select(rf_tile_mean("proj_raster").alias("mean")) - input_cols = ['mean'] - ndf = NoDataFilter().setInputCols(input_cols) - assembler = VectorAssembler().setInputCols(input_cols) + input_cols = ["mean"] + ndf = NoDataFilter().setInputCols(input_cols) + assembler = VectorAssembler().setInputCols(input_cols) - pipe = Pipeline().setStages([ndf, assembler]) + pipe = Pipeline().setStages([ndf, assembler]) - pipe.fit(df).write().overwrite().save(path) + pipe.fit(df).write().overwrite().save(path) - read_pipe = PipelineModel.load(path) - self.assertEqual(len(read_pipe.stages), 2) - actual_stages_ndf = read_pipe.stages[0].getInputCols() - self.assertEqual(actual_stages_ndf, input_cols) + read_pipe = PipelineModel.load(path) + assert len(read_pipe.stages) == 2 + actual_stages_ndf = read_pipe.stages[0].getInputCols() + assert actual_stages_ndf == input_cols diff --git a/python/tests/PyRasterFramesTests.py b/python/tests/PyRasterFramesTests.py new file mode 100644 index 000000000..f0618a538 --- /dev/null +++ b/python/tests/PyRasterFramesTests.py @@ -0,0 +1,360 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import os + +import numpy as np +import pandas as pd +import pyspark.sql.functions as F +import pytest +from py4j.protocol import Py4JJavaError +from pyrasterframes.rasterfunctions import * +from pyrasterframes.rf_types import * +from pyspark.sql import Row, SQLContext + + +def test_spark_confs(spark, app_name): + assert spark.conf.get("spark.app.name"), app_name + assert spark.conf.get("spark.ui.enabled"), "false" + + +def test_is_raw(): + assert CellType("float32raw").is_raw() + assert not CellType("float64ud1234").is_raw() + assert not CellType("float32").is_raw() + assert CellType("int8raw").is_raw() + assert not CellType("uint16d12").is_raw() + assert not CellType("int32").is_raw() + + +def test_is_floating_point(): + assert CellType("float32raw").is_floating_point() + assert CellType("float64ud1234").is_floating_point() + assert CellType("float32").is_floating_point() + assert not CellType("int8raw").is_floating_point() + assert not CellType("uint16d12").is_floating_point() + assert not CellType("int32").is_floating_point() + + +def test_cell_type_no_data(): + import math + + assert CellType.bool().no_data_value() is None + + assert CellType.int8().has_no_data() + assert CellType.int8().no_data_value() == -128 + + assert CellType.uint8().has_no_data() + assert CellType.uint8().no_data_value() == 0 + + assert CellType.int16().has_no_data() + assert CellType.int16().no_data_value() == -32768 + + assert CellType.uint16().has_no_data() + assert CellType.uint16().no_data_value() == 0 + + assert CellType.float32().has_no_data() + assert np.isnan(CellType.float32().no_data_value()) + + assert CellType("float32ud-98").no_data_value() == -98.0 + assert CellType("float32ud-98").no_data_value() == -98 + assert CellType("int32ud-98").no_data_value() == -98.0 + assert CellType("int32ud-98").no_data_value() == -98 + + assert math.isnan(CellType.float64().no_data_value()) + assert CellType.uint8().no_data_value() == 0 + + +def test_cell_type_conversion(): + for ct in rf_cell_types(): + assert ( + ct.to_numpy_dtype() == CellType.from_numpy_dtype(ct.to_numpy_dtype()).to_numpy_dtype() + ), "dtype comparison for " + str(ct) + if not ct.is_raw(): + assert ct == CellType.from_numpy_dtype( + ct.to_numpy_dtype() + ), "GTCellType comparison for " + str(ct) + + else: + ct_ud = ct.with_no_data_value(99) + assert ct_ud.base_cell_type_name() == repr( + CellType.from_numpy_dtype(ct_ud.to_numpy_dtype()) + ), "GTCellType comparison for " + str(ct_ud) + + +@pytest.fixture(scope="module") +def tile_data(spark): + # convenience so we can assert around Tile() == Tile() + t1 = Tile(np.array([[1, 2], [3, 4]]), CellType.int8().with_no_data_value(3)) + t2 = Tile(np.array([[1, 2], [3, 4]]), CellType.int8().with_no_data_value(1)) + t3 = Tile(np.array([[1, 2], [-3, 4]]), CellType.int8().with_no_data_value(3)) + + df = spark.createDataFrame([Row(t1=t1, t2=t2, t3=t3)]) + + return df, t1, t2, t3 + + +def test_addition(tile_data): + + df, t1, t2, t3 = tile_data + + e1 = np.ma.masked_equal(np.array([[5, 6], [7, 8]]), 7) + assert np.array_equal((t1 + 4).cells, e1) + + e2 = np.ma.masked_equal(np.array([[3, 4], [3, 8]]), 3) + r2 = (t1 + t2).cells + assert np.ma.allequal(r2, e2) + + col_result = df.select(rf_local_add("t1", "t3").alias("sum")).first() + assert col_result.sum, t1 + t3 + + +def test_multiplication(tile_data): + df, t1, t2, t3 = tile_data + + e1 = np.ma.masked_equal(np.array([[4, 8], [12, 16]]), 12) + + assert np.array_equal((t1 * 4).cells, e1) + + e2 = np.ma.masked_equal(np.array([[3, 4], [3, 16]]), 3) + r2 = (t1 * t2).cells + assert np.ma.allequal(r2, e2) + + r3 = df.select(rf_local_multiply("t1", "t3").alias("r3")).first().r3 + assert r3 == t1 * t3 + + +def test_subtraction(tile_data): + _, t1, _, _ = tile_data + + t3 = t1 * 4 + r1 = t3 - t1 + # note careful construction of mask value and dtype above + e1 = Tile( + np.ma.masked_equal( + np.array([[4 - 1, 8 - 2], [3, 16 - 4]], dtype="int8"), + 3, + ) + ) + assert r1 == e1, "{} does not equal {}".format(r1, e1) + # put another way + assert r1 == t1 * 3, "{} does not equal {}".format(r1, t1 * 3) + + +def test_division(tile_data): + _, t1, _, _ = tile_data + t3 = t1 * 9 + r1 = t3 / 9 + assert np.array_equal(r1.cells, t1.cells), "{} does not equal {}".format(r1, t1) + + r2 = (t1 / t1).cells + assert np.array_equal(r2, np.array([[1, 1], [1, 1]], dtype=r2.dtype)) + + +def test_matmul(tile_data): + _, t1, t2, _ = tile_data + r1 = t1 @ t2 + + # The behavior of np.matmul with masked arrays is not well documented + # it seems to treat the 2nd arg as if not a MaskedArray + e1 = Tile(np.matmul(t1.cells, t2.cells), r1.cell_type) + + assert r1 == e1, "{} was not equal to {}".format(r1, e1) + assert r1 == e1 + + +def test_pandas_conversion(spark): + # pd.options.display.max_colwidth = 256 + cell_types = ( + ct for ct in rf_cell_types() if not (ct.is_raw() or ("bool" in ct.base_cell_type_name())) + ) + tiles = [Tile(np.random.randn(5, 5) * 100, ct) for ct in cell_types] + in_pandas = pd.DataFrame({"tile": tiles}) + + in_spark = spark.createDataFrame(in_pandas) + out_pandas = in_spark.select(rf_identity("tile").alias("tile")).toPandas() + assert out_pandas.equals(in_pandas), str(in_pandas) + "\n\n" + str(out_pandas) + + +def test_extended_pandas_ops(spark, rf): + + assert isinstance(rf.sql_ctx, SQLContext) + + # Try to collect self.rf which is read from a geotiff + rf_collect = rf.take(2) + assert all([isinstance(row.tile.cells, np.ndarray) for row in rf_collect]) + + # Try to create a tile from numpy. + assert Tile(np.random.randn(10, 10), CellType.int8()).dimensions() == [10, 10] + + tiles = [Tile(np.random.randn(10, 12), CellType.float64()) for _ in range(3)] + to_spark = pd.DataFrame( + { + "t": tiles, + "b": ["a", "b", "c"], + "c": [1, 2, 4], + } + ) + rf_maybe = spark.createDataFrame(to_spark) + + # rf_maybe.select(rf_render_matrix(rf_maybe.t)).show(truncate=False) + + # Try to do something with it. + sums = to_spark.t.apply(lambda a: a.cells.sum()).tolist() + maybe_sums = rf_maybe.select(rf_tile_sum(rf_maybe.t).alias("tsum")) + maybe_sums = [r.tsum for r in maybe_sums.collect()] + np.testing.assert_almost_equal(maybe_sums, sums, 12) + + # Test round trip for an array + simple_array = Tile(np.array([[1, 2], [3, 4]]), CellType.float64()) + to_spark_2 = pd.DataFrame({"t": [simple_array]}) + + rf_maybe_2 = spark.createDataFrame(to_spark_2) + # print("RasterFrameLayer `show`:") + # rf_maybe_2.select(rf_render_matrix(rf_maybe_2.t).alias('t')).show(truncate=False) + + pd_2 = rf_maybe_2.toPandas() + array_back_2 = pd_2.iloc[0].t + # print("Array collected from toPandas output\n", array_back_2) + + assert isinstance(array_back_2, Tile) + np.testing.assert_equal(array_back_2.cells, simple_array.cells) + + +def test_raster_join(spark, img_uri, rf): + # re-read the same source + rf_prime = spark.read.geotiff(img_uri).withColumnRenamed("tile", "tile2") + + rf_joined = rf.raster_join(rf_prime) + + assert rf_joined.count(), rf.count() + assert len(rf_joined.columns) == len(rf.columns) + len(rf_prime.columns) - 2 + + rf_joined_2 = rf.raster_join(rf_prime, rf.extent, rf.crs, rf_prime.extent, rf_prime.crs) + assert rf_joined_2.count(), rf.count() + assert len(rf_joined_2.columns) == len(rf.columns) + len(rf_prime.columns) - 2 + + # this will bring arbitrary additional data into join; garbage result + join_expression = rf.extent.xmin == rf_prime.extent.xmin + rf_joined_3 = rf.raster_join( + rf_prime, rf.extent, rf.crs, rf_prime.extent, rf_prime.crs, join_expression + ) + assert rf_joined_3.count(), rf.count() + assert len(rf_joined_3.columns) == len(rf.columns) + len(rf_prime.columns) - 2 + + # throws if you don't pass in all expected columns + with pytest.raises(AssertionError): + rf.raster_join(rf_prime, join_exprs=rf.extent) + + +def test_raster_join_resample_method(spark, resource_dir): + + df = spark.read.raster("file://" + os.path.join(resource_dir, "L8-B4-Elkton-VA.tiff")).select( + F.col("proj_raster").alias("tile") + ) + df_prime = spark.read.raster( + "file://" + os.path.join(resource_dir, "L8-B4-Elkton-VA-4326.tiff") + ).select(F.col("proj_raster").alias("tile2")) + + result_methods = ( + df.raster_join( + df_prime.withColumnRenamed("tile2", "bilinear"), resampling_method="bilinear" + ) + .select( + "tile", + rf_proj_raster("bilinear", rf_extent("tile"), rf_crs("tile")).alias("bilinear"), + ) + .raster_join( + df_prime.withColumnRenamed("tile2", "cubic_spline"), + resampling_method="cubic_spline", + ) + .select(rf_local_subtract("bilinear", "cubic_spline").alias("diff")) + .agg(rf_agg_stats("diff").alias("stats")) + .select("stats.min") + .first() + ) + + assert result_methods[0] > 0.0 + + +def test_raster_join_with_null_left_head(spark): + # https://github.com/locationtech/rasterframes/issues/462 + + ones = np.ones((10, 10), dtype="uint8") + t = Tile(ones, CellType.uint8()) + e = Extent(0.0, 0.0, 40.0, 40.0) + c = CRS("EPSG:32611") + + # Note: there's a bug in Spark 2.x whereby the serialization of Extent + # reorders the fields, causing deserialization errors in the JVM side. + # So we end up manually forcing ordering with the use of `struct`. + # See https://stackoverflow.com/questions/35343525/how-do-i-order-fields-of-my-row-objects-in-spark-python/35343885#35343885 + left = spark.createDataFrame( + [Row(i=1, j="a", t=t, u=t, e=e, c=c), Row(i=1, j="b", t=None, u=t, e=e, c=c)] + ).withColumn("e2", F.struct("e.xmin", "e.ymin", "e.xmax", "e.ymax")) + + right = spark.createDataFrame( + [ + Row(i=1, r=Tile(ones, CellType.uint8()), e=e, c=c), + ] + ).withColumn("e2", F.struct("e.xmin", "e.ymin", "e.xmax", "e.ymax")) + + try: + joined = left.raster_join( + right, + join_exprs=left.i == right.i, + left_extent=left.e2, + right_extent=right.e2, + left_crs=left.c, + right_crs=right.c, + ) + + assert joined.count() == 2 + # In the case where the head column is null it will be passed thru + assert joined.select(F.isnull("t")).filter(F.col("j") == "b").first()[0] + + # The right hand side tile should get dimensions from col `u` however + collected = joined.select( + rf_dimensions("r").cols.alias("cols"), rf_dimensions("r").rows.alias("rows") + ).collect() + + for r in collected: + assert 10 == r.rows + assert 10 == r.cols + + # If there is no non-null tile on the LHS then the RHS is ill defined + joined_no_left_tile = left.drop("u").raster_join( + right, + join_exprs=left.i == right.i, + left_extent=left.e, + right_extent=right.e, + left_crs=left.c, + right_crs=right.c, + ) + assert joined_no_left_tile.count() == 2 + + # Tile col from Left side passed thru as null + assert joined_no_left_tile.select(F.isnull("t")).filter(F.col("j") == "b").first()[0] + # Because no non-null tile col on Left side, the right side is null too + assert joined_no_left_tile.select(F.isnull("r")).filter(F.col("j") == "b").first()[0] + + except Py4JJavaError as e: + raise Exception("test_raster_join_with_null_left_head failed with Py4JJavaError:" + e) diff --git a/python/tests/RasterFunctionsTests.py b/python/tests/RasterFunctionsTests.py new file mode 100644 index 000000000..66e7aa705 --- /dev/null +++ b/python/tests/RasterFunctionsTests.py @@ -0,0 +1,693 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import os + +import numpy as np +import pyspark.sql.functions as F +import pytest +from deprecation import fail_if_not_removed +from numpy.testing import assert_allclose, assert_equal +from pyrasterframes.rasterfunctions import * +from pyrasterframes.rf_types import * +from pyrasterframes.rf_types import CellType, Tile +from pyrasterframes.utils import gdal_version +from pyspark.sql import Row + +from .conftest import assert_png, rounded_compare + + +# @pytest.mark.filterwarnings("ignore") +def test_setup(spark): + assert ( + spark.sparkContext.getConf().get("spark.serializer") + == "org.apache.spark.serializer.KryoSerializer" + ) + print("GDAL version", gdal_version()) + + +def test_identify_columns(rf): + cols = rf.tile_columns() + assert len(cols) == 1, "`tileColumns` did not find the proper number of columns." + print("Tile columns: ", cols) + col = rf.spatial_key_column() + assert isinstance(col, Column), "`spatialKeyColumn` was not found" + print("Spatial key column: ", col) + col = rf.temporal_key_column() + assert col is None, "`temporalKeyColumn` should be `None`" + print("Temporal key column: ", col) + + +def test_tile_creation(spark): + + base = spark.createDataFrame([1, 2, 3, 4], "integer") + tiles = base.select( + rf_make_constant_tile(3, 3, 3, "int32"), + rf_make_zeros_tile(3, 3, "int32"), + rf_make_ones_tile(3, 3, CellType.int32()), + ) + tiles.show() + assert tiles.count() == 4 + + +def test_multi_column_operations(rf): + df1 = rf.withColumnRenamed("tile", "t1").as_layer() + df2 = rf.withColumnRenamed("tile", "t2").as_layer() + df3 = df1.spatial_join(df2).as_layer() + df3 = df3.withColumn("norm_diff", rf_normalized_difference("t1", "t2")) + # df3.printSchema() + + aggs = df3.agg( + rf_agg_mean("norm_diff"), + ) + aggs.show() + row = aggs.first() + + assert rounded_compare(row["rf_agg_mean(norm_diff)"], 0) + + +def test_general(rf): + meta = rf.tile_layer_metadata() + assert meta["bounds"] is not None + df = ( + rf.withColumn("dims", rf_dimensions("tile")) + .withColumn("type", rf_cell_type("tile")) + .withColumn("dCells", rf_data_cells("tile")) + .withColumn("ndCells", rf_no_data_cells("tile")) + .withColumn("min", rf_tile_min("tile")) + .withColumn("max", rf_tile_max("tile")) + .withColumn("mean", rf_tile_mean("tile")) + .withColumn("sum", rf_tile_sum("tile")) + .withColumn("stats", rf_tile_stats("tile")) + .withColumn("extent", st_extent("geometry")) + .withColumn("extent_geom1", st_geometry("extent")) + .withColumn("ascii", rf_render_ascii("tile")) + .withColumn("log", rf_log("tile")) + .withColumn("exp", rf_exp("tile")) + .withColumn("expm1", rf_expm1("tile")) + .withColumn("sqrt", rf_sqrt("tile")) + .withColumn("round", rf_round("tile")) + .withColumn("abs", rf_abs("tile")) + ) + + df.first() + + +def test_st_geometry_from_struct(spark): + + df = spark.createDataFrame([Row(xmin=0, ymin=1, xmax=2, ymax=3)]) + df2 = df.select(st_geometry(F.struct(df.xmin, df.ymin, df.xmax, df.ymax)).alias("geom")) + + actual_bounds = df2.first()["geom"].bounds + assert (0.0, 1.0, 2.0, 3.0) == actual_bounds + + +def test_agg_mean(rf): + mean = rf.agg(rf_agg_mean("tile")).first()["rf_agg_mean(tile)"] + assert rounded_compare(mean, 10160) + + +def test_agg_local_mean(spark): + + # this is really testing the nodata propagation in the agg local summation + ct = CellType.int8().with_no_data_value(4) + df = spark.createDataFrame( + [ + Row(tile=Tile(np.array([[1, 2, 3, 4, 5, 6]]), ct)), + Row(tile=Tile(np.array([[1, 2, 4, 3, 5, 6]]), ct)), + ] + ) + + result = df.agg(rf_agg_local_mean("tile").alias("mean")).first().mean + + expected = Tile(np.array([[1.0, 2.0, 3.0, 3.0, 5.0, 6.0]]), CellType.float64()) + assert result == expected + + +def test_aggregations(rf): + aggs = rf.agg( + rf_agg_data_cells("tile"), + rf_agg_no_data_cells("tile"), + rf_agg_stats("tile"), + rf_agg_approx_histogram("tile"), + ) + row = aggs.first() + + # print(row['rf_agg_data_cells(tile)']) + assert row["rf_agg_data_cells(tile)"] == 387000 + assert row["rf_agg_no_data_cells(tile)"] == 1000 + assert row["rf_agg_stats(tile)"].data_cells == row["rf_agg_data_cells(tile)"] + + +@fail_if_not_removed +def test_add_scalar(rf): + # Trivial test to trigger the deprecation failure at the right time. + result: Row = rf.select(rf_local_add_double("tile", 99.9), rf_local_add_int("tile", 42)).first() + assert True + + +def test_agg_approx_quantiles(rf): + agg = rf.agg(rf_agg_approx_quantiles("tile", [0.1, 0.5, 0.9, 0.98])) + result = agg.first()[0] + # expected result from computing in external python process; c.f. scala tests + assert_allclose(result, np.array([7963.0, 10068.0, 12160.0, 14366.0])) + + +def test_sql(spark, rf): + + rf.createOrReplaceTempView("rf_test_sql") + + arith = spark.sql( + """SELECT tile, + rf_local_add(tile, 1) AS add_one, + rf_local_subtract(tile, 1) AS less_one, + rf_local_multiply(tile, 2) AS times_two, + rf_local_divide( + rf_convert_cell_type(tile, "float32"), + 2) AS over_two + FROM rf_test_sql""" + ) + + arith.createOrReplaceTempView("rf_test_sql_1") + arith.show(truncate=False) + stats = spark.sql( + """ + SELECT rf_tile_mean(tile) as base, + rf_tile_mean(add_one) as plus_one, + rf_tile_mean(less_one) as minus_one, + rf_tile_mean(times_two) as double, + rf_tile_mean(over_two) as half, + rf_no_data_cells(tile) as nd + + FROM rf_test_sql_1 + ORDER BY rf_no_data_cells(tile) + """ + ) + stats.show(truncate=False) + stats.createOrReplaceTempView("rf_test_sql_stats") + + compare = spark.sql( + """ + SELECT + plus_one - 1.0 = base as add, + minus_one + 1.0 = base as subtract, + double / 2.0 = base as multiply, + half * 2.0 = base as divide, + nd + FROM rf_test_sql_stats + """ + ) + + expect_row1 = compare.orderBy("nd").first() + + assert expect_row1.subtract + assert expect_row1.multiply + assert expect_row1.divide + assert expect_row1.nd == 0 + assert expect_row1.add + + expect_row2 = compare.orderBy("nd", ascending=False).first() + + assert expect_row2.subtract + assert expect_row2.multiply + assert expect_row2.divide + assert expect_row2.nd > 0 + assert expect_row2.add # <-- Would fail in a case where ND + 1 = 1 + + +def test_explode(rf): + + rf.select("spatial_key", rf_explode_tiles("tile")).show() + # +-----------+------------+---------+-------+ + # |spatial_key|column_index|row_index|tile | + # +-----------+------------+---------+-------+ + # |[2,1] |4 |0 |10150.0| + cell = ( + rf.select(rf.spatial_key_column(), rf_explode_tiles(rf.tile)) + .where(F.col("spatial_key.col") == 2) + .where(F.col("spatial_key.row") == 1) + .where(F.col("column_index") == 4) + .where(F.col("row_index") == 0) + .select(F.col("tile")) + .collect()[0][0] + ) + assert cell == 10150.0 + + # Test the sample version + frac = 0.01 + sample_count = rf.select(rf_explode_tiles_sample(frac, 1872, "tile")).count() + print("Sample count is {}".format(sample_count)) + assert sample_count > 0 + assert sample_count < (frac * 1.1) * 387000 # give some wiggle room + + +def test_mask_by_value(rf): + + # create an artificial mask for values > 25000; masking value will be 4 + mask_value = 4 + + rf1 = rf.select( + rf.tile, + rf_local_multiply( + rf_convert_cell_type(rf_local_greater(rf.tile, 25000), "uint8"), + F.lit(mask_value), + ).alias("mask"), + ) + rf2 = rf1.select( + rf1.tile, rf_mask_by_value(rf1.tile, rf1.mask, F.lit(mask_value), False).alias("masked") + ) + + result = rf2.agg(rf_agg_no_data_cells(rf2.tile) < rf_agg_no_data_cells(rf2.masked)).collect()[ + 0 + ][0] + assert result + + # note supplying a `int` here, not a column to mask value + rf3 = rf1.select( + rf1.tile, + rf_inverse_mask_by_value(rf1.tile, rf1.mask, mask_value).alias("masked"), + rf_mask_by_value(rf1.tile, rf1.mask, mask_value, True).alias("masked2"), + ) + result = rf3.agg( + rf_agg_no_data_cells(rf3.tile) < rf_agg_no_data_cells(rf3.masked), + rf_agg_no_data_cells(rf3.tile) < rf_agg_no_data_cells(rf3.masked2), + ).first() + assert result[0] + assert result[1] # inverse mask arg gives equivalent result + + result_equiv_tiles = rf3.select(rf_for_all(rf_local_equal(rf3.masked, rf3.masked2))).first()[0] + assert result_equiv_tiles # inverse fn and inverse arg produce same Tile + + +def test_mask_by_values(spark): + + tile = Tile(np.random.randint(1, 100, (5, 5)), CellType.uint8()) + mask_tile = Tile(np.array(range(1, 26), "uint8").reshape(5, 5)) + expected_diag_nd = Tile(np.ma.masked_array(tile.cells, mask=np.eye(5))) + + df = spark.createDataFrame([Row(t=tile, m=mask_tile)]).select( + rf_mask_by_values("t", "m", [0, 6, 12, 18, 24]) + ) # values on the diagonal + result0 = df.first() + # assert_equal(result0[0].cells, expected_diag_nd) + assert result0[0] == expected_diag_nd + + +def test_mask_bits(spark): + t = Tile(42 * np.ones((4, 4), "uint16"), CellType.uint16()) + # with a varitey of known values + mask = Tile( + np.array( + [ + [1, 1, 2720, 2720], + [1, 6816, 6816, 2756], + [2720, 2720, 6900, 2720], + [2720, 6900, 6816, 1], + ] + ), + CellType("uint16raw"), + ) + + df = spark.createDataFrame([Row(t=t, mask=mask)]) + + # removes fill value 1 + mask_fill_df = df.select(rf_mask_by_bit("t", "mask", 0, True).alias("mbb")) + mask_fill_tile = mask_fill_df.first()["mbb"] + + assert mask_fill_tile.cell_type.has_no_data() + + assert mask_fill_df.select(rf_data_cells("mbb")).first()[0], 16 - 4 + + # mask out 6816, 6900 + mask_med_hi_cir = ( + df.withColumn("mask_cir_mh", rf_mask_by_bits("t", "mask", 11, 2, [2, 3])) + .first()["mask_cir_mh"] + .cells + ) + + assert mask_med_hi_cir.mask.sum() == 5 + + +@pytest.mark.skip("Issue #422 https://github.com/locationtech/rasterframes/issues/422") +def test_mask_and_deser(spark): + # duplicates much of test_mask_bits but + t = Tile(42 * np.ones((4, 4), "uint16"), CellType.uint16()) + # with a varitey of known values + mask = Tile( + np.array( + [ + [1, 1, 2720, 2720], + [1, 6816, 6816, 2756], + [2720, 2720, 6900, 2720], + [2720, 6900, 6816, 1], + ] + ), + CellType("uint16raw"), + ) + + df = spark.createDataFrame([Row(t=t, mask=mask)]) + + # removes fill value 1 + mask_fill_df = df.select(rf_mask_by_bit("t", "mask", 0, True).alias("mbb")) + mask_fill_tile = mask_fill_df.first()["mbb"] + + assert mask_fill_tile.cell_type.has_no_data() + + # Unsure why this fails. mask_fill_tile.cells is all 42 unmasked. + assert mask_fill_tile.cells.mask.sum() == 4, ( + f"Expected {16 - 4} data values but got the masked tile:" f"{mask_fill_tile}" + ) + + +def test_mask(spark): + + np.random.seed(999) + # importantly exclude 0 from teh range because that's the nodata value for the `data_tile`'s cell type + ma = np.ma.array( + np.random.randint(1, 10, (5, 5), dtype="int8"), mask=np.random.rand(5, 5) > 0.7 + ) + expected_data_values = ma.compressed().size + expected_no_data_values = ma.size - expected_data_values + assert expected_data_values > 0, "Make sure random seed is cooperative " + assert expected_no_data_values > 0, "Make sure random seed is cooperative " + + data_tile = Tile(np.ones(ma.shape, ma.dtype), CellType.uint8()) + + df = spark.createDataFrame([Row(t=data_tile, m=Tile(ma))]).withColumn( + "masked_t", rf_mask("t", "m") + ) + + result = df.select(rf_data_cells("masked_t")).first()[0] + assert ( + result == expected_data_values + ), f"Masked tile should have {expected_data_values} data values but found: {df.select('masked_t').first()[0].cells}. Original data: {data_tile.cells} Masked by {ma}" + + nd_result = df.select(rf_no_data_cells("masked_t")).first()[0] + assert nd_result == expected_no_data_values + + # deser of tile is correct + assert df.select("masked_t").first()[0].cells.compressed().size == expected_data_values + + +def test_extract_bits(spark): + one = np.ones((6, 6), "uint8") + t = Tile(84 * one) + df = spark.createDataFrame([Row(t=t)]) + result_py_literals = df.select(rf_local_extract_bits("t", 2, 3)).first()[0] + # expect value binary 84 => 1010100 => 101 + assert_equal(result_py_literals.cells, 5 * one) + + result_cols = df.select(rf_local_extract_bits("t", lit(2), lit(3))).first()[0] + assert_equal(result_cols.cells, 5 * one) + + +def test_resample(rf): + + result = rf.select( + rf_tile_min( + rf_local_equal(rf_resample(rf_resample(rf.tile, F.lit(2)), F.lit(0.5)), rf.tile) + ) + ).collect()[0][0] + + assert result == 1 # short hand for all values are true + + +def test_exists_for_all(rf): + df = rf.withColumn("should_exist", rf_make_ones_tile(5, 5, "int8")).withColumn( + "should_not_exist", rf_make_zeros_tile(5, 5, "int8") + ) + + should_exist = df.select(rf_exists(df.should_exist).alias("se")).take(1)[0].se + assert should_exist + + should_not_exist = df.select(rf_exists(df.should_not_exist).alias("se")).take(1)[0].se + assert not should_not_exist + + assert df.select(rf_for_all(df.should_exist).alias("se")).take(1)[0].se + assert not df.select(rf_for_all(df.should_not_exist).alias("se")).take(1)[0].se + + +def test_cell_type_in_functions(rf): + + ct = CellType.float32().with_no_data_value(-999) + + df = ( + rf.withColumn("ct_str", rf_convert_cell_type("tile", ct.cell_type_name)) + .withColumn("ct", rf_convert_cell_type("tile", ct)) + .withColumn("make", rf_make_constant_tile(99, 3, 4, CellType.int8())) + .withColumn("make2", rf_with_no_data("make", 99)) + ) + + result = df.select("ct", "ct_str", "make", "make2").first() + + assert result["ct"].cell_type == ct + assert result["ct_str"].cell_type == ct + assert result["make"].cell_type == CellType.int8() + + counts = df.select( + rf_no_data_cells("make").alias("nodata1"), + rf_data_cells("make").alias("data1"), + rf_no_data_cells("make2").alias("nodata2"), + rf_data_cells("make2").alias("data2"), + ).first() + + assert counts["data1"] == 3 * 4 + assert counts["nodata1"] == 0 + assert counts["data2"] == 0 + assert counts["nodata2"] == 3 * 4 + assert result["make2"].cell_type == CellType.int8().with_no_data_value(99) + + +# + + +def test_render_composite(spark, resource_dir): + def l8band_uri(band_index): + return "file://" + os.path.join(resource_dir, "L8-B{}-Elkton-VA.tiff".format(band_index)) + + cat = spark.createDataFrame([Row(red=l8band_uri(4), green=l8band_uri(3), blue=l8band_uri(2))]) + rf = spark.read.raster(cat, catalog_col_names=cat.columns) + + # Test composite construction + rgb = rf.select(rf_tile(rf_rgb_composite("red", "green", "blue")).alias("rgb")).first()["rgb"] + + # TODO: how to better test this? + assert isinstance(rgb, Tile) + assert rgb.dimensions() == [186, 169] + + ## Test PNG generation + png_bytes = rf.select(rf_render_png("red", "green", "blue").alias("png")).first()["png"] + # Look for the PNG magic cookie + assert_png(png_bytes) + + +def test_rf_interpret_cell_type_as(spark): + + df = spark.createDataFrame( + [Row(t=Tile(np.array([[1, 3, 4], [5, 0, 3]]), CellType.uint8().with_no_data_value(5)))] + ) + df = df.withColumn("tile", rf_interpret_cell_type_as("t", "uint8ud3")) # threes become ND + result = df.select(rf_tile_sum(rf_local_equal("t", lit(3))).alias("threes")).first()["threes"] + assert result == 2 + + result_5 = df.select(rf_tile_sum(rf_local_equal("t", lit(5))).alias("fives")).first()["fives"] + assert result_5 == 0 + + +def test_rf_local_data_and_no_data(spark): + + nd = 5 + t = Tile(np.array([[1, 3, 4], [nd, 0, 3]]), CellType.uint8().with_no_data_value(nd)) + # note the convert is due to issue #188 + df = ( + spark.createDataFrame([Row(t=t)]) + .withColumn("lnd", rf_convert_cell_type(rf_local_no_data("t"), "uint8")) + .withColumn("ld", rf_convert_cell_type(rf_local_data("t"), "uint8")) + ) + + result = df.first() + result_nd = result["lnd"] + assert_equal(result_nd.cells, t.cells.mask) + + result_d = result["ld"] + assert_equal(result_d.cells, np.invert(t.cells.mask)) + + +def test_rf_local_is_in(spark): + + nd = 5 + t = Tile(np.array([[1, 3, 4], [nd, 0, 3]]), CellType.uint8().with_no_data_value(nd)) + # note the convert is due to issue #188 + df = ( + spark.createDataFrame([Row(t=t)]) + .withColumn("a", F.array(F.lit(3), lit(4))) + .withColumn( + "in2", + rf_convert_cell_type(rf_local_is_in(F.col("t"), F.array(lit(0), lit(4))), "uint8"), + ) + .withColumn("in3", rf_convert_cell_type(rf_local_is_in("t", "a"), "uint8")) + .withColumn( + "in4", + rf_convert_cell_type(rf_local_is_in("t", F.array(lit(0), lit(4), lit(3))), "uint8"), + ) + .withColumn("in_list", rf_convert_cell_type(rf_local_is_in(F.col("t"), [4, 1]), "uint8")) + ) + + result = df.first() + assert result["in2"].cells.sum() == 2 + assert_equal(result["in2"].cells, np.isin(t.cells, np.array([0, 4]))) + assert result["in3"].cells.sum() == 3 + assert result["in4"].cells.sum() == 4 + assert ( + result["in_list"].cells.sum() == 2 + ), "Tile value {} should contain two 1s as: [[1, 0, 1],[0, 0, 0]]".format( + result["in_list"].cells + ) + + +def test_local_min_max_clamp(spark): + tile = Tile(np.random.randint(-20, 20, (10, 10)), CellType.int8()) + min_tile = Tile(np.random.randint(-20, 0, (10, 10)), CellType.int8()) + max_tile = Tile(np.random.randint(0, 20, (10, 10)), CellType.int8()) + + df = spark.createDataFrame([Row(t=tile, mn=min_tile, mx=max_tile)]) + assert_equal( + df.select(rf_local_min("t", "mn")).first()[0].cells, + np.clip(tile.cells, None, min_tile.cells), + ) + + assert_equal(df.select(rf_local_min("t", -5)).first()[0].cells, np.clip(tile.cells, None, -5)) + + assert_equal( + df.select(rf_local_max("t", "mx")).first()[0].cells, + np.clip(tile.cells, max_tile.cells, None), + ) + + assert_equal(df.select(rf_local_max("t", 5)).first()[0].cells, np.clip(tile.cells, 5, None)) + + assert_equal( + df.select(rf_local_clamp("t", "mn", "mx")).first()[0].cells, + np.clip(tile.cells, min_tile.cells, max_tile.cells), + ) + + +def test_rf_where(spark): + cond = Tile(np.random.binomial(1, 0.35, (10, 10)), CellType.uint8()) + x = Tile(np.random.randint(-20, 10, (10, 10)), CellType.int8()) + y = Tile(np.random.randint(0, 30, (10, 10)), CellType.int8()) + + df = spark.createDataFrame([Row(cond=cond, x=x, y=y)]) + result = df.select(rf_where("cond", "x", "y")).first()[0].cells + assert_equal(result, np.where(cond.cells, x.cells, y.cells)) + + +def test_rf_standardize(prdf): + + stats = ( + prdf.select(rf_agg_stats("proj_raster").alias("stat")) + .select("stat.mean", F.sqrt("stat.variance").alias("sttdev")) + .first() + ) + + result = ( + prdf.select(rf_standardize("proj_raster", stats[0], stats[1]).alias("z")) + .select(rf_agg_stats("z").alias("z_stat")) + .select("z_stat.mean", "z_stat.variance") + .first() + ) + + assert result[0] == pytest.approx(0.0, abs=0.00001) + assert result[1] == pytest.approx(1.0, abs=0.00001) + + +def test_rf_standardize_per_tile(spark): + + # 10k samples so should be pretty stable + x = Tile(np.random.randint(-20, 0, (100, 100)), CellType.int8()) + df = spark.createDataFrame([Row(x=x)]) + + result = ( + df.select(rf_standardize("x").alias("z")) + .select(rf_agg_stats("z").alias("z_stat")) + .select("z_stat.mean", "z_stat.variance") + .first() + ) + + assert result[0] == pytest.approx(0.0, abs=0.00001) + assert result[1] == pytest.approx(1.0, abs=0.00001) + + +def test_rf_rescale(spark): + + x1 = Tile(np.random.randint(-60, 12, (10, 10)), CellType.int8()) + x2 = Tile(np.random.randint(15, 122, (10, 10)), CellType.int8()) + df = spark.createDataFrame([Row(x=x1), Row(x=x2)]) + # Note there will be some clipping + rescaled = df.select(rf_rescale("x", -20, 50).alias("x_prime"), "x") + result = rescaled.agg(F.max(rf_tile_min("x_prime")), F.min(rf_tile_max("x_prime"))).first() + + assert ( + result[0] > 0.0 + ), f"Expected max tile_min to be > 0 (strictly); but it is {rescaled.select('x', 'x_prime', rf_tile_min('x_prime')).take(2)}" + + assert ( + result[1] < 1.0 + ), f"Expected min tile_max to be < 1 (strictly); it is {rescaled.select(rf_tile_max('x_prime')).take(2)}" + + +def test_rf_rescale_per_tile(spark): + x1 = Tile(np.random.randint(-20, 42, (10, 10)), CellType.int8()) + x2 = Tile(np.random.randint(20, 242, (10, 10)), CellType.int8()) + df = spark.createDataFrame([Row(x=x1), Row(x=x2)]) + result = ( + df.select(rf_rescale("x").alias("x_prime")) + .agg(rf_agg_stats("x_prime").alias("stat")) + .select("stat.min", "stat.max") + .first() + ) + + assert result[0] == 0.0 + assert result[1] == 1.0 + + +def test_rf_agg_overview_raster(prdf): + width = 500 + height = 400 + agg = prdf.select(rf_agg_extent(rf_extent(prdf.proj_raster)).alias("extent")).first().extent + crs = prdf.select(rf_crs(prdf.proj_raster).alias("crs")).first().crs.crsProj4 + aoi = Extent.from_row(agg) + aoi = aoi.reproject(crs, "EPSG:3857") + aoi = aoi.buffer(-(aoi.width * 0.2)) + + ovr = prdf.select(rf_agg_overview_raster(prdf.proj_raster, width, height, aoi).alias("agg")) + png = ovr.select(rf_render_color_ramp_png("agg", "Greyscale64")).first()[0] + assert_png(png) + + # with open('/tmp/test_rf_agg_overview_raster.png', 'wb') as f: + # f.write(png) + + +def test_rf_proj_raster(prdf): + df = prdf.select( + rf_proj_raster( + rf_tile("proj_raster"), rf_extent("proj_raster"), rf_crs("proj_raster") + ).alias("roll_your_own") + ) + assert "extent" in df.schema["roll_your_own"].dataType.fieldNames() diff --git a/python/tests/RasterSourceTest.py b/python/tests/RasterSourceTest.py new file mode 100644 index 000000000..dfaa1c2f8 --- /dev/null +++ b/python/tests/RasterSourceTest.py @@ -0,0 +1,274 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import json +import os.path +import urllib.request +from functools import lru_cache + +import pandas as pd +import pyspark.sql.functions as F +from geopandas import GeoDataFrame +from pyrasterframes.rasterfunctions import * +from pyrasterframes.rf_types import * +from shapely.geometry import Point + + +@lru_cache(maxsize=None) +def get_signed_url(url): + sas_url = f"https://planetarycomputer.microsoft.com/api/sas/v1/sign?href={url}" + with urllib.request.urlopen(sas_url) as response: + signed_url = json.loads(response.read())["href"] + return signed_url + + +def path(scene, band): + + scene_dict = { + 1: "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2022/195/023/LC08_L2SP_195023_20220902_20220910_02_T1/LC08_L2SP_195023_20220902_20220910_02_T1_SR_B{}.TIF", + 2: "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2022/195/022/LC08_L2SP_195022_20220902_20220910_02_T1/LC08_L2SP_195022_20220902_20220910_02_T1_SR_B{}.TIF", + 3: "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2022/196/022/LC08_L2SP_196022_20220418_20220427_02_T1/LC08_L2SP_196022_20220418_20220427_02_T1_SR_B{}.TIF", + } + + assert band in range(1, 12) + assert scene in scene_dict.keys() + p = scene_dict[scene] + return get_signed_url(p.format(band)) + + +def path_pandas_df(): + return pd.DataFrame( + [ + { + "b1": path(1, 1), + "b2": path(1, 2), + "b3": path(1, 3), + "geo": Point(1, 1), + }, + { + "b1": path(2, 1), + "b2": path(2, 2), + "b3": path(2, 3), + "geo": Point(2, 2), + }, + { + "b1": path(3, 1), + "b2": path(3, 2), + "b3": path(3, 3), + "geo": Point(3, 3), + }, + ] + ) + + +def test_handle_lazy_eval(spark): + df = spark.read.raster(path(1, 1)) + ltdf = df.select("proj_raster") + assert ltdf.count() > 0 + assert ltdf.first().proj_raster is not None + + tdf = df.select(rf_tile("proj_raster").alias("pr")) + assert tdf.count() > 0 + assert tdf.first().pr is not None + + +def test_strict_eval(spark, img_uri): + df_lazy = spark.read.raster(img_uri, lazy_tiles=True) + # when doing Show on a lazy tile we will see something like RasterRefTile(RasterRef(JVMGeoTiffRasterSource(... + # use this trick to get the `show` string + show_str_lazy = df_lazy.select("proj_raster")._jdf.showString(1, -1, False) + print(show_str_lazy) + assert "RasterRef" in show_str_lazy + + # again for strict + df_strict = spark.read.raster(img_uri, lazy_tiles=False) + show_str_strict = df_strict.select("proj_raster")._jdf.showString(1, -1, False) + assert "RasterRef" not in show_str_strict + + +def test_prt_functions(spark, img_uri): + df = ( + spark.read.raster(img_uri) + .withColumn("crs", rf_crs("proj_raster")) + .withColumn("ext", rf_extent("proj_raster")) + .withColumn("geom", rf_geometry("proj_raster")) + ) + df.select("crs", "ext", "geom").first() + + +def test_list_of_str(spark): + # much the same as RasterSourceDataSourceSpec here; but using https PDS. Takes about 30s to run + + def l8path(b): + assert b in range(1, 12) + + base = "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2022/196/022/LC08_L2SP_196022_20220418_20220427_02_T1/LC08_L2SP_196022_20220418_20220427_02_T1_SR_B{}.TIF" + return get_signed_url(base.format(b)) + + path_param = [l8path(b) for b in [1, 2, 3]] + tile_size = 512 + + df = spark.read.raster( + path_param, + tile_dimensions=(tile_size, tile_size), + lazy_tiles=True, + ).cache() + + print(df.take(3)) + + # schema is tile_path and tile + # df.printSchema() + assert len(df.columns) == 2 and "proj_raster_path" in df.columns and "proj_raster" in df.columns + + # the most common tile dimensions should be as passed to `options`, showing that options are correctly applied + tile_size_df = ( + df.select( + rf_dimensions(df.proj_raster).rows.alias("r"), + rf_dimensions(df.proj_raster).cols.alias("c"), + ) + .groupby(["r", "c"]) + .count() + .toPandas() + ) + most_common_size = tile_size_df.loc[tile_size_df["count"].idxmax()] + assert most_common_size.r == tile_size and most_common_size.c == tile_size + + # all rows are from a single source URI + path_count = df.groupby(df.proj_raster_path).count() + print(path_count.collect()) + assert path_count.count() == 3 + + +def test_list_of_list_of_str(spark): + lol = [ + [path(1, 1), path(1, 2)], + [path(2, 1), path(2, 2)], + [path(3, 1), path(3, 2)], + ] + df = spark.read.raster(lol) + assert len(df.columns) == 4 # 2 cols of uris plus 2 cols of proj_rasters + assert sorted(df.columns) == sorted( + ["proj_raster_0_path", "proj_raster_1_path", "proj_raster_0", "proj_raster_1"] + ) + uri_df = df.select("proj_raster_0_path", "proj_raster_1_path").distinct() + + # check that various uri's are in the dataframe + assert uri_df.filter(F.col("proj_raster_0_path") == F.lit(path(1, 1))).count() == 1 + + assert ( + uri_df.filter(F.col("proj_raster_0_path") == F.lit(path(1, 1))) + .filter(F.col("proj_raster_1_path") == F.lit(path(1, 2))) + .count() + == 1 + ) + + assert ( + uri_df.filter(F.col("proj_raster_0_path") == F.lit(path(3, 1))) + .filter(F.col("proj_raster_1_path") == F.lit(path(3, 2))) + .count() + == 1 + ) + + +def test_schemeless_string(spark, resource_dir): + + path = os.path.join(resource_dir, "L8-B8-Robinson-IL.tiff") + assert not path.startswith("file://") + assert os.path.exists(path) + df = spark.read.raster(path) + assert df.count() > 0 + + +def test_spark_df_source(spark): + catalog_columns = ["b1", "b2", "b3"] + catalog = spark.createDataFrame(path_pandas_df()) + + df = spark.read.raster( + catalog, + tile_dimensions=(512, 512), + catalog_col_names=catalog_columns, + lazy_tiles=True, # We'll get an OOM error if we try to read 9 scenes all at once! + ) + + assert len(df.columns) == 7 # three bands times {path, tile} plus geo + assert df.select("b1_path").distinct().count() == 3 # as per scene_dict + b1_paths_maybe = df.select("b1_path").distinct().collect() + b1_paths = [path(s, 1) for s in [1, 2, 3]] + assert all([row.b1_path in b1_paths for row in b1_paths_maybe]) + + +def test_pandas_source(spark): + + df = spark.read.raster(path_pandas_df(), catalog_col_names=["b1", "b2", "b3"]) + assert len(df.columns) == 7 # three path cols, three tile cols, and geo + assert "geo" in df.columns + assert df.select("b1_path").distinct().count() == 3 + + +def test_geopandas_source(spark): + + # Same test as test_pandas_source with geopandas + geo_df = GeoDataFrame(path_pandas_df(), crs={"init": "EPSG:4326"}, geometry="geo") + df = spark.read.raster(geo_df, ["b1", "b2", "b3"]) + + assert len(df.columns) == 7 # three path cols, three tile cols, and geo + assert "geo" in df.columns + assert df.select("b1_path").distinct().count() == 3 + + +def test_csv_string(spark): + + s = """metadata,b1,b2 + a,{},{} + b,{},{} + c,{},{} + """.format( + path(1, 1), + path(1, 2), + path(2, 1), + path(2, 2), + path(3, 1), + path(3, 2), + ) + + df = spark.read.raster(s, ["b1", "b2"]) + assert ( + len(df.columns) == 3 + 2 + ) # number of columns in original DF plus cardinality of catalog_col_names + assert len(df.take(1)) # non-empty check + + +def test_catalog_named_arg(spark): + # through version 0.8.1 reading a catalog was via named argument only. + df = spark.read.raster(catalog=path_pandas_df(), catalog_col_names=["b1", "b2", "b3"]) + assert len(df.columns) == 7 # three path cols, three tile cols, and geo + assert df.select("b1_path").distinct().count() == 3 + + +def test_spatial_partitioning(spark): + f = path(1, 1) + df = spark.read.raster(f, spatial_index_partitions=True) + assert "spatial_index" in df.columns + + assert df.rdd.getNumPartitions() == int(spark.conf.get("spark.sql.shuffle.partitions")) + assert spark.read.raster(f, spatial_index_partitions=34).rdd.getNumPartitions() == 34 + assert spark.read.raster(f, spatial_index_partitions="42").rdd.getNumPartitions() == 42 + assert "spatial_index" not in spark.read.raster(f, spatial_index_partitions=False).columns + assert "spatial_index" not in spark.read.raster(f, spatial_index_partitions=0).columns diff --git a/python/tests/UDTTests.py b/python/tests/UDTTests.py new file mode 100644 index 000000000..c4021f346 --- /dev/null +++ b/python/tests/UDTTests.py @@ -0,0 +1,185 @@ +import math + +import numpy as np +import numpy.testing +import pandas +import pyspark.sql.functions as F +from pyproj import CRS as pyCRS +from pyrasterframes.rasterfunctions import * +from pyrasterframes.rf_types import * +from pyspark.sql import DataFrame, Row +from pyspark.sql.types import StructField, StructType + + +def test_mask_no_data(): + t1 = Tile(np.array([[1, 2], [3, 4]]), CellType("int8ud3")) + assert t1.cells.mask[1][0] + assert t1.cells[1][1] is not None + assert len(t1.cells.compressed()) == 3 + + t2 = Tile(np.array([[1.0, 2.0], [float("nan"), 4.0]]), CellType.float32()) + assert len(t2.cells.compressed()) == 3 + assert t2.cells.mask[1][0] + assert t2.cells[1][1] is not None + + +def test_tile_udt_serialization(spark): + + udt = TileUDT() + cell_types = ( + ct for ct in rf_cell_types() if not (ct.is_raw() or ("bool" in ct.base_cell_type_name())) + ) + + for ct in cell_types: + cells = (100 + np.random.randn(3, 3) * 100).astype(ct.to_numpy_dtype()) + + if ct.is_floating_point(): + nd = 33.0 + else: + nd = 33 + + cells[1][1] = nd + a_tile = Tile(cells, ct.with_no_data_value(nd)) + round_trip = udt.fromInternal(udt.toInternal(a_tile)) + assert a_tile == round_trip, "round-trip serialization for " + str(ct) + + schema = StructType([StructField("tile", TileUDT(), False)]) + df = spark.createDataFrame([{"tile": a_tile}], schema) + + long_trip = df.first()["tile"] + assert long_trip == a_tile + + +def test_masked_deser(spark): + t = Tile( + np.array( + [ + [ + 1, + 2, + 3, + ], + [4, 5, 6], + [7, 8, 9], + ] + ), + CellType("uint8"), + ) + + df = spark.createDataFrame([Row(t=t)]) + roundtrip = df.select(rf_mask_by_value("t", rf_local_greater("t", lit(6)), 1)).first()[0] + assert roundtrip.cells.mask.sum() == 3, ( + f"Expected {3} nodata values but found Tile" f"{roundtrip}" + ) + + +def test_udf_on_tile_type_input(spark, img_uri, rf): + + df = spark.read.raster(img_uri) + + # create trivial UDF that does something we already do with raster_Functions + @F.udf("integer") + def my_udf(t): + a = t.cells + return a.size # same as rf_dimensions.cols * rf_dimensions.rows + + rf_result = rf.select( + (rf_dimensions("tile").cols.cast("int") * rf_dimensions("tile").rows.cast("int")).alias( + "expected" + ), + my_udf("tile").alias("result"), + ).toPandas() + + numpy.testing.assert_array_equal(rf_result.expected.tolist(), rf_result.result.tolist()) + + df_result = df.select( + ( + rf_dimensions(df.proj_raster).cols.cast("int") + * rf_dimensions(df.proj_raster).rows.cast("int") + - my_udf(rf_tile(df.proj_raster)) + ).alias("result") + ).toPandas() + + numpy.testing.assert_array_equal(np.zeros(len(df_result)), df_result.result.tolist()) + + +def test_udf_on_tile_type_output(rf): + + # create a trivial UDF that does something we already do with a raster_functions + @F.udf(TileUDT()) + def my_udf(t): + import numpy as np + + return Tile(np.log1p(t.cells)) + + rf_result = rf.select( + rf_tile_max(rf_local_subtract(my_udf(rf.tile), rf_log1p(rf.tile))).alias("expect_zeros") + ).collect() + + # almost equal because of different implemenations under the hoods: C (numpy) versus Java (rf_) + numpy.testing.assert_almost_equal( + [r["expect_zeros"] for r in rf_result], [0.0 for _ in rf_result], decimal=6 + ) + + +def test_no_data_udf_handling(spark): + + t1 = Tile(np.array([[1, 2], [0, 4]]), CellType.uint8()) + assert t1.cell_type.to_numpy_dtype() == np.dtype("uint8") + e1 = Tile(np.array([[2, 3], [0, 5]]), CellType.uint8()) + schema = StructType([StructField("tile", TileUDT(), False)]) + df = spark.createDataFrame([{"tile": t1}], schema) + + @F.udf(TileUDT()) + def increment(t): + return t + 1 + + r1 = df.select(increment(df.tile).alias("inc")).first()["inc"] + assert r1 == e1 + + +def test_udf_np_implicit_type_conversion(spark): + + a1 = np.array([[1, 2], [0, 4]]) + t1 = Tile(a1, CellType.uint8()) + exp_array = a1.astype(">f8") + + @F.udf(TileUDT()) + def times_pi(t): + return t * math.pi + + @F.udf(TileUDT()) + def divide_pi(t): + return t / math.pi + + @F.udf(TileUDT()) + def plus_pi(t): + return t + math.pi + + @F.udf(TileUDT()) + def less_pi(t): + return t - math.pi + + df = spark.createDataFrame(pandas.DataFrame([{"tile": t1}])) + r1 = df.select(less_pi(divide_pi(times_pi(plus_pi(df.tile))))).first()[0] + + assert np.all(r1.cells == exp_array) + assert r1.cells.dtype == exp_array.dtype + + +def test_crs_udt_serialization(): + udt = CrsUDT() + + crs = CRS(pyCRS.from_epsg(4326).to_proj4()) + + roundtrip = udt.fromInternal(udt.toInternal(crs)) + assert crs == roundtrip + + +def test_extract_from_raster(spark, img_uri): + # should be able to write a projected raster tile column to path like '/data/foo/file.tif' + + rf = spark.read.raster(img_uri) + crs: DataFrame = rf.select(rf_crs("proj_raster").alias("crs")).distinct() + assert crs.schema.fields[0].dataType == CrsUDT() + assert crs.first()["crs"].proj4_str == "+proj=utm +zone=16 +datum=WGS84 +units=m +no_defs " diff --git a/python/tests/VectorTypesTests.py b/python/tests/VectorTypesTests.py new file mode 100644 index 000000000..7455a8595 --- /dev/null +++ b/python/tests/VectorTypesTests.py @@ -0,0 +1,244 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import os + +import numpy.testing +import pandas as pd +import pyspark.sql.functions as F +import pytest +import shapely +from geomesa_pyspark.types import PointUDT, PolygonUDT +from pyrasterframes.rasterfunctions import * +from pyspark.sql import Row + + +@pytest.fixture +def pandas_df(): + return pd.DataFrame( + { + "eye": ["a", "b", "c", "d"], + "x": [0.0, 1.0, 2.0, 3.0], + "y": [-4.0, -3.0, -2.0, -1.0], + } + ) + + +@pytest.fixture +def df(spark, pandas_df): + + df = spark.createDataFrame(pandas_df) + df = df.withColumn("point_geom", st_point(df.x, df.y)) + return df.withColumn("poly_geom", st_bufferPoint(df.point_geom, lit(1250.0))) + + +def test_spatial_relations(df, pandas_df): + + # Use python shapely UDT in a UDF + @F.udf("double") + def area_fn(g): + return g.area + + @F.udf("double") + def length_fn(g): + return g.length + + df = df.withColumn("poly_area", area_fn(df.poly_geom)) + df = df.withColumn("poly_len", length_fn(df.poly_geom)) + + # Return UDT in a UDF! + def some_point(g): + return g.representative_point() + + some_point_udf = F.udf(some_point, PointUDT()) + + df = df.withColumn("any_point", some_point_udf(df.poly_geom)) + # spark-side UDF/UDT are correct + intersect_total = ( + df.agg(F.sum(st_intersects(df.poly_geom, df.any_point).astype("double")).alias("s")) + .collect()[0] + .s + ) + assert intersect_total == df.count() + + # Collect to python driver in shapely UDT + pandas_df_out = df.toPandas() + + # Confirm we get a shapely type back from st_* function and UDF + assert isinstance(pandas_df_out.poly_geom.iloc[0], shapely.geometry.Polygon) + assert isinstance(pandas_df_out.any_point.iloc[0], shapely.geometry.Point) + + # And our spark-side manipulations were correct + xs_correct = pandas_df_out.point_geom.apply(lambda g: g.coords[0][0]) == pandas_df.x + assert all(xs_correct) + + centroid_ys = pandas_df_out.poly_geom.apply(lambda g: g.centroid.coords[0][1]).tolist() + numpy.testing.assert_almost_equal(centroid_ys, pandas_df.y.tolist()) + + # Including from UDF's + numpy.testing.assert_almost_equal( + pandas_df_out.poly_geom.apply(lambda g: g.area).values, pandas_df_out.poly_area.values + ) + numpy.testing.assert_almost_equal( + pandas_df_out.poly_geom.apply(lambda g: g.length).values, pandas_df_out.poly_len.values + ) + + +def test_geometry_udf(rf): + + # simple test that raster contents are not invalid + # create a udf to buffer (the bounds) polygon + def _buffer(g, d): + return g.buffer(d) + + @F.udf("double") + def area(g): + return g.area + + buffer_udf = F.udf(_buffer, PolygonUDT()) + + buf_cells = 10 + with_poly = rf.withColumn( + "poly", buffer_udf(rf.geometry, F.lit(-15 * buf_cells)) + ) # cell res is 15x15 + area = with_poly.select(area("poly") < area("geometry")) + area_result = area.collect() + assert all([r[0] for r in area_result]) + + +def test_rasterize(rf): + @F.udf(PolygonUDT()) + def buffer(g, d): + return g.buffer(d) + + # start with known polygon, the tile extents, **negative buffered** by 10 cells + buf_cells = 10 + with_poly = rf.withColumn( + "poly", buffer(rf.geometry, lit(-15 * buf_cells)) + ) # cell res is 15x15 + + # rasterize value 16 into buffer shape. + cols = 194 # from dims of tile + rows = 250 # from dims of tile + with_raster = with_poly.withColumn( + "rasterized", rf_rasterize("poly", "geometry", lit(16), lit(cols), lit(rows)) + ) + result = with_raster.select( + rf_tile_sum(rf_local_equal_int(with_raster.rasterized, 16)), + rf_tile_sum(with_raster.rasterized), + ) + # + expected_burned_in_cells = (cols - 2 * buf_cells) * (rows - 2 * buf_cells) + assert result.first()[0] == float(expected_burned_in_cells) + assert result.first()[1] == 16.0 * expected_burned_in_cells + + +def test_parse_crs(spark): + df = spark.createDataFrame([Row(id=1)]) + assert df.select(rf_mk_crs("EPSG:4326")).count() == 1 + + +def test_reproject(rf): + reprojected = rf.withColumn( + "reprojected", st_reproject("center", rf_mk_crs("EPSG:4326"), rf_mk_crs("EPSG:3857")) + ) + reprojected.show() + assert reprojected.count() == 8 + + +def test_geojson(spark, resource_dir): + + sample = "file://" + os.path.join(resource_dir, "buildings.geojson") + geo = spark.read.geojson(sample) + geo.show() + assert geo.select("geometry").count() == 8 + + +def test_xz2_index(spark, img_uri, df): + + df1 = df.select(rf_xz2_index(df.poly_geom, rf_crs(F.lit("EPSG:4326"))).alias("index")) + expected = {22858201775, 38132946267, 38166922588, 38180072113} + indexes = {x[0] for x in df1.collect()} + assert indexes == expected + + # Test against proj_raster (has CRS and Extent embedded). + df2 = spark.read.raster(img_uri) + result_one_arg = df2.select(rf_xz2_index("proj_raster").alias("ix")).agg(F.min("ix")).first()[0] + + result_two_arg = ( + df2.select(rf_xz2_index(rf_extent("proj_raster"), rf_crs("proj_raster")).alias("ix")) + .agg(F.min("ix")) + .first()[0] + ) + + assert result_two_arg == result_one_arg + assert result_one_arg == 55179438768 # this is a bit more fragile but less important + + # Custom resolution + df3 = df.select(rf_xz2_index(df.poly_geom, rf_crs(lit("EPSG:4326")), 3).alias("index")) + expected = {21, 36} + indexes = {x[0] for x in df3.collect()} + assert indexes == expected + + +def test_z2_index(df): + df1 = df.select(rf_z2_index(df.poly_geom, rf_crs(lit("EPSG:4326"))).alias("index")) + + expected = {28596898472, 28625192874, 28635062506, 28599712232} + indexes = {x[0] for x in df1.collect()} + assert indexes == expected + + # Custom resolution + df2 = df.select(rf_z2_index(df.poly_geom, rf_crs(lit("EPSG:4326")), 6).alias("index")) + expected = {1704, 1706} + indexes = {x[0] for x in df2.collect()} + assert indexes == expected + + +def test_agg_extent(df): + r = ( + df.select(rf_agg_extent(st_extent("poly_geom")).alias("agg_extent")) + .select("agg_extent.*") + .first() + ) + assert ( + r.asDict() + == Row( + xmin=-0.011268955205879273, + ymin=-4.011268955205879, + xmax=3.0112432169934484, + ymax=-0.9887567830065516, + ).asDict() + ) + + +def test_agg_reprojected_extent(df): + r = df.select( + rf_agg_reprojected_extent(st_extent("poly_geom"), rf_mk_crs("EPSG:4326"), "EPSG:3857") + ).first()[0] + assert ( + r.asDict() + == Row( + xmin=-1254.45435529069, + ymin=-446897.63591665257, + xmax=335210.0615704097, + ymax=-110073.36515944061, + ).asDict() + ) diff --git a/python/tests/__init__.py b/python/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/python/tests/conftest.py b/python/tests/conftest.py new file mode 100644 index 000000000..8c8f29f44 --- /dev/null +++ b/python/tests/conftest.py @@ -0,0 +1,130 @@ +# +# This software is licensed under the Apache 2 license, quoted below. +# +# Copyright 2019 Astraea, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# [http://www.apache.org/licenses/LICENSE-2.0] +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +import builtins +import os +from pathlib import Path + +import pytest +from pyrasterframes.rasterfunctions import rf_convert_cell_type +from pyrasterframes.utils import create_rf_spark_session + + +# Setuptools/easy_install doesn't properly set the execute bit on the Spark scripts, +# So this preemptively attempts to do it. +def _chmodit(): + try: + from importlib.util import find_spec + + module_home = find_spec("pyspark").origin + print(module_home) + bin_dir = os.path.join(os.path.dirname(module_home), "bin") + for filename in os.listdir(bin_dir): + try: + os.chmod(os.path.join(bin_dir, filename), mode=0o555, follow_symlinks=True) + except OSError: + pass + except ImportError: + pass + + +_chmodit() + +jar_dir = Path(".") / "dist" +jar_path = next(jar_dir.glob("*assembly*.jar")) + + +@pytest.fixture(scope="session") +def app_name(): + return "PyRasterFrames test suite" + + +@pytest.fixture(scope="session") +def resource_dir(): + here = os.path.dirname(os.path.realpath(__file__)) + return os.path.join(here, "resources") + + +@pytest.fixture(scope="session") +def spark(app_name): + spark_session = create_rf_spark_session( + **{ + "spark.master": "local[*, 2]", + "spark.ui.enabled": "false", + "spark.app.name": app_name, + "spark.jars": jar_path, + #'spark.driver.extraJavaOptions': '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' + } + ) + spark_session.sparkContext.setLogLevel("ERROR") + + print("Spark Version: " + spark_session.version) + print("Spark Config: " + str(spark_session.sparkContext._conf.getAll())) + + return spark_session + + +@pytest.fixture() +def img_uri(resource_dir): + img_path = os.path.join(resource_dir, "L8-B8-Robinson-IL.tiff") + return "file://" + img_path + + +@pytest.fixture() +def img_rgb_uri(resource_dir): + img_rgb_path = os.path.join(resource_dir, "L8-B4_3_2-Elkton-VA.tiff") + return "file://" + img_rgb_path + + +@pytest.fixture() +def rf(spark, img_uri): + # load something into a rasterframe + rf = spark.read.geotiff(img_uri).with_bounds().with_center() + + # convert the tile cell type to provide for other operations + return ( + rf.withColumn("tile2", rf_convert_cell_type("tile", "float32")) + .drop("tile") + .withColumnRenamed("tile2", "tile") + .as_layer() + ) + + +@pytest.fixture() +def prdf(spark, img_uri): + return spark.read.raster(img_uri) + + +@pytest.fixture() +def df(prdf): + return prdf.withColumn("tile", rf_convert_cell_type("proj_raster", "float32")).drop( + "proj_raster" + ) + + +def assert_png(bytes): + assert bytes[0:8] == bytearray( + [0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A] + ), "png header does not match" + + +def rounded_compare(val1, val2): + print("Comparing {} and {} using round()".format(val1, val2)) + return builtins.round(val1) == builtins.round(val2) diff --git a/python/tests/resources/L8-B2-Elkton-VA.tiff b/python/tests/resources/L8-B2-Elkton-VA.tiff new file mode 100644 index 000000000..b287e5180 Binary files /dev/null and b/python/tests/resources/L8-B2-Elkton-VA.tiff differ diff --git a/python/tests/resources/L8-B3-Elkton-VA.tiff b/python/tests/resources/L8-B3-Elkton-VA.tiff new file mode 100644 index 000000000..61a95250c Binary files /dev/null and b/python/tests/resources/L8-B3-Elkton-VA.tiff differ diff --git a/python/tests/resources/L8-B4-Elkton-VA-4326.tiff b/python/tests/resources/L8-B4-Elkton-VA-4326.tiff new file mode 100644 index 000000000..2bc57e255 Binary files /dev/null and b/python/tests/resources/L8-B4-Elkton-VA-4326.tiff differ diff --git a/python/tests/resources/L8-B4-Elkton-VA.tiff b/python/tests/resources/L8-B4-Elkton-VA.tiff new file mode 100644 index 000000000..2534d4bd0 Binary files /dev/null and b/python/tests/resources/L8-B4-Elkton-VA.tiff differ diff --git a/python/tests/resources/L8-B4_3_2-Elkton-VA.tiff b/python/tests/resources/L8-B4_3_2-Elkton-VA.tiff new file mode 100644 index 000000000..c351f5887 Binary files /dev/null and b/python/tests/resources/L8-B4_3_2-Elkton-VA.tiff differ diff --git a/python/tests/resources/L8-B8-Robinson-IL.tiff b/python/tests/resources/L8-B8-Robinson-IL.tiff new file mode 100644 index 000000000..224ec5ac9 Binary files /dev/null and b/python/tests/resources/L8-B8-Robinson-IL.tiff differ diff --git a/python/tests/resources/buildings.geojson b/python/tests/resources/buildings.geojson new file mode 100644 index 000000000..a9eba9fc0 --- /dev/null +++ b/python/tests/resources/buildings.geojson @@ -0,0 +1,899 @@ +{ + "type": "FeatureCollection", + "crs": { + "type": "name", + "properties": { + "name": "urn:ogc:def:crs:EPSG::3968" + } + }, + "bbox": [ + 10453.2340000011, + 137465.4443, + 0, + 203572.226800002, + 261914.089200001, + 518 + ], + "features": [ + { + "type": "Feature", + "properties": { + "OBJECTID": 2560367, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "23712149512", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 112.01513810500001, + "SHAPE_Area": 612.03868604499996, + "AREA_SQFT": 6587.9 + }, + "bbox": [ + 23695.017599999904633, + 149496.648900002241135, + 0.0, + 23732.747999999672174, + 149527.1985, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 23730.124099999666214, + 149514.097699999809265, + 0.0 + ], + [ + 23728.569000001996756, + 149511.027800001204014, + 0.0 + ], + [ + 23732.747999999672174, + 149508.911100000143051, + 0.0 + ], + [ + 23731.134500000625849, + 149505.72520000115037, + 0.0 + ], + [ + 23727.252300001680851, + 149507.691500000655651, + 0.0 + ], + [ + 23725.473999999463558, + 149504.180500000715256, + 0.0 + ], + [ + 23722.397100001573563, + 149505.523900002241135, + 0.0 + ], + [ + 23717.634100001305342, + 149496.648900002241135, + 0.0 + ], + [ + 23696.2179000005126, + 149507.229299999773502, + 0.0 + ], + [ + 23698.083399999886751, + 149510.912300001829863, + 0.0 + ], + [ + 23696.541700001806021, + 149511.693300001323223, + 0.0 + ], + [ + 23695.017599999904633, + 149512.464999999850988, + 0.0 + ], + [ + 23702.387800000607967, + 149527.1985, + 0.0 + ], + [ + 23730.124099999666214, + 149514.097699999809265, + 0.0 + ] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "OBJECTID": 2561065, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "22784149837", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 140.080104933, + "SHAPE_Area": 572.44039797000005, + "AREA_SQFT": 6161.67 + }, + "bbox": [ + 22778.690500002354383, + 149807.139500003308058, + 0.0, + 22788.61260000243783, + 149867.804400000721216, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 22788.599300000816584, + 149866.041400000452995, + 0.0 + ], + [ + 22788.586899999529123, + 149864.452200002968311, + 0.0 + ], + [ + 22788.545500002801418, + 149859.174200002104044, + 0.0 + ], + [ + 22788.532500002533197, + 149857.504900000989437, + 0.0 + ], + [ + 22788.493900001049042, + 149852.586800001561642, + 0.0 + ], + [ + 22788.480700001120567, + 149850.917000003159046, + 0.0 + ], + [ + 22788.44030000269413, + 149845.7555, + 0.0 + ], + [ + 22788.427200000733137, + 149844.088899999856949, + 0.0 + ], + [ + 22788.36600000038743, + 149836.274000000208616, + 0.0 + ], + [ + 22788.355200000107288, + 149834.883799999952, + 0.0 + ], + [ + 22788.314600002020597, + 149829.721200000494719, + 0.0 + ], + [ + 22788.302200000733137, + 149828.140100002288818, + 0.0 + ], + [ + 22788.262099999934435, + 149823.015900000929832, + 0.0 + ], + [ + 22788.249400001019239, + 149821.38910000026226, + 0.0 + ], + [ + 22788.20890000090003, + 149816.230100002139807, + 0.0 + ], + [ + 22788.19649999961257, + 149814.641700003296137, + 0.0 + ], + [ + 22788.157099999487, + 149809.631100002676249, + 0.0 + ], + [ + 22788.143800001591444, + 149807.931900002062321, + 0.0 + ], + [ + 22788.137600000947714, + 149807.139500003308058, + 0.0 + ], + [ + 22778.690500002354383, + 149807.213600002229214, + 0.0 + ], + [ + 22778.701400000602007, + 149808.6064000017941, + 0.0 + ], + [ + 22778.724300000816584, + 149811.520500000566244, + 0.0 + ], + [ + 22778.757200002670288, + 149815.718500003218651, + 0.0 + ], + [ + 22778.776700001209974, + 149818.198900002986193, + 0.0 + ], + [ + 22778.80800000205636, + 149822.202100001275539, + 0.0 + ], + [ + 22778.830000001937151, + 149825.011500000953674, + 0.0 + ], + [ + 22778.861900001764297, + 149829.083700001239777, + 0.0 + ], + [ + 22778.882699999958277, + 149831.727000001817942, + 0.0 + ], + [ + 22778.913499999791384, + 149835.65990000218153, + 0.0 + ], + [ + 22778.934599999338388, + 149838.344300001859665, + 0.0 + ], + [ + 22778.967199999839067, + 149842.509800001978874, + 0.0 + ], + [ + 22778.99040000140667, + 149845.455000001937151, + 0.0 + ], + [ + 22779.019000001251698, + 149849.091400001198053, + 0.0 + ], + [ + 22779.041999999433756, + 149852.034200001507998, + 0.0 + ], + [ + 22779.073300000280142, + 149856.03490000218153, + 0.0 + ], + [ + 22779.094000000506639, + 149858.6824000030756, + 0.0 + ], + [ + 22779.124700002372265, + 149862.587100002914667, + 0.0 + ], + [ + 22779.149300001561642, + 149865.729600001126528, + 0.0 + ], + [ + 22779.159200001507998, + 149866.985300000756979, + 0.0 + ], + [ + 22779.165600001811981, + 149867.804400000721216, + 0.0 + ], + [ + 22788.61260000243783, + 149867.730399999767542, + 0.0 + ], + [ + 22788.599300000816584, + 149866.041400000452995, + 0.0 + ] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "OBJECTID": 2564971, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "22828150049", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 90.962539370399995, + "SHAPE_Area": 469.83376775699998, + "AREA_SQFT": 5057.23 + }, + "bbox": [ + 22814.135000001639128, + 150033.061800003051758, + 0.0, + 22842.757500000298023, + 150063.555600002408028, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 22826.411499999463558, + 150063.357500001788139, + 0.0 + ], + [ + 22832.272399999201298, + 150060.221000000834465, + 0.0 + ], + [ + 22832.609200000762939, + 150060.850400000810623, + 0.0 + ], + [ + 22833.580099999904633, + 150062.664800003170967, + 0.0 + ], + [ + 22840.825800001621246, + 150058.787200000137091, + 0.0 + ], + [ + 22842.757500000298023, + 150057.753499999642372, + 0.0 + ], + [ + 22841.060200002044439, + 150054.581800002604723, + 0.0 + ], + [ + 22829.543500002473593, + 150033.061800003051758, + 0.0 + ], + [ + 22814.135000001639128, + 150041.30800000205636, + 0.0 + ], + [ + 22815.89299999922514, + 150044.593100000172853, + 0.0 + ], + [ + 22826.041400000452995, + 150063.555600002408028, + 0.0 + ], + [ + 22826.411499999463558, + 150063.357500001788139, + 0.0 + ] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "OBJECTID": 2565115, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "23690149798", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 666.59224832899997, + "SHAPE_Area": 1754.8776121799999, + "AREA_SQFT": 18889.3 + }, + "bbox": [ + 23608.65260000154376, + 149786.040000002831221, + 0.0, + 23716.468699999153614, + 149934.448400001972914, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 23677.989900000393391, + 149786.040000002831221, + 0.0 + ], + [ + 23608.65260000154376, + 149905.035600002855062, + 0.0 + ], + [ + 23659.130400002002716, + 149934.448400001972914, + 0.0 + ], + [ + 23709.182300001382828, + 149848.55010000243783, + 0.0 + ], + [ + 23702.254200000315905, + 149844.513300001621246, + 0.0 + ], + [ + 23653.967100001871586, + 149927.382800001651049, + 0.0 + ], + [ + 23613.26410000026226, + 149903.665699999779463, + 0.0 + ], + [ + 23676.700699999928474, + 149794.797200001776218, + 0.0 + ], + [ + 23712.332600001245737, + 149815.559600003063679, + 0.0 + ], + [ + 23716.468699999153614, + 149808.461100000888109, + 0.0 + ], + [ + 23677.989900000393391, + 149786.040000002831221, + 0.0 + ] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "OBJECTID": 2565175, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "23677149868", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 208.79166312800001, + "SHAPE_Area": 756.18697303700003, + "AREA_SQFT": 8139.5 + }, + "bbox": [ + 23649.58049999922514, + 149824.107200000435114, + 0.0, + 23704.862700000405312, + 149911.536400001496077, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 23698.09180000051856, + 149824.107200000435114, + 0.0 + ], + [ + 23649.58049999922514, + 149907.602500002831221, + 0.0 + ], + [ + 23656.351600002497435, + 149911.536400001496077, + 0.0 + ], + [ + 23704.862700000405312, + 149828.041200000792742, + 0.0 + ], + [ + 23698.09180000051856, + 149824.107200000435114, + 0.0 + ] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "OBJECTID": 2565183, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "23665149861", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 207.67019654800001, + "SHAPE_Area": 732.90812511800004, + "AREA_SQFT": 7888.93 + }, + "bbox": [ + 23637.763700000941753, + 149817.533100001513958, + 0.0, + 23692.70160000026226, + 149904.5471, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 23686.116100002080202, + 149817.533100001513958, + 0.0 + ], + [ + 23637.763700000941753, + 149900.719300001859665, + 0.0 + ], + [ + 23644.349199999123812, + 149904.5471, + 0.0 + ], + [ + 23692.70160000026226, + 149821.361000001430511, + 0.0 + ], + [ + 23686.116100002080202, + 149817.533100001513958, + 0.0 + ] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "OBJECTID": 2565190, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "23742149880", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 122.36290404099999, + "SHAPE_Area": 646.52232617100003, + "AREA_SQFT": 6959.08 + }, + "bbox": [ + 23724.983899999409914, + 149858.991399999707937, + 0.0, + 23760.777000002563, + 149899.87780000269413, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 23738.19200000166893, + 149899.87780000269413, + 0.0 + ], + [ + 23760.777000002563, + 149866.160600002855062, + 0.0 + ], + [ + 23756.139400001615286, + 149863.332299999892712, + 0.0 + ], + [ + 23749.021099999547005, + 149858.991399999707937, + 0.0 + ], + [ + 23739.843000002205372, + 149873.353400003165007, + 0.0 + ], + [ + 23738.434500001370907, + 149872.516200002282858, + 0.0 + ], + [ + 23736.961800001561642, + 149875.730600003153086, + 0.0 + ], + [ + 23733.695100001990795, + 149873.934000000357628, + 0.0 + ], + [ + 23731.586500000208616, + 149877.310600001364946, + 0.0 + ], + [ + 23726.082499999552965, + 149886.124200001358986, + 0.0 + ], + [ + 23728.469000000506639, + 149888.12780000269413, + 0.0 + ], + [ + 23726.007200002670288, + 149892.045700002461672, + 0.0 + ], + [ + 23724.983899999409914, + 149893.5912000015378, + 0.0 + ], + [ + 23727.842800002545118, + 149895.429100003093481, + 0.0 + ], + [ + 23727.374400001019239, + 149896.157600000500679, + 0.0 + ], + [ + 23733.05970000103116, + 149899.811800003051758, + 0.0 + ], + [ + 23734.530200000852346, + 149897.524100001901388, + 0.0 + ], + [ + 23738.19200000166893, + 149899.87780000269413, + 0.0 + ] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "OBJECTID": 2565193, + "BLDGHEIGHT": 0, + "NUMSTORIES": 0, + "FEATURECOD": null, + "LASTUPDATE": "2017\/08\/15", + "LASTEDITOR": "VGIN", + "RuleID": 0, + "BUILDINGCL": 0, + "DATASOURCE": "Jurisdiction", + "EDITCOMMEN": null, + "SOURCEFEAT": "23654149854", + "RuleID_1": 1, + "SFIDdupes": 0, + "FIPS": "51680", + "MUNICIPALI": "Lynchburg City", + "SHAPE_Leng": 207.14953045, + "SHAPE_Area": 744.31082211199998, + "AREA_SQFT": 8011.66 + }, + "bbox": [ + 23626.108500000089407, + 149810.810000002384186, + 0.0, + 23681.160199999809265, + 149897.444099999964, + 0.0 + ], + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 23674.452899999916553, + 149810.810000002384186, + 0.0 + ], + [ + 23626.108500000089407, + 149893.523800000548363, + 0.0 + ], + [ + 23632.815799999982119, + 149897.444099999964, + 0.0 + ], + [ + 23681.160199999809265, + 149814.73030000180006, + 0.0 + ], + [ + 23674.452899999916553, + 149810.810000002384186, + 0.0 + ] + ] + ] + ] + } + } + ] +}