diff --git a/.circleci/.dockerignore b/.circleci/.dockerignore deleted file mode 100644 index dbe9a91d7..000000000 --- a/.circleci/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!requirements-conda.txt -!fix-permissions diff --git a/.circleci/Dockerfile b/.circleci/Dockerfile deleted file mode 100644 index f4629597a..000000000 --- a/.circleci/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM circleci/openjdk:11-jdk -#LABEL org.opencontainers.image.source=https://github.com/locationtech/rasterframes - -USER root - -# See: https://docs.conda.io/projects/conda/en/latest/user-guide/install/rpm-debian.html -RUN \ - curl -s https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list - -RUN \ - apt-get update && \ - apt-get install -yq --no-install-recommends conda && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -ENV CONDA_DIR=/opt/conda -ENV PATH=$CONDA_DIR/bin:$PATH - -COPY requirements-conda.txt fix-permissions /tmp -RUN \ - conda install --quiet --yes --channel=conda-forge --file=/tmp/requirements-conda.txt && \ - echo "$CONDA_DIR/lib" > /etc/ld.so.conf.d/conda.conf && \ - ldconfig && \ - conda clean --all --force-pkgs-dirs --yes --quiet && \ - sh /tmp/fix-permissions $CONDA_DIR - - -# Work-around for pyproj issue https://github.com/pyproj4/pyproj/issues/415 -ENV PROJ_LIB=/opt/conda/share/proj - -USER 3434 -WORKDIR /home/circleci diff --git a/.circleci/Makefile b/.circleci/Makefile deleted file mode 100644 index 578140c4e..000000000 --- a/.circleci/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -IMAGE_NAME=circleci-openjdk-conda-gdal -SHA=$(shell git log -n1 --format=format:"%H" | cut -c 1-7) -VERSION?=$(SHA) -HOST=docker.io -REPO=$(HOST)/s22s -FULL_NAME=$(REPO)/$(IMAGE_NAME):$(VERSION) - -.DEFAULT_GOAL := help -help: -# http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html - @echo "Usage: make [target]" - @echo "Targets: " - @grep -E '^[a-zA-Z0-9_%/-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\t\033[36m%-20s\033[0m %s\n", $$1, $$2}' - -all: build push ## Build and then push image - -build: ## Build the docker image - docker build . -t ${FULL_NAME} - -login: ## Login to the docker registry - docker login - -push: login ## Push docker image to registry - docker push ${FULL_NAME} - -run: build ## Build image and launch shell - docker run --rm -it ${FULL_NAME} bash diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 5b832beb6..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,249 +0,0 @@ -version: 2.1 - -orbs: - sbt: - description: SBT build/test runtime - executors: - default: - docker: - - image: s22s/circleci-openjdk-conda-gdal:b8e30ee - working_directory: ~/repo - environment: - SBT_OPTS: "-Xms32M -Xmx2G -XX:+UseStringDeduplication -XX:+UseCompressedOops -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp" - commands: - setup: - description: Setup for sbt build - steps: - - run: - name: Setup sbt - command: 'true' # NOOP - - compile: - description: Do just the compilation stage to minimize sbt memory footprint - steps: - - run: - name: "Compile Scala via sbt" - command: sbt -v -batch compile test:compile it:compile - - python: - commands: - setup: - description: Ensure a minimal python environment is avalable and ready - steps: - - run: - name: Install Python and PIP - command: |- - python -m pip install --user 'setuptools>=45.2' - - requirements: - description: Install packages identified in requirements file - steps: - - run: - name: Install requirements - command: /opt/conda/bin/conda install -c conda-forge --yes --file pyrasterframes/src/main/python/requirements-condaforge.txt - - - rasterframes: - commands: - setup: - steps: - - run: - name: Enable saving core files - command: ulimit -c unlimited -S - - save-artifacts: - steps: - - run: - command: | - mkdir -p /tmp/core_dumps - ls -lh /tmp - cp core.* *.hs /tmp/core_dumps 2> /dev/null || true - cp core/* /tmp/core_dumps/ 2> /dev/null || true - cp -r /tmp/hsperfdata* /tmp/*.hprof /tmp/core_dumps 2> /dev/null || true - when: on_fail - - - store_artifacts: - path: /tmp/core_dumps - - - store_test_results: - path: core/target/test-reports - - - store_test_results: - path: datasource/target/test-reports - - - store_test_results: - path: experimental/target/test-reports - - save-doc-artifacts: - steps: - - run: - command: | - mkdir -p /tmp/core_dumps - cp core.* *.hs /tmp/core_dumps 2> /dev/null || true - mkdir -p /tmp/markdown - cp /home/circleci/repo/pyrasterframes/target/python/docs/*.md /tmp/markdown 2> /dev/null || true - when: on_fail - - - store_artifacts: - path: /tmp/core_dumps - - - store_artifacts: - path: /tmp/markdown - - - store_artifacts: - path: docs/target/site - destination: rf-site - - save-cache: - steps: - - save_cache: - key: v4-dependencies--{{ checksum "build.sbt" }} - paths: - - ~/.ivy2/cache - - ~/.sbt - - ~/.cache/coursier - - ~/.local - - restore-cache: - steps: - - restore_cache: - keys: - - v4-dependencies-{{ checksum "build.sbt" }} - -jobs: - test: - executor: sbt/default - steps: - - checkout - - sbt/setup - - python/setup - - python/requirements - - rasterframes/setup - - rasterframes/restore-cache - - sbt/compile - - - run: - name: "Scala Tests: core" - command: sbt -v -batch core/test - - - run: - name: "Scala Tests: datasource" - command: sbt -v -batch datasource/test - - - run: - name: "Scala Tests: experimental" - command: sbt -v -batch experimental/test - - - run: - name: "Create PyRasterFrames package" - command: sbt -v -batch pyrasterframes/package - - - run: - name: "Python Tests" - command: sbt -v -batch pyrasterframes/test - - - rasterframes/save-artifacts - - rasterframes/save-cache - - docs: - executor: sbt/default - steps: - - checkout - - sbt/setup - - python/setup - - python/requirements - - rasterframes/setup - - rasterframes/restore-cache - - sbt/compile - - - run: - name: Build documentation - command: sbt makeSite - no_output_timeout: 30m - - - rasterframes/save-doc-artifacts - - rasterframes/save-cache - - it: - executor: sbt/default - steps: - - checkout - - sbt/setup - - rasterframes/setup - - rasterframes/restore-cache - - sbt/compile - - - run: - name: Integration tests - command: sbt it:test - no_output_timeout: 30m - - - rasterframes/save-artifacts - - rasterframes/save-cache - - it-no-gdal: - executor: sbt/default - steps: - - checkout - - sbt/setup - - rasterframes/setup - - rasterframes/restore-cache - - - run: - name: Uninstall GDAL - command: conda remove gdal -q -y --offline - - - sbt/compile - - - run: - name: Integration tests - command: sbt it:test - no_output_timeout: 30m - - - rasterframes/save-artifacts - - rasterframes/save-cache - -workflows: - version: 2 - all: - jobs: - - test - - - it: - requires: - - test - filters: - branches: - only: - - /feature\/.*-it.*/ - - /it\/.*/ - - - it-no-gdal: - requires: - - test - filters: - branches: - only: - - /feature\/.*-it.*/ - - /it\/.*/ - - - docs: - filters: - branches: - only: - - /feature\/.*docs.*/ - - /fix\/.*docs.*/ - - /docs\/.*/ - - weekly: - triggers: - - schedule: - cron: "0 8 4 * *" - filters: - branches: - only: - - develop - jobs: - - test - - it - - it-no-gdal diff --git a/.circleci/fix-permissions b/.circleci/fix-permissions deleted file mode 100755 index d8e14920f..000000000 --- a/.circleci/fix-permissions +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -set -e - -GID=3434 # circleci - -for d in "$@"; do - find "$d" \ - ! \( \ - -group $GID \ - -a -perm -g+rwX \ - \) \ - -exec chgrp $GID {} \; \ - -exec chmod g+rwX {} \; - # setuid,setgid *on directories only* - find "$d" \ - \( \ - -type d \ - -a ! -perm -6000 \ - \) \ - -exec chmod +6000 {} \; -done diff --git a/.circleci/requirements-conda.txt b/.circleci/requirements-conda.txt deleted file mode 100644 index a8ebfd56b..000000000 --- a/.circleci/requirements-conda.txt +++ /dev/null @@ -1,5 +0,0 @@ -python==3.8 -gdal==3.1.2 -libspatialindex -rasterio[s3] -rtree \ No newline at end of file diff --git a/.github/actions/collect_artefacts/action.yml b/.github/actions/collect_artefacts/action.yml new file mode 100644 index 000000000..27575e34f --- /dev/null +++ b/.github/actions/collect_artefacts/action.yml @@ -0,0 +1,10 @@ +name: upload rasterframes artefacts +description: upload rasterframes artefacts +runs: + using: "composite" + steps: + - name: upload core dumps + uses: actions/upload-artifact@v3 + with: + name: core-dumps + path: /tmp/core_dumps \ No newline at end of file diff --git a/.github/actions/init-python-env/action.yaml b/.github/actions/init-python-env/action.yaml new file mode 100644 index 000000000..89f45cfec --- /dev/null +++ b/.github/actions/init-python-env/action.yaml @@ -0,0 +1,40 @@ +name: Setup Python Environment + +description: Install Python, Poetry and project dependencies + +inputs: + python_version: + description: 'Version of Python to configure' + default: '3.8' + poetry_version: + description: 'Version of Poetry to configure' + default: '1.3.2' + +runs: + using: "composite" + steps: + - name: Load cached Poetry installation + id: cached-poetry + uses: actions/cache@v3 + with: + path: ~/.local # the path depends on the OS, this is linux + key: poetry-${{inputs.poetry_version}}-0 # increment to reset cache + + - name: Install Poetry + if: steps.cached-poetry.outputs.cache-hit != 'true' + uses: snok/install-poetry@v1 + with: + version: ${{ inputs.poetry_version }} + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python_version }} + cache: 'poetry' + + - name: Install Poetry project dependencies + # if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + shell: bash + run: make init-python \ No newline at end of file diff --git a/.github/actions/init-scala-env/action.yaml b/.github/actions/init-scala-env/action.yaml new file mode 100644 index 000000000..902f8de40 --- /dev/null +++ b/.github/actions/init-scala-env/action.yaml @@ -0,0 +1,10 @@ +name: setup scala +description: setup scala environment +runs: + using: "composite" + steps: + - uses: coursier/cache-action@v6 + - uses: coursier/setup-action@v1 + with: + jvm: zulu:8.0.362 + apps: sbt diff --git a/.github/image/.dockerignore b/.github/image/.dockerignore deleted file mode 100644 index dbe9a91d7..000000000 --- a/.github/image/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!requirements-conda.txt -!fix-permissions diff --git a/.github/image/Dockerfile b/.github/image/Dockerfile deleted file mode 100644 index 27cd7a1aa..000000000 --- a/.github/image/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -FROM adoptopenjdk/openjdk11:debian-slim - -# See: https://docs.conda.io/projects/conda/en/latest/user-guide/install/rpm-debian.html -RUN \ - apt-get update && \ - apt-get install -yq gpg && \ - curl -s https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list && \ - apt-get update && \ - apt-get install -yq --no-install-recommends conda && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -ENV CONDA_DIR=/opt/conda -ENV PATH=$CONDA_DIR/bin:$PATH - -COPY requirements-conda.txt /tmp -RUN \ - conda install --quiet --yes --channel=conda-forge --file=/tmp/requirements-conda.txt && \ - echo "$CONDA_DIR/lib" > /etc/ld.so.conf.d/conda.conf && \ - ldconfig && \ - conda clean --all --force-pkgs-dirs --yes --quiet - -# Work-around for pyproj issue https://github.com/pyproj4/pyproj/issues/415 -ENV PROJ_LIB=/opt/conda/share/proj - diff --git a/.github/image/Makefile b/.github/image/Makefile deleted file mode 100644 index 1dab66b65..000000000 --- a/.github/image/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -IMAGE_NAME=debian-openjdk-conda-gdal -SHA=$(shell git log -n1 --format=format:"%H" | cut -c 1-7) -VERSION?=$(SHA) -HOST=docker.io -REPO=$(HOST)/s22s -FULL_NAME=$(REPO)/$(IMAGE_NAME):$(VERSION) - -.DEFAULT_GOAL := help -help: -# http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html - @echo "Usage: make [target]" - @echo "Targets: " - @grep -E '^[a-zA-Z0-9_%/-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\t\033[36m%-20s\033[0m %s\n", $$1, $$2}' - -all: build push ## Build and then push image - -build: ## Build the docker image - docker build . -t ${FULL_NAME} - -login: ## Login to the docker registry - docker login - -push: login ## Push docker image to registry - docker push ${FULL_NAME} - -run: build ## Build image and launch shell - docker run --rm -it ${FULL_NAME} bash diff --git a/.github/image/requirements-conda.txt b/.github/image/requirements-conda.txt deleted file mode 100644 index a8ebfd56b..000000000 --- a/.github/image/requirements-conda.txt +++ /dev/null @@ -1,5 +0,0 @@ -python==3.8 -gdal==3.1.2 -libspatialindex -rasterio[s3] -rtree \ No newline at end of file diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml deleted file mode 100644 index 97afa087b..000000000 --- a/.github/workflows/build-test.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: Build and Test - -on: - pull_request: - branches: ['**'] - push: - branches: ['master', 'develop', 'release/*', 'spark-3.2'] - tags: [v*] - release: - types: [published] - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - uses: coursier/cache-action@v6 - - name: Setup JDK - uses: actions/setup-java@v3 - with: - distribution: temurin - java-version: 8 - cache: sbt - - # Do just the compilation stage to minimize sbt memory footprint - - name: Compile - run: sbt -v -batch compile test:compile it:compile - - - name: Core tests - run: sbt -batch core/test - - - name: Datasource tests - run: sbt -batch datasource/test - - - name: Experimental tests - run: sbt -batch experimental/test - - ## TODO: Update python build to be PEP 517 compatible - # - name: Install Conda dependencies - # run: | - # # $CONDA_DIR is an environment variable pointing to the root of the miniconda directory - # $CONDA_DIR/bin/conda install -c conda-forge --yes --file pyrasterframes/src/main/python/requirements-condaforge.txt - # - name: Create PyRasterFrames package - # run: sbt -v -batch pyrasterframes/package - # - name: Python tests - # run: sbt -batch pyrasterframes/test - - - name: Collect artifacts - if: ${{ failure() }} - run: | - mkdir -p /tmp/core_dumps - ls -lh /tmp - cp core.* *.hs /tmp/core_dumps/ 2> /dev/null || true - cp ./core/*.log /tmp/core_dumps/ 2> /dev/null || true - cp -r /tmp/hsperfdata* /tmp/*.hprof /tmp/core_dumps/ 2> /dev/null || true - cp repo/core/core/* /tmp/core_dumps/ 2> /dev/null || true - - - name: Upload core dumps - if: ${{ failure() }} - uses: actions/upload-artifact@v2 - with: - name: core-dumps - path: /tmp/core_dumps \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..668b34546 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,122 @@ +name: Continuous Integration + +on: + pull_request: + branches: + - '**' + push: + branches: + - '**' + tags: + - 'v*' + +jobs: + + build-scala: + runs-on: ubuntu-20.04 + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Setup Scala Build Tools + uses: ./.github/actions/init-scala-env + + - name: Compile Scala Project + run: make compile-scala + + - name: Test Scala Project + # python/* branches are not supposed to change scala code, trust them + if: ${{ !startsWith(github.event.inputs.from_branch, 'python/') }} + run: make test-scala + + - name: Build Spark Assembly + shell: bash + run: make build-scala + + - name: Cache Spark Assembly + uses: actions/cache@v3 + with: + path: ./dist/* + key: dist-${{ github.sha }} + + build-python: + # scala/* branches are not supposed to change python code, trust them + if: ${{ !startsWith(github.event.inputs.from_branch, 'scala/') }} + runs-on: ubuntu-20.04 + needs: build-scala + + strategy: + matrix: + python: [ "3.8" ] + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: ./.github/actions/init-python-env + with: + python_version: ${{ matrix.python }} + + - name: Static checks + shell: bash + run: make lint-python + + - uses: actions/cache@v3 + with: + path: ./dist/* + key: dist-${{ github.sha }} + + - name: Run tests + shell: bash + run: make test-python-quick + + publish: + name: Publish Artifacts + needs: [ build-scala, build-python ] + runs-on: ubuntu-20.04 + if: (github.event_name != 'pull_request') && startsWith(github.ref, 'refs/tags/v') + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Setup Scala Build Tools + uses: ./.github/actions/init-scala-env + + - name: Publish JARs to GitHub Packages + shell: bash + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + run: make publish-scala + + - uses: ./.github/actions/init-python-env + with: + python_version: "3.8" + + - name: Build Python whl + shell: bash + run: make build-python + + +# TODO: Where does this go, do we need it? +# - name: upload artefacts +# uses: ./.github/actions/upload_artefacts + +# TODO: Where does this go, do we need it? +# - uses: actions/cache@v3 +# with: +# path: ./dist/* +# key: dist-${{ github.sha }} + +# TODO: Where does this go? +# - name: upload wheel +# working-directory: dist +# shell: bash +# run: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 100b78d4f..ddf7b107d 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -1,3 +1,4 @@ +# TODO: This needs refactor name: Compile documentation on: diff --git a/.gitignore b/.gitignore index ac5807ecd..b8ce6ce00 100644 --- a/.gitignore +++ b/.gitignore @@ -48,3 +48,16 @@ rf-notebook/src/main/notebooks/.ipython .bloop metals.sbt *.parquet/ + +# Python + +.coverage +.venv +htmlcov +dist/ +docs/*.md +docs/*.ipynb +__pycache__ +*.pipe/ +.coverage* +*.jar diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..9142d0b3c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks + +files: ^python/ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: local + hooks: + - id: black + name: black formatting + language: system + types: [python] + entry: poetry run black + + - id: isort + name: isort import sorting + language: system + types: [python] + entry: poetry run isort + args: ["--profile", "black"] diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..486335119 --- /dev/null +++ b/Makefile @@ -0,0 +1,90 @@ +SHELL := /usr/bin/env bash + +.PHONY: init test lint build docs notebooks help + +help: + @echo "init - Setup the repository" + @echo "clean - clean all compiled python files, build artifacts and virtual envs. Run \`make init\` anew afterwards." + @echo "test - run unit tests" + @echo "lint - run linter and checks" + @echo "build - build wheel" + @echo "docs - build documentations" + @echo "help - this command" + +test: test-scala test-python + +############### +# SCALA +############### + +compile-scala: + sbt -v -batch compile test:compile it:compile + +test-scala: test-core-scala test-datasource-scala test-experimental-scala + +test-core-scala: + sbt -batch core/test + +test-datasource-scala: + sbt -batch datasource/test + +test-experimental-scala: + sbt -batch experimental/test + +build-scala: + sbt "pyrasterframes/assembly" + +clean-scala: + sbt clean + +publish-scala: + sbt publish + +################ +# PYTHON +################ + +init-python: + python -m venv ./.venv + ./.venv/bin/python -m pip install --upgrade pip + poetry self add "poetry-dynamic-versioning[plugin]" + poetry install + poetry run pre-commit install + +test-python: build-scala + poetry run pytest -vv python/tests --cov=python/pyrasterframes --cov=python/geomesa_pyspark --cov-report=term-missing + +test-python-quick: + poetry run pytest -vv python/tests --cov=python/pyrasterframes --cov=python/geomesa_pyspark --cov-report=term-missing + +lint-python: + poetry run pre-commit run --all-file + +build-python: clean-build-python + poetry build + +docs-python: clean-docs-python + poetry run python python/docs/build_docs.py + +notebooks-python: clean-notebooks-python + poetry run python python/docs/build_docs.py --format notebook + +clean-python: clean-build-python clean-test-python clean-venv-python clean-docs-python clean-notebooks-python + +clean-build-python: + find ./dist -name 'pyrasterframes*.whl' -exec rm -fr {} + + find ./dist -name 'pyrasterframes*.tar.gz' -exec rm -fr {} + + +clean-test-python: + rm -f .coverage + rm -fr htmlcov/ + rm -fr test*.pipe + +clean-venv-python: + rm -fr .venv/ + +clean-docs-python: + find docs -name '*.md' -exec rm -f {} + + +clean-notebooks-python: + find docs -name '*.ipynb' -exec rm -f {} + diff --git a/datasource/src/main/resources/slippy.html b/datasource/src/main/resources/slippy.html index 96cf2d168..83bd67357 100644 --- a/datasource/src/main/resources/slippy.html +++ b/datasource/src/main/resources/slippy.html @@ -23,7 +23,7 @@