From 727761617a930266d3c5bef20d98083f75ed14bb Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 24 Aug 2023 17:03:11 +0100 Subject: [PATCH 001/195] Marked existing as old, added new flow basics --- .../{document.yml => OLD_document.yml} | 6 ++- ..._sdist.yml => OLD_pythonpublish_sdist.yml} | 7 ++- .../workflows/{tests.yml => OLD_tests.yml} | 11 ++-- ...abels.yml => OLD_update-linear-labels.yml} | 7 ++- .github/workflows/code_quality.yml | 45 ++++++++++++++++ .github/workflows/documentation.yml | 52 +++++++++++++++++++ .github/workflows/release.yml | 47 +++++++++++++++++ 7 files changed, 165 insertions(+), 10 deletions(-) rename .github/workflows/{document.yml => OLD_document.yml} (96%) rename .github/workflows/{pythonpublish_sdist.yml => OLD_pythonpublish_sdist.yml} (92%) rename .github/workflows/{tests.yml => OLD_tests.yml} (95%) rename .github/workflows/{update-linear-labels.yml => OLD_update-linear-labels.yml} (86%) create mode 100644 .github/workflows/code_quality.yml create mode 100644 .github/workflows/documentation.yml create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/document.yml b/.github/workflows/OLD_document.yml similarity index 96% rename from .github/workflows/document.yml rename to .github/workflows/OLD_document.yml index 6ac125eb5..0c5b3deee 100644 --- a/.github/workflows/document.yml +++ b/.github/workflows/OLD_document.yml @@ -1,8 +1,10 @@ name: Documentation +# on: +# push: +# branches: ["master"] +# workflow_dispatch: on: - push: - branches: ["master"] workflow_dispatch: permissions: diff --git a/.github/workflows/pythonpublish_sdist.yml b/.github/workflows/OLD_pythonpublish_sdist.yml similarity index 92% rename from .github/workflows/pythonpublish_sdist.yml rename to .github/workflows/OLD_pythonpublish_sdist.yml index 4d923ddc9..b9f881b9f 100644 --- a/.github/workflows/pythonpublish_sdist.yml +++ b/.github/workflows/OLD_pythonpublish_sdist.yml @@ -1,8 +1,11 @@ name: Upload Python Package (SDIST) +# on: +# release: +# types: [created] +# workflow_dispatch: + on: - release: - types: [created] workflow_dispatch: jobs: diff --git a/.github/workflows/tests.yml b/.github/workflows/OLD_tests.yml similarity index 95% rename from .github/workflows/tests.yml rename to .github/workflows/OLD_tests.yml index 8694d7f97..a73073106 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/OLD_tests.yml @@ -1,10 +1,13 @@ name: Testing +# on: +# push: +# branches: +# - master +# pull_request: + on: - push: - branches: - - master - pull_request: + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} diff --git a/.github/workflows/update-linear-labels.yml b/.github/workflows/OLD_update-linear-labels.yml similarity index 86% rename from .github/workflows/update-linear-labels.yml rename to .github/workflows/OLD_update-linear-labels.yml index 7dcec5569..171dd36d3 100644 --- a/.github/workflows/update-linear-labels.yml +++ b/.github/workflows/OLD_update-linear-labels.yml @@ -1,8 +1,11 @@ name: "Update Linear Labels" +# on: +# pull_request: +# branches: [master] + on: - pull_request: - branches: [master] + workflow_dispatch: jobs: update-linear: diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml new file mode 100644 index 000000000..1fc079bda --- /dev/null +++ b/.github/workflows/code_quality.yml @@ -0,0 +1,45 @@ +name: pull_request +run-name: Pull Request + +on: + pull_request: + types: [opened, synchronize, reopened, closed] + + +# Prevent running concurrently +concurrency: + group: ${{ github.head_ref || github.ref }} + cancel-in-progress: true + +jobs: + + #Check format of code + format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Format code + run: python3 deploy/format.py ${{ github.workspace }} + + # Check code for linting errors + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Run tests + run: python3 deploy/lint.py ${{ github.workspace }} + + # Run tests + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Run tests + run: python3 deploy/test.py ${{ github.workspace }} + \ No newline at end of file diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml new file mode 100644 index 000000000..ff7adba5f --- /dev/null +++ b/.github/workflows/documentation.yml @@ -0,0 +1,52 @@ +name: documentation +run-name: Generate Documentation + +on: + push: + branches: + - master + +concurrency: + group: documentation + cancel-in-progress: true + + +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + +env: + AWS_REGION: eu-west-1 + AWS_SESSION_NAME: darwinPyDocumentation + +jobs: + generate-docs: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.8, 3.9, "3.10", "3.11"] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: ${{ github.workspace }}/scripts/docs-dependencies.sh + - name: Parse README + run: ${{ github.workspace }}/scripts/docs_parse_readme.sh + - name: Generate new docs + env: + PYTHONPATH: "." + run: ${{ github.workspace }}/scripts/docs_generate_docs.sh + + - name: Setup access to AWS + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} + role-session-name: ${{ env.AWS_SESSION_NAME }} + aws-region: ${{ env.AWS_REGION }} + - name: Upload docs to S3 + run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive + + \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..46185e7b1 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,47 @@ +name: created_tag +run-name: Created Tag + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'test-*' + +concurrency: + group: created_tag + cancel-in-progress: true + +jobs: + + checkout: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + name: Checkout + with: + ref: ${{ github.head_ref || github.ref }} + + check_master_is_passing: + runs-on: ubuntu-latest + needs: checkout + steps: + - name: Check master is passing + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { data: { state } } = await github.repos.getCombinedStatusForRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: 'master' + }); + if (state !== 'success') { + core.setFailed('master branch is not passing, cannot create a release'); + } + + deploy: + needs: [checkout, check_master_is_passing] + runs-on: ubuntu-latest + steps: + - name: Deploy + run: python3 release.py ${{ github.ref }} From c3bccbfd552bd239d6947417afa68c4901df4bcb Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 24 Aug 2023 17:03:27 +0100 Subject: [PATCH 002/195] Bare bones of initial scripts for testing --- deploy/build_documentation.py | 18 ++++++++++++++++++ deploy/deploy.py | 18 ++++++++++++++++++ deploy/docs_dependencies.sh | 17 +++++++++++++++++ deploy/docs_generate_docs.sh | 7 +++++++ deploy/docs_parse_readme.sh | 7 +++++++ deploy/format.py | 18 ++++++++++++++++++ deploy/lint.py | 18 ++++++++++++++++++ deploy/run_e2e_tests.py | 18 ++++++++++++++++++ deploy/run_tests.py | 18 ++++++++++++++++++ deploy/validate_version_and_update.py | 18 ++++++++++++++++++ e2e_tests/teardown_tests.py | 1 + 11 files changed, 158 insertions(+) create mode 100644 deploy/build_documentation.py create mode 100644 deploy/deploy.py create mode 100644 deploy/docs_dependencies.sh create mode 100644 deploy/docs_generate_docs.sh create mode 100644 deploy/docs_parse_readme.sh create mode 100644 deploy/format.py create mode 100644 deploy/lint.py create mode 100644 deploy/run_e2e_tests.py create mode 100644 deploy/run_tests.py create mode 100644 deploy/validate_version_and_update.py create mode 100644 e2e_tests/teardown_tests.py diff --git a/deploy/build_documentation.py b/deploy/build_documentation.py new file mode 100644 index 000000000..307a15823 --- /dev/null +++ b/deploy/build_documentation.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python3 +import logging +import sys + +logger = logging.getLogger(__name__) + +logger.setLevel(logging.INFO) + + +def main() -> None: + # TODO: Implement + logger.info("This function is not yet implemented") + logger.info(f"This file is {__file__}") + logger.info("args: {}".format(sys.argv)) + + +if __name__ == "__main__": + main() diff --git a/deploy/deploy.py b/deploy/deploy.py new file mode 100644 index 000000000..307a15823 --- /dev/null +++ b/deploy/deploy.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python3 +import logging +import sys + +logger = logging.getLogger(__name__) + +logger.setLevel(logging.INFO) + + +def main() -> None: + # TODO: Implement + logger.info("This function is not yet implemented") + logger.info(f"This file is {__file__}") + logger.info("args: {}".format(sys.argv)) + + +if __name__ == "__main__": + main() diff --git a/deploy/docs_dependencies.sh b/deploy/docs_dependencies.sh new file mode 100644 index 000000000..1e3e136bc --- /dev/null +++ b/deploy/docs_dependencies.sh @@ -0,0 +1,17 @@ +#!/usr/bin env python3 + +# TODO: refactor as needed + +python -m pip install --upgrade pip +pip install poetry +poetry install --all-extras --no-interaction --no-root +pip install wheel +pip install --upgrade setuptools +pip install --editable ".[test,ml,medical,dev]" +pip install torch torchvision +pip install -U sphinx +# Locking mistune version so m2r works. More info on issue: +# https://github.com/miyakogi/m2r/issues/66 +pip install mistune==0.8.4 # TODO: Mistune is now at version 3, so this is quite old, look into upgrading +pip install m2r # TODO: m2r is deprecated. Find alternative. +pip install sphinx_rtd_theme \ No newline at end of file diff --git a/deploy/docs_generate_docs.sh b/deploy/docs_generate_docs.sh new file mode 100644 index 000000000..a266bbb3e --- /dev/null +++ b/deploy/docs_generate_docs.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +#TODO: refactor as needed + +rm -rf docs/* +sphinx-apidoc -f -o source darwin darwin/future +sphinx-build -b html source/ docs/ -W \ No newline at end of file diff --git a/deploy/docs_parse_readme.sh b/deploy/docs_parse_readme.sh new file mode 100644 index 000000000..07ca87af6 --- /dev/null +++ b/deploy/docs_parse_readme.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +# TODO: refactor as needed + +rm -f README.rst +m2r README.md +mv README.rst source/ \ No newline at end of file diff --git a/deploy/format.py b/deploy/format.py new file mode 100644 index 000000000..307a15823 --- /dev/null +++ b/deploy/format.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python3 +import logging +import sys + +logger = logging.getLogger(__name__) + +logger.setLevel(logging.INFO) + + +def main() -> None: + # TODO: Implement + logger.info("This function is not yet implemented") + logger.info(f"This file is {__file__}") + logger.info("args: {}".format(sys.argv)) + + +if __name__ == "__main__": + main() diff --git a/deploy/lint.py b/deploy/lint.py new file mode 100644 index 000000000..307a15823 --- /dev/null +++ b/deploy/lint.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python3 +import logging +import sys + +logger = logging.getLogger(__name__) + +logger.setLevel(logging.INFO) + + +def main() -> None: + # TODO: Implement + logger.info("This function is not yet implemented") + logger.info(f"This file is {__file__}") + logger.info("args: {}".format(sys.argv)) + + +if __name__ == "__main__": + main() diff --git a/deploy/run_e2e_tests.py b/deploy/run_e2e_tests.py new file mode 100644 index 000000000..307a15823 --- /dev/null +++ b/deploy/run_e2e_tests.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python3 +import logging +import sys + +logger = logging.getLogger(__name__) + +logger.setLevel(logging.INFO) + + +def main() -> None: + # TODO: Implement + logger.info("This function is not yet implemented") + logger.info(f"This file is {__file__}") + logger.info("args: {}".format(sys.argv)) + + +if __name__ == "__main__": + main() diff --git a/deploy/run_tests.py b/deploy/run_tests.py new file mode 100644 index 000000000..307a15823 --- /dev/null +++ b/deploy/run_tests.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python3 +import logging +import sys + +logger = logging.getLogger(__name__) + +logger.setLevel(logging.INFO) + + +def main() -> None: + # TODO: Implement + logger.info("This function is not yet implemented") + logger.info(f"This file is {__file__}") + logger.info("args: {}".format(sys.argv)) + + +if __name__ == "__main__": + main() diff --git a/deploy/validate_version_and_update.py b/deploy/validate_version_and_update.py new file mode 100644 index 000000000..307a15823 --- /dev/null +++ b/deploy/validate_version_and_update.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python3 +import logging +import sys + +logger = logging.getLogger(__name__) + +logger.setLevel(logging.INFO) + + +def main() -> None: + # TODO: Implement + logger.info("This function is not yet implemented") + logger.info(f"This file is {__file__}") + logger.info("args: {}".format(sys.argv)) + + +if __name__ == "__main__": + main() diff --git a/e2e_tests/teardown_tests.py b/e2e_tests/teardown_tests.py new file mode 100644 index 000000000..8862d2f8f --- /dev/null +++ b/e2e_tests/teardown_tests.py @@ -0,0 +1 @@ +# TODO: In IO:1336 From 1c5fcd6b68ff989d4955d22001e7a6cfb644e67a Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 25 Aug 2023 16:22:26 +0100 Subject: [PATCH 003/195] Written basic shell scripts for most steps --- .github/workflows/code_quality.yml | 10 +++++--- deploy/build.sh | 27 ++++++++++++++++++++++ deploy/check_python.sh | 25 ++++++++++++++++++++ deploy/format.py | 18 --------------- deploy/format.sh | 19 +++++++++++++++ deploy/install_deps.sh | 37 ++++++++++++++++++++++++++++++ deploy/lint.py | 18 --------------- deploy/lint.sh | 19 +++++++++++++++ deploy/run_e2e_tests.py | 18 --------------- deploy/run_e2e_tests.sh | 4 ++++ deploy/run_tests.py | 18 --------------- deploy/run_tests.sh | 23 +++++++++++++++++++ 12 files changed, 161 insertions(+), 75 deletions(-) create mode 100644 deploy/build.sh create mode 100644 deploy/check_python.sh delete mode 100644 deploy/format.py create mode 100644 deploy/format.sh create mode 100644 deploy/install_deps.sh delete mode 100644 deploy/lint.py create mode 100644 deploy/lint.sh delete mode 100644 deploy/run_e2e_tests.py create mode 100644 deploy/run_e2e_tests.sh delete mode 100644 deploy/run_tests.py create mode 100644 deploy/run_tests.sh diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 1fc079bda..0a7f4a2f7 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -21,7 +21,9 @@ jobs: with: ref: ${{ github.head_ref || github.ref }} - name: Format code - run: python3 deploy/format.py ${{ github.workspace }} + # Lints and formats are called only against the changed files, because other + # files must have come from master + run: bash deploy/format.sh ${{ github.head_ref || github.ref }} # Check code for linting errors lint: @@ -31,7 +33,9 @@ jobs: with: ref: ${{ github.head_ref || github.ref }} - name: Run tests - run: python3 deploy/lint.py ${{ github.workspace }} + # Lints and formats are called only against the changed files, because other + # files must have come from master + run: python3 deploy/lint.sh ${{ github.head_ref || github.ref }} # Run tests test: @@ -41,5 +45,5 @@ jobs: with: ref: ${{ github.head_ref || github.ref }} - name: Run tests - run: python3 deploy/test.py ${{ github.workspace }} + run: python3 deploy/test.sh ${{ github.workspace }} \ No newline at end of file diff --git a/deploy/build.sh b/deploy/build.sh new file mode 100644 index 000000000..4036df004 --- /dev/null +++ b/deploy/build.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +# Check python and pip are installed +echo "Check that python3 and pip3 are installed" +./deploy/check_python.sh || exit $? + + +echo "Check that poetry is installed" +if ! command -v poetry &> /dev/null +then + # Try to run install deps script, and if that fails, exit gracefully + echo "Poetry could not be found" + echo "Installing dependencies" + + .deploy/install_deps.sh || exit 1 +fi + +# Check poetry is installed +if ! command -v poetry &> /dev/null +then + echo "Poetry could not be found after dependency install" + exit 2 +fi + +poetry build || exit 3 + + diff --git a/deploy/check_python.sh b/deploy/check_python.sh new file mode 100644 index 000000000..b2407a70e --- /dev/null +++ b/deploy/check_python.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Confirm that python 3.8 or higher is installed and pip installed + +# Check python is installed +if ! command -v python3 &> /dev/null +then + echo "Python3 could not be found" + exit 1 +fi + +# Check python version is 3.8 or higher +if [[ $(python3 -c 'import sys; print(sys.version_info >= (3, 8))') != "True" ]] +then + echo "Python version 3.8 or higher is required" + exit 2 +fi + +# Check pip is installed +if ! command -v pip3 &> /dev/null +then + echo "pip3 could not be found" + exit 3 +fi + +exit 0 \ No newline at end of file diff --git a/deploy/format.py b/deploy/format.py deleted file mode 100644 index 307a15823..000000000 --- a/deploy/format.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python3 -import logging -import sys - -logger = logging.getLogger(__name__) - -logger.setLevel(logging.INFO) - - -def main() -> None: - # TODO: Implement - logger.info("This function is not yet implemented") - logger.info(f"This file is {__file__}") - logger.info("args: {}".format(sys.argv)) - - -if __name__ == "__main__": - main() diff --git a/deploy/format.sh b/deploy/format.sh new file mode 100644 index 000000000..9e46e479a --- /dev/null +++ b/deploy/format.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash +# Check formatting +# This script is intended for CI/CD, but can be run locally +# +# Exit: +# 0 - Success +# 1 - Called with incorrect number of arguments +# 2 - Python3 or dependency not found +# 3 - Black formatting failed + +if [ "$#" -ne 1 ]; then + echo "Usage: $0 " + exit 1 +fi + +echo "Checking formatting of reference: $1" +./check_python.sh || ./install_deps.sh || exit 2 + +git diff --name-only master.."$1" | grep -E '\.py$' | xargs | python3 -m poetry run black --check --diff --no-color . || exit 3 \ No newline at end of file diff --git a/deploy/install_deps.sh b/deploy/install_deps.sh new file mode 100644 index 000000000..026589690 --- /dev/null +++ b/deploy/install_deps.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# This script installs dependencies for the project +# It is intended for CI/CD, but can be run locally +# It will exit at any point that fails, and will return different exit codes: +# 1 - Python3 not found +# 2 - Python version is not 3.8 or higher +# 3 - pip3 not found +# 4 - Poetry not found after attempted install +# 5 - pip3 upgrade failed +# 6 - Poetry install failed + + +echo "Installing dependencies" + +# Check python is installed +./deploy/check_python.sh || exit $? + +# Check poetry is installed, and install if not +if ! command -v poetry &> /dev/null +then + curl -sSL https://install.python-poetry.org | python3 - +fi + +# Check poetry is installed +if ! command -v poetry &> /dev/null +then + echo "Poetry could not be found" + exit 4 +fi + +# Install dependencies +python3 -m pip install --upgrade pip || exit 5 +python3 -m poetry install --all-extras --no-interaction --no-root || exit 6 + +echo "Dependencies installed" + +exit 0 \ No newline at end of file diff --git a/deploy/lint.py b/deploy/lint.py deleted file mode 100644 index 307a15823..000000000 --- a/deploy/lint.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python3 -import logging -import sys - -logger = logging.getLogger(__name__) - -logger.setLevel(logging.INFO) - - -def main() -> None: - # TODO: Implement - logger.info("This function is not yet implemented") - logger.info(f"This file is {__file__}") - logger.info("args: {}".format(sys.argv)) - - -if __name__ == "__main__": - main() diff --git a/deploy/lint.sh b/deploy/lint.sh new file mode 100644 index 000000000..39dc8ff74 --- /dev/null +++ b/deploy/lint.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash +# Check PEP8 compliance and other linting +# This script is intended for CI/CD, but can be run locally +# +# Exit: +# 0 - Success +# 1 - Called with incorrect number of arguments +# 2 - Python3 or dependency not found +# 3 - PEP8 compliance failed + +if [ "$#" -ne 1 ]; then + echo "Usage: $0 " + exit 1 +fi + +echo "Checking linting compliance of reference: $1" +./check_python.sh || ./install_deps.sh || exit 2 + +git diff --name-only master.."$1" | grep -E '\.py$' | xargs | python3 -m poetry run flake8 --diff --no-color . || exit 3 \ No newline at end of file diff --git a/deploy/run_e2e_tests.py b/deploy/run_e2e_tests.py deleted file mode 100644 index 307a15823..000000000 --- a/deploy/run_e2e_tests.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python3 -import logging -import sys - -logger = logging.getLogger(__name__) - -logger.setLevel(logging.INFO) - - -def main() -> None: - # TODO: Implement - logger.info("This function is not yet implemented") - logger.info(f"This file is {__file__}") - logger.info("args: {}".format(sys.argv)) - - -if __name__ == "__main__": - main() diff --git a/deploy/run_e2e_tests.sh b/deploy/run_e2e_tests.sh new file mode 100644 index 000000000..3301716c7 --- /dev/null +++ b/deploy/run_e2e_tests.sh @@ -0,0 +1,4 @@ +#! /usr/bin/env bash + +echo "Not yet implemented" +exit 1 diff --git a/deploy/run_tests.py b/deploy/run_tests.py deleted file mode 100644 index 307a15823..000000000 --- a/deploy/run_tests.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python3 -import logging -import sys - -logger = logging.getLogger(__name__) - -logger.setLevel(logging.INFO) - - -def main() -> None: - # TODO: Implement - logger.info("This function is not yet implemented") - logger.info(f"This file is {__file__}") - logger.info("args: {}".format(sys.argv)) - - -if __name__ == "__main__": - main() diff --git a/deploy/run_tests.sh b/deploy/run_tests.sh new file mode 100644 index 000000000..c9564a96d --- /dev/null +++ b/deploy/run_tests.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash +# Run unit tests +# This script is intended for CI/CD, but can be run locally +# +# Exit: +# 0 - Success +# 1 - Called with incorrect number of arguments +# 2 - Python3 or dependency not found +# 3 - Unit tests failed + +if [ "$#" -ne 1 ]; then + echo "Usage: $0 " + exit 1 +fi + +echo "Running unit tests in directory: $1" +./check_python.sh || ./install_deps.sh || exit 2 + +python3 -m poetry run pytest --cov="$1" --cov-report=xml --cov-report=term-missing --cov-fail-under=85 "$1" || exit 3 + +echo "Unit tests passed" + +exit 0 From 98671e0c56f8c4507e4b4e2c8ef6f0a2c9225370 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 25 Aug 2023 17:19:59 +0100 Subject: [PATCH 004/195] WIP --- .github/workflows/code_quality.yml | 38 ++++++++++++++++++++++++--- deploy/build.sh | 0 deploy/build_documentation.py | 0 deploy/check_python.sh | 0 deploy/deploy.py | 0 deploy/docs_dependencies.sh | 0 deploy/docs_generate_docs.sh | 0 deploy/docs_parse_readme.sh | 0 deploy/format.sh | 6 +++-- deploy/install_deps.sh | 0 deploy/lint.sh | 6 +++-- deploy/run_e2e_tests.sh | 0 deploy/run_tests.sh | 22 +++++++++++++--- deploy/validate_version_and_update.py | 0 14 files changed, 60 insertions(+), 12 deletions(-) mode change 100644 => 100755 deploy/build.sh mode change 100644 => 100755 deploy/build_documentation.py mode change 100644 => 100755 deploy/check_python.sh mode change 100644 => 100755 deploy/deploy.py mode change 100644 => 100755 deploy/docs_dependencies.sh mode change 100644 => 100755 deploy/docs_generate_docs.sh mode change 100644 => 100755 deploy/docs_parse_readme.sh mode change 100644 => 100755 deploy/format.sh mode change 100644 => 100755 deploy/install_deps.sh mode change 100644 => 100755 deploy/lint.sh mode change 100644 => 100755 deploy/run_e2e_tests.sh mode change 100644 => 100755 deploy/run_tests.sh mode change 100644 => 100755 deploy/validate_version_and_update.py diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 0a7f4a2f7..7eb6d4adf 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -8,7 +8,7 @@ on: # Prevent running concurrently concurrency: - group: ${{ github.head_ref || github.ref }} + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: @@ -35,15 +35,45 @@ jobs: - name: Run tests # Lints and formats are called only against the changed files, because other # files must have come from master - run: python3 deploy/lint.sh ${{ github.head_ref || github.ref }} + run: bash deploy/lint.sh ${{ github.head_ref || github.ref }} # Run tests test: - runs-on: ubuntu-latest + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.8, 3.9, "3.10", "3.11"] + poetry-version: ["1.3.1"] + runs-on: ${{ matrix.os }} steps: + #install python - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Upgrade pip + run: python -m pip install --upgrade pip + - name: Setup Poetry + uses: abatilo/actions-poetry@v2 + with: + poetry-version: ${{ matrix.poetry-version }} + - name: Install dependencies + run: | + poetry install --no-interaction --no-root --all-extras -vvv + pip install wheel + pip install --upgrade setuptools + pip install --editable ".[test,ml,medical,dev]" + pip install pytest - name: Run tests - run: python3 deploy/test.sh ${{ github.workspace }} + run: | + pytest --junitxml=${{ github.workspace }}/os-${{ matrix.os }}-py${{ matrix.python-version}}-poetry${{ matrix.poetry-version }}-test-results.xml --cov=src --cov-report=xml --cov-report=term-missing --cov-fail-under=100 + - name: Publish test results + uses: EnricoMi/publish-unit-test-result-action@v2 + if: always() + with: + files: ${GITHUB_WORKSPACE}/**/*test-results.xml \ No newline at end of file diff --git a/deploy/build.sh b/deploy/build.sh old mode 100644 new mode 100755 diff --git a/deploy/build_documentation.py b/deploy/build_documentation.py old mode 100644 new mode 100755 diff --git a/deploy/check_python.sh b/deploy/check_python.sh old mode 100644 new mode 100755 diff --git a/deploy/deploy.py b/deploy/deploy.py old mode 100644 new mode 100755 diff --git a/deploy/docs_dependencies.sh b/deploy/docs_dependencies.sh old mode 100644 new mode 100755 diff --git a/deploy/docs_generate_docs.sh b/deploy/docs_generate_docs.sh old mode 100644 new mode 100755 diff --git a/deploy/docs_parse_readme.sh b/deploy/docs_parse_readme.sh old mode 100644 new mode 100755 diff --git a/deploy/format.sh b/deploy/format.sh old mode 100644 new mode 100755 index 9e46e479a..16f81a114 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -8,12 +8,14 @@ # 2 - Python3 or dependency not found # 3 - Black formatting failed +THIS_FILE_DIRECTORY=`dirname "$0"` + if [ "$#" -ne 1 ]; then echo "Usage: $0 " exit 1 fi echo "Checking formatting of reference: $1" -./check_python.sh || ./install_deps.sh || exit 2 +"$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -git diff --name-only master.."$1" | grep -E '\.py$' | xargs | python3 -m poetry run black --check --diff --no-color . || exit 3 \ No newline at end of file +git diff --name-only master.."$1" | grep -E '\.py$' | xargs | poetry run black --check --diff --no-color . || exit 3 \ No newline at end of file diff --git a/deploy/install_deps.sh b/deploy/install_deps.sh old mode 100644 new mode 100755 diff --git a/deploy/lint.sh b/deploy/lint.sh old mode 100644 new mode 100755 index 39dc8ff74..aaf74a6d2 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -8,12 +8,14 @@ # 2 - Python3 or dependency not found # 3 - PEP8 compliance failed +THIS_FILE_DIRECTORY=`dirname "$0"` + if [ "$#" -ne 1 ]; then echo "Usage: $0 " exit 1 fi echo "Checking linting compliance of reference: $1" -./check_python.sh || ./install_deps.sh || exit 2 +"$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -git diff --name-only master.."$1" | grep -E '\.py$' | xargs | python3 -m poetry run flake8 --diff --no-color . || exit 3 \ No newline at end of file +git diff --name-only master.."$1" | grep -E '\.py$' | xargs | poetry run flake8 --diff --no-color . || exit 3 \ No newline at end of file diff --git a/deploy/run_e2e_tests.sh b/deploy/run_e2e_tests.sh old mode 100644 new mode 100755 diff --git a/deploy/run_tests.sh b/deploy/run_tests.sh old mode 100644 new mode 100755 index c9564a96d..e9bfb49af --- a/deploy/run_tests.sh +++ b/deploy/run_tests.sh @@ -8,15 +8,29 @@ # 2 - Python3 or dependency not found # 3 - Unit tests failed -if [ "$#" -ne 1 ]; then - echo "Usage: $0 " +THIS_FILE_DIRECTORY=`dirname "$0"` + +if [ "$#" -lt 1 ]; then + echo "Usage: $0 [ ]" exit 1 fi +if [ "$#" -gt 1 ]; then + USING_CICD=1 + OS=$3 + PYTHON_VERSION=$2 +fi + echo "Running unit tests in directory: $1" -./check_python.sh || ./install_deps.sh || exit 2 +"$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 + +# Unit test config is in pyproject.toml and pytest.ini - don't set any here as it will only complicate CI/CD +if [ "$USING_CICD" = 1 ]; then + poetry run pytest $1 -vvv --junit-xml=$0/$PYTHON_VERSION-$OS-test_results.xml || exit 3 + exit 0 +fi -python3 -m poetry run pytest --cov="$1" --cov-report=xml --cov-report=term-missing --cov-fail-under=85 "$1" || exit 3 +poetry run pytest $1 || exit 3 echo "Unit tests passed" diff --git a/deploy/validate_version_and_update.py b/deploy/validate_version_and_update.py old mode 100644 new mode 100755 From 21846b7ed9f1ebae04556e93adb0f01aee8ca5fe Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 25 Aug 2023 17:44:45 +0100 Subject: [PATCH 005/195] Correction to scripts --- .github/workflows/code_quality.yml | 25 +++++++++++++++++++++---- deploy/format.sh | 12 +++++++++--- deploy/lint.sh | 15 ++++++++++++--- 3 files changed, 42 insertions(+), 10 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 7eb6d4adf..39b97409f 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -20,10 +20,18 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} + - name: Get changed files + id: changed_files + run: | + if ${{ github.event_name == 'pull_request' }}; then + echo "changed_files=$(git diff --name-only -r HEAD^1 HEAD | xargs)" >> $GITHUB_OUTPUT + else + echo "changed_files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT + fi - name: Format code # Lints and formats are called only against the changed files, because other # files must have come from master - run: bash deploy/format.sh ${{ github.head_ref || github.ref }} + run: bash deploy/format.sh ${{ steps.changed_files.outputs.changed_files }} # Check code for linting errors lint: @@ -32,10 +40,18 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} - - name: Run tests + - name: Get changed files + id: changed_files + run: | + if ${{ github.event_name == 'pull_request' }}; then + echo "changed_files=$(git diff --name-only -r HEAD^1 HEAD | xargs)" >> $GITHUB_OUTPUT + else + echo "changed_files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT + fi + - name: Run linter # Lints and formats are called only against the changed files, because other # files must have come from master - run: bash deploy/lint.sh ${{ github.head_ref || github.ref }} + run: bash deploy/lint.sh ${{ steps.changed_files.outputs.changed_files }} # Run tests test: @@ -73,7 +89,8 @@ jobs: pytest --junitxml=${{ github.workspace }}/os-${{ matrix.os }}-py${{ matrix.python-version}}-poetry${{ matrix.poetry-version }}-test-results.xml --cov=src --cov-report=xml --cov-report=term-missing --cov-fail-under=100 - name: Publish test results uses: EnricoMi/publish-unit-test-result-action@v2 - if: always() + # Only run on windows + if: matrix.os == 'windows-latest' with: files: ${GITHUB_WORKSPACE}/**/*test-results.xml \ No newline at end of file diff --git a/deploy/format.sh b/deploy/format.sh index 16f81a114..635ad4337 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash # Check formatting -# This script is intended for CI/CD, but can be run locally +# This script is intended for CI/CD only # # Exit: # 0 - Success @@ -9,13 +9,19 @@ # 3 - Black formatting failed THIS_FILE_DIRECTORY=`dirname "$0"` +FILES_CHANGED="$1" + +if ! $CI; then + echo "This script is intended for CI/CD only" + exit 1 +fi if [ "$#" -ne 1 ]; then echo "Usage: $0 " exit 1 fi -echo "Checking formatting of reference: $1" +echo "Checking formatting of files: $1" "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -git diff --name-only master.."$1" | grep -E '\.py$' | xargs | poetry run black --check --diff --no-color . || exit 3 \ No newline at end of file +echo "$1" | grep -E '\.py$' | xargs | poetry run black --check --diff --no-color . || exit 3 \ No newline at end of file diff --git a/deploy/lint.sh b/deploy/lint.sh index aaf74a6d2..e5e53a83f 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -9,13 +9,22 @@ # 3 - PEP8 compliance failed THIS_FILE_DIRECTORY=`dirname "$0"` +FILES_CHANGED="$1" + +if ! $CI; then + echo "This script is intended for CI/CD only" + exit 1 +fi + +echo "Checking linting compliance of reference: $2 against $1" + if [ "$#" -ne 1 ]; then - echo "Usage: $0 " + echo "Usage: $0 " exit 1 fi -echo "Checking linting compliance of reference: $1" +echo "Checking linting compliance of files: $FILES_CHANGED" "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -git diff --name-only master.."$1" | grep -E '\.py$' | xargs | poetry run flake8 --diff --no-color . || exit 3 \ No newline at end of file +echo "$FILES_CHANGED" | grep -E '\.py$' | xargs | poetry run flake8 --diff --no-color . || exit 3 \ No newline at end of file From 6668eacf1b7cd5d3ac5a05b4770a38a8de58c0e3 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 29 Aug 2023 11:56:06 +0100 Subject: [PATCH 006/195] Script that confirms release needs doing --- deploy/confirm_main_branch_deployability.py | 170 ++++++++++++++++++++ pyproject.toml | 3 + 2 files changed, 173 insertions(+) create mode 100644 deploy/confirm_main_branch_deployability.py diff --git a/deploy/confirm_main_branch_deployability.py b/deploy/confirm_main_branch_deployability.py new file mode 100644 index 000000000..fdc968e9a --- /dev/null +++ b/deploy/confirm_main_branch_deployability.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python3 + +import json +import logging +import sys +from datetime import datetime, timezone +from enum import IntFlag, auto +from os import environ +from subprocess import PIPE, Popen +from typing import List, Tuple + +logger = logging.getLogger(__name__) +logger.addHandler(logging.StreamHandler(sys.stdout)) +logger.setLevel(logging.DEBUG) if environ.get("DEBUG") else logger.setLevel(logging.INFO) + + +# Set up default constants +DEFAULT_BRANCH = environ.get("DEFAULT_BRANCH", "master") +DEFAULT_RELEASE_DAY = environ.get("DEFAULT_RELEASE_DAY", "Tuesday") + + +class ExitCodes(IntFlag): + """ + Exit codes for the script + """ + + SUCCESS = auto() + GETTING_LAST_RELEASE_TAG_THREW_EXITCODE = auto() + COULD_NOT_PARSE_LAST_RELEASE_TAG = auto() + GETTING_RELEASE_METADATA_THREW_EXITCODE = auto() + COULD_NOT_PARSE_RELEASE_METADATA = auto() + UNEXPECTED_STRUCTURE_TO_RELEASE_METADATA = auto() + GIT_DIFF_THREW_EXITCODE = auto() + NO_CHANGES_SINCE_LAST_RELEASE = 128 + + +def printl(*args: str) -> None: + logger.info(" ".join([str(arg) for arg in args])) + + +def _run_command(command: str, *args: str) -> Tuple[str, int]: + """ + Runs a command and returns the stdout and stderr + (similar to subprocess.run but set to PIPE for ease of parsing - needlessly complex in subprocess.run) + + Parameters + ---------- + command: str + The command to run, e.g. "ls" + args: List[str] + The command to run as a list of strings, e.g. ["-l"] + + Returns + ------- + Tuple[int, str] + The stdout and stderr of the command + + """ + + process = Popen([command, *args], stdout=PIPE, stderr=PIPE) + output, error = process.communicate() + return output.decode("utf-8"), int(process.returncode) + + +def _exit(message: str, exit_code: ExitCodes) -> str: + """ + Exits the script with an exit code and message + + Parameters + ---------- + message: str + The message to print + exit_code: ExitCodes + The exit code to exit with + + """ + + logger.error(message) + exit(exit_code.value) + + +def _get_most_recent_release_tag() -> str: + """ + Gets the last release tag from the repo + """ + + output, error = _run_command("gh", "release", "list", "--limit", "1") + assert error == 0, _exit("Failed to get last release tag", ExitCodes.GETTING_LAST_RELEASE_TAG_THREW_EXITCODE) + + release_tag = str(output).split()[0] + assert release_tag, _exit("No release tag found", ExitCodes.COULD_NOT_PARSE_LAST_RELEASE_TAG) + + return release_tag + + +def _get_most_recent_release_timestamp(release_tag: str) -> Tuple[str, datetime]: + """ + Gets the last release timestamp from the repo + """ + output, error = _run_command("gh", "release", "view", release_tag, "--json", "name,publishedAt") + assert error == 0, _exit("Failed to get last release timestamp", ExitCodes.GETTING_RELEASE_METADATA_THREW_EXITCODE) + + json_output = {} + try: + json_output = json.loads(output) + except json.JSONDecodeError: + _exit("Could not parse release metadata", ExitCodes.COULD_NOT_PARSE_RELEASE_METADATA) + + assert "name" in json_output and "publishedAt" in json_output, _exit( + "Expected release name and timestamp in metadata", ExitCodes.UNEXPECTED_STRUCTURE_TO_RELEASE_METADATA + ) + + return json_output["name"], datetime.fromisoformat(json_output["publishedAt"].replace("Z", "+00:00")) + + +def _get_changes_since_last_release(last_release_timestamp: datetime) -> List[str]: + """ + Gets the changes since the last release + """ + SECONDS_IN_A_DAY = 86400 + seconds_since_last_release: int = int( + ( + datetime.utcnow().astimezone(timezone.utc) - last_release_timestamp.astimezone(timezone.utc) + ).total_seconds() # Whose idea was it to create timedelta.seconds _and_ datetime.total_seconds + ) + gitref_to_compare = "{}@{{{} seconds ago}}".format(DEFAULT_BRANCH, seconds_since_last_release) + + print( + f"It's been {seconds_since_last_release} seconds since the last release, about {int(seconds_since_last_release / SECONDS_IN_A_DAY)} days ago" + ) + printl(f"Getting changes since {gitref_to_compare}") + + output, error = _run_command("git", "diff", DEFAULT_BRANCH, gitref_to_compare, "--name-only") + assert error == 0, _exit("Failed to get changes since last release", ExitCodes.GIT_DIFF_THREW_EXITCODE) + + files_changed = output.split("\n") + + return [f for f in files_changed if f] + + +def main() -> None: + printl("Testing main branch for deployablity") + printl("This tests whether any changes have been made since last scheduled deploy") + + printl("Getting most recent release tag") + last_release_tag = _get_most_recent_release_tag() + + printl("Getting last release timestamp") + last_release_tag, last_release_timestamp = _get_most_recent_release_timestamp(last_release_tag) + + printl(f"Last release timestamp: {last_release_timestamp}") + printl(f"Last release tag: {last_release_tag}") + + printl("Getting changes since last release") + changes_since_last_release = _get_changes_since_last_release(last_release_timestamp) + + if not changes_since_last_release: + printl("No changes since last release, exiting") + exit(ExitCodes.NO_CHANGES_SINCE_LAST_RELEASE) + + printl(f"Changes since last release ({len(changes_since_last_release)}):") + for i, change in enumerate(changes_since_last_release): + printl(f" {i}: {change}") + + printl("All done, exiting") + exit(ExitCodes.SUCCESS) + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml index c0e44590f..9192df8ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,9 @@ warn_untyped_fields = true [tool.black] line-length = 160 +[tool.ruff] +line-length = 160 + [tool.flake8] max-line-length = 160 ignore = ["E203", "W503", "E402"] From bb8d0ecfb00b2aa0c6e1a0ac1b39754658aaa6f8 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 29 Aug 2023 14:39:53 +0100 Subject: [PATCH 007/195] Python versioning scripts WIP --- deploy/check_version_validity.py | 0 deploy/increase_version.py | 237 +++++++++++++++++++++++++++++++ 2 files changed, 237 insertions(+) create mode 100644 deploy/check_version_validity.py create mode 100644 deploy/increase_version.py diff --git a/deploy/check_version_validity.py b/deploy/check_version_validity.py new file mode 100644 index 000000000..e69de29bb diff --git a/deploy/increase_version.py b/deploy/increase_version.py new file mode 100644 index 000000000..425e985b3 --- /dev/null +++ b/deploy/increase_version.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python3 + +import argparse +import math +from ast import Not +from dataclasses import dataclass +from os import environ +from pathlib import Path +from statistics import mean +from typing import Tuple + +from requests import get +from toml import loads + +DARWIN_PYPI_INFO_PAGE = environ.get("PYPY_INFO_PAGE", "https://pypi.org/pypi/darwin-py/json") + + +@dataclass +class Version: + major: int + minor: int + patch: int + + _changed = False + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Version): + return False + return self.major == other.major and self.minor == other.minor and self.patch == other.patch + + def __gt__(self, other: object) -> bool: + if not isinstance(other, Version): + return False + return ( + (self.major == other.major) + and (self.minor == other.minor) + and (self.patch > other.patch) + or (self.major == other.major) + and (self.minor > other.minor) + and (self.patch >= other.patch) + or (self.major > other.major) + and (self.minor >= other.minor) + and (self.patch >= other.patch) + ) + + def __lt__(self, other: object) -> bool: + if not isinstance(other, Version): + return False + return ( + (self.major == other.major) + and (self.minor == other.minor) + and (self.patch < other.patch) + or (self.major == other.major) + and (self.minor < other.minor) + and (self.patch <= other.patch) + or (self.major < other.major) + and (self.minor <= other.minor) + and (self.patch <= other.patch) + ) + + def __sub__(self, other: object) -> Tuple[int, int, int]: + if not isinstance(other, Version): + return NotImplemented + + return ( + int(abs(self.major - other.major)), + int(abs(self.minor - other.minor)), + int(abs(self.patch - other.patch)), + ) + + def copy(self) -> "Version": + return Version(self.major, self.minor, self.patch) + + def was_changed(self) -> bool: + return self._changed + + def increment_major(self) -> None: + self.major += 1 + self._changed = True + + def increment_minor(self) -> None: + self.minor += 1 + self._changed = True + + def increment_patch(self) -> None: + self.patch += 1 + self._changed = True + + def __str__(self) -> str: + return f"{self.major}.{self.minor}.{self.patch}" + + +def confirm(question: str) -> bool: + while True: + answer = input(f"{question} [y/n]: ").lower().strip() + if answer in ["y", "yes"]: + return True + elif answer in ["n", "no"]: + return False + else: + print("Invalid input, type 'y' or 'n'") + + +def _get_version() -> Version: + from darwin.version import __version__ + + major, minor, patch = __version__.split(".") + + if not major or not minor or not patch: + raise ValueError("Version not found in darwin.version module") + + return Version(int(major), int(minor), int(patch)) + + +def _get_pyproject_version() -> Version: + pyproject_dir = Path(__file__).parent.parent + pyproject_file = pyproject_dir / "pyproject.toml" + + if not pyproject_file.exists(): + raise FileNotFoundError("pyproject.toml not found") + + with open(pyproject_file, "r") as f: + toml_content = loads(f.read()) + version = toml_content["tool"]["poetry"]["version"] + + if not version: + raise ValueError("Version not found in pyproject.toml") + + major, minor, patch = version.split(".") + if not major or not minor or not patch: + raise ValueError("Version not found in pyproject.toml") + + return Version(int(major), int(minor), int(patch)) + + +def _get_pypi_version(force: bool) -> Version: + response = get(DARWIN_PYPI_INFO_PAGE) + + if not response.ok: + print("PYPI connection not available, sanity checking for PyPi unavailable") + if not force: + if not confirm("Continue without PyPi sanity check?"): + exit(1) + + try: + version_in_pypi = response.json()["info"]["version"] + except KeyError: + raise ValueError("Version not found in PyPI") + + major, minor, patch = version_in_pypi.split(".") + if not major or not minor or not patch: + raise ValueError("Version not found in PyPI") + + return Version(int(major), int(minor), int(patch)) + + +def _sanity_check(version: Version, pyproject_version: Version, pypi_version: Version, force: bool) -> None: + if version != pyproject_version: + raise ValueError("Version in darwin.version module and pyproject.toml do not match") + + # pypi version should be either equal to or one greater + difference_between_versions = version - pypi_version + if difference_between_versions not in [(0, 0, 0), (0, 0, 1), (0, 1, 0), (1, 0, 0)]: + print(f"Version in PyPI is not equal to or one greater than local version: {version} != {pypi_version}") + print("Your local version is probably too old, check your version number") + + if not force or confirm("Continue with updating version number?"): + exit(1) + + print("Pypi version was out of date, this was bypassed.") + + print("Versions are in sync, sanity check passed") + + +def _update_version(new_version: Version, force: bool) -> None: + raise NotImplementedError + + +def _update_pyproject_version(new_version: Version, force: bool) -> None: + raise NotImplementedError + + +def main() -> None: + parser = argparse.ArgumentParser(description="Increase version number") + parser.add_argument("-v", "--version", action="store_true", help="show version number and exit", default=True) + parser.add_argument("-M", "--major", action="store_true", help="increase major version") + parser.add_argument("-m", "--minor", action="store_true", help="increase minor version") + parser.add_argument("-p", "--patch", action="store_true", help="increase patch version") + parser.add_argument("-f", "--force", action="store_true", help="force actions, do not ask for confirmation") + + args = parser.parse_args() + + force_actions = False + + if args.force: + print("Force mode enabled, no confirmation will be asked") + force_actions = True + + if args.major and args.minor and args.patch: + print("Cannot increase major, minor and patch at the same time. Specify only one of these.") + exit(2) + + # Constants so that these are not mutated by mistake + LOCAL_VERSION = _get_version() + PYPROJECT_VERSION = _get_pyproject_version() + PYPI_VERSION = _get_pypi_version(force_actions) + + if args.version: + print(f"Current version in darwin.version module: {str(LOCAL_VERSION)}") + print(f"Current version in pyproject.toml: {str(PYPROJECT_VERSION)}") + print(f"Current version in PyPI: {str(PYPI_VERSION)}") + + _sanity_check(LOCAL_VERSION, PYPROJECT_VERSION, PYPI_VERSION, force_actions) + + new_version = LOCAL_VERSION.copy() + + if args.major: + new_version.increment_major() + + if args.minor: + new_version.increment_minor() + + if args.patch: + new_version.increment_patch() + + if new_version.was_changed() and ( + force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?") + ): + _update_version(new_version, force_actions) + _update_pyproject_version(new_version, force_actions) + print(f"Version updated successfully to {str(new_version)}") + else: + print("Version not updated") + + +if __name__ == "__main__": + main() From f17b69f50f1273610fff3141d52db19b7e00e0c8 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 29 Aug 2023 17:08:03 +0100 Subject: [PATCH 008/195] Attempt at fix for changed files filter --- .github/workflows/code_quality.yml | 12 ++---------- deploy/format.sh | 2 ++ 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 39b97409f..527c045c0 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -23,11 +23,7 @@ jobs: - name: Get changed files id: changed_files run: | - if ${{ github.event_name == 'pull_request' }}; then - echo "changed_files=$(git diff --name-only -r HEAD^1 HEAD | xargs)" >> $GITHUB_OUTPUT - else - echo "changed_files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT - fi + changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - name: Format code # Lints and formats are called only against the changed files, because other # files must have come from master @@ -43,11 +39,7 @@ jobs: - name: Get changed files id: changed_files run: | - if ${{ github.event_name == 'pull_request' }}; then - echo "changed_files=$(git diff --name-only -r HEAD^1 HEAD | xargs)" >> $GITHUB_OUTPUT - else - echo "changed_files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT - fi + changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - name: Run linter # Lints and formats are called only against the changed files, because other # files must have come from master diff --git a/deploy/format.sh b/deploy/format.sh index 635ad4337..895925d20 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -11,6 +11,8 @@ THIS_FILE_DIRECTORY=`dirname "$0"` FILES_CHANGED="$1" +echo "Checking formatting of reference: $1" + if ! $CI; then echo "This script is intended for CI/CD only" exit 1 From 5a4aede3ed633c6c4ccfd1cbd06c539057edde4d Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 29 Aug 2023 17:41:53 +0100 Subject: [PATCH 009/195] Bash refactor --- deploy/format.sh | 36 ++++++++++++++++++++++++++++++------ deploy/lint.sh | 34 ++++++++++++++++++++++++++++++---- 2 files changed, 60 insertions(+), 10 deletions(-) diff --git a/deploy/format.sh b/deploy/format.sh index 895925d20..2aaf19561 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -9,21 +9,45 @@ # 3 - Black formatting failed THIS_FILE_DIRECTORY=`dirname "$0"` -FILES_CHANGED="$1" - -echo "Checking formatting of reference: $1" +FILES_CHANGED="$@" if ! $CI; then echo "This script is intended for CI/CD only" exit 1 fi -if [ "$#" -ne 1 ]; then - echo "Usage: $0 " +if [ "$#" -lt 1 ]; then + echo "Usage: $0 ......" exit 1 fi echo "Checking formatting of files: $1" "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -echo "$1" | grep -E '\.py$' | xargs | poetry run black --check --diff --no-color . || exit 3 \ No newline at end of file +EXIT_CODE=0 + +for file in $FILES_CHANGED; do + if [[ $file == *"__init__.py"* ]]; then + echo "Skipping __init__.py file: $file" + continue + fi + if [[ $file != *.py ]]; then + echo "Skipping non-python file: $file" + continue + fi + + echo "Checking black formatting of file: $file" + poetry run black --check --diff --no-color $file + if $!; then + echo "Black formatting failed for file: $file" + EXIT_CODE=$! + fi +done + +if [[ $EXIT_CODE -eq 0 ]]; then + echo "Black formatting passed" +else + echo "Black formatting failed" +fi + +exit $EXIT_CODE \ No newline at end of file diff --git a/deploy/lint.sh b/deploy/lint.sh index e5e53a83f..b3f423caf 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -9,22 +9,48 @@ # 3 - PEP8 compliance failed THIS_FILE_DIRECTORY=`dirname "$0"` -FILES_CHANGED="$1" +FILES_CHANGED="$@" if ! $CI; then echo "This script is intended for CI/CD only" exit 1 fi -echo "Checking linting compliance of reference: $2 against $1" +echo "Checking linting compliance" -if [ "$#" -ne 1 ]; then +if [ "$#" -lt 1 ]; then echo "Usage: $0 " exit 1 fi +EXIT_CODE=0 + echo "Checking linting compliance of files: $FILES_CHANGED" "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -echo "$FILES_CHANGED" | grep -E '\.py$' | xargs | poetry run flake8 --diff --no-color . || exit 3 \ No newline at end of file +for file in $FILES_CHANGED; do + if [[ $file == *"__init__.py"* ]]; then + echo "Skipping __init__.py file: $file" + continue + fi + if [[ $file != *.py ]]; then + echo "Skipping non-python file: $file" + continue + fi + + echo "Checking flake8 compliance of file: $file" + poetry run flake8 $file + if $!; then + echo "Flake8 check failed for file: $file" + EXIT_CODE=$1! + fi +done + +if [[ $EXIT_CODE -eq 0 ]]; then + echo "Flake8 check passed" +else + echo "Flake8 check failed" +fi + +exit $EXIT_CODE \ No newline at end of file From 413f600c2c592fc7d812917ba8ac91092046bcbb Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 09:46:48 +0100 Subject: [PATCH 010/195] Update flows --- .github/workflows/code_quality.yml | 6 ++++-- .github/workflows/documentation.yml | 9 +++++---- .github/workflows/release.yml | 4 +++- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 527c045c0..f066d7be1 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -10,9 +10,11 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true - + +env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + jobs: - #Check format of code format: runs-on: ubuntu-latest diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index ff7adba5f..62763a9a9 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -10,15 +10,16 @@ concurrency: group: documentation cancel-in-progress: true +env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + AWS_REGION: eu-west-1 + AWS_SESSION_NAME: darwinPyDocumentation + permissions: id-token: write # This is required for requesting the JWT contents: read # This is required for actions/checkout -env: - AWS_REGION: eu-west-1 - AWS_SESSION_NAME: darwinPyDocumentation - jobs: generate-docs: runs-on: ubuntu-latest diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 46185e7b1..21b8b97eb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,8 +11,10 @@ concurrency: group: created_tag cancel-in-progress: true -jobs: +env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} +jobs: checkout: runs-on: ubuntu-latest steps: From 5111954551b402039c18a0524dce82b5bb84e64d Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 10:52:02 +0100 Subject: [PATCH 011/195] Fixing Bash scripts and testing --- .github/workflows/code_quality.yml | 108 ++++++++++++++--------------- deploy/format.sh | 58 +++++++++++++--- 2 files changed, 101 insertions(+), 65 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index f066d7be1..874a5a7a7 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -13,7 +13,7 @@ concurrency: env: GH_TOKEN: ${{ secrets.GH_TOKEN }} - + jobs: #Check format of code format: @@ -32,59 +32,59 @@ jobs: run: bash deploy/format.sh ${{ steps.changed_files.outputs.changed_files }} # Check code for linting errors - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - ref: ${{ github.head_ref || github.ref }} - - name: Get changed files - id: changed_files - run: | - changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - - name: Run linter - # Lints and formats are called only against the changed files, because other - # files must have come from master - run: bash deploy/lint.sh ${{ steps.changed_files.outputs.changed_files }} + # lint: + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v2 + # with: + # ref: ${{ github.head_ref || github.ref }} + # - name: Get changed files + # id: changed_files + # run: | + # changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) + # - name: Run linter + # # Lints and formats are called only against the changed files, because other + # # files must have come from master + # run: bash deploy/lint.sh ${{ steps.changed_files.outputs.changed_files }} - # Run tests - test: - strategy: - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.8, 3.9, "3.10", "3.11"] - poetry-version: ["1.3.1"] - runs-on: ${{ matrix.os }} - steps: - #install python - - uses: actions/checkout@v2 - with: - ref: ${{ github.head_ref || github.ref }} + # # Run tests + # test: + # strategy: + # matrix: + # os: [ubuntu-latest, macos-latest, windows-latest] + # python-version: [3.8, 3.9, "3.10", "3.11"] + # poetry-version: ["1.3.1"] + # runs-on: ${{ matrix.os }} + # steps: + # #install python + # - uses: actions/checkout@v2 + # with: + # ref: ${{ github.head_ref || github.ref }} - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Upgrade pip - run: python -m pip install --upgrade pip - - name: Setup Poetry - uses: abatilo/actions-poetry@v2 - with: - poetry-version: ${{ matrix.poetry-version }} - - name: Install dependencies - run: | - poetry install --no-interaction --no-root --all-extras -vvv - pip install wheel - pip install --upgrade setuptools - pip install --editable ".[test,ml,medical,dev]" - pip install pytest - - name: Run tests - run: | - pytest --junitxml=${{ github.workspace }}/os-${{ matrix.os }}-py${{ matrix.python-version}}-poetry${{ matrix.poetry-version }}-test-results.xml --cov=src --cov-report=xml --cov-report=term-missing --cov-fail-under=100 - - name: Publish test results - uses: EnricoMi/publish-unit-test-result-action@v2 - # Only run on windows - if: matrix.os == 'windows-latest' - with: - files: ${GITHUB_WORKSPACE}/**/*test-results.xml + # - name: Set up Python ${{ matrix.python-version }} + # uses: actions/setup-python@v2 + # with: + # python-version: ${{ matrix.python-version }} + # - name: Upgrade pip + # run: python -m pip install --upgrade pip + # - name: Setup Poetry + # uses: abatilo/actions-poetry@v2 + # with: + # poetry-version: ${{ matrix.poetry-version }} + # - name: Install dependencies + # run: | + # poetry install --no-interaction --no-root --all-extras -vvv + # pip install wheel + # pip install --upgrade setuptools + # pip install --editable ".[test,ml,medical,dev]" + # pip install pytest + # - name: Run tests + # run: | + # pytest --junitxml=${{ github.workspace }}/os-${{ matrix.os }}-py${{ matrix.python-version}}-poetry${{ matrix.poetry-version }}-test-results.xml --cov=src --cov-report=xml --cov-report=term-missing --cov-fail-under=100 + # - name: Publish test results + # uses: EnricoMi/publish-unit-test-result-action@v2 + # # Only run on windows + # if: matrix.os == 'windows-latest' + # with: + # files: ${GITHUB_WORKSPACE}/**/*test-results.xml \ No newline at end of file diff --git a/deploy/format.sh b/deploy/format.sh index 2aaf19561..6901ccc98 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -11,6 +11,7 @@ THIS_FILE_DIRECTORY=`dirname "$0"` FILES_CHANGED="$@" +# Input checks if ! $CI; then echo "This script is intended for CI/CD only" exit 1 @@ -21,33 +22,68 @@ if [ "$#" -lt 1 ]; then exit 1 fi -echo "Checking formatting of files: $1" +# Introduction +echo +echo "** Checking formatting **" +echo +echo "These files were changed in this diff:" +echo $FILES_CHANGED | tr " " "\n" +echo +echo "** Checking formatting of files **" +echo + +# Check dependencies "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 EXIT_CODE=0 - +NUMBER_OF_PYTHON_FILES=0 +SKIPPED_FILES=0 +NONEXISTENT_FILES=0 for file in $FILES_CHANGED; do + if [ ! -f "$file" ]; then + SKIPPED_FILES=$((SKIPPED_FILES+1)) + NONEXISTENT_FILES=$((NONEXISTENT_FILES+1)) + continue + fi + if [[ $file == *"__init__.py"* ]]; then - echo "Skipping __init__.py file: $file" + SKIPPED_FILES=$((SKIPPED_FILES+1)) continue fi + if [[ $file != *.py ]]; then - echo "Skipping non-python file: $file" + SKIPPED_FILES=$((SKIPPED_FILES+1)) continue fi - - echo "Checking black formatting of file: $file" + + echo "> Checking black formatting of file: $file" poetry run black --check --diff --no-color $file + NUMBER_OF_PYTHON_FILES=$((NUMBER_OF_PYTHON_FILES+1)) if $!; then - echo "Black formatting failed for file: $file" + echo ">...Black formatting failed for file: $file" EXIT_CODE=$! fi + echo done +echo "Done." +echo "Checked $NUMBER_OF_PYTHON_FILES python files" +echo "Skipped $SKIPPED_FILES files" +echo "Skipped $NONEXISTENT_FILES files that do not exist" + +if [[ $NUMBER_OF_PYTHON_FILES -eq 0 ]]; then + echo "No checkable python files found in input." + exit 0 +fi + if [[ $EXIT_CODE -eq 0 ]]; then - echo "Black formatting passed" + if [[ $NUMBER_OF_PYTHON_FILES -eq 0 ]]; then + echo "No checkable python files found in input." + else + echo "Black formatting passed" + fi + exit 0 else echo "Black formatting failed" -fi - -exit $EXIT_CODE \ No newline at end of file + exit 3 +fi \ No newline at end of file From a338e808e65f1a8f732923062aed908dbc45fc4e Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 16:32:12 +0100 Subject: [PATCH 012/195] Github token change --- .github/workflows/code_quality.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 874a5a7a7..31b6d7dda 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -10,14 +10,13 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true - -env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} jobs: #Check format of code format: runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} steps: - uses: actions/checkout@v2 with: From 407e086c10c4a074991c9dca8f9ec270fad85bc1 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 16:37:01 +0100 Subject: [PATCH 013/195] Moved token to use github namespace --- .github/workflows/code_quality.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 31b6d7dda..74ea6115a 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -10,6 +10,9 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true + +env: + GH_TOKEN: ${{ github.token }} jobs: #Check format of code From ba4591ff4b01724879e0bbf118d50484245db6c3 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 16:39:34 +0100 Subject: [PATCH 014/195] Output list of changed files --- .github/workflows/code_quality.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 74ea6115a..0d1cef985 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -19,7 +19,7 @@ jobs: format: runs-on: ubuntu-latest env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} + GH_TOKEN: ${{ github.token }} steps: - uses: actions/checkout@v2 with: @@ -28,6 +28,8 @@ jobs: id: changed_files run: | changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) + - name: Print changed files + run: echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - name: Format code # Lints and formats are called only against the changed files, because other # files must have come from master From ce703c2466981835ec68b4fedff153fb1a07857b Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 16:49:37 +0100 Subject: [PATCH 015/195] Testing passing around variables --- .github/workflows/code_quality.yml | 44 ++++++++++++++++++++++++------ deploy/format.sh | 2 ++ 2 files changed, 37 insertions(+), 9 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 0d1cef985..8dcffd9a7 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -16,10 +16,8 @@ env: jobs: #Check format of code - format: + get_changed_files: runs-on: ubuntu-latest - env: - GH_TOKEN: ${{ github.token }} steps: - uses: actions/checkout@v2 with: @@ -28,12 +26,40 @@ jobs: id: changed_files run: | changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - - name: Print changed files - run: echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - - name: Format code - # Lints and formats are called only against the changed files, because other - # files must have come from master - run: bash deploy/format.sh ${{ steps.changed_files.outputs.changed_files }} + - name: Echo changed files + run: | + (echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n") >> $GITHUB_OUTPUT + - name: Set environment variable + run: | + echo "CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV + + test_retrieve_env: + runs-on: ubuntu-latest + needs: get_changed_files + steps: + - name: Echo changed files + run: | + echo ${{ env.CHANGED_FILES }} + + # format: + # runs-on: ubuntu-latest + # env: + # GH_TOKEN: ${{ github.token }} + # steps: + # - uses: actions/checkout@v2 + # with: + # ref: ${{ github.head_ref || github.ref }} + # - name: Get changed files + # id: changed_files + # run: | + # changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) + + # - name: Print changed files + # run: (echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n") >> $GITHUB_OUTPUT + # - name: Format code + # # Lints and formats are called only against the changed files, because other + # # files must have come from master + # run: bash deploy/format.sh ${{ steps.changed_files.outputs.changed_files }} # Check code for linting errors # lint: diff --git a/deploy/format.sh b/deploy/format.sh index 6901ccc98..59413f421 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -19,6 +19,8 @@ fi if [ "$#" -lt 1 ]; then echo "Usage: $0 ......" + echo + echo "Called with $@" exit 1 fi From bd5873aa140826e67bf9a99f29c0b0f73000b40a Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 16:55:16 +0100 Subject: [PATCH 016/195] Changing echo types --- .github/workflows/code_quality.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 8dcffd9a7..3a2935209 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -28,7 +28,7 @@ jobs: changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - name: Echo changed files run: | - (echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n") >> $GITHUB_OUTPUT + echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - name: Set environment variable run: | echo "CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV @@ -39,7 +39,7 @@ jobs: steps: - name: Echo changed files run: | - echo ${{ env.CHANGED_FILES }} + echo "$CHANGED_FILES" # format: # runs-on: ubuntu-latest From 4cfcaadd09fdf5245f0b01910f759d909d5e8cf4 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 17:00:24 +0100 Subject: [PATCH 017/195] Trying a different approach --- .github/workflows/code_quality.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 3a2935209..2120d7cc7 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -25,7 +25,7 @@ jobs: - name: Get changed files id: changed_files run: | - changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) + changed_files=gh pr diff ${{ github.event.number }} --name-only | xargs - name: Echo changed files run: | echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" From 891dec4fa8f4e6fc5ee3606fe0901106c1c85790 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 17:06:24 +0100 Subject: [PATCH 018/195] Check gh cli version --- .github/workflows/code_quality.yml | 36 ++++++++++++++++-------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 2120d7cc7..8ecbd98b7 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -22,24 +22,26 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} - - name: Get changed files - id: changed_files - run: | - changed_files=gh pr diff ${{ github.event.number }} --name-only | xargs - - name: Echo changed files - run: | - echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - - name: Set environment variable - run: | - echo "CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV + - name: GH CLI Version + run: gh --version + # - name: Get changed files + # id: changed_files + # run: | + # changed_files=gh pr diff ${{ github.event.number }} --name-only | xargs + # - name: Echo changed files + # run: | + # echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" + # - name: Set environment variable + # run: | + # echo "CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV - test_retrieve_env: - runs-on: ubuntu-latest - needs: get_changed_files - steps: - - name: Echo changed files - run: | - echo "$CHANGED_FILES" + # test_retrieve_env: + # runs-on: ubuntu-latest + # needs: get_changed_files + # steps: + # - name: Echo changed files + # run: | + # echo "$CHANGED_FILES" # format: # runs-on: ubuntu-latest From 467bfd39c8fe62ceac5565cc1ebfa20169bc0e9d Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 17:10:25 +0100 Subject: [PATCH 019/195] Fix for command wrapping --- .github/workflows/code_quality.yml | 34 +++++++++++++++--------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 8ecbd98b7..c596432ca 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -24,24 +24,24 @@ jobs: ref: ${{ github.head_ref || github.ref }} - name: GH CLI Version run: gh --version - # - name: Get changed files - # id: changed_files - # run: | - # changed_files=gh pr diff ${{ github.event.number }} --name-only | xargs - # - name: Echo changed files - # run: | - # echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - # - name: Set environment variable - # run: | - # echo "CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV + - name: Get changed files + id: changed_files + run: | + changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` + - name: Echo changed files + run: | + echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" + - name: Set environment variable + run: | + echo "CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV - # test_retrieve_env: - # runs-on: ubuntu-latest - # needs: get_changed_files - # steps: - # - name: Echo changed files - # run: | - # echo "$CHANGED_FILES" + test_retrieve_env: + runs-on: ubuntu-latest + needs: get_changed_files + steps: + - name: Echo changed files + run: | + echo "$CHANGED_FILES" # format: # runs-on: ubuntu-latest From e389a7031e28e26e2a14b5c4a38a53316c01321c Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 17:13:39 +0100 Subject: [PATCH 020/195] Setting step output --- .github/workflows/code_quality.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index c596432ca..67ea00ff7 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -28,6 +28,7 @@ jobs: id: changed_files run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` + echo "::set-output name=changed_files::$changed_files" - name: Echo changed files run: | echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" From 10cd78a89009767bdc05adbb216d80093e0e33b0 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 17:20:27 +0100 Subject: [PATCH 021/195] Set env var --- .github/workflows/code_quality.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 67ea00ff7..a00417a45 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -15,7 +15,6 @@ env: GH_TOKEN: ${{ github.token }} jobs: - #Check format of code get_changed_files: runs-on: ubuntu-latest steps: @@ -34,7 +33,7 @@ jobs: echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - name: Set environment variable run: | - echo "CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV + echo "export CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV test_retrieve_env: runs-on: ubuntu-latest From 93fac8eb9f6dd61971dba23412bb9b66ebe7bfad Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 17:26:07 +0100 Subject: [PATCH 022/195] Different approach to passing vars --- .github/workflows/code_quality.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index a00417a45..5e760035b 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -17,6 +17,8 @@ env: jobs: get_changed_files: runs-on: ubuntu-latest + outputs: + changed_files: ${{ steps.changed_files.outputs.changed_files }} steps: - uses: actions/checkout@v2 with: @@ -27,13 +29,10 @@ jobs: id: changed_files run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` - echo "::set-output name=changed_files::$changed_files" + echo "changed_files=$changed_files" >> $GITHUB_OUTPUT - name: Echo changed files run: | echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - - name: Set environment variable - run: | - echo "export CHANGED_FILES=${{ steps.changed_files.outputs.changed_files }}" >> $GITHUB_ENV test_retrieve_env: runs-on: ubuntu-latest @@ -41,7 +40,7 @@ jobs: steps: - name: Echo changed files run: | - echo "$CHANGED_FILES" + echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr " " "\n" # format: # runs-on: ubuntu-latest From 5a79f498eecb486d52847ec260c65491b4b717d6 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 30 Aug 2023 17:29:29 +0100 Subject: [PATCH 023/195] Format and lint should work now --- .github/workflows/code_quality.yml | 69 +++++++++++++----------------- 1 file changed, 29 insertions(+), 40 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 5e760035b..6aae4a4df 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -16,6 +16,7 @@ env: jobs: get_changed_files: + name: Get changed files runs-on: ubuntu-latest outputs: changed_files: ${{ steps.changed_files.outputs.changed_files }} @@ -33,50 +34,38 @@ jobs: - name: Echo changed files run: | echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" - - test_retrieve_env: + + format: + needs: get_changed_files runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ github.token }} + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + + - name: Format code + # Lints and formats are called only against the changed files, because other + # files must have come from master + run: bash deploy/format.sh ${{ needs.get_changed_files.outputs.changed_files }} + + + lint: needs: get_changed_files + runs-on: ubuntu-latest steps: - - name: Echo changed files + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Get changed files + id: changed_files run: | - echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr " " "\n" - - # format: - # runs-on: ubuntu-latest - # env: - # GH_TOKEN: ${{ github.token }} - # steps: - # - uses: actions/checkout@v2 - # with: - # ref: ${{ github.head_ref || github.ref }} - # - name: Get changed files - # id: changed_files - # run: | - # changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - - # - name: Print changed files - # run: (echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n") >> $GITHUB_OUTPUT - # - name: Format code - # # Lints and formats are called only against the changed files, because other - # # files must have come from master - # run: bash deploy/format.sh ${{ steps.changed_files.outputs.changed_files }} - - # Check code for linting errors - # lint: - # runs-on: ubuntu-latest - # steps: - # - uses: actions/checkout@v2 - # with: - # ref: ${{ github.head_ref || github.ref }} - # - name: Get changed files - # id: changed_files - # run: | - # changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - # - name: Run linter - # # Lints and formats are called only against the changed files, because other - # # files must have come from master - # run: bash deploy/lint.sh ${{ steps.changed_files.outputs.changed_files }} + changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) + - name: Run linter + # Lints and formats are called only against the changed files, because other + # files must have come from master + run: bash deploy/lint.sh ${{ needs.get_changed_files.outputs.changed_files }} # # Run tests # test: From cce8fffef97174a20b43558176096b7537a30f66 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 10:00:13 +0100 Subject: [PATCH 024/195] Refactor of bash scripts --- deploy/format.sh | 43 ++++++++++++++++++------------- deploy/lint.sh | 66 +++++++++++++++++++++++++++++++++++------------- 2 files changed, 74 insertions(+), 35 deletions(-) diff --git a/deploy/format.sh b/deploy/format.sh index 59413f421..0f5b37b47 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -16,7 +16,6 @@ if ! $CI; then echo "This script is intended for CI/CD only" exit 1 fi - if [ "$#" -lt 1 ]; then echo "Usage: $0 ......" echo @@ -37,55 +36,63 @@ echo # Check dependencies "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -EXIT_CODE=0 -NUMBER_OF_PYTHON_FILES=0 -SKIPPED_FILES=0 -NONEXISTENT_FILES=0 + +black_failed_files="" +number_of_python_files=0 +skipped_files=0 +skipped_init_files=0 +nonexistent_files=0 + for file in $FILES_CHANGED; do if [ ! -f "$file" ]; then - SKIPPED_FILES=$((SKIPPED_FILES+1)) - NONEXISTENT_FILES=$((NONEXISTENT_FILES+1)) + skipped_files=$((skipped_files+1)) + nonexistent_files=$((nonexistent_files+1)) continue fi if [[ $file == *"__init__.py"* ]]; then - SKIPPED_FILES=$((SKIPPED_FILES+1)) + skipped_files=$((skipped_files+1)) continue fi if [[ $file != *.py ]]; then - SKIPPED_FILES=$((SKIPPED_FILES+1)) + skipped_files=$((skipped_files+1)) continue fi echo "> Checking black formatting of file: $file" + number_of_python_files=$((number_of_python_files+1)) + poetry run black --check --diff --no-color $file - NUMBER_OF_PYTHON_FILES=$((NUMBER_OF_PYTHON_FILES+1)) if $!; then echo ">...Black formatting failed for file: $file" - EXIT_CODE=$! + black_failed_files="$black_failed_files $file" fi echo done echo "Done." -echo "Checked $NUMBER_OF_PYTHON_FILES python files" -echo "Skipped $SKIPPED_FILES files" -echo "Skipped $NONEXISTENT_FILES files that do not exist" +echo "Checked $number_of_python_files python files" +echo "Skipped $skipped_files files" +echo "Skipped $nonexistent_files files that do not exist" -if [[ $NUMBER_OF_PYTHON_FILES -eq 0 ]]; then +if [[ $number_of_python_files -eq 0 ]]; then echo "No checkable python files found in input." exit 0 fi -if [[ $EXIT_CODE -eq 0 ]]; then - if [[ $NUMBER_OF_PYTHON_FILES -eq 0 ]]; then +if [[ $black_failed_files -eq "" ]]; then + if [[ $number_of_python_files -eq 0 ]]; then echo "No checkable python files found in input." else echo "Black formatting passed" fi exit 0 else - echo "Black formatting failed" + echo "** Black formatting failed **" + echo "These files failed black formatting:" + echo + echo $black_failed_files | tr " " "\n" + echo exit 3 fi \ No newline at end of file diff --git a/deploy/lint.sh b/deploy/lint.sh index b3f423caf..690bab3f1 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -11,46 +11,78 @@ THIS_FILE_DIRECTORY=`dirname "$0"` FILES_CHANGED="$@" +# Input checks if ! $CI; then echo "This script is intended for CI/CD only" exit 1 fi - -echo "Checking linting compliance" - - if [ "$#" -lt 1 ]; then - echo "Usage: $0 " + echo "Usage: $0 ......" + echo + echo "Called with $@" exit 1 fi -EXIT_CODE=0 +# Introduction +echo +echo "Checking linting compliance" +echo +echo "These files were changed in this diff:" +echo $FILES_CHANGED | tr " " "\n" +echo +echo "** Checking formatting of files **" +echo -echo "Checking linting compliance of files: $FILES_CHANGED" "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 +flake8_failed_files="" +number_of_python_files=0 +skipped_files=0 +skipped_init_files=0 +nonexistent_files=0 + for file in $FILES_CHANGED; do + if [ ! -f "$file" ]; then + skipped_files=$((skipped_files+1)) + nonexistent_files=$((nonexistent_files+1)) + continue + fi if [[ $file == *"__init__.py"* ]]; then - echo "Skipping __init__.py file: $file" + skipped_files=$((skipped_files+1)) continue fi if [[ $file != *.py ]]; then - echo "Skipping non-python file: $file" + skipped_files=$((skipped_files+1)) continue fi - - echo "Checking flake8 compliance of file: $file" + + echo "> Checking flake8 compliance of file: $file" + number_of_python_files=$((number_of_python_files+1)) + poetry run flake8 $file if $!; then echo "Flake8 check failed for file: $file" - EXIT_CODE=$1! + flake8_failed_files="$flake8_failed_files $file" fi done -if [[ $EXIT_CODE -eq 0 ]]; then - echo "Flake8 check passed" -else - echo "Flake8 check failed" +if [[ $number_of_python_files -eq 0 ]]; then + echo "No checkable python files found in input." + exit 0 fi -exit $EXIT_CODE \ No newline at end of file +if [[ $flake8_failed_files -eq "" ]]; then + if [[ $number_of_python_files -eq 0 ]]; then + echo "No checkable python files found in input." + else + echo "Black formatting passed" + fi + exit 0 +else + echo "** Black formatting failed **" + echo "These files failed black formatting:" + echo + echo $flake8_failed_files | tr " " "\n" + echo + exit 3 +fi \ No newline at end of file From ec1ddc55669e0abdebba02b0af453b8fe2c7987f Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 10:20:36 +0100 Subject: [PATCH 025/195] Re-enabled testing stage --- .github/workflows/code_quality.yml | 59 ++++++++++-------------------- deploy/format.sh | 1 - deploy/increase_version.py | 3 -- deploy/lint.sh | 1 - deploy/run_tests.sh | 5 +-- 5 files changed, 22 insertions(+), 47 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 6aae4a4df..cdc5afde5 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -36,6 +36,7 @@ jobs: echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" format: + name: Check format of python needs: get_changed_files runs-on: ubuntu-latest env: @@ -52,6 +53,7 @@ jobs: lint: + name: Lint python needs: get_changed_files runs-on: ubuntu-latest steps: @@ -67,44 +69,23 @@ jobs: # files must have come from master run: bash deploy/lint.sh ${{ needs.get_changed_files.outputs.changed_files }} - # # Run tests - # test: - # strategy: - # matrix: - # os: [ubuntu-latest, macos-latest, windows-latest] - # python-version: [3.8, 3.9, "3.10", "3.11"] - # poetry-version: ["1.3.1"] - # runs-on: ${{ matrix.os }} - # steps: - # #install python - # - uses: actions/checkout@v2 - # with: - # ref: ${{ github.head_ref || github.ref }} + # Run tests + test: + name: Run Testing matrix + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.8, 3.9, "3.10", "3.11"] + runs-on: ${{ matrix.os }} + steps: + #install python + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Run tests + run: | + bash deploy/run_tests.sh ${{ github.workspace }} ${{ matrix.python-version }} ${{ matrix.os }} + - name: Publish test results + uses: EnricoMi/publish-unit-test-result-action@v2 - # - name: Set up Python ${{ matrix.python-version }} - # uses: actions/setup-python@v2 - # with: - # python-version: ${{ matrix.python-version }} - # - name: Upgrade pip - # run: python -m pip install --upgrade pip - # - name: Setup Poetry - # uses: abatilo/actions-poetry@v2 - # with: - # poetry-version: ${{ matrix.poetry-version }} - # - name: Install dependencies - # run: | - # poetry install --no-interaction --no-root --all-extras -vvv - # pip install wheel - # pip install --upgrade setuptools - # pip install --editable ".[test,ml,medical,dev]" - # pip install pytest - # - name: Run tests - # run: | - # pytest --junitxml=${{ github.workspace }}/os-${{ matrix.os }}-py${{ matrix.python-version}}-poetry${{ matrix.poetry-version }}-test-results.xml --cov=src --cov-report=xml --cov-report=term-missing --cov-fail-under=100 - # - name: Publish test results - # uses: EnricoMi/publish-unit-test-result-action@v2 - # # Only run on windows - # if: matrix.os == 'windows-latest' - # with: - # files: ${GITHUB_WORKSPACE}/**/*test-results.xml \ No newline at end of file diff --git a/deploy/format.sh b/deploy/format.sh index 0f5b37b47..223b56b5a 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -1,6 +1,5 @@ #! /usr/bin/env bash # Check formatting -# This script is intended for CI/CD only # # Exit: # 0 - Success diff --git a/deploy/increase_version.py b/deploy/increase_version.py index 425e985b3..0951a5db6 100644 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -1,12 +1,9 @@ #!/usr/bin/env python3 import argparse -import math -from ast import Not from dataclasses import dataclass from os import environ from pathlib import Path -from statistics import mean from typing import Tuple from requests import get diff --git a/deploy/lint.sh b/deploy/lint.sh index 690bab3f1..58c10516f 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -1,6 +1,5 @@ #! /usr/bin/env bash # Check PEP8 compliance and other linting -# This script is intended for CI/CD, but can be run locally # # Exit: # 0 - Success diff --git a/deploy/run_tests.sh b/deploy/run_tests.sh index e9bfb49af..94c0026b2 100755 --- a/deploy/run_tests.sh +++ b/deploy/run_tests.sh @@ -1,6 +1,5 @@ #! /usr/bin/env bash # Run unit tests -# This script is intended for CI/CD, but can be run locally # # Exit: # 0 - Success @@ -15,7 +14,7 @@ if [ "$#" -lt 1 ]; then exit 1 fi -if [ "$#" -gt 1 ]; then +if [ "$#" -eq 3 ]; then USING_CICD=1 OS=$3 PYTHON_VERSION=$2 @@ -26,7 +25,7 @@ echo "Running unit tests in directory: $1" # Unit test config is in pyproject.toml and pytest.ini - don't set any here as it will only complicate CI/CD if [ "$USING_CICD" = 1 ]; then - poetry run pytest $1 -vvv --junit-xml=$0/$PYTHON_VERSION-$OS-test_results.xml || exit 3 + poetry run pytest $1 -vvv --junit-xml=$THIS_FILE_DIRECTORY/$PYTHON_VERSION-$OS-test_results.xml || exit 3 exit 0 fi From 7283cbaa8903a1803411e9bcf84c48888012001a Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 10:33:07 +0100 Subject: [PATCH 026/195] Get absolute paths in bash scripts --- deploy/format.sh | 12 +++++++++++- deploy/lint.sh | 34 ++++++++++++++++++++++------------ 2 files changed, 33 insertions(+), 13 deletions(-) diff --git a/deploy/format.sh b/deploy/format.sh index 223b56b5a..63a87e673 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -7,7 +7,7 @@ # 2 - Python3 or dependency not found # 3 - Black formatting failed -THIS_FILE_DIRECTORY=`dirname "$0"` +THIS_FILE_DIRECTORY=$(dirname`realpath "$0"`) FILES_CHANGED="$@" # Input checks @@ -94,4 +94,14 @@ else echo $black_failed_files | tr " " "\n" echo exit 3 +fi echo "Black formatting passed" + fi + exit 0 +else + echo "** Black formatting failed **" + echo "These files failed black formatting:" + echo + echo $black_failed_files | tr " " "\n" + echo + exit 3 fi \ No newline at end of file diff --git a/deploy/lint.sh b/deploy/lint.sh index 58c10516f..fd4f44cc8 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -1,16 +1,16 @@ #! /usr/bin/env bash -# Check PEP8 compliance and other linting +# Check formatting # # Exit: # 0 - Success # 1 - Called with incorrect number of arguments # 2 - Python3 or dependency not found -# 3 - PEP8 compliance failed +# 3 - Black formatting failed -THIS_FILE_DIRECTORY=`dirname "$0"` +THIS_FILE_DIRECTORY=$(dirname`realpath "$0"`) FILES_CHANGED="$@" -# Input checks +# Input checks if ! $CI; then echo "This script is intended for CI/CD only" exit 1 @@ -24,7 +24,7 @@ fi # Introduction echo -echo "Checking linting compliance" +echo "** Checking formatting **" echo echo "These files were changed in this diff:" echo $FILES_CHANGED | tr " " "\n" @@ -32,9 +32,11 @@ echo echo "** Checking formatting of files **" echo +# Check dependencies "$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 -flake8_failed_files="" + +black_failed_files="" number_of_python_files=0 skipped_files=0 skipped_init_files=0 @@ -46,31 +48,39 @@ for file in $FILES_CHANGED; do nonexistent_files=$((nonexistent_files+1)) continue fi + if [[ $file == *"__init__.py"* ]]; then skipped_files=$((skipped_files+1)) continue fi + if [[ $file != *.py ]]; then skipped_files=$((skipped_files+1)) continue fi - echo "> Checking flake8 compliance of file: $file" + echo "> Checking black formatting of file: $file" number_of_python_files=$((number_of_python_files+1)) - poetry run flake8 $file + poetry run black --check --diff --no-color $file if $!; then - echo "Flake8 check failed for file: $file" - flake8_failed_files="$flake8_failed_files $file" + echo ">...Black formatting failed for file: $file" + black_failed_files="$black_failed_files $file" fi + echo done +echo "Done." +echo "Checked $number_of_python_files python files" +echo "Skipped $skipped_files files" +echo "Skipped $nonexistent_files files that do not exist" + if [[ $number_of_python_files -eq 0 ]]; then echo "No checkable python files found in input." exit 0 fi -if [[ $flake8_failed_files -eq "" ]]; then +if [[ $black_failed_files -eq "" ]]; then if [[ $number_of_python_files -eq 0 ]]; then echo "No checkable python files found in input." else @@ -81,7 +91,7 @@ else echo "** Black formatting failed **" echo "These files failed black formatting:" echo - echo $flake8_failed_files | tr " " "\n" + echo $black_failed_files | tr " " "\n" echo exit 3 fi \ No newline at end of file From 611814fcb4cdaa62bd1e4a5ec67cfc52db08e023 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 10:42:26 +0100 Subject: [PATCH 027/195] Ensure poetry is installed --- .github/workflows/code_quality.yml | 6 ++++-- .gitignore | 4 +++- deploy/check_poetry.sh | 9 +++++++++ deploy/check_python.sh | 1 + deploy/format.sh | 2 +- deploy/lint.sh | 2 +- deploy/run_tests.sh | 8 +++++--- 7 files changed, 24 insertions(+), 8 deletions(-) create mode 100644 deploy/check_poetry.sh diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index cdc5afde5..55eee1b9b 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -74,8 +74,10 @@ jobs: name: Run Testing matrix strategy: matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.8, 3.9, "3.10", "3.11"] + # os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest] + # python-version: [3.8, 3.9, "3.10", "3.11"] + python-version: [3.8] runs-on: ${{ matrix.os }} steps: #install python diff --git a/.gitignore b/.gitignore index b384a53d1..fe08529ab 100644 --- a/.gitignore +++ b/.gitignore @@ -179,4 +179,6 @@ scripts/ .ruff_cache/ !darwin/future/tests/data_objects/workflow/data -!tests/darwin/dataset/data \ No newline at end of file +!tests/darwin/dataset/data + +test_results.xml \ No newline at end of file diff --git a/deploy/check_poetry.sh b/deploy/check_poetry.sh new file mode 100644 index 000000000..4f3922819 --- /dev/null +++ b/deploy/check_poetry.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +# Check poetry is installed +if ! command -v poetry &> /dev/null +then + echo "Poetry could not be found" + exit 4 +fi + +exit 0 \ No newline at end of file diff --git a/deploy/check_python.sh b/deploy/check_python.sh index b2407a70e..eeff4538d 100755 --- a/deploy/check_python.sh +++ b/deploy/check_python.sh @@ -22,4 +22,5 @@ then exit 3 fi +echo "Confirmed Python and pip are installed" exit 0 \ No newline at end of file diff --git a/deploy/format.sh b/deploy/format.sh index 63a87e673..a20068aa7 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -33,7 +33,7 @@ echo "** Checking formatting of files **" echo # Check dependencies -"$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 +"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 black_failed_files="" diff --git a/deploy/lint.sh b/deploy/lint.sh index fd4f44cc8..e7d4d62c7 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -33,7 +33,7 @@ echo "** Checking formatting of files **" echo # Check dependencies -"$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 +"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 black_failed_files="" diff --git a/deploy/run_tests.sh b/deploy/run_tests.sh index 94c0026b2..2dd595cec 100755 --- a/deploy/run_tests.sh +++ b/deploy/run_tests.sh @@ -7,7 +7,9 @@ # 2 - Python3 or dependency not found # 3 - Unit tests failed -THIS_FILE_DIRECTORY=`dirname "$0"` +THIS_FILE_DIRECTORY=$(dirname`realpath "$0"`) + + if [ "$#" -lt 1 ]; then echo "Usage: $0 [ ]" @@ -21,11 +23,11 @@ if [ "$#" -eq 3 ]; then fi echo "Running unit tests in directory: $1" -"$THIS_FILE_DIRECTORY"/check_python.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 +"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 # Unit test config is in pyproject.toml and pytest.ini - don't set any here as it will only complicate CI/CD if [ "$USING_CICD" = 1 ]; then - poetry run pytest $1 -vvv --junit-xml=$THIS_FILE_DIRECTORY/$PYTHON_VERSION-$OS-test_results.xml || exit 3 + poetry run pytest $THIS_FILE_DIRECTORY -vvv --junit-xml=$THIS_FILE_DIRECTORY/$PYTHON_VERSION-$OS-test_results.xml || exit 3 exit 0 fi From b134b7cab09f8b0be1ce06b7a5dbee8b02f2d7b4 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 12:57:00 +0100 Subject: [PATCH 028/195] Updated test script --- .gitignore | 2 +- deploy/check_poetry.sh | 0 deploy/check_version_validity.py | 0 deploy/confirm_main_branch_deployability.py | 0 deploy/format.sh | 2 +- deploy/increase_version.py | 0 deploy/lint.sh | 2 +- deploy/run_tests.sh | 34 ++++++++++++++------- 8 files changed, 26 insertions(+), 14 deletions(-) mode change 100644 => 100755 deploy/check_poetry.sh mode change 100644 => 100755 deploy/check_version_validity.py mode change 100644 => 100755 deploy/confirm_main_branch_deployability.py mode change 100644 => 100755 deploy/increase_version.py diff --git a/.gitignore b/.gitignore index fe08529ab..ad7dcfc48 100644 --- a/.gitignore +++ b/.gitignore @@ -181,4 +181,4 @@ scripts/ !darwin/future/tests/data_objects/workflow/data !tests/darwin/dataset/data -test_results.xml \ No newline at end of file +*test_results.xml \ No newline at end of file diff --git a/deploy/check_poetry.sh b/deploy/check_poetry.sh old mode 100644 new mode 100755 diff --git a/deploy/check_version_validity.py b/deploy/check_version_validity.py old mode 100644 new mode 100755 diff --git a/deploy/confirm_main_branch_deployability.py b/deploy/confirm_main_branch_deployability.py old mode 100644 new mode 100755 diff --git a/deploy/format.sh b/deploy/format.sh index a20068aa7..38a114c73 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -7,7 +7,7 @@ # 2 - Python3 or dependency not found # 3 - Black formatting failed -THIS_FILE_DIRECTORY=$(dirname`realpath "$0"`) +THIS_FILE_DIRECTORY=$(dirname `realpath "$0"`) FILES_CHANGED="$@" # Input checks diff --git a/deploy/increase_version.py b/deploy/increase_version.py old mode 100644 new mode 100755 diff --git a/deploy/lint.sh b/deploy/lint.sh index e7d4d62c7..f7bef178c 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -7,7 +7,7 @@ # 2 - Python3 or dependency not found # 3 - Black formatting failed -THIS_FILE_DIRECTORY=$(dirname`realpath "$0"`) +THIS_FILE_DIRECTORY=$(dirname `realpath "$0"`) FILES_CHANGED="$@" # Input checks diff --git a/deploy/run_tests.sh b/deploy/run_tests.sh index 2dd595cec..199f208af 100755 --- a/deploy/run_tests.sh +++ b/deploy/run_tests.sh @@ -4,34 +4,46 @@ # Exit: # 0 - Success # 1 - Called with incorrect number of arguments -# 2 - Python3 or dependency not found -# 3 - Unit tests failed +# 2 - Test directory does not exist +# 3 - Python3 or dependency not found +# 4 - Unit tests failed -THIS_FILE_DIRECTORY=$(dirname`realpath "$0"`) +THIS_FILE_DIRECTORY=$(dirname `realpath "$0"`) +TEST_DIRECTORY=`realpath "$THIS_FILE_DIRECTORY"/../tests` - - -if [ "$#" -lt 1 ]; then - echo "Usage: $0 [ ]" +if [ "$#" -gt 3 ]; then + echo "Usage: $0 [ ]" + echo + echo "Called with $@" exit 1 fi if [ "$#" -eq 3 ]; then + echo "Called with directory and versions, so assuming CI/CD" USING_CICD=1 + TEST_DIRECTORY=$1 OS=$3 PYTHON_VERSION=$2 +else + echo "Called without directory and versions, so calculating test directory" fi -echo "Running unit tests in directory: $1" -"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 +if [ ! -d "$TEST_DIRECTORY" ]; then + echo "Test directory does not exist" + exit 2 +fi + +echo "Running unit tests in directory: $TEST_DIRECTORY" + +"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 3 # Unit test config is in pyproject.toml and pytest.ini - don't set any here as it will only complicate CI/CD if [ "$USING_CICD" = 1 ]; then - poetry run pytest $THIS_FILE_DIRECTORY -vvv --junit-xml=$THIS_FILE_DIRECTORY/$PYTHON_VERSION-$OS-test_results.xml || exit 3 + poetry run pytest $TEST_DIRECTORY -vvv --junit-xml=$THIS_FILE_DIRECTORY/$PYTHON_VERSION-$OS-test_results.xml || exit 3 exit 0 fi -poetry run pytest $1 || exit 3 +poetry run pytest $TEST_DIRECTORY || exit 4 echo "Unit tests passed" From d11a0710cffb3f69309525889127bd09753697cb Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 14:56:42 +0100 Subject: [PATCH 029/195] Extract tests into own file for use elsewhere --- .github/workflows/code_quality.yml | 22 ++-------------- .github/workflows/tests.yml | 41 ++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 20 deletions(-) create mode 100644 .github/workflows/tests.yml diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 55eee1b9b..b7209b8d4 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -69,25 +69,7 @@ jobs: # files must have come from master run: bash deploy/lint.sh ${{ needs.get_changed_files.outputs.changed_files }} - # Run tests - test: - name: Run Testing matrix - strategy: - matrix: - # os: [ubuntu-latest, macos-latest, windows-latest] - os: [ubuntu-latest] - # python-version: [3.8, 3.9, "3.10", "3.11"] - python-version: [3.8] - runs-on: ${{ matrix.os }} - steps: - #install python - - uses: actions/checkout@v2 - with: - ref: ${{ github.head_ref || github.ref }} - - name: Run tests - run: | - bash deploy/run_tests.sh ${{ github.workspace }} ${{ matrix.python-version }} ${{ matrix.os }} - - name: Publish test results - uses: EnricoMi/publish-unit-test-result-action@v2 + run_tests: + uses: ./.github/workflows/tests.yml \ No newline at end of file diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000..63df59280 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,41 @@ +# Run tests +name: tests +run-name: Tests + +on: + workflow_dispatch: + +jobs: + test: + name: "Run Testing OS: ${{ matrix.os }} Python: ${{ matrix.python-version }}" + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + # python-version: [3.8, 3.9, "3.10", "3.11"] + python-version: [3.8] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install poetry on POSIX + if: runner.os == 'Linux' || runner.os == 'macOS' + run: | + curl -sSL https://install.python-poetry.org | python3 - + - name: Install Poetry on Windows + if: runner.os == 'Windows' + run: | + curl -sSL https://install.python-poetry.org | python3 - + - name: Run pytest + uses: pavelzw/pytest-action@v2 + with: + verbose: true + emoji: true + job-summary: true + custom-arguments: '-q' + click-to-expand: true + report-title: 'Test Report' \ No newline at end of file From ca152b770ef03312103f4a502758224a19333946 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 15:01:03 +0100 Subject: [PATCH 030/195] Make tests.yml reusable --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 63df59280..1ee77ab9a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -3,7 +3,7 @@ name: tests run-name: Tests on: - workflow_dispatch: + workflow_call: jobs: test: From 33a51ae4c445f81b2a1eaf3cdfc89e3426439cb4 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 15:01:46 +0100 Subject: [PATCH 031/195] Improve run --- .github/workflows/code_quality.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index b7209b8d4..e42981d39 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -70,6 +70,8 @@ jobs: run: bash deploy/lint.sh ${{ needs.get_changed_files.outputs.changed_files }} run_tests: + name: Run tests + needs: [format, lint] uses: ./.github/workflows/tests.yml \ No newline at end of file From 7a313fed11abdacae89666fbf6d56bd9493376c7 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 15:07:38 +0100 Subject: [PATCH 032/195] Work out the issue with the poetry binary --- .github/workflows/code_quality.yml | 1 - deploy/format.sh | 1 + deploy/lint.sh | 1 + 3 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index e42981d39..04e319264 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -71,7 +71,6 @@ jobs: run_tests: name: Run tests - needs: [format, lint] uses: ./.github/workflows/tests.yml \ No newline at end of file diff --git a/deploy/format.sh b/deploy/format.sh index 38a114c73..328ed678e 100755 --- a/deploy/format.sh +++ b/deploy/format.sh @@ -35,6 +35,7 @@ echo # Check dependencies "$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 +which poetry black_failed_files="" number_of_python_files=0 diff --git a/deploy/lint.sh b/deploy/lint.sh index f7bef178c..8aeb4240d 100755 --- a/deploy/lint.sh +++ b/deploy/lint.sh @@ -35,6 +35,7 @@ echo # Check dependencies "$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 +which poetry black_failed_files="" number_of_python_files=0 From 5b32995776fa9fc0746b3264e0a3110d22707b39 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 15:10:53 +0100 Subject: [PATCH 033/195] Set powershell to run in pwsh --- .github/workflows/tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1ee77ab9a..4cbb3be00 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -24,10 +24,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install poetry on POSIX if: runner.os == 'Linux' || runner.os == 'macOS' + shell: bash run: | curl -sSL https://install.python-poetry.org | python3 - - name: Install Poetry on Windows if: runner.os == 'Windows' + shell: pwsh run: | curl -sSL https://install.python-poetry.org | python3 - - name: Run pytest From ea01e158244045f9ef66bc2fb55cfefcccaac5d3 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 15:57:30 +0100 Subject: [PATCH 034/195] Convert flows to reusable flows --- .github/workflows/code_quality.yml | 23 ++---- .github/workflows/tests.yml | 11 +-- deploy/format.sh | 108 ----------------------------- deploy/lint.sh | 98 -------------------------- format.yml | 25 +++++++ lint.yml | 25 +++++++ 6 files changed, 56 insertions(+), 234 deletions(-) delete mode 100755 deploy/format.sh delete mode 100755 deploy/lint.sh create mode 100644 format.yml create mode 100644 lint.yml diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 04e319264..1734e65ac 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -39,17 +39,10 @@ jobs: name: Check format of python needs: get_changed_files runs-on: ubuntu-latest - env: - GH_TOKEN: ${{ github.token }} steps: - - uses: actions/checkout@v2 + - uses: ./.github/workflows/format.yml with: - ref: ${{ github.head_ref || github.ref }} - - - name: Format code - # Lints and formats are called only against the changed files, because other - # files must have come from master - run: bash deploy/format.sh ${{ needs.get_changed_files.outputs.changed_files }} + files: ${{ needs.get_changed_files.outputs.changed_files }} lint: @@ -57,17 +50,9 @@ jobs: needs: get_changed_files runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: ./.github/workflows/lint.yml with: - ref: ${{ github.head_ref || github.ref }} - - name: Get changed files - id: changed_files - run: | - changed_files=$(gh pr diff ${{ github.event.number }} --name-only | xargs) - - name: Run linter - # Lints and formats are called only against the changed files, because other - # files must have come from master - run: bash deploy/lint.sh ${{ needs.get_changed_files.outputs.changed_files }} + files: ${{ needs.get_changed_files.outputs.changed_files }} run_tests: name: Run tests diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4cbb3be00..10ba400da 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -22,16 +22,9 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - name: Install poetry on POSIX - if: runner.os == 'Linux' || runner.os == 'macOS' - shell: bash + - name: Install poetry run: | - curl -sSL https://install.python-poetry.org | python3 - - - name: Install Poetry on Windows - if: runner.os == 'Windows' - shell: pwsh - run: | - curl -sSL https://install.python-poetry.org | python3 - + pip install poetry - name: Run pytest uses: pavelzw/pytest-action@v2 with: diff --git a/deploy/format.sh b/deploy/format.sh deleted file mode 100755 index 328ed678e..000000000 --- a/deploy/format.sh +++ /dev/null @@ -1,108 +0,0 @@ -#! /usr/bin/env bash -# Check formatting -# -# Exit: -# 0 - Success -# 1 - Called with incorrect number of arguments -# 2 - Python3 or dependency not found -# 3 - Black formatting failed - -THIS_FILE_DIRECTORY=$(dirname `realpath "$0"`) -FILES_CHANGED="$@" - -# Input checks -if ! $CI; then - echo "This script is intended for CI/CD only" - exit 1 -fi -if [ "$#" -lt 1 ]; then - echo "Usage: $0 ......" - echo - echo "Called with $@" - exit 1 -fi - -# Introduction -echo -echo "** Checking formatting **" -echo -echo "These files were changed in this diff:" -echo $FILES_CHANGED | tr " " "\n" -echo -echo "** Checking formatting of files **" -echo - -# Check dependencies -"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 - -which poetry - -black_failed_files="" -number_of_python_files=0 -skipped_files=0 -skipped_init_files=0 -nonexistent_files=0 - -for file in $FILES_CHANGED; do - if [ ! -f "$file" ]; then - skipped_files=$((skipped_files+1)) - nonexistent_files=$((nonexistent_files+1)) - continue - fi - - if [[ $file == *"__init__.py"* ]]; then - skipped_files=$((skipped_files+1)) - continue - fi - - if [[ $file != *.py ]]; then - skipped_files=$((skipped_files+1)) - continue - fi - - echo "> Checking black formatting of file: $file" - number_of_python_files=$((number_of_python_files+1)) - - poetry run black --check --diff --no-color $file - if $!; then - echo ">...Black formatting failed for file: $file" - black_failed_files="$black_failed_files $file" - fi - echo -done - -echo "Done." -echo "Checked $number_of_python_files python files" -echo "Skipped $skipped_files files" -echo "Skipped $nonexistent_files files that do not exist" - -if [[ $number_of_python_files -eq 0 ]]; then - echo "No checkable python files found in input." - exit 0 -fi - -if [[ $black_failed_files -eq "" ]]; then - if [[ $number_of_python_files -eq 0 ]]; then - echo "No checkable python files found in input." - else - echo "Black formatting passed" - fi - exit 0 -else - echo "** Black formatting failed **" - echo "These files failed black formatting:" - echo - echo $black_failed_files | tr " " "\n" - echo - exit 3 -fi echo "Black formatting passed" - fi - exit 0 -else - echo "** Black formatting failed **" - echo "These files failed black formatting:" - echo - echo $black_failed_files | tr " " "\n" - echo - exit 3 -fi \ No newline at end of file diff --git a/deploy/lint.sh b/deploy/lint.sh deleted file mode 100755 index 8aeb4240d..000000000 --- a/deploy/lint.sh +++ /dev/null @@ -1,98 +0,0 @@ -#! /usr/bin/env bash -# Check formatting -# -# Exit: -# 0 - Success -# 1 - Called with incorrect number of arguments -# 2 - Python3 or dependency not found -# 3 - Black formatting failed - -THIS_FILE_DIRECTORY=$(dirname `realpath "$0"`) -FILES_CHANGED="$@" - -# Input checks -if ! $CI; then - echo "This script is intended for CI/CD only" - exit 1 -fi -if [ "$#" -lt 1 ]; then - echo "Usage: $0 ......" - echo - echo "Called with $@" - exit 1 -fi - -# Introduction -echo -echo "** Checking formatting **" -echo -echo "These files were changed in this diff:" -echo $FILES_CHANGED | tr " " "\n" -echo -echo "** Checking formatting of files **" -echo - -# Check dependencies -"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 2 - -which poetry - -black_failed_files="" -number_of_python_files=0 -skipped_files=0 -skipped_init_files=0 -nonexistent_files=0 - -for file in $FILES_CHANGED; do - if [ ! -f "$file" ]; then - skipped_files=$((skipped_files+1)) - nonexistent_files=$((nonexistent_files+1)) - continue - fi - - if [[ $file == *"__init__.py"* ]]; then - skipped_files=$((skipped_files+1)) - continue - fi - - if [[ $file != *.py ]]; then - skipped_files=$((skipped_files+1)) - continue - fi - - echo "> Checking black formatting of file: $file" - number_of_python_files=$((number_of_python_files+1)) - - poetry run black --check --diff --no-color $file - if $!; then - echo ">...Black formatting failed for file: $file" - black_failed_files="$black_failed_files $file" - fi - echo -done - -echo "Done." -echo "Checked $number_of_python_files python files" -echo "Skipped $skipped_files files" -echo "Skipped $nonexistent_files files that do not exist" - -if [[ $number_of_python_files -eq 0 ]]; then - echo "No checkable python files found in input." - exit 0 -fi - -if [[ $black_failed_files -eq "" ]]; then - if [[ $number_of_python_files -eq 0 ]]; then - echo "No checkable python files found in input." - else - echo "Black formatting passed" - fi - exit 0 -else - echo "** Black formatting failed **" - echo "These files failed black formatting:" - echo - echo $black_failed_files | tr " " "\n" - echo - exit 3 -fi \ No newline at end of file diff --git a/format.yml b/format.yml new file mode 100644 index 000000000..ae2dd03c1 --- /dev/null +++ b/format.yml @@ -0,0 +1,25 @@ +name: format +run-name: Format + +on: + workflow_call: + inputs: + files: + description: "Files to check formatting of" + required: true + +jobs: + lint: + - name: Check out source repository + uses: actions/checkout@v3 + + - name: Set up Python environment + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: Black check + uses: psf/black@stable + with: + options: "--check --no-color" + src: ${{ github.event.inputs.files }}} diff --git a/lint.yml b/lint.yml new file mode 100644 index 000000000..19670934a --- /dev/null +++ b/lint.yml @@ -0,0 +1,25 @@ +name: lint +run-name: Lint + +on: + workflow_call: + inputs: + files: + description: "Files to lint" + required: true + +jobs: + lint: + - name: Check out source repository + uses: actions/checkout@v3 + + - name: Set up Python environment + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: flake8 Lint + uses: py-actions/flake8@v2 + args: ${{ github.event.inputs.files }}} + with: + plugins: "flake8-pyproject" From 998429acfbccc72626e7335ccb6500ac4ac4090a Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 16:03:11 +0100 Subject: [PATCH 035/195] Refactored yaml to call jobs from reusable workflows directly --- .github/workflows/code_quality.yml | 16 ++++++---------- format.yml | 1 + lint.yml | 1 + 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 1734e65ac..aacbd8883 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -38,21 +38,17 @@ jobs: format: name: Check format of python needs: get_changed_files - runs-on: ubuntu-latest - steps: - - uses: ./.github/workflows/format.yml - with: - files: ${{ needs.get_changed_files.outputs.changed_files }} + uses: ./.github/workflows/format.yml + with: + files: ${{ needs.get_changed_files.outputs.changed_files }} lint: name: Lint python needs: get_changed_files - runs-on: ubuntu-latest - steps: - - uses: ./.github/workflows/lint.yml - with: - files: ${{ needs.get_changed_files.outputs.changed_files }} + uses: ./.github/workflows/lint.yml + with: + files: ${{ needs.get_changed_files.outputs.changed_files }} run_tests: name: Run tests diff --git a/format.yml b/format.yml index ae2dd03c1..42dbdea42 100644 --- a/format.yml +++ b/format.yml @@ -9,6 +9,7 @@ on: required: true jobs: + runs-on: ubuntu-latest lint: - name: Check out source repository uses: actions/checkout@v3 diff --git a/lint.yml b/lint.yml index 19670934a..88f158dfb 100644 --- a/lint.yml +++ b/lint.yml @@ -9,6 +9,7 @@ on: required: true jobs: + runs-on: ubuntu-latest lint: - name: Check out source repository uses: actions/checkout@v3 From 14df7cd0a49dd86f18c11b9d8eb0d97324bd572b Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 16:09:06 +0100 Subject: [PATCH 036/195] Fixing links --- format.yml => .github/workflows/format.yml | 4 +++- lint.yml => .github/workflows/lint.yml | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) rename format.yml => .github/workflows/format.yml (89%) rename lint.yml => .github/workflows/lint.yml (80%) diff --git a/format.yml b/.github/workflows/format.yml similarity index 89% rename from format.yml rename to .github/workflows/format.yml index 42dbdea42..7bba1eb91 100644 --- a/format.yml +++ b/.github/workflows/format.yml @@ -7,10 +7,12 @@ on: files: description: "Files to check formatting of" required: true + type: string jobs: - runs-on: ubuntu-latest lint: + runs-on: ubuntu-latest + steps: - name: Check out source repository uses: actions/checkout@v3 diff --git a/lint.yml b/.github/workflows/lint.yml similarity index 80% rename from lint.yml rename to .github/workflows/lint.yml index 88f158dfb..204ed63fa 100644 --- a/lint.yml +++ b/.github/workflows/lint.yml @@ -5,12 +5,14 @@ on: workflow_call: inputs: files: + type: string description: "Files to lint" required: true jobs: - runs-on: ubuntu-latest lint: + runs-on: ubuntu-latest + steps: - name: Check out source repository uses: actions/checkout@v3 @@ -21,6 +23,6 @@ jobs: - name: flake8 Lint uses: py-actions/flake8@v2 - args: ${{ github.event.inputs.files }}} with: plugins: "flake8-pyproject" + args: ${{ github.event.inputs.files }} \ No newline at end of file From 853bb15f836a0c11e60b18ce6abd539adcd2c060 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 31 Aug 2023 17:01:03 +0100 Subject: [PATCH 037/195] Two simple replacements for 3rd party tools --- .github/workflows/format.yml | 19 ++++++++++++++----- .github/workflows/lint.yml | 19 ++++++++++++++----- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 7bba1eb91..1e8454853 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -10,7 +10,7 @@ on: type: string jobs: - lint: + format: runs-on: ubuntu-latest steps: - name: Check out source repository @@ -22,7 +22,16 @@ jobs: python-version: "3.11" - name: Black check - uses: psf/black@stable - with: - options: "--check --no-color" - src: ${{ github.event.inputs.files }}} + run: | + pip install black + success = 1 + for file in ${{ github.event.inputs.files }} + do + black --check $file + if [ $? -ne 0 ]; then + success = 0 + fi + done + if [ $success -eq 0 ]; then + exit 1 + fi diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 204ed63fa..4410f9c3b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,5 +1,5 @@ name: lint -run-name: Lint +run-name: Check linting on: workflow_call: @@ -22,7 +22,16 @@ jobs: python-version: "3.11" - name: flake8 Lint - uses: py-actions/flake8@v2 - with: - plugins: "flake8-pyproject" - args: ${{ github.event.inputs.files }} \ No newline at end of file + run: | + pip install flake8 flake8-pyproject + success = 1 + for file in ${{ github.event.inputs.files }} + do + flake8 $file + if [ $? -ne 0 ]; then + success = 0 + fi + done + if [ $success -eq 0 ]; then + exit 1 + fi From 5eba13d7eaffdab5d4450192f22454318d048dc4 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 09:49:55 +0100 Subject: [PATCH 038/195] Use bash as shell --- .github/workflows/format.yml | 1 + .github/workflows/lint.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 1e8454853..43c8be6f7 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -22,6 +22,7 @@ jobs: python-version: "3.11" - name: Black check + shell: bash run: | pip install black success = 1 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4410f9c3b..101321184 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -22,6 +22,7 @@ jobs: python-version: "3.11" - name: flake8 Lint + shell: bash run: | pip install flake8 flake8-pyproject success = 1 From 190f51541c26c1bc326e2ded7b653304e7e1b201 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 10:08:55 +0100 Subject: [PATCH 039/195] Fix variable assignment --- .github/workflows/format.yml | 2 +- .github/workflows/lint.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 43c8be6f7..76c95a43e 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -25,7 +25,7 @@ jobs: shell: bash run: | pip install black - success = 1 + success=1 for file in ${{ github.event.inputs.files }} do black --check $file diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 101321184..4926ca463 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -25,7 +25,7 @@ jobs: shell: bash run: | pip install flake8 flake8-pyproject - success = 1 + success=1 for file in ${{ github.event.inputs.files }} do flake8 $file From da805ee44f6388ba24ca5b012abea2a68c6a20a7 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 10:12:48 +0100 Subject: [PATCH 040/195] Format and lint simplification --- .github/workflows/format.yml | 6 ++---- .github/workflows/lint.yml | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 76c95a43e..fb2d111ea 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -28,10 +28,8 @@ jobs: success=1 for file in ${{ github.event.inputs.files }} do - black --check $file - if [ $? -ne 0 ]; then - success = 0 - fi + echo "Checking $file" + black --check $file || success=0 done if [ $success -eq 0 ]; then exit 1 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4926ca463..cc1d404b0 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -28,10 +28,8 @@ jobs: success=1 for file in ${{ github.event.inputs.files }} do - flake8 $file - if [ $? -ne 0 ]; then - success = 0 - fi + echo "Linting $file" + flake8 $file || success=0 done if [ $success -eq 0 ]; then exit 1 From 5b856d9ba5b44914c4ebb9bf2b4113d2ad806ff5 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 10:15:22 +0100 Subject: [PATCH 041/195] Debugging output --- .github/workflows/format.yml | 1 + .github/workflows/lint.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index fb2d111ea..965b73909 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -26,6 +26,7 @@ jobs: run: | pip install black success=1 + echo "Checking files ${{ github.event.inputs.files }}" for file in ${{ github.event.inputs.files }} do echo "Checking $file" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index cc1d404b0..92ab87c61 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -26,6 +26,7 @@ jobs: run: | pip install flake8 flake8-pyproject success=1 + echo "Linting files ${{ github.event.inputs.files }}" for file in ${{ github.event.inputs.files }} do echo "Linting $file" From b1cc7fbead7984d4c086d4e31658326edd671863 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 10:21:57 +0100 Subject: [PATCH 042/195] Confirm passing of variables --- .github/workflows/code_quality.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index aacbd8883..0ae740db1 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -31,9 +31,15 @@ jobs: run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` echo "changed_files=$changed_files" >> $GITHUB_OUTPUT + + echo_changed_files: + name: Echo changed files + needs: get_changed_files + runs-on: ubuntu-latest + steps: - name: Echo changed files run: | - echo ${{ steps.changed_files.outputs.changed_files }} | tr " " "\n" + echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' format: name: Check format of python From 02d8c7c47fc8658aef0d3a3ad2ec538af6645594 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 10:28:34 +0100 Subject: [PATCH 043/195] Found the alternative context --- .github/workflows/code_quality.yml | 2 +- .github/workflows/format.yml | 4 ++-- .github/workflows/lint.yml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 0ae740db1..d7b55f164 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -29,7 +29,7 @@ jobs: - name: Get changed files id: changed_files run: | - changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` + changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs | grep -E '\.py$' | tr ' ' '\n' | sort -u | tr '\n' ' '` echo "changed_files=$changed_files" >> $GITHUB_OUTPUT echo_changed_files: diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 965b73909..8b36f5c40 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -26,8 +26,8 @@ jobs: run: | pip install black success=1 - echo "Checking files ${{ github.event.inputs.files }}" - for file in ${{ github.event.inputs.files }} + echo "Checking files ${{ inputs.files }}" + for file in ${{ inputs.files }} do echo "Checking $file" black --check $file || success=0 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 92ab87c61..ad592c26b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -26,8 +26,8 @@ jobs: run: | pip install flake8 flake8-pyproject success=1 - echo "Linting files ${{ github.event.inputs.files }}" - for file in ${{ github.event.inputs.files }} + echo "Linting files ${{ inputs.files }}" + for file in ${{ inputs.files }} do echo "Linting $file" flake8 $file || success=0 From 73c6e48f7bc16a2c5faa647a441ae2254363686b Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 10:31:05 +0100 Subject: [PATCH 044/195] Fixed grep problem --- .github/workflows/code_quality.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index d7b55f164..0ae740db1 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -29,7 +29,7 @@ jobs: - name: Get changed files id: changed_files run: | - changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs | grep -E '\.py$' | tr ' ' '\n' | sort -u | tr '\n' ' '` + changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` echo "changed_files=$changed_files" >> $GITHUB_OUTPUT echo_changed_files: From 67c0cf07c0a9de7b5727911d7bcc0b44596825de Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 10:37:21 +0100 Subject: [PATCH 045/195] Doing some debugging to reduce to python files only --- .github/workflows/code_quality.yml | 37 +++++++++++++++++------------- .github/workflows/format.yml | 8 ++++--- .github/workflows/lint.yml | 8 ++++--- 3 files changed, 31 insertions(+), 22 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 0ae740db1..d1bfbaa78 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -20,6 +20,7 @@ jobs: runs-on: ubuntu-latest outputs: changed_files: ${{ steps.changed_files.outputs.changed_files }} + changed_python_files: ${{ steps.changed_files.outputs.changed_python_files }} steps: - uses: actions/checkout@v2 with: @@ -31,6 +32,10 @@ jobs: run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` echo "changed_files=$changed_files" >> $GITHUB_OUTPUT + - name: Get changed python files + id: changed_python_files + run: | + echo "${{ steps.changed_files.outputs.changed_files }}" | grep -E '\.py$' | xargs echo_changed_files: name: Echo changed files @@ -39,25 +44,25 @@ jobs: steps: - name: Echo changed files run: | - echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' + echo "${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' - format: - name: Check format of python - needs: get_changed_files - uses: ./.github/workflows/format.yml - with: - files: ${{ needs.get_changed_files.outputs.changed_files }} + # format: + # name: Check format of python + # needs: get_changed_files + # uses: ./.github/workflows/format.yml + # with: + # files: ${{ needs.get_changed_files.outputs.changed_files }} - lint: - name: Lint python - needs: get_changed_files - uses: ./.github/workflows/lint.yml - with: - files: ${{ needs.get_changed_files.outputs.changed_files }} + # lint: + # name: Lint python + # needs: get_changed_files + # uses: ./.github/workflows/lint.yml + # with: + # files: ${{ needs.get_changed_files.outputs.changed_files }} - run_tests: - name: Run tests - uses: ./.github/workflows/tests.yml + # run_tests: + # name: Run tests + # uses: ./.github/workflows/tests.yml \ No newline at end of file diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 8b36f5c40..701d4c945 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -25,13 +25,15 @@ jobs: shell: bash run: | pip install black - success=1 + failed_formatting=1 echo "Checking files ${{ inputs.files }}" for file in ${{ inputs.files }} do echo "Checking $file" - black --check $file || success=0 + black --check $file || failed_formatting=$((failed_formatting + 1)) done - if [ $success -eq 0 ]; then + if [ $failed_formatting -ne 0 ]; then + echo "Formatting failed for $failed_formatting files" exit 1 fi + exit 0 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ad592c26b..1ec621ef3 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -25,13 +25,15 @@ jobs: shell: bash run: | pip install flake8 flake8-pyproject - success=1 + failed_linting=0 echo "Linting files ${{ inputs.files }}" for file in ${{ inputs.files }} do echo "Linting $file" - flake8 $file || success=0 + flake8 $file || failed_linting=$((failed_linting + 1)) done - if [ $success -eq 0 ]; then + if [ $success -ne 0 ]; then + echo "Linting failed on $failed_linting files" exit 1 fi + exit 1 From c58a78579006323021b1285ae36f3e002e11da02 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:11:26 +0100 Subject: [PATCH 046/195] Creation of new variables --- .github/workflows/code_quality.yml | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index d1bfbaa78..ac2e5cacf 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -21,6 +21,8 @@ jobs: outputs: changed_files: ${{ steps.changed_files.outputs.changed_files }} changed_python_files: ${{ steps.changed_files.outputs.changed_python_files }} + changed_yaml_files: ${{ steps.changed_files.outputs.changed_yaml_files }} + changed_json_files: ${{ steps.changed_files.outputs.changed_json_files }} steps: - uses: actions/checkout@v2 with: @@ -35,7 +37,15 @@ jobs: - name: Get changed python files id: changed_python_files run: | - echo "${{ steps.changed_files.outputs.changed_files }}" | grep -E '\.py$' | xargs + echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' >> $GITHUB_OUTPUT + - name: Get changed yaml files + id: changed_yaml_files + run: | + echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' >> $GITHUB_OUTPUT + - name: Get changed json files + id: changed_json_files + run: | + echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' >> $GITHUB_OUTPUT echo_changed_files: name: Echo changed files @@ -44,7 +54,15 @@ jobs: steps: - name: Echo changed files run: | - echo "${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' + echo + echo "Summary of changed files:" + echo " Total changed files: ${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l + echo + echo " Changed python files: ${{ steps.changed_files.outputs.changed_python_files }}" | tr ' ' '\n' + echo + echo " Changed yaml files: ${{ steps.changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' + echo + echo " Changed json files: ${{ steps.changed_files.outputs.changed_json_files }}" | tr ' ' '\n' # format: # name: Check format of python From 05c33534b74116c230870e160b0a643030030b01 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:16:52 +0100 Subject: [PATCH 047/195] trim strings --- .github/workflows/code_quality.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index ac2e5cacf..9dc96de98 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -37,15 +37,15 @@ jobs: - name: Get changed python files id: changed_python_files run: | - echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' >> $GITHUB_OUTPUT + echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files run: | - echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' >> $GITHUB_OUTPUT + echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' | xargs >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files run: | - echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' >> $GITHUB_OUTPUT + echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' | xargs >> $GITHUB_OUTPUT echo_changed_files: name: Echo changed files From a9927e77d05340193bc7f924f443c9ab46aefdf4 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:20:55 +0100 Subject: [PATCH 048/195] Trying an echo for output --- .github/workflows/code_quality.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 9dc96de98..24e7c009a 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -37,15 +37,15 @@ jobs: - name: Get changed python files id: changed_python_files run: | - echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs >> $GITHUB_OUTPUT + echo $(echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs) >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files run: | - echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' | xargs >> $GITHUB_OUTPUT + echo $(echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' | xargs) >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files run: | - echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' | xargs >> $GITHUB_OUTPUT + echo $(echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' | xargs) >> $GITHUB_OUTPUT echo_changed_files: name: Echo changed files From 969af9aa7a829d21dad9b931d702eed58044da82 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:29:03 +0100 Subject: [PATCH 049/195] Assign variables properly --- .github/workflows/code_quality.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index 24e7c009a..930daad5a 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -37,19 +37,22 @@ jobs: - name: Get changed python files id: changed_python_files run: | - echo $(echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs) >> $GITHUB_OUTPUT + changed_python_files='echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs' + echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files run: | - echo $(echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' | xargs) >> $GITHUB_OUTPUT + changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' | xargs` + echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files run: | - echo $(echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' | xargs) >> $GITHUB_OUTPUT + changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' | xargs` + echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT echo_changed_files: - name: Echo changed files needs: get_changed_files + name: Echo changed files runs-on: ubuntu-latest steps: - name: Echo changed files From 46969f1e458894b92d3e7799558e4f2c7ea2e741 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:39:10 +0100 Subject: [PATCH 050/195] File rename and new file filtering --- .github/workflows/{code_quality.yml => pull_request.yml} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{code_quality.yml => pull_request.yml} (86%) diff --git a/.github/workflows/code_quality.yml b/.github/workflows/pull_request.yml similarity index 86% rename from .github/workflows/code_quality.yml rename to .github/workflows/pull_request.yml index 930daad5a..c34ab8fad 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/pull_request.yml @@ -59,13 +59,13 @@ jobs: run: | echo echo "Summary of changed files:" - echo " Total changed files: ${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l + echo " Total changed files: ${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l echo - echo " Changed python files: ${{ steps.changed_files.outputs.changed_python_files }}" | tr ' ' '\n' + echo " Changed python files: ${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' echo - echo " Changed yaml files: ${{ steps.changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' + echo " Changed yaml files: ${{ needs.get_changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' echo - echo " Changed json files: ${{ steps.changed_files.outputs.changed_json_files }}" | tr ' ' '\n' + echo " Changed json files: ${{ needs.get_changed_files.outputs.changed_json_files }}" | tr ' ' '\n' # format: # name: Check format of python From eadfc925e0f6b87fefba63cecda720a9991215a8 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:41:49 +0100 Subject: [PATCH 051/195] Correct quote --- .github/workflows/pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index c34ab8fad..007d36a43 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -37,7 +37,7 @@ jobs: - name: Get changed python files id: changed_python_files run: | - changed_python_files='echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs' + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs` echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files From f21bf06ce055614ed2708d49ec8f5337a10b2fb6 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:48:28 +0100 Subject: [PATCH 052/195] Change to piping --- .github/workflows/pull_request.yml | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 007d36a43..0c40c7529 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -37,17 +37,17 @@ jobs: - name: Get changed python files id: changed_python_files run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | tr '\n' ' ' | xargs` + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | xargs` echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files run: | - changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | tr '\n' ' ' | xargs` + changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | xargs` echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files run: | - changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | tr '\n' ' ' | xargs` + changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | xargs` echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT echo_changed_files: @@ -59,13 +59,17 @@ jobs: run: | echo echo "Summary of changed files:" - echo " Total changed files: ${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l + echo " Total changed files:" + echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l echo - echo " Changed python files: ${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' + echo " Changed python files:" + echo "${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' echo - echo " Changed yaml files: ${{ needs.get_changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' + echo " Changed yaml files:" + echo "${{ needs.get_changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' echo - echo " Changed json files: ${{ needs.get_changed_files.outputs.changed_json_files }}" | tr ' ' '\n' + echo " Changed json files:" + echo "${{ needs.get_changed_files.outputs.changed_json_files }}" | tr ' ' '\n' # format: # name: Check format of python From 07577fcf80cd8f95afe3f230226659b47ced928f Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 11:51:12 +0100 Subject: [PATCH 053/195] Debugging statements --- .github/workflows/pull_request.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 0c40c7529..fc3869b59 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -31,23 +31,31 @@ jobs: run: gh --version - name: Get changed files id: changed_files + shell: bash run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` + echo $changed_files echo "changed_files=$changed_files" >> $GITHUB_OUTPUT - name: Get changed python files id: changed_python_files + shell: bash run: | changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | xargs` + echo $changed_python_files echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files + shell: bash run: | changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | xargs` + echo $changed_yaml_files echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files + shell: bash run: | changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | xargs` + echo $changed_json_files echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT echo_changed_files: From 6e8bae5ed2c0f8e839c4c93eaa9abd0b14a7033a Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 12:07:25 +0100 Subject: [PATCH 054/195] Conditional setting of output --- .github/workflows/pull_request.yml | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index fc3869b59..6208a2a74 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -42,13 +42,22 @@ jobs: run: | changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | xargs` echo $changed_python_files - echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT + if [ -z "$changed_python_files" ]; then + echo "No python files changed" + else + echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT + fi - name: Get changed yaml files id: changed_yaml_files shell: bash run: | changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | xargs` echo $changed_yaml_files + if [ -z "$changed_yaml_files" ]; then + echo "No yaml files changed" + else + echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT + fi echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files @@ -56,7 +65,11 @@ jobs: run: | changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | xargs` echo $changed_json_files - echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT + if [ -z "$changed_json_files" ]; then + echo "No json files changed" + else + echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT + fi echo_changed_files: needs: get_changed_files From 9a719e91647826c5f88383b70f9bde5f59b2ddc9 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 12:13:23 +0100 Subject: [PATCH 055/195] Testing a hypothesis --- .github/workflows/pull_request.yml | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 6208a2a74..b2387e302 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -40,24 +40,15 @@ jobs: id: changed_python_files shell: bash run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | xargs` + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.somethingelse$' | xargs` echo $changed_python_files - if [ -z "$changed_python_files" ]; then - echo "No python files changed" - else - echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - fi + echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | xargs` echo $changed_yaml_files - if [ -z "$changed_yaml_files" ]; then - echo "No yaml files changed" - else - echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - fi echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files @@ -65,11 +56,7 @@ jobs: run: | changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | xargs` echo $changed_json_files - if [ -z "$changed_json_files" ]; then - echo "No json files changed" - else - echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT - fi + echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT echo_changed_files: needs: get_changed_files From c399ac04dcf410d79c52b1d84777745c85eccb5d Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 13:19:34 +0100 Subject: [PATCH 056/195] Testing which step fails --- .github/workflows/pull_request.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index b2387e302..3cda8cee3 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -40,6 +40,9 @@ jobs: id: changed_python_files shell: bash run: | + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}"` + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n'` + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.somethingelse$'` changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.somethingelse$' | xargs` echo $changed_python_files echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT From a0c18ecc02fef8b3fff758a16c2d8d15477ee953 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 13:22:41 +0100 Subject: [PATCH 057/195] Localising error --- .github/workflows/pull_request.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 3cda8cee3..3f641f68d 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -40,11 +40,8 @@ jobs: id: changed_python_files shell: bash run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}"` - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n'` - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.somethingelse$'` - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.somethingelse$' | xargs` - echo $changed_python_files + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.somethingelse$' | xargs` &&\ + echo $changed_python_files &&\ echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files From e86822703cd17de3bd546fca818bb3dfc09ac146 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 13:35:23 +0100 Subject: [PATCH 058/195] pipefail issue --- .github/workflows/pull_request.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 3f641f68d..2f40e78f4 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -38,21 +38,21 @@ jobs: echo "changed_files=$changed_files" >> $GITHUB_OUTPUT - name: Get changed python files id: changed_python_files - shell: bash + shell: bash -ileo pipefail {0} run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.somethingelse$' | xargs` &&\ + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | xargs` &&\ echo $changed_python_files &&\ echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files - shell: bash + shell: bash -ileo pipefail {0} run: | changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | xargs` echo $changed_yaml_files echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files - shell: bash + shell: bash -ileo pipefail {0} run: | changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | xargs` echo $changed_json_files From 3821874f56a3a3a4073524436c893f4263fc5fd1 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 13:51:42 +0100 Subject: [PATCH 059/195] Suppress grep errors if they occur --- .github/workflows/pull_request.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 2f40e78f4..995d610ec 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -38,23 +38,22 @@ jobs: echo "changed_files=$changed_files" >> $GITHUB_OUTPUT - name: Get changed python files id: changed_python_files - shell: bash -ileo pipefail {0} + shell: bash run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' | xargs` &&\ - echo $changed_python_files &&\ + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | (grep -E '\.py$' || "") | xargs` &&\ echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files - shell: bash -ileo pipefail {0} + shell: bash run: | - changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' | xargs` + changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | (grep -E '\.yaml$|\.yml$' || "") | xargs` echo $changed_yaml_files echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files - shell: bash -ileo pipefail {0} + shell: bash run: | - changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' | xargs` + changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | (grep -E '\.json$' || "") | xargs` echo $changed_json_files echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT From ef53b5aa0335283a9f1bcab1331981d52b220199 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 13:59:59 +0100 Subject: [PATCH 060/195] Filrering should be fault tolerant now --- .github/workflows/pull_request.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 995d610ec..65e7fd224 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -40,20 +40,21 @@ jobs: id: changed_python_files shell: bash run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | (grep -E '\.py$' || "") | xargs` &&\ + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' || "" | xargs` + echo $changed_python_files echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | - changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | (grep -E '\.yaml$|\.yml$' || "") | xargs` + changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' || "" | xargs` echo $changed_yaml_files echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files shell: bash run: | - changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | (grep -E '\.json$' || "") | xargs` + changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' || "" | xargs` echo $changed_json_files echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT From 8c9cc387716e8ea1359c12261f6c7338d8620d31 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 14:09:01 +0100 Subject: [PATCH 061/195] Github doesn't handle multiline strings well --- .github/workflows/pull_request.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 65e7fd224..cea6c2e78 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -40,21 +40,21 @@ jobs: id: changed_python_files shell: bash run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' || "" | xargs` + changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' || "" | tr '\n' ' '` echo $changed_python_files echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | - changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' || "" | xargs` + changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' || "" | tr '\n' ' '` echo $changed_yaml_files echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files shell: bash run: | - changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' || "" | xargs` + changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' || "" | tr '\n' ' '` echo $changed_json_files echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT From ae1e8e3bf0d1ed6b5e068fedc66397151f21616e Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 14:50:54 +0100 Subject: [PATCH 062/195] File filter in python --- .github/workflows/pull_request.yml | 6 +++--- deploy/_filter_files.py | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) create mode 100644 deploy/_filter_files.py diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index cea6c2e78..aec231b41 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -40,21 +40,21 @@ jobs: id: changed_python_files shell: bash run: | - changed_python_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.py$' || "" | tr '\n' ' '` + changed_python_files=`${{ github.workspace}}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo $changed_python_files echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | - changed_yaml_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.yaml$|\.yml$' || "" | tr '\n' ' '` + changed_yaml_files=`${{ github.workspace}}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` echo $changed_yaml_files echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files shell: bash run: | - changed_json_files=`echo "${{ steps.changed_files.outputs.changed_files }}" | tr ' ' '\n' | grep -E '\.json$' || "" | tr '\n' ' '` + changed_json_files=`${{ github.workspace}}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo $changed_json_files echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT diff --git a/deploy/_filter_files.py b/deploy/_filter_files.py new file mode 100644 index 000000000..5104ab3eb --- /dev/null +++ b/deploy/_filter_files.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +import sys +from typing import List + + +def main(argv: List[str]) -> None: + file_extension: str = argv[0] + files_in: List[str] = argv[1:] + + if file_extension.startswith("."): + file_extension = file_extension[1:] + + files_out = [file for file in files_in if file.endswith(f".{file_extension}")] + + sys.stdout.write(" ".join(files_out)) + + +if __name__ == "__main__": + main(sys.argv[1:]) From 2dc25d6acdf073344a68ed8115d4a4287d410ab5 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 15:21:53 +0100 Subject: [PATCH 063/195] Make executable --- deploy/_filter_files.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 deploy/_filter_files.py diff --git a/deploy/_filter_files.py b/deploy/_filter_files.py old mode 100644 new mode 100755 From e969ec2752dd040e29d762778616336edc90f8c7 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 1 Sep 2023 15:30:05 +0100 Subject: [PATCH 064/195] Output checking --- .github/workflows/pull_request.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index aec231b41..550bcbe0f 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -40,21 +40,21 @@ jobs: id: changed_python_files shell: bash run: | - changed_python_files=`${{ github.workspace}}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` + changed_python_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo $changed_python_files echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | - changed_yaml_files=`${{ github.workspace}}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` + changed_yaml_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` echo $changed_yaml_files echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files shell: bash run: | - changed_json_files=`${{ github.workspace}}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` + changed_json_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo $changed_json_files echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT @@ -71,13 +71,13 @@ jobs: echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l echo echo " Changed python files:" - echo "${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' + echo "${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' | wc -l echo echo " Changed yaml files:" - echo "${{ needs.get_changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' + echo "${{ needs.get_changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' | wc -l echo echo " Changed json files:" - echo "${{ needs.get_changed_files.outputs.changed_json_files }}" | tr ' ' '\n' + echo "${{ needs.get_changed_files.outputs.changed_json_files }}" | tr ' ' '\n' | wc -l # format: # name: Check format of python From 60802f432acbe1ad3ebf990ef0b0f2dafd752674 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 09:38:02 +0100 Subject: [PATCH 065/195] Expose whole strings to see what is mutating them --- .github/workflows/pull_request.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 550bcbe0f..b2b679ef2 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -71,13 +71,13 @@ jobs: echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l echo echo " Changed python files:" - echo "${{ needs.get_changed_files.outputs.changed_python_files }}" | tr ' ' '\n' | wc -l + echo "${{ needs.get_changed_files.outputs.changed_python_files }}" echo echo " Changed yaml files:" - echo "${{ needs.get_changed_files.outputs.changed_yaml_files }}" | tr ' ' '\n' | wc -l + echo "${{ needs.get_changed_files.outputs.changed_yaml_files }}" echo echo " Changed json files:" - echo "${{ needs.get_changed_files.outputs.changed_json_files }}" | tr ' ' '\n' | wc -l + echo "${{ needs.get_changed_files.outputs.changed_json_files }}" # format: # name: Check format of python From 5d4e6404a0fb9753ca1c1c1dd5a8af14936f8487 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 10:30:41 +0100 Subject: [PATCH 066/195] Added new opportunities for debugging --- .github/workflows/pull_request.yml | 10 ++++------ deploy/_filter_files.py | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index b2b679ef2..93b11b729 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -27,35 +27,33 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} - - name: GH CLI Version - run: gh --version - name: Get changed files id: changed_files shell: bash run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` - echo $changed_files + echo "Changed files: $changed_files" echo "changed_files=$changed_files" >> $GITHUB_OUTPUT - name: Get changed python files id: changed_python_files shell: bash run: | changed_python_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` - echo $changed_python_files + echo "Changed python files: $changed_python_files" echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | changed_yaml_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` - echo $changed_yaml_files + echo "Changed yaml files: $changed_yaml_files" echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files shell: bash run: | changed_json_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` - echo $changed_json_files + echo "Changed json files: $changed_json_files" echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT echo_changed_files: diff --git a/deploy/_filter_files.py b/deploy/_filter_files.py index 5104ab3eb..23856c96d 100755 --- a/deploy/_filter_files.py +++ b/deploy/_filter_files.py @@ -13,7 +13,7 @@ def main(argv: List[str]) -> None: files_out = [file for file in files_in if file.endswith(f".{file_extension}")] - sys.stdout.write(" ".join(files_out)) + print(" ".join(files_out)) if __name__ == "__main__": From dba23378bbe7ec3ad136f229f26af7ed6d1ccffa Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 10:36:43 +0100 Subject: [PATCH 067/195] Checking entire context --- .github/workflows/pull_request.yml | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 93b11b729..5519b7aac 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -63,19 +63,8 @@ jobs: steps: - name: Echo changed files run: | - echo - echo "Summary of changed files:" - echo " Total changed files:" - echo "${{ needs.get_changed_files.outputs.changed_files }}" | tr ' ' '\n' | wc -l - echo - echo " Changed python files:" - echo "${{ needs.get_changed_files.outputs.changed_python_files }}" - echo - echo " Changed yaml files:" - echo "${{ needs.get_changed_files.outputs.changed_yaml_files }}" - echo - echo " Changed json files:" - echo "${{ needs.get_changed_files.outputs.changed_json_files }}" + echo ${{ toJson(needs.get_changed_files.outputs) }} + # format: # name: Check format of python From a81bd717f6f6c20129710155e0c4ae0eb1fcadf3 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 10:45:27 +0100 Subject: [PATCH 068/195] Testing an idea --- .github/workflows/pull_request.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 5519b7aac..45f7c93de 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -38,23 +38,23 @@ jobs: id: changed_python_files shell: bash run: | - changed_python_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed python files: $changed_python_files" - echo "changed_python_files=$changed_python_files" >> $GITHUB_OUTPUT + cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed python files: $cpf" + echo "changed_python_files=$cpf" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | - changed_yaml_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed yaml files: $changed_yaml_files" - echo "changed_yaml_files=$changed_yaml_files" >> $GITHUB_OUTPUT + cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed yaml files: $cyf" + echo "changed_yaml_files=$cyf" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files shell: bash run: | - changed_json_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed json files: $changed_json_files" - echo "changed_json_files=$changed_json_files" >> $GITHUB_OUTPUT + cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed json files: $cjf" + echo "changed_json_files=$cjf" >> $GITHUB_OUTPUT echo_changed_files: needs: get_changed_files From ef61d1845a997744c36a5825b9987f36f7b63082 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 10:52:42 +0100 Subject: [PATCH 069/195] Trying a test value --- .github/workflows/pull_request.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 45f7c93de..e761b7e96 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -23,6 +23,7 @@ jobs: changed_python_files: ${{ steps.changed_files.outputs.changed_python_files }} changed_yaml_files: ${{ steps.changed_files.outputs.changed_yaml_files }} changed_json_files: ${{ steps.changed_files.outputs.changed_json_files }} + test_value: ${{ steps.changed_files.outputs.test_value }} steps: - uses: actions/checkout@v2 with: @@ -55,6 +56,11 @@ jobs: cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" echo "changed_json_files=$cjf" >> $GITHUB_OUTPUT + - name: Get test value + id: test_value + shell: bash + run: | + echo "test_value=foo" >> $GITHUB_OUTPUT echo_changed_files: needs: get_changed_files From aa6c8d75411592613a26411130684fc4ee9f15f4 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 11:20:19 +0100 Subject: [PATCH 070/195] Debug step --- .github/workflows/pull_request.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index e761b7e96..723641a90 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -68,8 +68,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Echo changed files + env: + OUTPUTS: ${{ toJson(needs.get_changed_files.outputs) }} run: | - echo ${{ toJson(needs.get_changed_files.outputs) }} + echo "All outputs: $OUTPUTS" # format: From 53332708c5fe03217a6dcbb20923454cbf166674 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 11:41:32 +0100 Subject: [PATCH 071/195] Testing something --- .github/workflows/pull_request.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 723641a90..6d755b840 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -34,33 +34,33 @@ jobs: run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` echo "Changed files: $changed_files" - echo "changed_files=$changed_files" >> $GITHUB_OUTPUT + echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT" - name: Get changed python files id: changed_python_files shell: bash run: | cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo "Changed python files: $cpf" - echo "changed_python_files=$cpf" >> $GITHUB_OUTPUT + echo "changed_python_files=$cpf" >> "$GITHUB_OUTPUT" - name: Get changed yaml files id: changed_yaml_files shell: bash run: | cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` echo "Changed yaml files: $cyf" - echo "changed_yaml_files=$cyf" >> $GITHUB_OUTPUT + echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT" - name: Get changed json files id: changed_json_files shell: bash run: | cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" - echo "changed_json_files=$cjf" >> $GITHUB_OUTPUT + echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT" - name: Get test value id: test_value shell: bash run: | - echo "test_value=foo" >> $GITHUB_OUTPUT + echo "test_value=foo" >> "$GITHUB_OUTPUT" echo_changed_files: needs: get_changed_files From 9859edaf763fb792242fa8fd277cdd30b53d3571 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 11:58:42 +0100 Subject: [PATCH 072/195] Debugging step --- .editorconfig | 36 ++++++++++++++++++++++++++++ .github/workflows/debug_contexts.yml | 33 +++++++++++++++++++++++++ .github/workflows/pull_request.yml | 35 ++++++++++++++------------- 3 files changed, 88 insertions(+), 16 deletions(-) create mode 100644 .editorconfig create mode 100644 .github/workflows/debug_contexts.yml diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..b42311381 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,36 @@ +# http://editorconfig.org +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +# Use 4 spaces for the Python files +[*.py] +indent_size = 4 +max_line_length = 160 + +# The JSON files contain newlines inconsistently +[*.json] +insert_final_newline = ignore + +# Minified JavaScript files shouldn't be changed +[**.min.js] +indent_style = ignore +insert_final_newline = ignore + +# Makefiles always use tabs for indentation +[Makefile] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +[*.yml, *.yaml] +indent_size = 2 +insert_final_newline = true + diff --git a/.github/workflows/debug_contexts.yml b/.github/workflows/debug_contexts.yml new file mode 100644 index 000000000..bed1589f9 --- /dev/null +++ b/.github/workflows/debug_contexts.yml @@ -0,0 +1,33 @@ +# A useful debugging workflow to display the GitHub context for a run + +on: + workflow_call: + +jobs: + Debugging: + runs-on: ubuntu-latest + steps: + - name: Dump GitHub context + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + run: echo "$GITHUB_CONTEXT" + - name: Dump job context + env: + JOB_CONTEXT: ${{ toJson(job) }} + run: echo "$JOB_CONTEXT" + - name: Dump steps context + env: + STEPS_CONTEXT: ${{ toJson(steps) }} + run: echo "$STEPS_CONTEXT" + - name: Dump runner context + env: + RUNNER_CONTEXT: ${{ toJson(runner) }} + run: echo "$RUNNER_CONTEXT" + - name: Dump strategy context + env: + STRATEGY_CONTEXT: ${{ toJson(strategy) }} + run: echo "$STRATEGY_CONTEXT" + - name: Dump matrix context + env: + MATRIX_CONTEXT: ${{ toJson(matrix) }} + run: echo "$MATRIX_CONTEXT" diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 6d755b840..d28505b86 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -4,13 +4,13 @@ run-name: Pull Request on: pull_request: types: [opened, synchronize, reopened, closed] - - + + # Prevent running concurrently concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true - + env: GH_TOKEN: ${{ github.token }} @@ -62,25 +62,29 @@ jobs: run: | echo "test_value=foo" >> "$GITHUB_OUTPUT" - echo_changed_files: - needs: get_changed_files - name: Echo changed files - runs-on: ubuntu-latest - steps: - - name: Echo changed files - env: - OUTPUTS: ${{ toJson(needs.get_changed_files.outputs) }} - run: | - echo "All outputs: $OUTPUTS" +# echo_changed_files: +# needs: get_changed_files +# name: Echo changed files +# runs-on: ubuntu-latest +# steps: +# - name: Echo changed files +# env: +# OUTPUTS: ${{ toJson(needs.get_changed_files.outputs) }} +# run: | +# echo "All outputs: $OUTPUTS" + + debug: + name: Debug + uses: ./.github/workflows/debug_contexts.yml + - # format: # name: Check format of python # needs: get_changed_files # uses: ./.github/workflows/format.yml # with: # files: ${{ needs.get_changed_files.outputs.changed_files }} - + # lint: # name: Lint python @@ -93,4 +97,3 @@ jobs: # name: Run tests # uses: ./.github/workflows/tests.yml - \ No newline at end of file From beae74fc45850836da8b49b644ee921f155ef7dd Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:06:57 +0100 Subject: [PATCH 073/195] Dump needs context --- .github/workflows/pull_request.yml | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index d28505b86..501d6fc80 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -62,20 +62,16 @@ jobs: run: | echo "test_value=foo" >> "$GITHUB_OUTPUT" -# echo_changed_files: -# needs: get_changed_files -# name: Echo changed files -# runs-on: ubuntu-latest -# steps: -# - name: Echo changed files -# env: -# OUTPUTS: ${{ toJson(needs.get_changed_files.outputs) }} -# run: | -# echo "All outputs: $OUTPUTS" - - debug: - name: Debug - uses: ./.github/workflows/debug_contexts.yml + echo_changed_files: + needs: get_changed_files + name: Echo changed files + runs-on: ubuntu-latest + steps: + - name: Echo changed files + env: + OUTPUTS: ${{ toJson(needs) }} + run: | + echo "All outputs: $OUTPUTS" # format: From b58f9786b5fe9f42e75ce0f5896205c59b0332bc Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:15:03 +0100 Subject: [PATCH 074/195] Testing with xargs --- .github/workflows/pull_request.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 501d6fc80..a8680d0b2 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -39,21 +39,21 @@ jobs: id: changed_python_files shell: bash run: | - cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` + cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }} | xargs` echo "Changed python files: $cpf" echo "changed_python_files=$cpf" >> "$GITHUB_OUTPUT" - name: Get changed yaml files id: changed_yaml_files shell: bash run: | - cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` + cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }} | xargs` echo "Changed yaml files: $cyf" echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT" - name: Get changed json files id: changed_json_files shell: bash run: | - cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` + cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }} | xargs` echo "Changed json files: $cjf" echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT" - name: Get test value From 47720e30b27bdcd1d9da9c2994123c78be4caa5d Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:17:01 +0100 Subject: [PATCH 075/195] Trying removing the output section --- .github/workflows/pull_request.yml | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index a8680d0b2..4d20da652 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -18,12 +18,6 @@ jobs: get_changed_files: name: Get changed files runs-on: ubuntu-latest - outputs: - changed_files: ${{ steps.changed_files.outputs.changed_files }} - changed_python_files: ${{ steps.changed_files.outputs.changed_python_files }} - changed_yaml_files: ${{ steps.changed_files.outputs.changed_yaml_files }} - changed_json_files: ${{ steps.changed_files.outputs.changed_json_files }} - test_value: ${{ steps.changed_files.outputs.test_value }} steps: - uses: actions/checkout@v2 with: @@ -39,21 +33,21 @@ jobs: id: changed_python_files shell: bash run: | - cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }} | xargs` + cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo "Changed python files: $cpf" echo "changed_python_files=$cpf" >> "$GITHUB_OUTPUT" - name: Get changed yaml files id: changed_yaml_files shell: bash run: | - cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }} | xargs` + cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` echo "Changed yaml files: $cyf" echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT" - name: Get changed json files id: changed_json_files shell: bash run: | - cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }} | xargs` + cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT" - name: Get test value From 4f934b984a7479f3721857cdf3b28d40c684a925 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:20:12 +0100 Subject: [PATCH 076/195] Testing variable name length --- .github/workflows/pull_request.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 4d20da652..3b4c7e8ce 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -18,6 +18,12 @@ jobs: get_changed_files: name: Get changed files runs-on: ubuntu-latest + outputs: + changed_files: ${{ steps.changed_files.outputs.changed_files }} + python_files: ${{ steps.changed_files.outputs.python_files }} + changed_yaml_files: ${{ steps.changed_files.outputs.changed_yaml_files }} + changed_json_files: ${{ steps.changed_files.outputs.changed_json_files }} + test_value: ${{ steps.changed_files.outputs.test_value }} steps: - uses: actions/checkout@v2 with: @@ -35,7 +41,7 @@ jobs: run: | cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo "Changed python files: $cpf" - echo "changed_python_files=$cpf" >> "$GITHUB_OUTPUT" + echo "python_files=$cpf" >> "$GITHUB_OUTPUT" - name: Get changed yaml files id: changed_yaml_files shell: bash From ed1e4818130bbf9ca1f0b11359d35d909f7bf6fa Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:22:20 +0100 Subject: [PATCH 077/195] View job --- .github/workflows/pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 3b4c7e8ce..73b90df98 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -69,7 +69,7 @@ jobs: steps: - name: Echo changed files env: - OUTPUTS: ${{ toJson(needs) }} + OUTPUTS: ${{ toJson(job) }} run: | echo "All outputs: $OUTPUTS" From e4e34ee402f0d0c5c8b2eb4271090fdb3ecff629 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:26:39 +0100 Subject: [PATCH 078/195] Confirm set succeeds --- .github/workflows/pull_request.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 73b90df98..b068f339d 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -34,33 +34,33 @@ jobs: run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` echo "Changed files: $changed_files" - echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT" + echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT"&&echo"Set." - name: Get changed python files id: changed_python_files shell: bash run: | cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo "Changed python files: $cpf" - echo "python_files=$cpf" >> "$GITHUB_OUTPUT" + echo "python_files=$cpf" >> "$GITHUB_OUTPUT"&&echo"Set." - name: Get changed yaml files id: changed_yaml_files shell: bash run: | cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` echo "Changed yaml files: $cyf" - echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT" + echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT"&&echo"Set." - name: Get changed json files id: changed_json_files shell: bash run: | cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" - echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT" + echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo"Set." - name: Get test value id: test_value shell: bash run: | - echo "test_value=foo" >> "$GITHUB_OUTPUT" + echo "test_value=foo" >> "$GITHUB_OUTPUT"&&echo"Set." echo_changed_files: needs: get_changed_files @@ -69,7 +69,7 @@ jobs: steps: - name: Echo changed files env: - OUTPUTS: ${{ toJson(job) }} + OUTPUTS: ${{ toJson(needs) }} run: | echo "All outputs: $OUTPUTS" From e55a6060d6805cb3756e0d3f4eabe2b3a97e5bdb Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:27:54 +0100 Subject: [PATCH 079/195] Syntax error corrected --- .github/workflows/pull_request.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index b068f339d..fa90526c3 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -34,33 +34,33 @@ jobs: run: | changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` echo "Changed files: $changed_files" - echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT"&&echo"Set." + echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get changed python files id: changed_python_files shell: bash run: | cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo "Changed python files: $cpf" - echo "python_files=$cpf" >> "$GITHUB_OUTPUT"&&echo"Set." + echo "python_files=$cpf" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get changed yaml files id: changed_yaml_files shell: bash run: | cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` echo "Changed yaml files: $cyf" - echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT"&&echo"Set." + echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get changed json files id: changed_json_files shell: bash run: | cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" - echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo"Set." + echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get test value id: test_value shell: bash run: | - echo "test_value=foo" >> "$GITHUB_OUTPUT"&&echo"Set." + echo "test_value=foo" >> "$GITHUB_OUTPUT"&&echo "Set." echo_changed_files: needs: get_changed_files From 52b8b6ffa4e44c8bcdd2a5741bf2ebd0b8d9f598 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:34:20 +0100 Subject: [PATCH 080/195] Correct commit --- .github/workflows/pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index fa90526c3..ab868831f 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -53,7 +53,7 @@ jobs: id: changed_json_files shell: bash run: | - cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` + cjf=`python3 ${{ github.workspace }}/deploy/_filter_fi6py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get test value From 9b5a62e3408368698963fa63c6b3e37543f339b9 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:35:22 +0100 Subject: [PATCH 081/195] JSON error --- .github/workflows/pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index ab868831f..4d8353380 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -53,7 +53,7 @@ jobs: id: changed_json_files shell: bash run: | - cjf=`python3 ${{ github.workspace }}/deploy/_filter_fi6py json ${{ steps.changed_files.outputs.changed_files }}` + cjf=`python3 ${{ github.workspace }}/deploy/_filter_fi.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get test value From 3645e65ca5ab786554679f57afb872214ca8a2d7 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 12:36:18 +0100 Subject: [PATCH 082/195] filename --- .github/workflows/pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 4d8353380..fa90526c3 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -53,7 +53,7 @@ jobs: id: changed_json_files shell: bash run: | - cjf=`python3 ${{ github.workspace }}/deploy/_filter_fi.py json ${{ steps.changed_files.outputs.changed_files }}` + cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get test value From 49b9943a819d47f0d4c8c2f8cbce9339d30a7489 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 14:21:30 +0100 Subject: [PATCH 083/195] Trying one last thing, and then taking a whole different approach --- .github/workflows/pull_request.yml | 62 +++++++++++++++--------------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index fa90526c3..c4c48ecf1 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -25,37 +25,37 @@ jobs: changed_json_files: ${{ steps.changed_files.outputs.changed_json_files }} test_value: ${{ steps.changed_files.outputs.test_value }} steps: - - uses: actions/checkout@v2 - with: - ref: ${{ github.head_ref || github.ref }} - - name: Get changed files - id: changed_files - shell: bash - run: | - changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` - echo "Changed files: $changed_files" - echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT"&&echo "Set." - - name: Get changed python files - id: changed_python_files - shell: bash - run: | - cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed python files: $cpf" - echo "python_files=$cpf" >> "$GITHUB_OUTPUT"&&echo "Set." - - name: Get changed yaml files - id: changed_yaml_files - shell: bash - run: | - cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed yaml files: $cyf" - echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT"&&echo "Set." - - name: Get changed json files - id: changed_json_files - shell: bash - run: | - cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed json files: $cjf" - echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo "Set." + # - uses: actions/checkout@v2 + # with: + # ref: ${{ github.head_ref || github.ref }} + # - name: Get changed files + # id: changed_files + # shell: bash + # run: | + # changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` + # echo "Changed files: $changed_files" + # echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT"&&echo "Set." + # - name: Get changed python files + # id: changed_python_files + # shell: bash + # run: | + # cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` + # echo "Changed python files: $cpf" + # echo "python_files=$cpf" >> "$GITHUB_OUTPUT"&&echo "Set." + # - name: Get changed yaml files + # id: changed_yaml_files + # shell: bash + # run: | + # cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` + # echo "Changed yaml files: $cyf" + # echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT"&&echo "Set." + # - name: Get changed json files + # id: changed_json_files + # shell: bash + # run: | + # cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` + # echo "Changed json files: $cjf" + # echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo "Set." - name: Get test value id: test_value shell: bash From beda9c5966ee860889cd6532d8b3a5062b5ba14d Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 14:29:17 +0100 Subject: [PATCH 084/195] Correcting the issue --- .github/workflows/pull_request.yml | 72 +++++++++++++++--------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index c4c48ecf1..37789a2b4 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -20,47 +20,47 @@ jobs: runs-on: ubuntu-latest outputs: changed_files: ${{ steps.changed_files.outputs.changed_files }} - python_files: ${{ steps.changed_files.outputs.python_files }} - changed_yaml_files: ${{ steps.changed_files.outputs.changed_yaml_files }} - changed_json_files: ${{ steps.changed_files.outputs.changed_json_files }} - test_value: ${{ steps.changed_files.outputs.test_value }} + python_files: ${{ steps.changed_python_files.outputs.changed_files }} + changed_yaml_files: ${{ steps.changed_yaml_files.outputs.changed_files }} + changed_json_files: ${{ steps.changed_json_files.outputs.changed_files }} + test_value: ${{ steps.test_value.outputs.test_value }} steps: - # - uses: actions/checkout@v2 - # with: - # ref: ${{ github.head_ref || github.ref }} - # - name: Get changed files - # id: changed_files - # shell: bash - # run: | - # changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` - # echo "Changed files: $changed_files" - # echo "changed_files=$changed_files" >> "$GITHUB_OUTPUT"&&echo "Set." - # - name: Get changed python files - # id: changed_python_files - # shell: bash - # run: | - # cpf=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` - # echo "Changed python files: $cpf" - # echo "python_files=$cpf" >> "$GITHUB_OUTPUT"&&echo "Set." - # - name: Get changed yaml files - # id: changed_yaml_files - # shell: bash - # run: | - # cyf=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` - # echo "Changed yaml files: $cyf" - # echo "changed_yaml_files=$cyf" >> "$GITHUB_OUTPUT"&&echo "Set." - # - name: Get changed json files - # id: changed_json_files - # shell: bash - # run: | - # cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` - # echo "Changed json files: $cjf" - # echo "changed_json_files=$cjf" >> "$GITHUB_OUTPUT"&&echo "Set." + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Get changed files + id: changed_files + shell: bash + run: | + changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` + echo "Changed files: $changed_files" + echo "changed_files=$changed_files" >> $GITHUB_OUTPUT + - name: Get changed python files + id: changed_python_files + shell: bash + run: | + changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed python files: $changed_files" + echo "python_files=$changed_files" >> $GITHUB_OUTPUT + - name: Get changed yaml files + id: changed_yaml_files + shell: bash + run: | + changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed yaml files: $changed_files" + echo "changed_yaml_files=$changed_files" >> $GITHUB_OUTPUT + - name: Get changed json files + id: changed_json_files + shell: bash + run: | + cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed json files: $cjf" + echo "changed_json_files=$cjf" >> $GITHUB_OUTPUT - name: Get test value id: test_value shell: bash run: | - echo "test_value=foo" >> "$GITHUB_OUTPUT"&&echo "Set." + echo "test_value=foo" >> $GITHUB_OUTPUT echo_changed_files: needs: get_changed_files From 0780e45f7aa36125ca5520a6c56f7859e23a172f Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 14:32:28 +0100 Subject: [PATCH 085/195] Getting somewhere --- .github/workflows/pull_request.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 37789a2b4..2c956ed36 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -20,9 +20,9 @@ jobs: runs-on: ubuntu-latest outputs: changed_files: ${{ steps.changed_files.outputs.changed_files }} - python_files: ${{ steps.changed_python_files.outputs.changed_files }} - changed_yaml_files: ${{ steps.changed_yaml_files.outputs.changed_files }} - changed_json_files: ${{ steps.changed_json_files.outputs.changed_files }} + python_files: ${{ steps.changed_python_files.outputs.python_changed_files }} + yaml_changed_files: ${{ steps.changed_yaml_files.outputs.yaml_changed_files }} + json_changed_files: ${{ steps.changed_json_files.outputs.json_changed_files }} test_value: ${{ steps.test_value.outputs.test_value }} steps: - uses: actions/checkout@v2 @@ -41,21 +41,21 @@ jobs: run: | changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` echo "Changed python files: $changed_files" - echo "python_files=$changed_files" >> $GITHUB_OUTPUT + echo "python_changed_files=$changed_files" >> $GITHUB_OUTPUT - name: Get changed yaml files id: changed_yaml_files shell: bash run: | changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` echo "Changed yaml files: $changed_files" - echo "changed_yaml_files=$changed_files" >> $GITHUB_OUTPUT + echo "yaml_changed_files=$changed_files" >> $GITHUB_OUTPUT - name: Get changed json files id: changed_json_files shell: bash run: | cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" - echo "changed_json_files=$cjf" >> $GITHUB_OUTPUT + echo "json_changed_files=$cjf" >> $GITHUB_OUTPUT - name: Get test value id: test_value shell: bash From e1e36b701eb724c8404c667bd6cf4c603a3b38b2 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 14:49:21 +0100 Subject: [PATCH 086/195] Restructure of directories --- .../workflows/{ => jobs}/debug_contexts.yml | 0 .../workflows/{ => jobs}/documentation.yml | 0 .github/workflows/{ => jobs}/format.yml | 0 .github/workflows/jobs/get_changed_files.yml | 69 ++++++++++++++ .github/workflows/{ => jobs}/lint.yml | 0 .github/workflows/{ => jobs}/tests.yml | 0 .github/workflows/{ => old}/OLD_document.yml | 0 .../{ => old}/OLD_pythonpublish_sdist.yml | 0 .github/workflows/{ => old}/OLD_tests.yml | 0 .../{ => old}/OLD_update-linear-labels.yml | 0 .github/workflows/pull_request.yml | 89 ++++--------------- deploy/run_e2e_tests.sh | 4 - deploy/run_tests.sh | 50 ----------- 13 files changed, 85 insertions(+), 127 deletions(-) rename .github/workflows/{ => jobs}/debug_contexts.yml (100%) rename .github/workflows/{ => jobs}/documentation.yml (100%) rename .github/workflows/{ => jobs}/format.yml (100%) create mode 100644 .github/workflows/jobs/get_changed_files.yml rename .github/workflows/{ => jobs}/lint.yml (100%) rename .github/workflows/{ => jobs}/tests.yml (100%) rename .github/workflows/{ => old}/OLD_document.yml (100%) rename .github/workflows/{ => old}/OLD_pythonpublish_sdist.yml (100%) rename .github/workflows/{ => old}/OLD_tests.yml (100%) rename .github/workflows/{ => old}/OLD_update-linear-labels.yml (100%) delete mode 100755 deploy/run_e2e_tests.sh delete mode 100755 deploy/run_tests.sh diff --git a/.github/workflows/debug_contexts.yml b/.github/workflows/jobs/debug_contexts.yml similarity index 100% rename from .github/workflows/debug_contexts.yml rename to .github/workflows/jobs/debug_contexts.yml diff --git a/.github/workflows/documentation.yml b/.github/workflows/jobs/documentation.yml similarity index 100% rename from .github/workflows/documentation.yml rename to .github/workflows/jobs/documentation.yml diff --git a/.github/workflows/format.yml b/.github/workflows/jobs/format.yml similarity index 100% rename from .github/workflows/format.yml rename to .github/workflows/jobs/format.yml diff --git a/.github/workflows/jobs/get_changed_files.yml b/.github/workflows/jobs/get_changed_files.yml new file mode 100644 index 000000000..a3bb92b1c --- /dev/null +++ b/.github/workflows/jobs/get_changed_files.yml @@ -0,0 +1,69 @@ +name: get_changed_files +run-name: Get changed get_changed_files + +on: + workflow_call: + outputs: + changed_files: + description: "Changed files" + value: ${{ github.jobs.get_changed_files.outputs.changed_files }} + python_files: + description: "Changed python files" + value: ${{ github.jobs.get_changed_files.outputs.python_files }} + yaml_changed_files: + description: "Changed yaml files" + value: ${{ github.jobs.get_changed_files.outputs.yaml_changed_files }} + json_changed_files: + description: "Changed json files" + value: ${{ github.jobs.get_changed_files.outputs.json_changed_files }} + +env: + GH_TOKEN: ${{ github.token }} + +jobs: + get_changed_files: + name: Get changed files + runs-on: ubuntu-latest + outputs: + changed_files: ${{ steps.changed_files.outputs.changed_files }} + python_files: ${{ steps.changed_python_files.outputs.python_changed_files }} + yaml_changed_files: ${{ steps.changed_yaml_files.outputs.yaml_changed_files }} + json_changed_files: ${{ steps.changed_json_files.outputs.json_changed_files }} + test_value: ${{ steps.test_value.outputs.test_value }} + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + - name: Get changed files + id: changed_files + shell: bash + run: | + changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` + echo "Changed files: $changed_files" + echo "changed_files=$changed_files" >> $GITHUB_OUTPUT + - name: Get changed python files + id: changed_python_files + shell: bash + run: | + changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed python files: $changed_files" + echo "python_changed_files=$changed_files" >> $GITHUB_OUTPUT + - name: Get changed yaml files + id: changed_yaml_files + shell: bash + run: | + changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed yaml files: $changed_files" + echo "yaml_changed_files=$changed_files" >> $GITHUB_OUTPUT + - name: Get changed json files + id: changed_json_files + shell: bash + run: | + cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` + echo "Changed json files: $cjf" + echo "json_changed_files=$cjf" >> $GITHUB_OUTPUT + - name: Get test value + id: test_value + shell: bash + run: | + echo "test_value=foo" >> $GITHUB_OUTPUT diff --git a/.github/workflows/lint.yml b/.github/workflows/jobs/lint.yml similarity index 100% rename from .github/workflows/lint.yml rename to .github/workflows/jobs/lint.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/jobs/tests.yml similarity index 100% rename from .github/workflows/tests.yml rename to .github/workflows/jobs/tests.yml diff --git a/.github/workflows/OLD_document.yml b/.github/workflows/old/OLD_document.yml similarity index 100% rename from .github/workflows/OLD_document.yml rename to .github/workflows/old/OLD_document.yml diff --git a/.github/workflows/OLD_pythonpublish_sdist.yml b/.github/workflows/old/OLD_pythonpublish_sdist.yml similarity index 100% rename from .github/workflows/OLD_pythonpublish_sdist.yml rename to .github/workflows/old/OLD_pythonpublish_sdist.yml diff --git a/.github/workflows/OLD_tests.yml b/.github/workflows/old/OLD_tests.yml similarity index 100% rename from .github/workflows/OLD_tests.yml rename to .github/workflows/old/OLD_tests.yml diff --git a/.github/workflows/OLD_update-linear-labels.yml b/.github/workflows/old/OLD_update-linear-labels.yml similarity index 100% rename from .github/workflows/OLD_update-linear-labels.yml rename to .github/workflows/old/OLD_update-linear-labels.yml diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 2c956ed36..7977007c0 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -11,85 +11,28 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true -env: - GH_TOKEN: ${{ github.token }} - jobs: + get_changed_files: name: Get changed files - runs-on: ubuntu-latest - outputs: - changed_files: ${{ steps.changed_files.outputs.changed_files }} - python_files: ${{ steps.changed_python_files.outputs.python_changed_files }} - yaml_changed_files: ${{ steps.changed_yaml_files.outputs.yaml_changed_files }} - json_changed_files: ${{ steps.changed_json_files.outputs.json_changed_files }} - test_value: ${{ steps.test_value.outputs.test_value }} - steps: - - uses: actions/checkout@v2 - with: - ref: ${{ github.head_ref || github.ref }} - - name: Get changed files - id: changed_files - shell: bash - run: | - changed_files=`gh pr diff ${{ github.event.number }} --name-only | xargs` - echo "Changed files: $changed_files" - echo "changed_files=$changed_files" >> $GITHUB_OUTPUT - - name: Get changed python files - id: changed_python_files - shell: bash - run: | - changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py py ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed python files: $changed_files" - echo "python_changed_files=$changed_files" >> $GITHUB_OUTPUT - - name: Get changed yaml files - id: changed_yaml_files - shell: bash - run: | - changed_files=`python3 ${{ github.workspace }}/deploy/_filter_files.py yml ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed yaml files: $changed_files" - echo "yaml_changed_files=$changed_files" >> $GITHUB_OUTPUT - - name: Get changed json files - id: changed_json_files - shell: bash - run: | - cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` - echo "Changed json files: $cjf" - echo "json_changed_files=$cjf" >> $GITHUB_OUTPUT - - name: Get test value - id: test_value - shell: bash - run: | - echo "test_value=foo" >> $GITHUB_OUTPUT + uses: ./.github/workflows/jobs/get_changed_files.yml - echo_changed_files: + format: + name: Check format of python needs: get_changed_files - name: Echo changed files - runs-on: ubuntu-latest - steps: - - name: Echo changed files - env: - OUTPUTS: ${{ toJson(needs) }} - run: | - echo "All outputs: $OUTPUTS" - + uses: ./.github/workflows/jobs/format.yml + with: + files: ${{ needs.get_changed_files.outputs.changed_files }} - # format: - # name: Check format of python - # needs: get_changed_files - # uses: ./.github/workflows/format.yml - # with: - # files: ${{ needs.get_changed_files.outputs.changed_files }} + lint: + name: Lint python + needs: get_changed_files + uses: ./.github/workflows/jobs/lint.yml + with: + files: ${{ needs.get_changed_files.outputs.changed_files }} - # lint: - # name: Lint python - # needs: get_changed_files - # uses: ./.github/workflows/lint.yml - # with: - # files: ${{ needs.get_changed_files.outputs.changed_files }} - - # run_tests: - # name: Run tests - # uses: ./.github/workflows/tests.yml + run_tests: + name: Run tests + uses: ./.github/workflows/jobs/tests.yml diff --git a/deploy/run_e2e_tests.sh b/deploy/run_e2e_tests.sh deleted file mode 100755 index 3301716c7..000000000 --- a/deploy/run_e2e_tests.sh +++ /dev/null @@ -1,4 +0,0 @@ -#! /usr/bin/env bash - -echo "Not yet implemented" -exit 1 diff --git a/deploy/run_tests.sh b/deploy/run_tests.sh deleted file mode 100755 index 199f208af..000000000 --- a/deploy/run_tests.sh +++ /dev/null @@ -1,50 +0,0 @@ -#! /usr/bin/env bash -# Run unit tests -# -# Exit: -# 0 - Success -# 1 - Called with incorrect number of arguments -# 2 - Test directory does not exist -# 3 - Python3 or dependency not found -# 4 - Unit tests failed - -THIS_FILE_DIRECTORY=$(dirname `realpath "$0"`) -TEST_DIRECTORY=`realpath "$THIS_FILE_DIRECTORY"/../tests` - -if [ "$#" -gt 3 ]; then - echo "Usage: $0 [ ]" - echo - echo "Called with $@" - exit 1 -fi - -if [ "$#" -eq 3 ]; then - echo "Called with directory and versions, so assuming CI/CD" - USING_CICD=1 - TEST_DIRECTORY=$1 - OS=$3 - PYTHON_VERSION=$2 -else - echo "Called without directory and versions, so calculating test directory" -fi - -if [ ! -d "$TEST_DIRECTORY" ]; then - echo "Test directory does not exist" - exit 2 -fi - -echo "Running unit tests in directory: $TEST_DIRECTORY" - -"$THIS_FILE_DIRECTORY"/check_poetry.sh || "$THIS_FILE_DIRECTORY"/install_deps.sh || exit 3 - -# Unit test config is in pyproject.toml and pytest.ini - don't set any here as it will only complicate CI/CD -if [ "$USING_CICD" = 1 ]; then - poetry run pytest $TEST_DIRECTORY -vvv --junit-xml=$THIS_FILE_DIRECTORY/$PYTHON_VERSION-$OS-test_results.xml || exit 3 - exit 0 -fi - -poetry run pytest $TEST_DIRECTORY || exit 4 - -echo "Unit tests passed" - -exit 0 From 2c15fa2b267b5800f9ca2386bb281c384aca4dae Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 14:57:30 +0100 Subject: [PATCH 087/195] Restructure again because github doesn't support nested directories --- .../{pull_request.yml => EVENT_pull_request.yml} | 8 ++++---- .github/workflows/{release.yml => EVENT_release.yml} | 0 .../{jobs/debug_contexts.yml => JOB_debug_contexts.yml} | 0 .../{jobs/documentation.yml => JOB_documentation.yml} | 0 .github/workflows/{jobs/format.yml => JOB_format.yml} | 0 .../get_changed_files.yml => JOB_get_changed_files.yml} | 0 .github/workflows/{jobs/lint.yml => JOB_lint.yml} | 0 .github/workflows/{jobs/tests.yml => JOB_tests.yml} | 0 8 files changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{pull_request.yml => EVENT_pull_request.yml} (77%) rename .github/workflows/{release.yml => EVENT_release.yml} (100%) rename .github/workflows/{jobs/debug_contexts.yml => JOB_debug_contexts.yml} (100%) rename .github/workflows/{jobs/documentation.yml => JOB_documentation.yml} (100%) rename .github/workflows/{jobs/format.yml => JOB_format.yml} (100%) rename .github/workflows/{jobs/get_changed_files.yml => JOB_get_changed_files.yml} (100%) rename .github/workflows/{jobs/lint.yml => JOB_lint.yml} (100%) rename .github/workflows/{jobs/tests.yml => JOB_tests.yml} (100%) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/EVENT_pull_request.yml similarity index 77% rename from .github/workflows/pull_request.yml rename to .github/workflows/EVENT_pull_request.yml index 7977007c0..0d41e70eb 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -15,12 +15,12 @@ jobs: get_changed_files: name: Get changed files - uses: ./.github/workflows/jobs/get_changed_files.yml + uses: ./.github/workflows/JOB_get_changed_files.yml format: name: Check format of python needs: get_changed_files - uses: ./.github/workflows/jobs/format.yml + uses: ./.github/workflows/JOB_format.yml with: files: ${{ needs.get_changed_files.outputs.changed_files }} @@ -28,11 +28,11 @@ jobs: lint: name: Lint python needs: get_changed_files - uses: ./.github/workflows/jobs/lint.yml + uses: ./.github/workflows/JOB_lint.yml with: files: ${{ needs.get_changed_files.outputs.changed_files }} run_tests: name: Run tests - uses: ./.github/workflows/jobs/tests.yml + uses: ./.github/workflows/JOB_tests.yml diff --git a/.github/workflows/release.yml b/.github/workflows/EVENT_release.yml similarity index 100% rename from .github/workflows/release.yml rename to .github/workflows/EVENT_release.yml diff --git a/.github/workflows/jobs/debug_contexts.yml b/.github/workflows/JOB_debug_contexts.yml similarity index 100% rename from .github/workflows/jobs/debug_contexts.yml rename to .github/workflows/JOB_debug_contexts.yml diff --git a/.github/workflows/jobs/documentation.yml b/.github/workflows/JOB_documentation.yml similarity index 100% rename from .github/workflows/jobs/documentation.yml rename to .github/workflows/JOB_documentation.yml diff --git a/.github/workflows/jobs/format.yml b/.github/workflows/JOB_format.yml similarity index 100% rename from .github/workflows/jobs/format.yml rename to .github/workflows/JOB_format.yml diff --git a/.github/workflows/jobs/get_changed_files.yml b/.github/workflows/JOB_get_changed_files.yml similarity index 100% rename from .github/workflows/jobs/get_changed_files.yml rename to .github/workflows/JOB_get_changed_files.yml diff --git a/.github/workflows/jobs/lint.yml b/.github/workflows/JOB_lint.yml similarity index 100% rename from .github/workflows/jobs/lint.yml rename to .github/workflows/JOB_lint.yml diff --git a/.github/workflows/jobs/tests.yml b/.github/workflows/JOB_tests.yml similarity index 100% rename from .github/workflows/jobs/tests.yml rename to .github/workflows/JOB_tests.yml From 1f660f2d1319be0f3e0c1d9b4c462db2a3461f28 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 15:52:52 +0100 Subject: [PATCH 088/195] Changes to make tests work --- .github/workflows/JOB_tests.yml | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index 10ba400da..da615bb2d 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -5,9 +5,11 @@ run-name: Tests on: workflow_call: -jobs: +jobs: test: name: "Run Testing OS: ${{ matrix.os }} Python: ${{ matrix.python-version }}" + env: + MARKDOWN_OUTPUT: "# Test Results\n" strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] @@ -18,6 +20,7 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} + run: - name: Install Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: @@ -26,11 +29,24 @@ jobs: run: | pip install poetry - name: Run pytest - uses: pavelzw/pytest-action@v2 + run: | + poetry install + poetry run pytest + if [ $? -eq 0 ]; then + export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Test Results\n\n" + export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}**Test Results:** :white_check_mark:\n\n" + else + export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Test Results\n\n" + export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}**Test Results:** :x:\n\n" + fi + - name: Output Summary + uses: actions/github-script@v4 with: - verbose: true - emoji: true - job-summary: true - custom-arguments: '-q' - click-to-expand: true - report-title: 'Test Report' \ No newline at end of file + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + github.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: process.env.MARKDOWN_OUTPUT + }) From d09ab37713ba7b8b19d766620cd3b0efc3b16ef4 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 15:57:27 +0100 Subject: [PATCH 089/195] Test runner --- .github/workflows/JOB_tests.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index da615bb2d..f4a6a7a2c 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -20,7 +20,9 @@ jobs: - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} - run: + - name: Set title + run: | + export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Tests on ${{ matrix.python-version }}, OS: ${{ matrix.os }}\n\n" - name: Install Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: From 538a397ea4df896799d4c5cf5497d938b3260912 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 16:17:27 +0100 Subject: [PATCH 090/195] Bash changes --- .github/workflows/JOB_tests.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index f4a6a7a2c..15a97b60a 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -27,10 +27,12 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - name: Install poetry + - name: Install dependencies + shell: bash # stops windows hosts from using powershell run: | - pip install poetry + ${{ github.workspace }}/deploy/install_deps.sh - name: Run pytest + shell: bash # stops windows hosts from using powershell run: | poetry install poetry run pytest From 1a14c39a7709621ad4c920a02bc00513ee11debd Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 16:39:14 +0100 Subject: [PATCH 091/195] Lint job change --- .github/workflows/JOB_lint.yml | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index 1ec621ef3..8bd85d059 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -26,14 +26,26 @@ jobs: run: | pip install flake8 flake8-pyproject failed_linting=0 + failed_files="" echo "Linting files ${{ inputs.files }}" + MARKDOWN_OUTPUT="" for file in ${{ inputs.files }} do echo "Linting $file" - flake8 $file || failed_linting=$((failed_linting + 1)) + flake8 $file || failed_linting=$((failed_linting + 1) && failed_files="$failed_files $file") done - if [ $success -ne 0 ]; then + if [ $failed_linting -ne 0 ]; then echo "Linting failed on $failed_linting files" + echo "Failed files: $failed_files" + + echo "# Linting failed 😢" >> $GITHUB_STEP_SUMMARY + echo "## Failed files" >> $GITHUB_STEP_SUMMARY + for file in $failed_files ; do + echo "- $file" >> $GITHUB_STEP_SUMMARY + done exit 1 + else + echo "# Linting passed 🎉" >> $GITHUB_STEP_SUMMARY + echo "Tested files: ${{ inputs.files }}" >> $GITHUB_STEP_SUMMARY fi exit 1 From bdab367be27ef523b2f181172b59749f33009230 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 16:56:53 +0100 Subject: [PATCH 092/195] Tests --- .github/workflows/JOB_tests.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index 15a97b60a..4d5275d71 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -30,12 +30,16 @@ jobs: - name: Install dependencies shell: bash # stops windows hosts from using powershell run: | - ${{ github.workspace }}/deploy/install_deps.sh + pip install poetry pytest + poetry install --no-interaction --no-root -vvv --all-extras + poetry install --no-interaction --no-root --all-extras -vvv + pip install wheel + pip install --upgrade setuptools + pip install --editable ".[test,ml,medical,dev]" - name: Run pytest shell: bash # stops windows hosts from using powershell run: | - poetry install - poetry run pytest + python -m pytest if [ $? -eq 0 ]; then export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Test Results\n\n" export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}**Test Results:** :white_check_mark:\n\n" @@ -44,6 +48,7 @@ jobs: export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}**Test Results:** :x:\n\n" fi - name: Output Summary + if: always() uses: actions/github-script@v4 with: github-token: ${{ secrets.GITHUB_TOKEN }} From 2bb285df45b9d9ca023af1570f83a6b307f9722d Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 4 Sep 2023 17:00:40 +0100 Subject: [PATCH 093/195] Missed a shell command --- .github/workflows/JOB_tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index 4d5275d71..9ff24f37b 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -21,6 +21,7 @@ jobs: with: ref: ${{ github.head_ref || github.ref }} - name: Set title + shell: bash # stops windows hosts from using powershell run: | export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Tests on ${{ matrix.python-version }}, OS: ${{ matrix.os }}\n\n" - name: Install Python ${{ matrix.python-version }} From eaa887fbbb559ab3b7b3de12759a1743e3530888 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 10:09:11 +0100 Subject: [PATCH 094/195] Extract install deps into reusable flow for later, fix tests job --- .editorconfig | 11 +++---- .github/MISC_install_deps.yml | 28 +++++++++++++++++ .github/workflows/EVENT_pull_request.yml | 1 - .github/workflows/JOB_tests.yml | 40 +++--------------------- 4 files changed, 38 insertions(+), 42 deletions(-) create mode 100644 .github/MISC_install_deps.yml diff --git a/.editorconfig b/.editorconfig index b42311381..0c1cf340c 100644 --- a/.editorconfig +++ b/.editorconfig @@ -18,11 +18,6 @@ max_line_length = 160 [*.json] insert_final_newline = ignore -# Minified JavaScript files shouldn't be changed -[**.min.js] -indent_style = ignore -insert_final_newline = ignore - # Makefiles always use tabs for indentation [Makefile] indent_style = tab @@ -30,7 +25,11 @@ indent_style = tab [*.md] trim_trailing_whitespace = false -[*.yml, *.yaml] +[*.yml] +indent_size = 2 +insert_final_newline = true + +[*.yaml] indent_size = 2 insert_final_newline = true diff --git a/.github/MISC_install_deps.yml b/.github/MISC_install_deps.yml new file mode 100644 index 000000000..c95fc8e06 --- /dev/null +++ b/.github/MISC_install_deps.yml @@ -0,0 +1,28 @@ +name: install_dependencies +run-name: Install dependencies + +on: + workflow_dispatch: + +jobs: + install_dependencies: + name: Install dependencies + steps: + - name: Confirm environment + shell: bash # stops windows hosts from using powershell + run: | + python --version || echo "[FATAL]: Installing dependencies failed because python is not installed" && exit 1 + if [ -z "${{ runner.os }}" ]; then + echo "[FATAL]: Install dependencies task can't be called from a flow that hasn't first called actions/checkout" + exit 2 + fi + + - name: Install dependencies + shell: bash # stops windows hosts from using powershell + run: | + pip install poetry pytest + poetry install --no-interaction --no-root -vvv --all-extras + poetry install --no-interaction --no-root --all-extras -vvv + pip install wheel + pip install --upgrade setuptools + pip install --editable ".[test,ml,medical,dev, ocv]" diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index 0d41e70eb..98a66585a 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -24,7 +24,6 @@ jobs: with: files: ${{ needs.get_changed_files.outputs.changed_files }} - lint: name: Lint python needs: get_changed_files diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index 9ff24f37b..5db1b63fa 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -8,55 +8,25 @@ on: jobs: test: name: "Run Testing OS: ${{ matrix.os }} Python: ${{ matrix.python-version }}" - env: - MARKDOWN_OUTPUT: "# Test Results\n" strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - # python-version: [3.8, 3.9, "3.10", "3.11"] - python-version: [3.8] + python-version: ["3.8", "3.9", "3.10", "3.11"] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 with: ref: ${{ github.head_ref || github.ref }} - - name: Set title - shell: bash # stops windows hosts from using powershell - run: | - export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Tests on ${{ matrix.python-version }}, OS: ${{ matrix.os }}\n\n" + - name: Install Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} + - name: Install dependencies - shell: bash # stops windows hosts from using powershell - run: | - pip install poetry pytest - poetry install --no-interaction --no-root -vvv --all-extras - poetry install --no-interaction --no-root --all-extras -vvv - pip install wheel - pip install --upgrade setuptools - pip install --editable ".[test,ml,medical,dev]" + uses: ./.github/actions/MISC_install_deps.yml + - name: Run pytest shell: bash # stops windows hosts from using powershell run: | python -m pytest - if [ $? -eq 0 ]; then - export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Test Results\n\n" - export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}**Test Results:** :white_check_mark:\n\n" - else - export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}## Test Results\n\n" - export MARKDOWN_OUTPUT="${MARKDOWN_OUTPUT}**Test Results:** :x:\n\n" - fi - - name: Output Summary - if: always() - uses: actions/github-script@v4 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - github.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: process.env.MARKDOWN_OUTPUT - }) From c2075b3608a66e38861055e3be7ca55197f205a8 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 12:04:23 +0100 Subject: [PATCH 095/195] Make black formatter pass --- deploy/confirm_main_branch_deployability.py | 4 +--- deploy/increase_version.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/deploy/confirm_main_branch_deployability.py b/deploy/confirm_main_branch_deployability.py index fdc968e9a..c647f5243 100755 --- a/deploy/confirm_main_branch_deployability.py +++ b/deploy/confirm_main_branch_deployability.py @@ -125,9 +125,7 @@ def _get_changes_since_last_release(last_release_timestamp: datetime) -> List[st ) gitref_to_compare = "{}@{{{} seconds ago}}".format(DEFAULT_BRANCH, seconds_since_last_release) - print( - f"It's been {seconds_since_last_release} seconds since the last release, about {int(seconds_since_last_release / SECONDS_IN_A_DAY)} days ago" - ) + print(f"It's been {seconds_since_last_release} seconds since the last release, about {int(seconds_since_last_release / SECONDS_IN_A_DAY)} days ago") printl(f"Getting changes since {gitref_to_compare}") output, error = _run_command("git", "diff", DEFAULT_BRANCH, gitref_to_compare, "--name-only") diff --git a/deploy/increase_version.py b/deploy/increase_version.py index 0951a5db6..7f81e711f 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -220,9 +220,7 @@ def main() -> None: if args.patch: new_version.increment_patch() - if new_version.was_changed() and ( - force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?") - ): + if new_version.was_changed() and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")): _update_version(new_version, force_actions) _update_pyproject_version(new_version, force_actions) print(f"Version updated successfully to {str(new_version)}") From e2d08fa0dd125ffbe9eeba44ccc05cba6f6d03f7 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 14:05:10 +0100 Subject: [PATCH 096/195] Final fixes to the increase version system --- deploy/increase_version.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/deploy/increase_version.py b/deploy/increase_version.py index 7f81e711f..966e888fb 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -7,7 +7,7 @@ from typing import Tuple from requests import get -from toml import loads +from toml import dumps, loads DARWIN_PYPI_INFO_PAGE = environ.get("PYPY_INFO_PAGE", "https://pypi.org/pypi/darwin-py/json") @@ -169,12 +169,30 @@ def _sanity_check(version: Version, pyproject_version: Version, pypi_version: Ve print("Versions are in sync, sanity check passed") +VERSION_TEMPLATE = """ +__version__ = "{}" +""" + + def _update_version(new_version: Version, force: bool) -> None: - raise NotImplementedError + version_file = Path(__file__).parent / "darwin" / "version" / "__init__.py" + assert version_file.exists(), "Version file not found" + + with open(version_file, "w") as f: + f.write(VERSION_TEMPLATE.format(str(new_version))) def _update_pyproject_version(new_version: Version, force: bool) -> None: - raise NotImplementedError + pyproject_file = Path(__file__).parent.parent / "pyproject.toml" + assert pyproject_file.exists(), "pyproject.toml not found" + + with open(pyproject_file, "r") as f: + toml_content = loads(f.read()) + + toml_content["tool"]["poetry"]["version"] = str(new_version) + + with open(pyproject_file, "w") as f: + f.write(dumps(toml_content)) def main() -> None: @@ -220,7 +238,9 @@ def main() -> None: if args.patch: new_version.increment_patch() - if new_version.was_changed() and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")): + if new_version.was_changed() and ( + force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?") + ): _update_version(new_version, force_actions) _update_pyproject_version(new_version, force_actions) print(f"Version updated successfully to {str(new_version)}") From 9fdef65a19de2022c3d4dd222ad57a8dcaeddf2c Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 14:36:12 +0100 Subject: [PATCH 097/195] Add in pyproject validator --- deploy/increase_version.py | 28 +++++++++++++++++++++++----- poetry.lock | 36 +++++++++++++++++++++++++++++++++++- 2 files changed, 58 insertions(+), 6 deletions(-) diff --git a/deploy/increase_version.py b/deploy/increase_version.py index 966e888fb..f29663d1c 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -4,10 +4,13 @@ from dataclasses import dataclass from os import environ from pathlib import Path +from pprint import pprint from typing import Tuple from requests import get from toml import dumps, loads +from validate_pyproject import api as pyproject_api +from validate_pyproject import errors as pyproject_errors DARWIN_PYPI_INFO_PAGE = environ.get("PYPY_INFO_PAGE", "https://pypi.org/pypi/darwin-py/json") @@ -169,13 +172,13 @@ def _sanity_check(version: Version, pyproject_version: Version, pypi_version: Ve print("Versions are in sync, sanity check passed") -VERSION_TEMPLATE = """ -__version__ = "{}" -""" +VERSION_TEMPLATE = '__version__ = "{}"\n' def _update_version(new_version: Version, force: bool) -> None: - version_file = Path(__file__).parent / "darwin" / "version" / "__init__.py" + version_file = (Path(__file__).parent / "..").resolve() / "darwin" / "version" / "__init__.py" + + print(f"Updating version in {version_file.absolute()}") assert version_file.exists(), "Version file not found" with open(version_file, "w") as f: @@ -183,7 +186,10 @@ def _update_version(new_version: Version, force: bool) -> None: def _update_pyproject_version(new_version: Version, force: bool) -> None: - pyproject_file = Path(__file__).parent.parent / "pyproject.toml" + pyproject_file = (Path(__file__).parent / "..").resolve() / "pyproject.toml" + original_content = pyproject_file.read_text() + + print(f"Updating version in {pyproject_file.absolute()}") assert pyproject_file.exists(), "pyproject.toml not found" with open(pyproject_file, "r") as f: @@ -194,6 +200,18 @@ def _update_pyproject_version(new_version: Version, force: bool) -> None: with open(pyproject_file, "w") as f: f.write(dumps(toml_content)) + # Sanity check + try: + validator = pyproject_api.Validator() + validator(toml_content) + except pyproject_errors.ValidationError as e: + print("Error validating pyproject.toml, reverting changes") + pprint(e) + with open(pyproject_file, "w") as f: + f.write(original_content) + print("Reverted. Please fix pyproject.toml and try again.") + exit(1) + def main() -> None: parser = argparse.ArgumentParser(description="Increase version number") diff --git a/poetry.lock b/poetry.lock index 52548d732..35aaf9ad4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -298,6 +298,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "fastjsonschema" +version = "2.18.0" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.18.0-py3-none-any.whl", hash = "sha256:128039912a11a807068a7c87d0da36660afbfd7202780db26c4aa7153cfdc799"}, + {file = "fastjsonschema-2.18.0.tar.gz", hash = "sha256:e820349dd16f806e4bd1467a138dced9def4bc7d6213a34295272a6cac95b5bd"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + [[package]] name = "flake8" version = "6.1.0" @@ -1631,6 +1645,26 @@ secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17. socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "validate-pyproject" +version = "0.14" +description = "Validation library and CLI tool for checking on 'pyproject.toml' files using JSON Schema" +optional = false +python-versions = ">=3.6" +files = [ + {file = "validate-pyproject-0.14.tar.gz", hash = "sha256:3457578f5b8589f11ff0d1fbafba273ed6b8d2ec9d1c62a7716bd7ac3f65a076"}, + {file = "validate_pyproject-0.14-py3-none-any.whl", hash = "sha256:39a9451cfdda6f00745fa1bfc0839a0ab078e038bbb3963b038f754e641e3165"}, +] + +[package.dependencies] +fastjsonschema = ">=2.16.2,<=3" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +all = ["packaging (>=20.4)", "tomli (>=1.2.1)", "trove-classifiers (>=2021.10.20)"] +testing = ["pytest", "pytest-cov", "pytest-randomly", "pytest-xdist", "repo-review", "setuptools", "tomli (>=1.2.1)"] +typecheck = ["importlib-resources", "mypy"] + [[package]] name = "wheel" version = "0.41.1" @@ -1670,4 +1704,4 @@ test = ["flake8-pyproject", "pytest", "responses"] [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<3.11" -content-hash = "bf2caf6db46010fe08b8aac8c522e07985b2a0758c89e830a0f92bdc455c86e3" +content-hash = "f5cced251e698cf4aa836d5d37be54f433eba8d8c60ad07f6b8ec3ab8e923ad0" From 454ee0c41f95988d88b24b24d801aa4ba2b0c302 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:02:40 +0100 Subject: [PATCH 098/195] increase_version now supports cicd mode --- deploy/increase_version.py | 58 ++++++++++++++++++++++++++++---------- 1 file changed, 43 insertions(+), 15 deletions(-) diff --git a/deploy/increase_version.py b/deploy/increase_version.py index f29663d1c..cf3b2eecc 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -133,12 +133,16 @@ def _get_pyproject_version() -> Version: return Version(int(major), int(minor), int(patch)) -def _get_pypi_version(force: bool) -> Version: +def _get_pypi_version(force: bool, cicd: bool) -> Version: response = get(DARWIN_PYPI_INFO_PAGE) if not response.ok: print("PYPI connection not available, sanity checking for PyPi unavailable") if not force: + if cicd: + print("Failed on PYPI check") + exit(1) + if not confirm("Continue without PyPi sanity check?"): exit(1) @@ -175,7 +179,7 @@ def _sanity_check(version: Version, pyproject_version: Version, pypi_version: Ve VERSION_TEMPLATE = '__version__ = "{}"\n' -def _update_version(new_version: Version, force: bool) -> None: +def _update_version(new_version: Version) -> None: version_file = (Path(__file__).parent / "..").resolve() / "darwin" / "version" / "__init__.py" print(f"Updating version in {version_file.absolute()}") @@ -185,7 +189,7 @@ def _update_version(new_version: Version, force: bool) -> None: f.write(VERSION_TEMPLATE.format(str(new_version))) -def _update_pyproject_version(new_version: Version, force: bool) -> None: +def _update_pyproject_version(new_version: Version) -> None: pyproject_file = (Path(__file__).parent / "..").resolve() / "pyproject.toml" original_content = pyproject_file.read_text() @@ -215,20 +219,33 @@ def _update_pyproject_version(new_version: Version, force: bool) -> None: def main() -> None: parser = argparse.ArgumentParser(description="Increase version number") + parser.add_argument("-f", "--force", action="store_true", help="force actions, do not ask for confirmation") + parser.add_argument( + "-c", + "--cicd", + action="store_true", + help="run in CI/CD mode (no confirmation, assume failure unless --force specified)", + ) + parser.add_argument("-v", "--version", action="store_true", help="show version number and exit", default=True) parser.add_argument("-M", "--major", action="store_true", help="increase major version") parser.add_argument("-m", "--minor", action="store_true", help="increase minor version") parser.add_argument("-p", "--patch", action="store_true", help="increase patch version") - parser.add_argument("-f", "--force", action="store_true", help="force actions, do not ask for confirmation") + parser.add_argument("-N", "--new-version", type=str, help="set new version number (overrides -M, -m, -p)") args = parser.parse_args() force_actions = False + cicd_mode = False if args.force: print("Force mode enabled, no confirmation will be asked") force_actions = True + if args.cicd: + print("CI/CD mode enabled, no confirmation will be asked") + cicd_mode = True + if args.major and args.minor and args.patch: print("Cannot increase major, minor and patch at the same time. Specify only one of these.") exit(2) @@ -236,7 +253,7 @@ def main() -> None: # Constants so that these are not mutated by mistake LOCAL_VERSION = _get_version() PYPROJECT_VERSION = _get_pyproject_version() - PYPI_VERSION = _get_pypi_version(force_actions) + PYPI_VERSION = _get_pypi_version(force_actions, cicd_mode) if args.version: print(f"Current version in darwin.version module: {str(LOCAL_VERSION)}") @@ -247,20 +264,31 @@ def main() -> None: new_version = LOCAL_VERSION.copy() - if args.major: - new_version.increment_major() + if (args.major or args.minor or args.patch) and args.new_version: + print("Cannot increase version and set new version at the same time. Specify only one of these.") + exit(2) + + if args.new_version: + print(f"Setting new version to {args.new_version}") + new_version = Version(*[int(x) for x in args.new_version.split(".")]) + new_version._changed = True + else: + if args.major: + new_version.increment_major() - if args.minor: - new_version.increment_minor() + if args.minor: + new_version.increment_minor() - if args.patch: - new_version.increment_patch() + if args.patch: + new_version.increment_patch() - if new_version.was_changed() and ( - force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?") + if ( + new_version.was_changed() + and not cicd_mode + and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")) ): - _update_version(new_version, force_actions) - _update_pyproject_version(new_version, force_actions) + _update_version(new_version) + _update_pyproject_version(new_version) print(f"Version updated successfully to {str(new_version)}") else: print("Version not updated") From e289e6b34afa5cc910acacf7f884d4ee2e2ae3f3 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:09:48 +0100 Subject: [PATCH 099/195] Release tag --- .github/workflows/EVENT_release.yml | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index 21b8b97eb..e9a40c758 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -35,15 +35,27 @@ jobs: const { data: { state } } = await github.repos.getCombinedStatusForRef({ owner: context.repo.owner, repo: context.repo.repo, - ref: 'master' + ref: '${{ github.head_ref || github.ref }}' }); if (state !== 'success') { - core.setFailed('master branch is not passing, cannot create a release'); + core.setFailed('Tagged branch is not passing, cannot create a release'); } deploy: needs: [checkout, check_master_is_passing] runs-on: ubuntu-latest steps: - - name: Deploy - run: python3 release.py ${{ github.ref }} + - name: Create release + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { data: { id: release_id } } = await github.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: context.ref, + name: context.ref, + draft: false, + prerelease: context.ref.startsWith('test-') + }); + core.setOutput('release_id', release_id); From 52414eceacd5f6425c084eb6b31aee42745dc0ec Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:18:00 +0100 Subject: [PATCH 100/195] Correct install deps reusable flow --- .github/MISC_install_deps.yml | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/.github/MISC_install_deps.yml b/.github/MISC_install_deps.yml index c95fc8e06..199e2452c 100644 --- a/.github/MISC_install_deps.yml +++ b/.github/MISC_install_deps.yml @@ -7,15 +7,12 @@ on: jobs: install_dependencies: name: Install dependencies + runs-on: ubuntu-latest steps: - - name: Confirm environment - shell: bash # stops windows hosts from using powershell - run: | - python --version || echo "[FATAL]: Installing dependencies failed because python is not installed" && exit 1 - if [ -z "${{ runner.os }}" ]; then - echo "[FATAL]: Install dependencies task can't be called from a flow that hasn't first called actions/checkout" - exit 2 - fi + - name: Checkout + uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} - name: Install dependencies shell: bash # stops windows hosts from using powershell From 92633fd637cf8897811b5ee5924bb1edf3a400c9 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:24:05 +0100 Subject: [PATCH 101/195] Change flows to pass variables correctly --- .github/workflows/EVENT_pull_request.yml | 4 ++-- .github/workflows/JOB_format.yml | 2 +- .github/workflows/JOB_get_changed_files.yml | 2 +- .github/workflows/JOB_lint.yml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index 98a66585a..377648058 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -22,14 +22,14 @@ jobs: needs: get_changed_files uses: ./.github/workflows/JOB_format.yml with: - files: ${{ needs.get_changed_files.outputs.changed_files }} + files: ${{ needs.get_changed_files.outputs.python_changed_files }} lint: name: Lint python needs: get_changed_files uses: ./.github/workflows/JOB_lint.yml with: - files: ${{ needs.get_changed_files.outputs.changed_files }} + files: ${{ needs.get_changed_files.outputs.python_changed_files }} run_tests: name: Run tests diff --git a/.github/workflows/JOB_format.yml b/.github/workflows/JOB_format.yml index 701d4c945..8571e7818 100644 --- a/.github/workflows/JOB_format.yml +++ b/.github/workflows/JOB_format.yml @@ -26,7 +26,7 @@ jobs: run: | pip install black failed_formatting=1 - echo "Checking files ${{ inputs.files }}" + echo "Checking files *${{ inputs.files }}*" for file in ${{ inputs.files }} do echo "Checking $file" diff --git a/.github/workflows/JOB_get_changed_files.yml b/.github/workflows/JOB_get_changed_files.yml index a3bb92b1c..eaa44fc4d 100644 --- a/.github/workflows/JOB_get_changed_files.yml +++ b/.github/workflows/JOB_get_changed_files.yml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest outputs: changed_files: ${{ steps.changed_files.outputs.changed_files }} - python_files: ${{ steps.changed_python_files.outputs.python_changed_files }} + python_changed_files: ${{ steps.changed_python_files.outputs.python_changed_files }} yaml_changed_files: ${{ steps.changed_yaml_files.outputs.yaml_changed_files }} json_changed_files: ${{ steps.changed_json_files.outputs.json_changed_files }} test_value: ${{ steps.test_value.outputs.test_value }} diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index 8bd85d059..af9eff3f3 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -27,7 +27,7 @@ jobs: pip install flake8 flake8-pyproject failed_linting=0 failed_files="" - echo "Linting files ${{ inputs.files }}" + echo "Linting files [${{ inputs.files }}]" MARKDOWN_OUTPUT="" for file in ${{ inputs.files }} do From 20ed111e7346def08222927e26820480ca54a19e Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:29:46 +0100 Subject: [PATCH 102/195] Return reusable to within tests --- .github/MISC_install_deps.yml | 25 ------------------------- .github/workflows/JOB_tests.yml | 9 ++++++++- 2 files changed, 8 insertions(+), 26 deletions(-) delete mode 100644 .github/MISC_install_deps.yml diff --git a/.github/MISC_install_deps.yml b/.github/MISC_install_deps.yml deleted file mode 100644 index 199e2452c..000000000 --- a/.github/MISC_install_deps.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: install_dependencies -run-name: Install dependencies - -on: - workflow_dispatch: - -jobs: - install_dependencies: - name: Install dependencies - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - with: - ref: ${{ github.head_ref || github.ref }} - - - name: Install dependencies - shell: bash # stops windows hosts from using powershell - run: | - pip install poetry pytest - poetry install --no-interaction --no-root -vvv --all-extras - poetry install --no-interaction --no-root --all-extras -vvv - pip install wheel - pip install --upgrade setuptools - pip install --editable ".[test,ml,medical,dev, ocv]" diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index 5db1b63fa..16775b3b8 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -24,7 +24,14 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies - uses: ./.github/actions/MISC_install_deps.yml + shell: bash # stops windows hosts from using powershell + run: | + pip install poetry pytest + poetry install --no-interaction --no-root -vvv --all-extras + poetry install --no-interaction --no-root --all-extras -vvv + pip install wheel + pip install --upgrade setuptools + pip install --editable ".[test,ml,medical,dev, ocv]" - name: Run pytest shell: bash # stops windows hosts from using powershell From 14e14792df6b49a058189f8b21a381ffaba21350 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:41:06 +0100 Subject: [PATCH 103/195] Lock file was blocking tests --- poetry.lock | 120 +++++++++++++++++++--------------------------------- 1 file changed, 43 insertions(+), 77 deletions(-) diff --git a/poetry.lock b/poetry.lock index 35aaf9ad4..815036adf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -198,46 +198,46 @@ files = [ [[package]] name = "connected-components-3d" -version = "3.12.2" +version = "3.12.3" description = "Connected components on 2D and 3D images. Supports multiple labels." optional = true python-versions = ">=3.7,<4.0" files = [ - {file = "connected-components-3d-3.12.2.tar.gz", hash = "sha256:867e9389b918db4b209ae754235d5f13bc276f131e36d9483159561479cb0582"}, - {file = "connected_components_3d-3.12.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fc209503aa99a1fa03d02cb651246fa6f8d8a8675eff12046ff305d2e152fd41"}, - {file = "connected_components_3d-3.12.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc1d47df4b583631c23d5e900481f6c29e91481c5976fe8ab62b7f322b23bdea"}, - {file = "connected_components_3d-3.12.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20b1c4bc3134c208dfc3e98adadf1e516784b63072533145956575d990f18ac9"}, - {file = "connected_components_3d-3.12.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4dc9c1fc09cb213cf4f7bde6ba0dde7f47002ac78f2a590c1162aeaabf04ecd"}, - {file = "connected_components_3d-3.12.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58a6bb8bc8c44ebe2e847236fa8106859e4de99dc5d79e44cc3a740563399916"}, - {file = "connected_components_3d-3.12.2-cp310-cp310-win32.whl", hash = "sha256:bca4637c6a2a3bc3af146cfea0635fc742dcd65cfb4fc2100fbdce99c6ccadae"}, - {file = "connected_components_3d-3.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:b4e89b15fd9bc80bd42b1a8d07f6f88d88915208c9b694453d0fa936e79a2acf"}, - {file = "connected_components_3d-3.12.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:56a7798c5dd14f49b67ecc1680cacf63a04cf196e6f48b0e8a6a9b20bb62c05b"}, - {file = "connected_components_3d-3.12.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66ecc34575320fb2eb037a0e830b99c59ce5ddaecf8f14d7e237b1fbfc926f3d"}, - {file = "connected_components_3d-3.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2f3ba7f41e240baa1eb33030d45c2a060ca94b3eb90e506bd6dc9e30a04e5ed"}, - {file = "connected_components_3d-3.12.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd2a148a1d0fe902a5f4d437c61a2fb9eed023f6c6240c0242c6a72ecb2842ce"}, - {file = "connected_components_3d-3.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6cc42fb491a1d25c873a0f2a30bca5a07e771fc0ce92119a9a865c79c48456"}, - {file = "connected_components_3d-3.12.2-cp311-cp311-win32.whl", hash = "sha256:d92b865b53524e87d118770927c0de19d1cfb02c9f59e0077362ad1a1a69434c"}, - {file = "connected_components_3d-3.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:a6744e3442e913cb5da067b50bfc8005b8f5ff9b543b65acbecd6024ac790664"}, - {file = "connected_components_3d-3.12.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:72a3657c3cea2fc17cac663205850857f69be8d54387353392c570296b927a0c"}, - {file = "connected_components_3d-3.12.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1ed09b044cef22defd6efb565930e6a8cc2732eaf7177d0a577ec928c50a0"}, - {file = "connected_components_3d-3.12.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d49f539d8a880dc9c168b3dfe69e6cc3e4750fe31facd4f5f0e4188105d1107e"}, - {file = "connected_components_3d-3.12.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4646776a33e724a0784cdeb3d51ddc3261ee238ef3f8e91f0277138516dfbd12"}, - {file = "connected_components_3d-3.12.2-cp37-cp37m-win32.whl", hash = "sha256:cc48fd8b717e3807158a659bed2b567c115d9033e9be968d8961d215e9725a34"}, - {file = "connected_components_3d-3.12.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ed6c1122dfdf5f6e494eedc18143c1e3a96453bc2b41791301370ac232c6d10b"}, - {file = "connected_components_3d-3.12.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c85802e9ec4eb0abda5c4362abeaf24db2d420bbddac14913024062d13cf515"}, - {file = "connected_components_3d-3.12.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:609f627d49f306a5fb66cb6eb386a87e1dce6bac91289dc4fa581744f6fd4fc7"}, - {file = "connected_components_3d-3.12.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16ef4954e325b552bd1a3b3c14a8a9a97970390fbdf66df431b1b9bb841da774"}, - {file = "connected_components_3d-3.12.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb2f9aafa5b3d3932be894b4eda9959aec32b5bb8f359cca4f2ba5db83ea38f3"}, - {file = "connected_components_3d-3.12.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac93ce49b532a223f3fc32213720978f715c434f598548d381591782af10a823"}, - {file = "connected_components_3d-3.12.2-cp38-cp38-win32.whl", hash = "sha256:3438d8eecf13ffbcdab2d90f4633d2c9edd9f6cecdf25c5f733a7e33ddb6c95c"}, - {file = "connected_components_3d-3.12.2-cp38-cp38-win_amd64.whl", hash = "sha256:14f574ba93c74c8c9e9e5d6a318a48aeaf1a8298d7f68960685c59f09a42452e"}, - {file = "connected_components_3d-3.12.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c4ea235f9422ae41a89b9c37f1f5b277afd56afa95f1ec617a7b56b9e530955f"}, - {file = "connected_components_3d-3.12.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1af878e907a3fa9792e9d3a9e14b4b3207f71a3a5243e38fff7df6c4d425ccfb"}, - {file = "connected_components_3d-3.12.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ac306aae6eab080d14861b18d9748b6924bb8d04024b6e819ddf3de733075d8"}, - {file = "connected_components_3d-3.12.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25d1d11bf70b76bc61588a915faf9b8780d97e0c50f368392f859c69ec93693c"}, - {file = "connected_components_3d-3.12.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daf46a0017f3b1ee49bed20d1e6ad6a8cbeb5b9e715d397a79ad04b54ae77619"}, - {file = "connected_components_3d-3.12.2-cp39-cp39-win32.whl", hash = "sha256:b066920dae690846791135ce4f6af117a5dc8f449df4e49d32f7564e2e5e4cdd"}, - {file = "connected_components_3d-3.12.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9660819e778cb674afd0bef9f1b80fd8afc27fe42697d07a1741cd1310f4586"}, + {file = "connected-components-3d-3.12.3.tar.gz", hash = "sha256:0040a009d86918c757d607986edea760feb782093ddfc1c20761c8a88cd087ec"}, + {file = "connected_components_3d-3.12.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ef86445ab99428102272028cc291af6eb332031287bf275600419f0acd397d15"}, + {file = "connected_components_3d-3.12.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a9a2aa53976b36a68ee701f021adadfd645527f10372515970c0373f4c254667"}, + {file = "connected_components_3d-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df624b758ae6d22a0a7c2687f4d009bc7b722d58526872c8062a868472d05d70"}, + {file = "connected_components_3d-3.12.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7997a95b655cd5dfbf94a00863e62c6b48a3ce54d96de492434c1decbabd8056"}, + {file = "connected_components_3d-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3fe77d67d82ebf42ef76e334517ac2165869a3393e5e5d68d475d36d057192"}, + {file = "connected_components_3d-3.12.3-cp310-cp310-win32.whl", hash = "sha256:1dcd2c47dcc308825a091a61ca2da54cbc8dd994a322ade7cbc622ff8c3c1da7"}, + {file = "connected_components_3d-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:7d124d12cc12766baaf08c3828282106a2407cb064950fd9bdd227f018471420"}, + {file = "connected_components_3d-3.12.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7b7c076ef6ea778f1581b68f6b994bf926543249057f795159ccec8661570bed"}, + {file = "connected_components_3d-3.12.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dc3a434bbf1b8033ee6e57ea4f499131d43ec5ff14d9c64b3c6cb11dd23b62a4"}, + {file = "connected_components_3d-3.12.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c02b4d758093a676519182f38c1f50d6e3c2f251a181ec89c5919736b710789"}, + {file = "connected_components_3d-3.12.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:073a715e0e0f877762e44fe83d98e651d2b4d476bd7862fd02fd80a0150f4d0f"}, + {file = "connected_components_3d-3.12.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57996a349a0747e7262e8f740da13522c81bd7d09f9a9a1ecab359ba0ff1d90"}, + {file = "connected_components_3d-3.12.3-cp311-cp311-win32.whl", hash = "sha256:010872cf018d2331b9c69148e3389118200ee1a24e719a17747acd3bad99b465"}, + {file = "connected_components_3d-3.12.3-cp311-cp311-win_amd64.whl", hash = "sha256:d8a0062b5f4e4c205db46698f262d9dd4946e3c4660d3709b9606c6f4bce1586"}, + {file = "connected_components_3d-3.12.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b3ffc46c0c10252f235130cf44fb0f643611104111871ebbaf21db3992f23041"}, + {file = "connected_components_3d-3.12.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbdfceb3ad240cd025d494f504b73dd4da9ebe19c053ff2036f7a5530a772c1f"}, + {file = "connected_components_3d-3.12.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524fa48f8affbfe5b29f9a48976c7d948a552bfc865cd1aa6dd032d22d158c3e"}, + {file = "connected_components_3d-3.12.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a115275f89233c726d6b0d1e543dd438094b5826c4b8e674de84370ae0320ca"}, + {file = "connected_components_3d-3.12.3-cp37-cp37m-win32.whl", hash = "sha256:b70ac3a2334c39bb608c0eb1721dbe2634e345942fc0bc4e60855672d4350c21"}, + {file = "connected_components_3d-3.12.3-cp37-cp37m-win_amd64.whl", hash = "sha256:31018fb4c4e6c9ba072c3a9e95638bdf6fa7c3fcd892a719ed9162686e975404"}, + {file = "connected_components_3d-3.12.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca3674d9d7a6bc9e2029a5038a0bff085ec05cff21f0766d0e6fa54a0e3372f5"}, + {file = "connected_components_3d-3.12.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:cc8c37a8aef38115a612978fdd37b9c98564ed9c05d09f3031c117408e06deba"}, + {file = "connected_components_3d-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eeb301764b3da77cec3e8a869c1dd937395cda4fb7e63f05b9a4ac231eb8dd5"}, + {file = "connected_components_3d-3.12.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aca594facf672997ac66aad4e86465dbc1b248a989474c67a549a70312f924c2"}, + {file = "connected_components_3d-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccdec1820268a60dba4a63ec298767b9462c8beac18a315f91802643e50cc3c1"}, + {file = "connected_components_3d-3.12.3-cp38-cp38-win32.whl", hash = "sha256:fbacf89984e374d8ae554c8e8fc4e4fc19b81e9396ce515aa893db6832114ebe"}, + {file = "connected_components_3d-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:bb1de1dd58ac1303dcfee6370138de8704381c2f85c55fe545760416355fcb20"}, + {file = "connected_components_3d-3.12.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ee61e8e4cd61f182b583726e6965d1e9f81746ac2623199ee4c044fd941b2c0"}, + {file = "connected_components_3d-3.12.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e17dc81b234e72668de3cf448766bb03aa57cceabb9469756a13feef7c8d60d6"}, + {file = "connected_components_3d-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf2fe091b9fd2a64884662afa118dad96dcab04f45a35ab48a1a756fcf1d168a"}, + {file = "connected_components_3d-3.12.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:385a266b94d6ba8ad11deabaa7ea6db094746e0ae6509759cc0d632c15bc7e3b"}, + {file = "connected_components_3d-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95cb6e9c5fe25052f9029aa6f6a99896671123343f3a7039fd7b4cc0e940bfed"}, + {file = "connected_components_3d-3.12.3-cp39-cp39-win32.whl", hash = "sha256:49954222a8936085284048eb0976e3916ae655505831fba2f80a5aa17c4cabaa"}, + {file = "connected_components_3d-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:d8734fbbbd7da6ea960a0effdeed9263dc1dfbbfdb6134928d0ac8630d5b3a3b"}, ] [package.dependencies] @@ -298,20 +298,6 @@ files = [ [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "fastjsonschema" -version = "2.18.0" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.18.0-py3-none-any.whl", hash = "sha256:128039912a11a807068a7c87d0da36660afbfd7202780db26c4aa7153cfdc799"}, - {file = "fastjsonschema-2.18.0.tar.gz", hash = "sha256:e820349dd16f806e4bd1467a138dced9def4bc7d6213a34295272a6cac95b5bd"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - [[package]] name = "flake8" version = "6.1.0" @@ -1104,13 +1090,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.1" description = "pytest: simple powerful testing with Python" optional = true python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, + {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, ] [package.dependencies] @@ -1645,35 +1631,15 @@ secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17. socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "validate-pyproject" -version = "0.14" -description = "Validation library and CLI tool for checking on 'pyproject.toml' files using JSON Schema" -optional = false -python-versions = ">=3.6" -files = [ - {file = "validate-pyproject-0.14.tar.gz", hash = "sha256:3457578f5b8589f11ff0d1fbafba273ed6b8d2ec9d1c62a7716bd7ac3f65a076"}, - {file = "validate_pyproject-0.14-py3-none-any.whl", hash = "sha256:39a9451cfdda6f00745fa1bfc0839a0ab078e038bbb3963b038f754e641e3165"}, -] - -[package.dependencies] -fastjsonschema = ">=2.16.2,<=3" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[package.extras] -all = ["packaging (>=20.4)", "tomli (>=1.2.1)", "trove-classifiers (>=2021.10.20)"] -testing = ["pytest", "pytest-cov", "pytest-randomly", "pytest-xdist", "repo-review", "setuptools", "tomli (>=1.2.1)"] -typecheck = ["importlib-resources", "mypy"] - [[package]] name = "wheel" -version = "0.41.1" +version = "0.41.2" description = "A built-package format for Python" optional = true python-versions = ">=3.7" files = [ - {file = "wheel-0.41.1-py3-none-any.whl", hash = "sha256:473219bd4cbedc62cea0cb309089b593e47c15c4a2531015f94e4e3b9a0f6981"}, - {file = "wheel-0.41.1.tar.gz", hash = "sha256:12b911f083e876e10c595779709f8a88a59f45aacc646492a67fe9ef796c1b47"}, + {file = "wheel-0.41.2-py3-none-any.whl", hash = "sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8"}, + {file = "wheel-0.41.2.tar.gz", hash = "sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985"}, ] [package.extras] @@ -1704,4 +1670,4 @@ test = ["flake8-pyproject", "pytest", "responses"] [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<3.11" -content-hash = "f5cced251e698cf4aa836d5d37be54f433eba8d8c60ad07f6b8ec3ab8e923ad0" +content-hash = "bf2caf6db46010fe08b8aac8c522e07985b2a0758c89e830a0f92bdc455c86e3" From be60008a1a9a7d1e08e58587fbb8cdae6ba2f9d9 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:44:23 +0100 Subject: [PATCH 104/195] Test fixes --- .github/workflows/JOB_get_changed_files.yml | 6 ------ .github/workflows/JOB_tests.yml | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/.github/workflows/JOB_get_changed_files.yml b/.github/workflows/JOB_get_changed_files.yml index eaa44fc4d..d47133d62 100644 --- a/.github/workflows/JOB_get_changed_files.yml +++ b/.github/workflows/JOB_get_changed_files.yml @@ -29,7 +29,6 @@ jobs: python_changed_files: ${{ steps.changed_python_files.outputs.python_changed_files }} yaml_changed_files: ${{ steps.changed_yaml_files.outputs.yaml_changed_files }} json_changed_files: ${{ steps.changed_json_files.outputs.json_changed_files }} - test_value: ${{ steps.test_value.outputs.test_value }} steps: - uses: actions/checkout@v2 with: @@ -62,8 +61,3 @@ jobs: cjf=`python3 ${{ github.workspace }}/deploy/_filter_files.py json ${{ steps.changed_files.outputs.changed_files }}` echo "Changed json files: $cjf" echo "json_changed_files=$cjf" >> $GITHUB_OUTPUT - - name: Get test value - id: test_value - shell: bash - run: | - echo "test_value=foo" >> $GITHUB_OUTPUT diff --git a/.github/workflows/JOB_tests.yml b/.github/workflows/JOB_tests.yml index 16775b3b8..b3efc82e2 100644 --- a/.github/workflows/JOB_tests.yml +++ b/.github/workflows/JOB_tests.yml @@ -11,7 +11,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10"] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 From e584cee76bfc22d77d2dfb79246fa52e72cf046c Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 15:55:34 +0100 Subject: [PATCH 105/195] Addition of a debug statement to sort context --- .github/workflows/EVENT_pull_request.yml | 18 ++++++++++++++++++ .github/workflows/JOB_get_changed_files.yml | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index 377648058..94a32e17a 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -17,6 +17,24 @@ jobs: name: Get changed files uses: ./.github/workflows/JOB_get_changed_files.yml + + debug: + needs: get_changed_files + runs-on: ubuntu-latest + steps: + - name: Debug + run: | + echo "Changed files:" + echo "${{ needs.get_changed_files.outputs.changed_files }}" + echo "Python changed files:" + echo "${{ needs.get_changed_files.outputs.python_changed_files }}" + echo "Yaml changed files:" + echo "${{ needs.get_changed_files.outputs.yaml_changed_files }}" + echo "Json changed files:" + echo "${{ needs.get_changed_files.outputs.json_changed_files }}" + + + format: name: Check format of python needs: get_changed_files diff --git a/.github/workflows/JOB_get_changed_files.yml b/.github/workflows/JOB_get_changed_files.yml index d47133d62..34511496b 100644 --- a/.github/workflows/JOB_get_changed_files.yml +++ b/.github/workflows/JOB_get_changed_files.yml @@ -7,7 +7,7 @@ on: changed_files: description: "Changed files" value: ${{ github.jobs.get_changed_files.outputs.changed_files }} - python_files: + python_changed_files: description: "Changed python files" value: ${{ github.jobs.get_changed_files.outputs.python_files }} yaml_changed_files: From 9b46d6c641b0e13c82b68683c35dc4d3ca2b1cbd Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 16:01:05 +0100 Subject: [PATCH 106/195] Jobs context --- .github/workflows/JOB_get_changed_files.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/JOB_get_changed_files.yml b/.github/workflows/JOB_get_changed_files.yml index 34511496b..939d215d8 100644 --- a/.github/workflows/JOB_get_changed_files.yml +++ b/.github/workflows/JOB_get_changed_files.yml @@ -6,16 +6,16 @@ on: outputs: changed_files: description: "Changed files" - value: ${{ github.jobs.get_changed_files.outputs.changed_files }} + value: ${{ jobs.get_changed_files.outputs.changed_files }} python_changed_files: description: "Changed python files" - value: ${{ github.jobs.get_changed_files.outputs.python_files }} + value: ${{ jobs.get_changed_files.outputs.python_changed_files }} yaml_changed_files: description: "Changed yaml files" - value: ${{ github.jobs.get_changed_files.outputs.yaml_changed_files }} + value: ${{ jobs.get_changed_files.outputs.yaml_changed_files }} json_changed_files: description: "Changed json files" - value: ${{ github.jobs.get_changed_files.outputs.json_changed_files }} + value: ${{ jobs.get_changed_files.outputs.json_changed_files }} env: GH_TOKEN: ${{ github.token }} From 2a868c56a8017535711481e152f67cbb5843192b Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 16:10:48 +0100 Subject: [PATCH 107/195] Restructure linting and formatting --- .github/workflows/JOB_format.yml | 13 ++++++++++--- .github/workflows/JOB_lint.yml | 10 +++++----- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/.github/workflows/JOB_format.yml b/.github/workflows/JOB_format.yml index 8571e7818..671f0e7c3 100644 --- a/.github/workflows/JOB_format.yml +++ b/.github/workflows/JOB_format.yml @@ -26,14 +26,21 @@ jobs: run: | pip install black failed_formatting=1 - echo "Checking files *${{ inputs.files }}*" + failed_files="" + echo "Checking files [${{ inputs.files }}]" for file in ${{ inputs.files }} do echo "Checking $file" - black --check $file || failed_formatting=$((failed_formatting + 1)) + black --check $file || failed_formatting=$((failed_formatting + 1) && failed_files="$failed_files $file") done if [ $failed_formatting -ne 0 ]; then - echo "Formatting failed for $failed_formatting files" + echo "Formatting failed for $failed_formatting files 😢" + echo "Failed files + for file in $failed_files ; do + echo "- $file" + done exit 1 + else + echo "Formatting passed for all files 🎉" fi exit 0 diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index af9eff3f3..105657259 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -38,14 +38,14 @@ jobs: echo "Linting failed on $failed_linting files" echo "Failed files: $failed_files" - echo "# Linting failed 😢" >> $GITHUB_STEP_SUMMARY - echo "## Failed files" >> $GITHUB_STEP_SUMMARY + echo "Linting failed 😢" + echo "Failed files" for file in $failed_files ; do - echo "- $file" >> $GITHUB_STEP_SUMMARY + echo "- $file" done exit 1 else - echo "# Linting passed 🎉" >> $GITHUB_STEP_SUMMARY - echo "Tested files: ${{ inputs.files }}" >> $GITHUB_STEP_SUMMARY + echo "Linting passed 🎉" + echo "Tested files: ${{ inputs.files }}" fi exit 1 From 9c1944f15df3006eac7a23ea60b073252c0de3e6 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 5 Sep 2023 16:18:42 +0100 Subject: [PATCH 108/195] Get black and flake passing --- .github/workflows/JOB_lint.yml | 2 +- deploy/increase_version.py | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index 105657259..1f9d5b902 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -48,4 +48,4 @@ jobs: echo "Linting passed 🎉" echo "Tested files: ${{ inputs.files }}" fi - exit 1 + exit 0 diff --git a/deploy/increase_version.py b/deploy/increase_version.py index cf3b2eecc..60cc72a49 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -282,11 +282,7 @@ def main() -> None: if args.patch: new_version.increment_patch() - if ( - new_version.was_changed() - and not cicd_mode - and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")) - ): + if new_version.was_changed() and not cicd_mode and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")): _update_version(new_version) _update_pyproject_version(new_version) print(f"Version updated successfully to {str(new_version)}") From 47febf32f1cdf330c0debb843ac8cc057b568f36 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 6 Sep 2023 14:58:31 +0100 Subject: [PATCH 109/195] Extracted formatting --- .github/workflows/JOB_format.yml | 22 +----------- .github/workflows/JOB_lint.yml | 27 +------------- deploy/format.sh | 61 ++++++++++++++++++++++++++++++++ 3 files changed, 63 insertions(+), 47 deletions(-) create mode 100644 deploy/format.sh diff --git a/.github/workflows/JOB_format.yml b/.github/workflows/JOB_format.yml index 671f0e7c3..1da2fec71 100644 --- a/.github/workflows/JOB_format.yml +++ b/.github/workflows/JOB_format.yml @@ -23,24 +23,4 @@ jobs: - name: Black check shell: bash - run: | - pip install black - failed_formatting=1 - failed_files="" - echo "Checking files [${{ inputs.files }}]" - for file in ${{ inputs.files }} - do - echo "Checking $file" - black --check $file || failed_formatting=$((failed_formatting + 1) && failed_files="$failed_files $file") - done - if [ $failed_formatting -ne 0 ]; then - echo "Formatting failed for $failed_formatting files 😢" - echo "Failed files - for file in $failed_files ; do - echo "- $file" - done - exit 1 - else - echo "Formatting passed for all files 🎉" - fi - exit 0 + run: ${{ github.workspace }}/scripts/format_lint.sh format ${{ inputs.files }} diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index 1f9d5b902..ca30fc8da 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -23,29 +23,4 @@ jobs: - name: flake8 Lint shell: bash - run: | - pip install flake8 flake8-pyproject - failed_linting=0 - failed_files="" - echo "Linting files [${{ inputs.files }}]" - MARKDOWN_OUTPUT="" - for file in ${{ inputs.files }} - do - echo "Linting $file" - flake8 $file || failed_linting=$((failed_linting + 1) && failed_files="$failed_files $file") - done - if [ $failed_linting -ne 0 ]; then - echo "Linting failed on $failed_linting files" - echo "Failed files: $failed_files" - - echo "Linting failed 😢" - echo "Failed files" - for file in $failed_files ; do - echo "- $file" - done - exit 1 - else - echo "Linting passed 🎉" - echo "Tested files: ${{ inputs.files }}" - fi - exit 0 + run: ${{ github.workspace }}/deploy/format.sh lint ${{ inputs.files }} diff --git a/deploy/format.sh b/deploy/format.sh new file mode 100644 index 000000000..45c795cb4 --- /dev/null +++ b/deploy/format.sh @@ -0,0 +1,61 @@ +#!/usr/env/bash + +ACTION = $1 +FILES = shift 1 && $@ + +# If action is not format or lint exit +if [ $ACTION != "format" ] && [ $ACTION != "lint" ]; then + echo "Action must be format or lint" + exit 1 +fi + +# If no files are passed in, exit +if [ -z $FILES ]; then + echo "No files passed in" + exit 1 +fi + +# Install dependencies +if [ $ACTION == "format" ]; then + pip install black +else + pip install flake8 flake8-pyproject +fi + +failed_formatting=1 +failed_files="" +echo "** Checking files [$FILES] **" + +for file in $FILES +do + echo "_________________________________________________________" + echo "Checking $file" + + if [ $ACTION == "lint" ]; then + flake8 --config pyproject.toml $file || failed_formatting=$(failed_formatting + 1)\ + && failed_files=$("$failed_files $file")\ + && echo "Failed linting for $file" + continue + fi + + if [ $ACTION == "format" ]; then + black --check $file || failed_formatting=$(failed_formatting + 1)\ + && failed_files=$("$failed_files $file")\ + && echo "Failed formatting for $file" + continue + fi + + echo "_________________________________________________________" +done + +if [ $failed_formatting -ne 0 ]; then + echo "Formatting failed for $failed_formatting files 😢" + echo "Failed files + for file in $failed_files ; do + echo "- $file" + done + exit 1 +else + echo "Formatting passed for all files 🎉" +fi +exit 0 From 41bc6d958bc2071603b012a968a23435f974f397 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 6 Sep 2023 15:04:12 +0100 Subject: [PATCH 110/195] Permissions --- .github/workflows/JOB_format.yml | 2 +- .github/workflows/JOB_lint.yml | 2 +- deploy/format.sh | 0 3 files changed, 2 insertions(+), 2 deletions(-) mode change 100644 => 100755 deploy/format.sh diff --git a/.github/workflows/JOB_format.yml b/.github/workflows/JOB_format.yml index 1da2fec71..c4bd42199 100644 --- a/.github/workflows/JOB_format.yml +++ b/.github/workflows/JOB_format.yml @@ -23,4 +23,4 @@ jobs: - name: Black check shell: bash - run: ${{ github.workspace }}/scripts/format_lint.sh format ${{ inputs.files }} + run: bash ${{ github.workspace }}/deploy/format_lint.sh format ${{ inputs.files }} diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index ca30fc8da..331dd6f53 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -23,4 +23,4 @@ jobs: - name: flake8 Lint shell: bash - run: ${{ github.workspace }}/deploy/format.sh lint ${{ inputs.files }} + run: bash ${{ github.workspace }}/deploy/format.sh lint ${{ inputs.files }} diff --git a/deploy/format.sh b/deploy/format.sh old mode 100644 new mode 100755 From b58d844ef312200047f109b506d303960befcea2 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 6 Sep 2023 15:26:29 +0100 Subject: [PATCH 111/195] Linting and format jobs --- .github/workflows/JOB_lint.yml | 2 +- deploy/format.sh | 61 --------------------------- deploy/format_lint.sh | 75 ++++++++++++++++++++++++++++++++++ 3 files changed, 76 insertions(+), 62 deletions(-) delete mode 100755 deploy/format.sh create mode 100755 deploy/format_lint.sh diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index 331dd6f53..aad223870 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -23,4 +23,4 @@ jobs: - name: flake8 Lint shell: bash - run: bash ${{ github.workspace }}/deploy/format.sh lint ${{ inputs.files }} + run: bash ${{ github.workspace }}/deploy/format_lint.sh lint ${{ inputs.files }} diff --git a/deploy/format.sh b/deploy/format.sh deleted file mode 100755 index 45c795cb4..000000000 --- a/deploy/format.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/env/bash - -ACTION = $1 -FILES = shift 1 && $@ - -# If action is not format or lint exit -if [ $ACTION != "format" ] && [ $ACTION != "lint" ]; then - echo "Action must be format or lint" - exit 1 -fi - -# If no files are passed in, exit -if [ -z $FILES ]; then - echo "No files passed in" - exit 1 -fi - -# Install dependencies -if [ $ACTION == "format" ]; then - pip install black -else - pip install flake8 flake8-pyproject -fi - -failed_formatting=1 -failed_files="" -echo "** Checking files [$FILES] **" - -for file in $FILES -do - echo "_________________________________________________________" - echo "Checking $file" - - if [ $ACTION == "lint" ]; then - flake8 --config pyproject.toml $file || failed_formatting=$(failed_formatting + 1)\ - && failed_files=$("$failed_files $file")\ - && echo "Failed linting for $file" - continue - fi - - if [ $ACTION == "format" ]; then - black --check $file || failed_formatting=$(failed_formatting + 1)\ - && failed_files=$("$failed_files $file")\ - && echo "Failed formatting for $file" - continue - fi - - echo "_________________________________________________________" -done - -if [ $failed_formatting -ne 0 ]; then - echo "Formatting failed for $failed_formatting files 😢" - echo "Failed files - for file in $failed_files ; do - echo "- $file" - done - exit 1 -else - echo "Formatting passed for all files 🎉" -fi -exit 0 diff --git a/deploy/format_lint.sh b/deploy/format_lint.sh new file mode 100755 index 000000000..2801560fe --- /dev/null +++ b/deploy/format_lint.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env bash + +ACTION=$1 +shift 1 +FILES=$@ + +echo "Action: $ACTION" +echo "Files: $FILES" + +# If action is not format or lint exit +if [[ -z $ACTION || ($ACTION != "format" && $ACTION != "lint") ]] ; then + echo "Action must be format or lint" + exit 1 +fi + +# If no files are passed in, exit +if [ -z $FILES ]; then + echo "No files passed in" + exit 1 +fi + +# Install dependencies +if [ $ACTION == "format" ]; then + pip install black &> pip_log.txt +else + pip install flake8 flake8-pyproject &> pip_log.txt +fi + +# Check if pip install failed +if [ $? -ne 0 ]; then + echo "Pip install failed" + cat pip_log.txt + rm pip_log.txt + exit 1 +fi + +failed_formatting=1 +failed_files="" +echo "** Checking files [$FILES] **" + +for file in $FILES +do + echo "_________________________________________________________" + echo "Checking $file" + + if [ -f $file ]; then + if [ $ACTION == "lint" ]; then + flake8 --config pyproject.toml $file || failed_formatting=$(failed_formatting + 1)\ + && failed_files="$failed_files $file"\ + && echo "Failed linting for $file" + fi + + if [ $ACTION == "format" ]; then + black --check $file || failed_formatting=$(failed_formatting + 1)\ + && failed_files="$failed_files $file"\ + && echo "Failed formatting for $file" + fi + else + echo "File $file does not exist" + fi + + echo "_________________________________________________________" +done + +if [ $failed_formatting -ne 0 ]; then + echo "Formatting failed for $failed_formatting files 😢" + echo "Failed files" + for file in $failed_files ; do + echo "- $file" + done + exit 1 +else + echo "Formatting passed for all files 🎉" +fi +exit 0 From eac86d297f1b012ef48645f3964fe022808ff269 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 6 Sep 2023 19:20:33 +0100 Subject: [PATCH 112/195] Use ruff instead of black --- .github/workflows/EVENT_pull_request.yml | 18 ----- .github/workflows/JOB_lint.yml | 2 +- deploy/format_lint.sh | 46 +++++++---- deploy/increase_version.py | 24 ++++-- poetry.lock | 98 +++++++----------------- pyproject.toml | 23 +++--- 6 files changed, 88 insertions(+), 123 deletions(-) diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index 94a32e17a..377648058 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -17,24 +17,6 @@ jobs: name: Get changed files uses: ./.github/workflows/JOB_get_changed_files.yml - - debug: - needs: get_changed_files - runs-on: ubuntu-latest - steps: - - name: Debug - run: | - echo "Changed files:" - echo "${{ needs.get_changed_files.outputs.changed_files }}" - echo "Python changed files:" - echo "${{ needs.get_changed_files.outputs.python_changed_files }}" - echo "Yaml changed files:" - echo "${{ needs.get_changed_files.outputs.yaml_changed_files }}" - echo "Json changed files:" - echo "${{ needs.get_changed_files.outputs.json_changed_files }}" - - - format: name: Check format of python needs: get_changed_files diff --git a/.github/workflows/JOB_lint.yml b/.github/workflows/JOB_lint.yml index aad223870..31ada14ef 100644 --- a/.github/workflows/JOB_lint.yml +++ b/.github/workflows/JOB_lint.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python environment uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "3.10" - name: flake8 Lint shell: bash diff --git a/deploy/format_lint.sh b/deploy/format_lint.sh index 2801560fe..f679d7cd1 100755 --- a/deploy/format_lint.sh +++ b/deploy/format_lint.sh @@ -8,22 +8,22 @@ echo "Action: $ACTION" echo "Files: $FILES" # If action is not format or lint exit -if [[ -z $ACTION || ($ACTION != "format" && $ACTION != "lint") ]] ; then +if [[ -z $ACTION || ("$ACTION" != "format" && "$ACTION" != "lint") ]] ; then echo "Action must be format or lint" exit 1 fi # If no files are passed in, exit -if [ -z $FILES ]; then +if [[ -z $FILES ]]; then echo "No files passed in" exit 1 fi # Install dependencies -if [ $ACTION == "format" ]; then +if [ "$ACTION" == "format" ]; then pip install black &> pip_log.txt else - pip install flake8 flake8-pyproject &> pip_log.txt + pip install ruff &> pip_log.txt fi # Check if pip install failed @@ -34,36 +34,48 @@ if [ $? -ne 0 ]; then exit 1 fi -failed_formatting=1 failed_files="" echo "** Checking files [$FILES] **" -for file in $FILES -do +for file in $FILES ; do echo "_________________________________________________________" echo "Checking $file" if [ -f $file ]; then - if [ $ACTION == "lint" ]; then - flake8 --config pyproject.toml $file || failed_formatting=$(failed_formatting + 1)\ - && failed_files="$failed_files $file"\ - && echo "Failed linting for $file" + if [ "$ACTION" == "lint" ]; then + ruff check $FILES; rc=$? + echo "$rc" + if [ $rc -ne 0 ]; then + failed_files="$failed_files $file" + echo "❌" + else + echo "✅" + fi fi - if [ $ACTION == "format" ]; then - black --check $file || failed_formatting=$(failed_formatting + 1)\ - && failed_files="$failed_files $file"\ - && echo "Failed formatting for $file" + if [ "$ACTION" == "format" ]; then + black --check $file + if [ $? -ne 0 ]; then + failed_files="$failed_files $file" + echo "❌" + else + echo "✅" + fi fi else echo "File $file does not exist" fi + echo "DEBUG" + echo "failed_files: $failed_files" + echo "_________________________________________________________" done -if [ $failed_formatting -ne 0 ]; then - echo "Formatting failed for $failed_formatting files 😢" +echo "$failed_files" + +if [[ "$failed_files" -ne "" ]]; then + echo "Checks failed for $failed_formatting files 😢" echo "Failed files" for file in $failed_files ; do echo "- $file" diff --git a/deploy/increase_version.py b/deploy/increase_version.py index 60cc72a49..5f7e5768d 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -217,7 +217,7 @@ def _update_pyproject_version(new_version: Version) -> None: exit(1) -def main() -> None: +def arguments() -> argparse.Namespace: parser = argparse.ArgumentParser(description="Increase version number") parser.add_argument("-f", "--force", action="store_true", help="force actions, do not ask for confirmation") parser.add_argument( @@ -233,11 +233,10 @@ def main() -> None: parser.add_argument("-p", "--patch", action="store_true", help="increase patch version") parser.add_argument("-N", "--new-version", type=str, help="set new version number (overrides -M, -m, -p)") - args = parser.parse_args() + return parser.parse_args() - force_actions = False - cicd_mode = False +def validate_args(args: argparse.Namespace, force_actions: bool, cicd_mode: bool) -> Tuple[bool, bool]: if args.force: print("Force mode enabled, no confirmation will be asked") force_actions = True @@ -250,6 +249,17 @@ def main() -> None: print("Cannot increase major, minor and patch at the same time. Specify only one of these.") exit(2) + return force_actions, cicd_mode + + +def main() -> None: + args = arguments() + + force_actions = False + cicd_mode = False + + force_actions, cicd_mode = validate_args(args, force_actions, cicd_mode) + # Constants so that these are not mutated by mistake LOCAL_VERSION = _get_version() PYPROJECT_VERSION = _get_pyproject_version() @@ -282,7 +292,11 @@ def main() -> None: if args.patch: new_version.increment_patch() - if new_version.was_changed() and not cicd_mode and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")): + if ( + new_version.was_changed() + and not cicd_mode + and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")) + ): _update_version(new_version) _update_pyproject_version(new_version) print(f"Version updated successfully to {str(new_version)}") diff --git a/poetry.lock b/poetry.lock index 815036adf..02a9a4102 100644 --- a/poetry.lock +++ b/poetry.lock @@ -298,39 +298,6 @@ files = [ [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "flake8" -version = "6.1.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = true -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" - -[[package]] -name = "flake8-pyproject" -version = "1.2.3" -description = "Flake8 plug-in loading the configuration from pyproject.toml" -optional = true -python-versions = ">= 3.6" -files = [ - {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, -] - -[package.dependencies] -Flake8 = ">=5" -TOMLi = {version = "*", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["pyTest", "pyTest-cov"] - [[package]] name = "humanize" version = "4.6.0" @@ -484,17 +451,6 @@ profiling = ["gprof2dot"] rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = true -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mdurl" version = "0.1.2" @@ -964,17 +920,6 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "pycodestyle" -version = "2.11.0" -description = "Python style guide checker" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, - {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, -] - [[package]] name = "pydantic" version = "1.10.12" @@ -1027,17 +972,6 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] -[[package]] -name = "pyflakes" -version = "3.1.0" -description = "passive checker of Python programs" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, -] - [[package]] name = "pygments" version = "2.16.1" @@ -1258,6 +1192,32 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "ruff" +version = "0.0.287" +description = "An extremely fast Python linter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.287-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:1e0f9ee4c3191444eefeda97d7084721d9b8e29017f67997a20c153457f2eafd"}, + {file = "ruff-0.0.287-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e9843e5704d4fb44e1a8161b0d31c1a38819723f0942639dfeb53d553be9bfb5"}, + {file = "ruff-0.0.287-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca1ed11d759a29695aed2bfc7f914b39bcadfe2ef08d98ff69c873f639ad3a8"}, + {file = "ruff-0.0.287-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf4d5ad3073af10f186ea22ce24bc5a8afa46151f6896f35c586e40148ba20b"}, + {file = "ruff-0.0.287-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d9d58bcb29afd72d2afe67120afcc7d240efc69a235853813ad556443dc922"}, + {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:06ac5df7dd3ba8bf83bba1490a72f97f1b9b21c7cbcba8406a09de1a83f36083"}, + {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bfb478e1146a60aa740ab9ebe448b1f9e3c0dfb54be3cc58713310eef059c30"}, + {file = "ruff-0.0.287-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00d579a011949108c4b4fa04c4f1ee066dab536a9ba94114e8e580c96be2aeb4"}, + {file = "ruff-0.0.287-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a810a79b8029cc92d06c36ea1f10be5298d2323d9024e1d21aedbf0a1a13e5"}, + {file = "ruff-0.0.287-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:150007028ad4976ce9a7704f635ead6d0e767f73354ce0137e3e44f3a6c0963b"}, + {file = "ruff-0.0.287-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a24a280db71b0fa2e0de0312b4aecb8e6d08081d1b0b3c641846a9af8e35b4a7"}, + {file = "ruff-0.0.287-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2918cb7885fa1611d542de1530bea3fbd63762da793751cc8c8d6e4ba234c3d8"}, + {file = "ruff-0.0.287-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:33d7b251afb60bec02a64572b0fd56594b1923ee77585bee1e7e1daf675e7ae7"}, + {file = "ruff-0.0.287-py3-none-win32.whl", hash = "sha256:022f8bed2dcb5e5429339b7c326155e968a06c42825912481e10be15dafb424b"}, + {file = "ruff-0.0.287-py3-none-win_amd64.whl", hash = "sha256:26bd0041d135a883bd6ab3e0b29c42470781fb504cf514e4c17e970e33411d90"}, + {file = "ruff-0.0.287-py3-none-win_arm64.whl", hash = "sha256:44bceb3310ac04f0e59d4851e6227f7b1404f753997c7859192e41dbee9f5c8d"}, + {file = "ruff-0.0.287.tar.gz", hash = "sha256:02dc4f5bf53ef136e459d467f3ce3e04844d509bc46c025a05b018feb37bbc39"}, +] + [[package]] name = "scikit-learn" version = "1.3.0" @@ -1661,13 +1621,13 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -dev = ["black", "debugpy", "flake8", "flake8-pyproject", "isort", "mypy", "pytest", "responses"] +dev = ["black", "debugpy", "isort", "mypy", "pytest", "responses"] medical = ["connected-components-3d", "nibabel"] ml = ["scikit-learn", "torch", "torchvision"] ocv = ["opencv-python-headless"] -test = ["flake8-pyproject", "pytest", "responses"] +test = ["pytest", "responses"] [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<3.11" -content-hash = "bf2caf6db46010fe08b8aac8c522e07985b2a0758c89e830a0f92bdc455c86e3" +content-hash = "152f9bbaf333f84827ff24e9b39a01c6d7377750b0e6726c50ad0105aef939b1" diff --git a/pyproject.toml b/pyproject.toml index d17303665..99573443e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ classifiers = [ [[tool.poetry.packages]] include = "darwin" + [tool.isort] profile = "black" @@ -56,8 +57,15 @@ warn_untyped_fields = true line-length = 160 [tool.ruff] +select = ["E", "F", "C"] +ignore = ["E203", "E402"] line-length = 160 +[tool.ruff.per-file-ignores] +"__init__.py" = ["E402"] +"path/to/file.py" = ["E402"] +"**/{tests,docs,tools}/*" = ["E402"] + [tool.flake8] max-line-length = 160 ignore = ["E203", "W503", "E402"] @@ -84,11 +92,9 @@ torch = { version = "^1.13.1", optional = true } torchvision = { version = "^0.14.1", optional = true } black = { version = "^22.12.0", optional = true } isort = { version = "^5.11.4", optional = true } -flake8 = { version = "^6.0.0", python = ">=3.8.1,<3.11", optional = true } mypy = { version = "^0.991", optional = true } responses = { version = "^0.22.0", optional = true } pytest = { version = "^7.2.1", optional = true } -flake8-pyproject = { version = "^1.2.2", python = ">=3.8.1,<3.11", optional = true } debugpy = { version = "^1.6.5", optional = true } types-requests = { version = "^2.28.11.8" } mpire = { version = "^2.7.0" } @@ -99,17 +105,8 @@ opencv-python-headless = { version = "^4.8.0.76", optional = true } pyyaml = "^6.0.1" [tool.poetry.extras] -dev = [ - "black", - "isort", - "flake8", - "mypy", - "debugpy", - "responses", - "pytest", - "flake8-pyproject", -] -test = ["responses", "pytest", "flake8-pyproject"] +dev = ["black", "isort", "mypy", "debugpy", "responses", "pytest", "ruff"] +test = ["responses", "pytest", "ruff"] ml = ["torch", "torchvision", "scikit-learn"] medical = ["nibabel", "connected-components-3d"] ocv = ["opencv-python-headless"] From e9af68c28ad6bcf20bc31fb0e155180edb07e6c8 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 6 Sep 2023 19:23:42 +0100 Subject: [PATCH 113/195] Bash syntax --- deploy/format_lint.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deploy/format_lint.sh b/deploy/format_lint.sh index f679d7cd1..b60a1d082 100755 --- a/deploy/format_lint.sh +++ b/deploy/format_lint.sh @@ -72,9 +72,10 @@ for file in $FILES ; do echo "_________________________________________________________" done -echo "$failed_files" +echo +echo -if [[ "$failed_files" -ne "" ]]; then +if [[ "$failed_files" != "" ]]; then echo "Checks failed for $failed_formatting files 😢" echo "Failed files" for file in $failed_files ; do From 361b6358007d800b3a824accd6b5f8903ad81f2f Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 6 Sep 2023 19:24:59 +0100 Subject: [PATCH 114/195] Fix black --- deploy/increase_version.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/deploy/increase_version.py b/deploy/increase_version.py index 5f7e5768d..efe25a909 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -292,11 +292,7 @@ def main() -> None: if args.patch: new_version.increment_patch() - if ( - new_version.was_changed() - and not cicd_mode - and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")) - ): + if new_version.was_changed() and not cicd_mode and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")): _update_version(new_version) _update_pyproject_version(new_version) print(f"Version updated successfully to {str(new_version)}") From 948cfca27e06dff7029e9775057e4f572a67d8c3 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 12:39:04 +0100 Subject: [PATCH 115/195] Changes to increment_version.py for CICD use --- .gitignore | 5 +++-- deploy/increase_version.py | 8 +++++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index ad7dcfc48..8b7abc2d9 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,7 @@ darwin_py.egg-info/PKG-INFO *.png *.jpeg -*.jpg +*.jpg *.bpm *.mov *.mp4 @@ -181,4 +181,5 @@ scripts/ !darwin/future/tests/data_objects/workflow/data !tests/darwin/dataset/data -*test_results.xml \ No newline at end of file +*test_results.xml +version.txt diff --git a/deploy/increase_version.py b/deploy/increase_version.py index efe25a909..25ae30a86 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -292,10 +292,16 @@ def main() -> None: if args.patch: new_version.increment_patch() - if new_version.was_changed() and not cicd_mode and (force_actions or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")): + if new_version.was_changed() and ( + force_actions or cicd_mode or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?") + ): _update_version(new_version) _update_pyproject_version(new_version) print(f"Version updated successfully to {str(new_version)}") + if cicd_mode: + with open("version.txt", "w") as f: + f.write(str(new_version)) + print("Version file updated successfully") else: print("Version not updated") From 84b15ce62f55fb4772b0a3c4121d4067b1f2dbc1 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 13:02:06 +0100 Subject: [PATCH 116/195] Outline of all basic flows --- .github/workflows/EVENT_merge_to_master.yml | 27 ++++++ .github/workflows/EVENT_pull_request.yml | 2 +- .github/workflows/EVENT_release.yml | 83 +++++++------------ .github/workflows/EVENT_scheduled_release.yml | 52 ++++++++++++ .github/workflows/EVENT_tag.yml | 61 ++++++++++++++ .../JOB_check-master-can-release.yml | 46 ++++++++++ .../workflows/JOB_generate_documentation.yml | 19 +++++ .../workflows/JOB_reverse_commit_on_fails.yml | 24 ++++++ 8 files changed, 261 insertions(+), 53 deletions(-) create mode 100644 .github/workflows/EVENT_merge_to_master.yml create mode 100644 .github/workflows/EVENT_scheduled_release.yml create mode 100644 .github/workflows/EVENT_tag.yml create mode 100644 .github/workflows/JOB_check-master-can-release.yml create mode 100644 .github/workflows/JOB_generate_documentation.yml create mode 100644 .github/workflows/JOB_reverse_commit_on_fails.yml diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml new file mode 100644 index 000000000..b035a919d --- /dev/null +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -0,0 +1,27 @@ +name: merge_to_master +run-name: Merge to master + +on: + push: + branches: + - io-1554-test-merge # TODO change to master + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + run_tests: + name: Run tests + uses: ./.github/workflows/JOB_tests.yml + + documentation: + name: Documentation + uses: ./.github/workflows/JOB_generate_documentation.yml + + reverse_commit_on_fails: + name: Reverse commit on fails + uses: ./.github/workflows/JOB_reverse_commit_on_fails.yml + + diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index 377648058..c9feeb55c 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -3,7 +3,7 @@ run-name: Pull Request on: pull_request: - types: [opened, synchronize, reopened, closed] + types: [opened, synchronize, reopened] # Prevent running concurrently diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index e9a40c758..befd64a55 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -1,61 +1,40 @@ -name: created_tag -run-name: Created Tag +name: release +run-name: Release on: - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - - 'test-*' - -concurrency: - group: created_tag - cancel-in-progress: true - -env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} + release: + types: [created] jobs: - checkout: + deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Checkout + - name: Set up Python + uses: actions/setup-python@v2 with: - ref: ${{ github.head_ref || github.ref }} - - check_master_is_passing: - runs-on: ubuntu-latest - needs: checkout - steps: - - name: Check master is passing - uses: actions/github-script@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const { data: { state } } = await github.repos.getCombinedStatusForRef({ - owner: context.repo.owner, - repo: context.repo.repo, - ref: '${{ github.head_ref || github.ref }}' - }); - if (state !== 'success') { - core.setFailed('Tagged branch is not passing, cannot create a release'); - } - - deploy: - needs: [checkout, check_master_is_passing] - runs-on: ubuntu-latest - steps: - - name: Create release - uses: actions/github-script@v3 + python-version: "3.9" + - run: pip install pip --upgrade + - name: Setup Poetry + uses: abatilo/actions-poetry@v2 with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const { data: { id: release_id } } = await github.repos.createRelease({ - owner: context.repo.owner, - repo: context.repo.repo, - tag_name: context.ref, - name: context.ref, - draft: false, - prerelease: context.ref.startsWith('test-') - }); - core.setOutput('release_id', release_id); + poetry-version: "1.3.1" + - name: Install dependencies + run: | + poetry install --no-interaction --no-root --all-extras -vvv + poetry build + - name: Publish + if: startsWith(github.ref, 'refs/tags/v') + env: + POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} + POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + poetry publish + - name: Publish on test.pypi.org + # if release is a prerelease, publish to test.pypi.org + if: startsWith(github.ref, 'refs/tags/test-') + env: + POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.TEST_PYPI_USERNAME }} + POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.TEST_PYPI_PASSWORD }} + run: | + poetry publish diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml new file mode 100644 index 000000000..a3355f4fb --- /dev/null +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -0,0 +1,52 @@ +name: scheduled-release +run-name: Scheduled Release + +on: + schedule: + - cron: '30 10 * * 2' # every Tuesday at 10:30am + +jobs: + checkout: + name: Checkout + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: master + + check_master_can_release: + name: Check master can release + uses: ./.github/workflows/JOB_check-master-can-release.yml + + run_tests: + name: Run tests + needs: check_master_can_release + uses: ./.github/workflows/JOB_tests.yml + + increment_patch_version: + name: Increment patch version + needs: [checkout, check_master_can_release, run_tests] + runs-on: ubuntu-latest + steps: + - name: Run increment script + shell: bash + run: python3 ${{ github.workspace }}/deploy/increase_version.py --cicd --patch + + commit_changes_to_master: + name: Commit changes to master + needs: increment_patch_version + runs-on: ubuntu-latest + steps: + - name: Commit changes + shell: bash + run: | + version=`cat ${{ github.workspace }}/deploy/version.txt` + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git add . + git commit -m "HOUSEKEEPING: Increment version number to $version" + git tag master "v$version" + git push origin master --tags + + #TODO Ensure this triggers the release job, which is trigged on tag + diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml new file mode 100644 index 000000000..2d3552e83 --- /dev/null +++ b/.github/workflows/EVENT_tag.yml @@ -0,0 +1,61 @@ +name: created_tag +run-name: Created Tag + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'test-*' + +concurrency: + group: created_tag + cancel-in-progress: true + +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +jobs: + checkout: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + name: Checkout + with: + ref: ${{ github.head_ref || github.ref }} + + check_master_is_passing: + runs-on: ubuntu-latest + needs: checkout + steps: + - name: Check master is passing + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { data: { state } } = await github.repos.getCombinedStatusForRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: '${{ github.head_ref || github.ref }}' + }); + if (state !== 'success') { + core.setFailed('Tagged branch is not passing, cannot create a release'); + } + + deploy: + needs: [checkout, check_master_is_passing] + runs-on: ubuntu-latest + steps: + - name: Create release + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { data: { id: release_id } } = await github.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: context.ref, + name: context.ref, + draft: true, + prerelease: context.ref.startsWith('test-') + }); + core.setOutput('release_id', release_id); diff --git a/.github/workflows/JOB_check-master-can-release.yml b/.github/workflows/JOB_check-master-can-release.yml new file mode 100644 index 000000000..eda37ed85 --- /dev/null +++ b/.github/workflows/JOB_check-master-can-release.yml @@ -0,0 +1,46 @@ +name: check-master-can-release +run-name: check-master-can-release + +on: + workflow_call: + +jobs: + check-master-can-release: + runs-on: ubuntu-latest + steps: + - name: Check master is passing + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { data: { check_runs } } = await github.checks.listForRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: context.sha, + filter: 'latest', + }); + const masterCheck = check_runs.find(({ name }) => name === 'master'); + if (masterCheck.conclusion !== 'success') { + throw new Error('Master is not passing'); + } + + - name: Check there are commits in master since last release + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { data: { published_at } } = await github.repos.getLatestRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + }); + const lastRelease = new Date(published_at); + const { data: { commits } } = await github.repos.compareCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + base: lastRelease.toISOString(), + head: 'master', + }); + if (commits.length === 0) { + throw new Error('No commits in master since last release'); + } + diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml new file mode 100644 index 000000000..8d84218ae --- /dev/null +++ b/.github/workflows/JOB_generate_documentation.yml @@ -0,0 +1,19 @@ +name: generate-documentation +run-name: Generate Documentation + +on: + workflow_call: + +jobs: + generate-documentation: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref }} # Will usually be master, but this allows running against any branch. + + - name: Generate Documentation + run: + echo "TODO" + # TODO diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml new file mode 100644 index 000000000..c05d037a3 --- /dev/null +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -0,0 +1,24 @@ +name: reverse-commit-on-fails +run-name: Reverse commit on fails + +on: + workflow_call: + + +# Run when master is failing due to a merge, and you want to reverse the merge +# and push the reverse commit to master. + +jobs: + + step-master-back-one-commit: + name: Step master back one commit + runs-on: ubuntu-latest + steps: + - name: Checkout master + uses: actions/checkout@v2 + with: + ref: master + - name: Reverse commit + run: | + git revert HEAD~1 + git push origin master From 60efa3cc3b9d145de339f0306ec9a7c4faec05dc Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 15:14:34 +0100 Subject: [PATCH 117/195] End of PR task --- .github/workflows/EVENT_pull_request.yml | 7 +++++++ .github/workflows/EVENT_release.yml | 9 +++++++- .github/workflows/JOB_typecheck.yml | 26 ++++++++++++++++++++++++ deploy/format_lint.sh | 21 ++++++++++++++++--- 4 files changed, 59 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/JOB_typecheck.yml diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index c9feeb55c..67fdf2d4c 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -31,6 +31,13 @@ jobs: with: files: ${{ needs.get_changed_files.outputs.python_changed_files }} + typecheck: + name: Lint python + needs: get_changed_files + uses: ./.github/workflows/JOB_typecheck.yml + with: + files: ${{ needs.get_changed_files.outputs.python_changed_files }} + run_tests: name: Run tests uses: ./.github/workflows/JOB_tests.yml diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index befd64a55..e1000c126 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -10,26 +10,33 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + with: + ref: ${{ github.ref }} + - name: Set up Python uses: actions/setup-python@v2 with: python-version: "3.9" + - run: pip install pip --upgrade - name: Setup Poetry uses: abatilo/actions-poetry@v2 with: poetry-version: "1.3.1" + - name: Install dependencies run: | poetry install --no-interaction --no-root --all-extras -vvv poetry build - - name: Publish + + - name: Publish on pypi.org if: startsWith(github.ref, 'refs/tags/v') env: POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | poetry publish + - name: Publish on test.pypi.org # if release is a prerelease, publish to test.pypi.org if: startsWith(github.ref, 'refs/tags/test-') diff --git a/.github/workflows/JOB_typecheck.yml b/.github/workflows/JOB_typecheck.yml new file mode 100644 index 000000000..571e19fb8 --- /dev/null +++ b/.github/workflows/JOB_typecheck.yml @@ -0,0 +1,26 @@ +name: typecheck +run-name: Static type analysis + +on: + workflow_call: + inputs: + files: + type: string + description: "Files to lint" + required: true + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Check out source repository + uses: actions/checkout@v3 + + - name: Set up Python environment + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: MyPy typecheck + shell: bash + run: bash ${{ github.workspace }}/deploy/format_lint.sh typecheck ${{ inputs.files }} diff --git a/deploy/format_lint.sh b/deploy/format_lint.sh index b60a1d082..496061c0a 100755 --- a/deploy/format_lint.sh +++ b/deploy/format_lint.sh @@ -8,8 +8,8 @@ echo "Action: $ACTION" echo "Files: $FILES" # If action is not format or lint exit -if [[ -z $ACTION || ("$ACTION" != "format" && "$ACTION" != "lint") ]] ; then - echo "Action must be format or lint" +if [[ -z $ACTION || ("$ACTION" != "format" && "$ACTION" != "lint" && "$ACTION" != "typecheck") ]] ; then + echo "Action must be format, typecheck, or lint" exit 1 fi @@ -22,8 +22,13 @@ fi # Install dependencies if [ "$ACTION" == "format" ]; then pip install black &> pip_log.txt -else + elif [ "$ACTION" == "lint" ]; then pip install ruff &> pip_log.txt + elif [ "$ACTION" == "typecheck" ]; then + pip install mypy &> pip_log.txt +else + echo "Action must be format, typecheck, or lint" + exit 1 fi # Check if pip install failed @@ -53,6 +58,16 @@ for file in $FILES ; do fi fi + if [ "$ACTION" == "typecheck" ]; then + mypy $file + if [ $? -ne 0 ]; then + failed_files="$failed_files $file" + echo "❌" + else + echo "✅" + fi + fi + if [ "$ACTION" == "format" ]; then black --check $file if [ $? -ne 0 ]; then From b60ca291e63b6deccc05dee6465c9e73feda14ea Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 15:18:45 +0100 Subject: [PATCH 118/195] Testing merge action --- testmerge | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 testmerge diff --git a/testmerge b/testmerge new file mode 100644 index 000000000..e69de29bb From 814874779aac51b68bb55ba599f704658a1dc616 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 15:22:54 +0100 Subject: [PATCH 119/195] Typecheck fix --- .github/workflows/JOB_typecheck.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/JOB_typecheck.yml b/.github/workflows/JOB_typecheck.yml index 571e19fb8..3925bc537 100644 --- a/.github/workflows/JOB_typecheck.yml +++ b/.github/workflows/JOB_typecheck.yml @@ -21,6 +21,12 @@ jobs: with: python-version: "3.10" + - name: Install dependencies + shell: bash + run: | + pip install poetry + poetry install install --all-extras --vvv --no-root + - name: MyPy typecheck shell: bash run: bash ${{ github.workspace }}/deploy/format_lint.sh typecheck ${{ inputs.files }} From 39f8bb14aadf76846d6ac90902dfbf45a9349b7e Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 15:24:04 +0100 Subject: [PATCH 120/195] Job name change --- .github/workflows/EVENT_pull_request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index 67fdf2d4c..de0ffb8dc 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -32,7 +32,7 @@ jobs: files: ${{ needs.get_changed_files.outputs.python_changed_files }} typecheck: - name: Lint python + name: Analyse types in python needs: get_changed_files uses: ./.github/workflows/JOB_typecheck.yml with: From 3b9c49cd99e3903f932c3bf7e26e74a2893a40c9 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 15:29:50 +0100 Subject: [PATCH 121/195] PR finish --- .github/workflows/JOB_typecheck.yml | 2 +- deploy/increase_version.py | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/JOB_typecheck.yml b/.github/workflows/JOB_typecheck.yml index 3925bc537..b9d62406c 100644 --- a/.github/workflows/JOB_typecheck.yml +++ b/.github/workflows/JOB_typecheck.yml @@ -25,7 +25,7 @@ jobs: shell: bash run: | pip install poetry - poetry install install --all-extras --vvv --no-root + poetry install --all-extras --vvv --no-root - name: MyPy typecheck shell: bash diff --git a/deploy/increase_version.py b/deploy/increase_version.py index 25ae30a86..84a04fbb3 100755 --- a/deploy/increase_version.py +++ b/deploy/increase_version.py @@ -292,9 +292,7 @@ def main() -> None: if args.patch: new_version.increment_patch() - if new_version.was_changed() and ( - force_actions or cicd_mode or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?") - ): + if new_version.was_changed() and (force_actions or cicd_mode or confirm(f"Update version from {str(LOCAL_VERSION)} to {str(new_version)}?")): _update_version(new_version) _update_pyproject_version(new_version) print(f"Version updated successfully to {str(new_version)}") From 191eeb7f1dcb9cdcc2b0b9460decfa358d0f2172 Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 15:32:05 +0100 Subject: [PATCH 122/195] poetry install options --- .github/workflows/JOB_typecheck.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/JOB_typecheck.yml b/.github/workflows/JOB_typecheck.yml index b9d62406c..100a242de 100644 --- a/.github/workflows/JOB_typecheck.yml +++ b/.github/workflows/JOB_typecheck.yml @@ -25,7 +25,7 @@ jobs: shell: bash run: | pip install poetry - poetry install --all-extras --vvv --no-root + poetry install --all-extras --no-root - name: MyPy typecheck shell: bash From 79273f41b5cb4218a1db353502a060a067d4fa4d Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 17:11:33 +0100 Subject: [PATCH 123/195] Deps change --- poetry.lock | 69 +++++++++++++++++++++++--------------------------- pyproject.toml | 3 ++- 2 files changed, 34 insertions(+), 38 deletions(-) diff --git a/poetry.lock b/poetry.lock index 02a9a4102..4cce08e88 100644 --- a/poetry.lock +++ b/poetry.lock @@ -486,53 +486,48 @@ testing = ["dataclasses", "multiprocess", "multiprocess (>=0.70.15)", "numpy", " [[package]] name = "mypy" -version = "0.991" +version = "1.5.1" description = "Optional static typing for Python" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] @@ -1630,4 +1625,4 @@ test = ["pytest", "responses"] [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<3.11" -content-hash = "152f9bbaf333f84827ff24e9b39a01c6d7377750b0e6726c50ad0105aef939b1" +content-hash = "522ae94837261aafc88519fb08890070a9e5abae14b4766ff816d9d7337c2f50" diff --git a/pyproject.toml b/pyproject.toml index 99573443e..7e5c6260f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ torch = { version = "^1.13.1", optional = true } torchvision = { version = "^0.14.1", optional = true } black = { version = "^22.12.0", optional = true } isort = { version = "^5.11.4", optional = true } -mypy = { version = "^0.991", optional = true } +mypy = { version = "^1.5", optional = true, python = ">=3.8" } responses = { version = "^0.22.0", optional = true } pytest = { version = "^7.2.1", optional = true } debugpy = { version = "^1.6.5", optional = true } @@ -103,6 +103,7 @@ types-pyyaml = "^6.0.12.9" python-dotenv = { version = "^1.0.0", python = ">3.8" } opencv-python-headless = { version = "^4.8.0.76", optional = true } pyyaml = "^6.0.1" +ruff = "^0.0.287" [tool.poetry.extras] dev = ["black", "isort", "mypy", "debugpy", "responses", "pytest", "ruff"] From 3c9bda8bde46053446f8e003f0b933b523defedf Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 17:40:32 +0100 Subject: [PATCH 124/195] Switch to an action --- .github/workflows/JOB_typecheck.yml | 31 ++++++++++++----------------- 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/.github/workflows/JOB_typecheck.yml b/.github/workflows/JOB_typecheck.yml index 100a242de..e9f454f88 100644 --- a/.github/workflows/JOB_typecheck.yml +++ b/.github/workflows/JOB_typecheck.yml @@ -10,23 +10,18 @@ on: required: true jobs: - lint: + run_mypy: runs-on: ubuntu-latest + name: Mypy steps: - - name: Check out source repository - uses: actions/checkout@v3 - - - name: Set up Python environment - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Install dependencies - shell: bash - run: | - pip install poetry - poetry install --all-extras --no-root - - - name: MyPy typecheck - shell: bash - run: bash ${{ github.workspace }}/deploy/format_lint.sh typecheck ${{ inputs.files }} + # To use this repository's private action, + # you must check out the repository + - name: Checkout + uses: actions/checkout@v2 + - name: Run Mypy + uses: jashparekh/mypy-action@v2 + with: + path: '.' + mypy_version: '1.5' + mypy_options: '--verbose' + mypy_config_file: 'pyproject.toml' From d6ebc24a4a1695d8a5406d8b89920216e99b538c Mon Sep 17 00:00:00 2001 From: Owen Date: Thu, 7 Sep 2023 17:50:37 +0100 Subject: [PATCH 125/195] config entirely in ticks --- .github/workflows/JOB_typecheck.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/JOB_typecheck.yml b/.github/workflows/JOB_typecheck.yml index e9f454f88..decfe0b01 100644 --- a/.github/workflows/JOB_typecheck.yml +++ b/.github/workflows/JOB_typecheck.yml @@ -23,5 +23,6 @@ jobs: with: path: '.' mypy_version: '1.5' - mypy_options: '--verbose' - mypy_config_file: 'pyproject.toml' + mypy_options: '--verbose --warn-redundant-casts --warn-unused-ignores --check-untyped-defs --no-implicit-reexport --ignore-missing-imports --disallow-any-unimported --disallow-any-explicit --disallow-subclassing-any --python-version --disallow-untyped-calls --disallow-untyped-defs --disallow-incomplete-defs --no-implicit-optional --warn-unreachable --pretty' + mypy_config_file: '' + python_version: '3.10' From 681c6b13b2f0d0787fb230831b63421287b0ff09 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 10:08:54 +0100 Subject: [PATCH 126/195] Taking mypy out of the mix for now-ticketed for return --- .github/workflows/EVENT_pull_request.yml | 13 ++++----- .github/workflows/JOB_typecheck.yml | 34 +++++++++++++----------- 2 files changed, 26 insertions(+), 21 deletions(-) diff --git a/.github/workflows/EVENT_pull_request.yml b/.github/workflows/EVENT_pull_request.yml index de0ffb8dc..49a4e3563 100644 --- a/.github/workflows/EVENT_pull_request.yml +++ b/.github/workflows/EVENT_pull_request.yml @@ -31,12 +31,13 @@ jobs: with: files: ${{ needs.get_changed_files.outputs.python_changed_files }} - typecheck: - name: Analyse types in python - needs: get_changed_files - uses: ./.github/workflows/JOB_typecheck.yml - with: - files: ${{ needs.get_changed_files.outputs.python_changed_files }} + # TODO: See Issue IO-1666 + # typecheck: + # name: Analyse types in python + # needs: get_changed_files + # uses: ./.github/workflows/JOB_typecheck.yml + # with: + # files: ${{ needs.get_changed_files.outputs.python_changed_files }} run_tests: name: Run tests diff --git a/.github/workflows/JOB_typecheck.yml b/.github/workflows/JOB_typecheck.yml index decfe0b01..74bcf9334 100644 --- a/.github/workflows/JOB_typecheck.yml +++ b/.github/workflows/JOB_typecheck.yml @@ -1,5 +1,5 @@ name: typecheck -run-name: Static type analysis +run-name: Static analysis and typecheck on: workflow_call: @@ -10,19 +10,23 @@ on: required: true jobs: - run_mypy: + typecheck: runs-on: ubuntu-latest - name: Mypy steps: - # To use this repository's private action, - # you must check out the repository - - name: Checkout - uses: actions/checkout@v2 - - name: Run Mypy - uses: jashparekh/mypy-action@v2 - with: - path: '.' - mypy_version: '1.5' - mypy_options: '--verbose --warn-redundant-casts --warn-unused-ignores --check-untyped-defs --no-implicit-reexport --ignore-missing-imports --disallow-any-unimported --disallow-any-explicit --disallow-subclassing-any --python-version --disallow-untyped-calls --disallow-untyped-defs --disallow-incomplete-defs --no-implicit-optional --warn-unreachable --pretty' - mypy_config_file: '' - python_version: '3.10' + - name: Check out source repository + uses: actions/checkout@v3 + + - name: Set up Python environment + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Install dependencies + shell: bash + run: | + pip install poetry + poetry install install --all-extras -vvv --no-root + + - name: MyPy typecheck + shell: bash + run: bash ${{ github.workspace }}/deploy/format_lint.sh typecheck ${{ inputs.files }} From aedd8fe37ac4c06c5a9360c442b7dc95e513ee65 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 10:57:05 +0100 Subject: [PATCH 127/195] Merge to master documentation job --- .github/workflows/EVENT_merge_to_master.yml | 2 +- .../workflows/JOB_generate_documentation.yml | 56 +++++- poetry.lock | 174 +++++++++--------- 3 files changed, 139 insertions(+), 93 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index b035a919d..e850129a0 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -4,7 +4,7 @@ run-name: Merge to master on: push: branches: - - io-1554-test-merge # TODO change to master + - master concurrency: group: ${{ github.workflow }}-${{ github.ref }} diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml index 8d84218ae..f55274726 100644 --- a/.github/workflows/JOB_generate_documentation.yml +++ b/.github/workflows/JOB_generate_documentation.yml @@ -4,16 +4,58 @@ run-name: Generate Documentation on: workflow_call: +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + +env: + AWS_REGION: eu-west-1 + AWS_SESSION_NAME: darwinPyDocumentation + jobs: generate-documentation: runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.8, 3.9, "3.10"] steps: - - name: Checkout - uses: actions/checkout@v2 + - uses: actions/checkout@v2 + with: + ref: ${{ github.head_ref || github.ref }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + pip install poetry setuptools + poetry install --all-extras --no-interaction --no-root + pip install --editable ".[test,ml,medical,dev,ocv]" + pip install -U sphinx sphinx_rtd_theme m2r2 + + - name: Parse README + run: | + rm -f README.rst&&\ + m2r2 README.md&&\ + mv README.rst source/ + + - name: Generate new docs + env: + PYTHONPATH: "." + run: | + rm -rf docs/* &&\ + sphinx-apidoc -f -o source darwin darwin/future &&\ + sphinx-build -b html source/ docs/ -W + + - name: Setup access to AWS + uses: aws-actions/configure-aws-credentials@v2 with: - ref: ${{ github.head_ref }} # Will usually be master, but this allows running against any branch. + role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} + role-session-name: ${{ env.AWS_SESSION_NAME }} + aws-region: ${{ env.AWS_REGION }} + + - name: Upload docs to S3 + run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive - - name: Generate Documentation - run: - echo "TODO" - # TODO diff --git a/poetry.lock b/poetry.lock index 4cce08e88..384d783af 100644 --- a/poetry.lock +++ b/poetry.lock @@ -245,29 +245,33 @@ numpy = "*" [[package]] name = "debugpy" -version = "1.6.7.post1" +version = "1.7.0" description = "An implementation of the Debug Adapter Protocol for Python" optional = true python-versions = ">=3.7" files = [ - {file = "debugpy-1.6.7.post1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:903bd61d5eb433b6c25b48eae5e23821d4c1a19e25c9610205f5aeaccae64e32"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16882030860081e7dd5aa619f30dec3c2f9a421e69861125f83cc372c94e57d"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-win32.whl", hash = "sha256:eea8d8cfb9965ac41b99a61f8e755a8f50e9a20330938ad8271530210f54e09c"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-win_amd64.whl", hash = "sha256:85969d864c45f70c3996067cfa76a319bae749b04171f2cdeceebe4add316155"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:890f7ab9a683886a0f185786ffbda3b46495c4b929dab083b8c79d6825832a52"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ac7a4dba28801d184b7fc0e024da2635ca87d8b0a825c6087bb5168e3c0d28"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-win32.whl", hash = "sha256:3370ef1b9951d15799ef7af41f8174194f3482ee689988379763ef61a5456426"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:65b28435a17cba4c09e739621173ff90c515f7b9e8ea469b92e3c28ef8e5cdfb"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92b6dae8bfbd497c90596bbb69089acf7954164aea3228a99d7e43e5267f5b36"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f5d2ecead8125cf669e62784ef1e6300f4067b0f14d9f95ee00ae06fc7c4f7"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-win32.whl", hash = "sha256:f0851403030f3975d6e2eaa4abf73232ab90b98f041e3c09ba33be2beda43fcf"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3de5d0f97c425dc49bce4293df6a04494309eedadd2b52c22e58d95107e178d9"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38651c3639a4e8bbf0ca7e52d799f6abd07d622a193c406be375da4d510d968d"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038c51268367c9c935905a90b1c2d2dbfe304037c27ba9d19fe7409f8cdc710c"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-win32.whl", hash = "sha256:4b9eba71c290852f959d2cf8a03af28afd3ca639ad374d393d53d367f7f685b2"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-win_amd64.whl", hash = "sha256:973a97ed3b434eab0f792719a484566c35328196540676685c975651266fccf9"}, - {file = "debugpy-1.6.7.post1-py2.py3-none-any.whl", hash = "sha256:1093a5c541af079c13ac8c70ab8b24d1d35c8cacb676306cf11e57f699c02926"}, - {file = "debugpy-1.6.7.post1.zip", hash = "sha256:fe87ec0182ef624855d05e6ed7e0b7cb1359d2ffa2a925f8ec2d22e98b75d0ca"}, + {file = "debugpy-1.7.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:17ad9a681aca1704c55b9a5edcb495fa8f599e4655c9872b7f9cf3dc25890d48"}, + {file = "debugpy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1285920a3f9a75f5d1acf59ab1b9da9ae6eb9a05884cd7674f95170c9cafa4de"}, + {file = "debugpy-1.7.0-cp310-cp310-win32.whl", hash = "sha256:a6f43a681c5025db1f1c0568069d1d1bad306a02e7c36144912b26d9c90e4724"}, + {file = "debugpy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e9571d831ad3c75b5fb6f3efcb71c471cf2a74ba84af6ac1c79ce00683bed4b"}, + {file = "debugpy-1.7.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:538765a41198aa88cc089295b39c7322dd598f9ef1d52eaae12145c63bf9430a"}, + {file = "debugpy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7e8cf91f8f3f9b5fad844dd88427b85d398bda1e2a0cd65d5a21312fcbc0c6f"}, + {file = "debugpy-1.7.0-cp311-cp311-win32.whl", hash = "sha256:18a69f8e142a716310dd0af6d7db08992aed99e2606108732efde101e7c65e2a"}, + {file = "debugpy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7515a5ba5ee9bfe956685909c5f28734c1cecd4ee813523363acfe3ca824883a"}, + {file = "debugpy-1.7.0-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:bc8da67ade39d9e75608cdb8601d07e63a4e85966e0572c981f14e2cf42bcdef"}, + {file = "debugpy-1.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5036e918c6ba8fc4c4f1fd0207d81db634431a02f0dc2ba51b12fd793c8c9de"}, + {file = "debugpy-1.7.0-cp37-cp37m-win32.whl", hash = "sha256:d5be95b3946a4d7b388e45068c7b75036ac5a610f41014aee6cafcd5506423ad"}, + {file = "debugpy-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0e90314a078d4e3f009520c8387aba8f74c3034645daa7a332a3d1bb81335756"}, + {file = "debugpy-1.7.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:1565fd904f9571c430adca597771255cff4f92171486fced6f765dcbdfc8ec8d"}, + {file = "debugpy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6516f36a2e95b3be27f171f12b641e443863f4ad5255d0fdcea6ae0be29bb912"}, + {file = "debugpy-1.7.0-cp38-cp38-win32.whl", hash = "sha256:2b0e489613bc066051439df04c56777ec184b957d6810cb65f235083aef7a0dc"}, + {file = "debugpy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:7bf0b4bbd841b2397b6a8de15da9227f1164f6d43ceee971c50194eaed930a9d"}, + {file = "debugpy-1.7.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:ad22e1095b9977af432465c1e09132ba176e18df3834b1efcab1a449346b350b"}, + {file = "debugpy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f625e427f21423e5874139db529e18cb2966bdfcc1cb87a195538c5b34d163d1"}, + {file = "debugpy-1.7.0-cp39-cp39-win32.whl", hash = "sha256:18bca8429d6632e2d3435055416d2d88f0309cc39709f4f6355c8d412cc61f24"}, + {file = "debugpy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:dc8a12ac8b97ef3d6973c6679a093138c7c9b03eb685f0e253269a195f651559"}, + {file = "debugpy-1.7.0-py2.py3-none-any.whl", hash = "sha256:f6de2e6f24f62969e0f0ef682d78c98161c4dca29e9fb05df4d2989005005502"}, + {file = "debugpy-1.7.0.zip", hash = "sha256:676911c710e85567b17172db934a71319ed9d995104610ce23fd74a07f66e6f6"}, ] [[package]] @@ -700,71 +704,71 @@ numpy = [ [[package]] name = "orjson" -version = "3.9.5" +version = "3.9.6" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.7" files = [ - {file = "orjson-3.9.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ad6845912a71adcc65df7c8a7f2155eba2096cf03ad2c061c93857de70d699ad"}, - {file = "orjson-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e298e0aacfcc14ef4476c3f409e85475031de24e5b23605a465e9bf4b2156273"}, - {file = "orjson-3.9.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83c9939073281ef7dd7c5ca7f54cceccb840b440cec4b8a326bda507ff88a0a6"}, - {file = "orjson-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e174cc579904a48ee1ea3acb7045e8a6c5d52c17688dfcb00e0e842ec378cabf"}, - {file = "orjson-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d51702f42c785b115401e1d64a27a2ea767ae7cf1fb8edaa09c7cf1571c660"}, - {file = "orjson-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f13d61c0c7414ddee1ef4d0f303e2222f8cced5a2e26d9774751aecd72324c9e"}, - {file = "orjson-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d748cc48caf5a91c883d306ab648df1b29e16b488c9316852844dd0fd000d1c2"}, - {file = "orjson-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bd19bc08fa023e4c2cbf8294ad3f2b8922f4de9ba088dbc71e6b268fdf54591c"}, - {file = "orjson-3.9.5-cp310-none-win32.whl", hash = "sha256:5793a21a21bf34e1767e3d61a778a25feea8476dcc0bdf0ae1bc506dc34561ea"}, - {file = "orjson-3.9.5-cp310-none-win_amd64.whl", hash = "sha256:2bcec0b1024d0031ab3eab7a8cb260c8a4e4a5e35993878a2da639d69cdf6a65"}, - {file = "orjson-3.9.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8547b95ca0e2abd17e1471973e6d676f1d8acedd5f8fb4f739e0612651602d66"}, - {file = "orjson-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87ce174d6a38d12b3327f76145acbd26f7bc808b2b458f61e94d83cd0ebb4d76"}, - {file = "orjson-3.9.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a960bb1bc9a964d16fcc2d4af5a04ce5e4dfddca84e3060c35720d0a062064fe"}, - {file = "orjson-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a7aa5573a949760d6161d826d34dc36db6011926f836851fe9ccb55b5a7d8e8"}, - {file = "orjson-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b2852afca17d7eea85f8e200d324e38c851c96598ac7b227e4f6c4e59fbd3df"}, - {file = "orjson-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa185959c082475288da90f996a82e05e0c437216b96f2a8111caeb1d54ef926"}, - {file = "orjson-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:89c9332695b838438ea4b9a482bce8ffbfddde4df92750522d928fb00b7b8dce"}, - {file = "orjson-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2493f1351a8f0611bc26e2d3d407efb873032b4f6b8926fed8cfed39210ca4ba"}, - {file = "orjson-3.9.5-cp311-none-win32.whl", hash = "sha256:ffc544e0e24e9ae69301b9a79df87a971fa5d1c20a6b18dca885699709d01be0"}, - {file = "orjson-3.9.5-cp311-none-win_amd64.whl", hash = "sha256:89670fe2732e3c0c54406f77cad1765c4c582f67b915c74fda742286809a0cdc"}, - {file = "orjson-3.9.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:15df211469625fa27eced4aa08dc03e35f99c57d45a33855cc35f218ea4071b8"}, - {file = "orjson-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9f17c59fe6c02bc5f89ad29edb0253d3059fe8ba64806d789af89a45c35269a"}, - {file = "orjson-3.9.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ca6b96659c7690773d8cebb6115c631f4a259a611788463e9c41e74fa53bf33f"}, - {file = "orjson-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26fafe966e9195b149950334bdbe9026eca17fe8ffe2d8fa87fdc30ca925d30"}, - {file = "orjson-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9006b1eb645ecf460da067e2dd17768ccbb8f39b01815a571bfcfab7e8da5e52"}, - {file = "orjson-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebfdbf695734b1785e792a1315e41835ddf2a3e907ca0e1c87a53f23006ce01d"}, - {file = "orjson-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4a3943234342ab37d9ed78fb0a8f81cd4b9532f67bf2ac0d3aa45fa3f0a339f3"}, - {file = "orjson-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e6762755470b5c82f07b96b934af32e4d77395a11768b964aaa5eb092817bc31"}, - {file = "orjson-3.9.5-cp312-none-win_amd64.whl", hash = "sha256:c74df28749c076fd6e2157190df23d43d42b2c83e09d79b51694ee7315374ad5"}, - {file = "orjson-3.9.5-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:88e18a74d916b74f00d0978d84e365c6bf0e7ab846792efa15756b5fb2f7d49d"}, - {file = "orjson-3.9.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28514b5b6dfaf69097be70d0cf4f1407ec29d0f93e0b4131bf9cc8fd3f3e374"}, - {file = "orjson-3.9.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b81aca8c7be61e2566246b6a0ca49f8aece70dd3f38c7f5c837f398c4cb142"}, - {file = "orjson-3.9.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:385c1c713b1e47fd92e96cf55fd88650ac6dfa0b997e8aa7ecffd8b5865078b1"}, - {file = "orjson-3.9.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9850c03a8e42fba1a508466e6a0f99472fd2b4a5f30235ea49b2a1b32c04c11"}, - {file = "orjson-3.9.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4449f84bbb13bcef493d8aa669feadfced0f7c5eea2d0d88b5cc21f812183af8"}, - {file = "orjson-3.9.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:86127bf194f3b873135e44ce5dc9212cb152b7e06798d5667a898a00f0519be4"}, - {file = "orjson-3.9.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0abcd039f05ae9ab5b0ff11624d0b9e54376253b7d3217a358d09c3edf1d36f7"}, - {file = "orjson-3.9.5-cp37-none-win32.whl", hash = "sha256:10cc8ad5ff7188efcb4bec196009d61ce525a4e09488e6d5db41218c7fe4f001"}, - {file = "orjson-3.9.5-cp37-none-win_amd64.whl", hash = "sha256:ff27e98532cb87379d1a585837d59b187907228268e7b0a87abe122b2be6968e"}, - {file = "orjson-3.9.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5bfa79916ef5fef75ad1f377e54a167f0de334c1fa4ebb8d0224075f3ec3d8c0"}, - {file = "orjson-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87dfa6ac0dae764371ab19b35eaaa46dfcb6ef2545dfca03064f21f5d08239f"}, - {file = "orjson-3.9.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50ced24a7b23058b469ecdb96e36607fc611cbaee38b58e62a55c80d1b3ad4e1"}, - {file = "orjson-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1b74ea2a3064e1375da87788897935832e806cc784de3e789fd3c4ab8eb3fa5"}, - {file = "orjson-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7cb961efe013606913d05609f014ad43edfaced82a576e8b520a5574ce3b2b9"}, - {file = "orjson-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1225d2d5ee76a786bda02f8c5e15017462f8432bb960de13d7c2619dba6f0275"}, - {file = "orjson-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f39f4b99199df05c7ecdd006086259ed25886cdbd7b14c8cdb10c7675cfcca7d"}, - {file = "orjson-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a461dc9fb60cac44f2d3218c36a0c1c01132314839a0e229d7fb1bba69b810d8"}, - {file = "orjson-3.9.5-cp38-none-win32.whl", hash = "sha256:dedf1a6173748202df223aea29de814b5836732a176b33501375c66f6ab7d822"}, - {file = "orjson-3.9.5-cp38-none-win_amd64.whl", hash = "sha256:fa504082f53efcbacb9087cc8676c163237beb6e999d43e72acb4bb6f0db11e6"}, - {file = "orjson-3.9.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6900f0248edc1bec2a2a3095a78a7e3ef4e63f60f8ddc583687eed162eedfd69"}, - {file = "orjson-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17404333c40047888ac40bd8c4d49752a787e0a946e728a4e5723f111b6e55a5"}, - {file = "orjson-3.9.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0eefb7cfdd9c2bc65f19f974a5d1dfecbac711dae91ed635820c6b12da7a3c11"}, - {file = "orjson-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68c78b2a3718892dc018adbc62e8bab6ef3c0d811816d21e6973dee0ca30c152"}, - {file = "orjson-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:591ad7d9e4a9f9b104486ad5d88658c79ba29b66c5557ef9edf8ca877a3f8d11"}, - {file = "orjson-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cc2cbf302fbb2d0b2c3c142a663d028873232a434d89ce1b2604ebe5cc93ce8"}, - {file = "orjson-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b26b5aa5e9ee1bad2795b925b3adb1b1b34122cb977f30d89e0a1b3f24d18450"}, - {file = "orjson-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef84724f7d29dcfe3aafb1fc5fc7788dca63e8ae626bb9298022866146091a3e"}, - {file = "orjson-3.9.5-cp39-none-win32.whl", hash = "sha256:664cff27f85939059472afd39acff152fbac9a091b7137092cb651cf5f7747b5"}, - {file = "orjson-3.9.5-cp39-none-win_amd64.whl", hash = "sha256:91dda66755795ac6100e303e206b636568d42ac83c156547634256a2e68de694"}, - {file = "orjson-3.9.5.tar.gz", hash = "sha256:6daf5ee0b3cf530b9978cdbf71024f1c16ed4a67d05f6ec435c6e7fe7a52724c"}, + {file = "orjson-3.9.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fad6866871411ee9737d4b26fbc7dbe1f66f371ce8a9fffc329bb76805752c4f"}, + {file = "orjson-3.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7b8cf05ed44c79c4a74de128ee481adb1b2446939d565fc7611d34b07d0b3b"}, + {file = "orjson-3.9.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:212f0524ecd04f217f023bb9f2226f8ff41805cfc69f02d1cbd57300b13cd644"}, + {file = "orjson-3.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517a48ddb9684d69002e1ee16d9eb5213be338837936b5dad4bccde61ac4c2ef"}, + {file = "orjson-3.9.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4601ff8efd8cc45b21a23e0d70bc6f6f67270e95bf8bf4746c4960f696114f47"}, + {file = "orjson-3.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c8702fbb658cb3eb2ac88d50e0c921782a4041012f9138e737341288abe817"}, + {file = "orjson-3.9.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a6843d59c882608da5a026d54e04016924c279f29ead28db9e99d55613326687"}, + {file = "orjson-3.9.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f742af5b28fa153a89e6d87a13bae0ac94bf5c8ac56335102a0e1d9267ed1bc7"}, + {file = "orjson-3.9.6-cp310-none-win32.whl", hash = "sha256:0ee1664ccc7bdd6de64b6f3f04633837391e2c8e8e04bfd8b3a3270597de2e22"}, + {file = "orjson-3.9.6-cp310-none-win_amd64.whl", hash = "sha256:cefad5742f0ee2cfae795756eefcedabf4f6e4910fc530cc06f72df2d1ada781"}, + {file = "orjson-3.9.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:48761464611a333a83686f21f70b483951eb11c6136d7ab46848da03ac90beb1"}, + {file = "orjson-3.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb3921062e495a3df770517b5ad9db18a7e0db70d42453bdbb545d8fceb0f85"}, + {file = "orjson-3.9.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0520473680d24290558d26aeb8b7d8ba6835955e01ff09a9d0ea866049a0d9c3"}, + {file = "orjson-3.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ec2b39c4a38a763e18b93a70ce2114fa322b88ce1896769332271af4f5b33b6"}, + {file = "orjson-3.9.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cdbf3b293a11f33aa1b164783b2df8a368bf5a5ec0d46a5f241f00927f3df8"}, + {file = "orjson-3.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cfa39f734dac4f5e64d79e5735355d09e6c6a8ade1312daab63efeac325da8a"}, + {file = "orjson-3.9.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f994f52901814cf70cc68b835da8394ea50a5464426d122275ac96a0bc39ba20"}, + {file = "orjson-3.9.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32e3e1cc335b1d4539e131fb3a361953b9d7b499e27f81c3648359c0e70ed7aa"}, + {file = "orjson-3.9.6-cp311-none-win32.whl", hash = "sha256:f5ff143d42c4a7e6ef0ecdaeca41348eb0ab730a60e3e9927fd0153ba5d4bb60"}, + {file = "orjson-3.9.6-cp311-none-win_amd64.whl", hash = "sha256:1c7d9a4db055d8febdf949273bc9bc7a15179ea92cdc7c77d0f992fdbf52cfa4"}, + {file = "orjson-3.9.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:49ecdeb3ae767e6abefd5711c75052692d53a65cce00d6d8caabb5a9b756fcb1"}, + {file = "orjson-3.9.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:593a939aa8cf0c39a9f8f706681439a172ce98d679bc2b387130bcc219be1ee4"}, + {file = "orjson-3.9.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1c9987b3e9920c90456c879d9f2ec030f1f7417a1c8ea53badbaceb7a8dcab6"}, + {file = "orjson-3.9.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1daedb551d3a71873caad350b2b824c56d38e6f03381d7d2d516b9eb01196cdf"}, + {file = "orjson-3.9.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f00458dd180d332820545009afca77f3dc4658526995e1aab4127357f473693d"}, + {file = "orjson-3.9.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8ae23b3cde20c2d5472cd9efe35623ebf3c7648e62c8e534082528394078fb"}, + {file = "orjson-3.9.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f19579ba3dbf069b77ebeb70f9628571c9969e51a558cdda7eace9d1885f379f"}, + {file = "orjson-3.9.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2fd1771f0f41569734a85d572735aa47c89b2d0e98b0aa89edc5db849cffd1ef"}, + {file = "orjson-3.9.6-cp312-none-win_amd64.whl", hash = "sha256:018f85b53e5c7a8bd1b5ce900358760dddb8a7b9b2da1545c9a17cf42ae99cc6"}, + {file = "orjson-3.9.6-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:de609af7958f0f9010de04356edb4f58f0cfadbb17103c198561a721981fba74"}, + {file = "orjson-3.9.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b077ec427eb805264ab9606406819cb745bef6be0a3d903613c9fa8421547a46"}, + {file = "orjson-3.9.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:418202b229b00d628e52bc5883a06d43aeecd0449962ad5b4f68113a7fd741a6"}, + {file = "orjson-3.9.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aeec3598ad7e6b5f0267fa0e57ebc27f140eed7d8e4c68a193d814af3973e1a3"}, + {file = "orjson-3.9.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:181e56cbd94149a721fdbc5417b6283c668e9995a320e6279a87ac8c736d0c6f"}, + {file = "orjson-3.9.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0032c152f29688f84d0660de992df3d76163c45b2ba7ba1aa9bc1f770e84316"}, + {file = "orjson-3.9.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:676f037a3ef590f6664d70be956659c7c164daa91b652504cf54d59c252cf29c"}, + {file = "orjson-3.9.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b9466e66982ddf18bc0e96e383be5fecc867f659aee3cd621a70de0af0154ac1"}, + {file = "orjson-3.9.6-cp37-none-win32.whl", hash = "sha256:e46ea20dcc8b9d6e5377e125a8101dc59da06086f08e924b6b3c45322709c484"}, + {file = "orjson-3.9.6-cp37-none-win_amd64.whl", hash = "sha256:49f2f632c8e2db6e9e024d3ea5b9b1343fb5bc4e52d3139c2c724d84f952fae8"}, + {file = "orjson-3.9.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a3cb03b1aaf94633d78b389d4110ed5cfd4fc6c09c99a1c61ba418f512b92de7"}, + {file = "orjson-3.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:212e6ec66d0bcc9882f9bd0e1870b486a6ead115975108fe17e5e87d0666044e"}, + {file = "orjson-3.9.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:93de6166da3ee5523d25acbae6d77f5a76525a1a81b69966a3091a3497f8f9ee"}, + {file = "orjson-3.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15e4442fea9aae10074a06e9e486373b960ef61d5735836cb026dd4d104f511d"}, + {file = "orjson-3.9.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:496c1515b6b4a1435667035a955e4531cbea341b0a50e86db42b4b6d0b9c78b0"}, + {file = "orjson-3.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f6ef36a66a7a2e98d1e247c7a5b7e92d26731c9e9e9a3de627e82a56d1aee6"}, + {file = "orjson-3.9.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:08cc162e221105e195301030b3d98e668335da6020424cc61e4ea85fd0d49456"}, + {file = "orjson-3.9.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03977a50b682b546c03b7d6e4f39d41cd0568cae533aabd339c853ff33c44a35"}, + {file = "orjson-3.9.6-cp38-none-win32.whl", hash = "sha256:e898a5150c3375512f76820bd9a009aab717ffde551f60f381aa8bad9f503bda"}, + {file = "orjson-3.9.6-cp38-none-win_amd64.whl", hash = "sha256:0664ad3c14dfb61ec794e469525556367a0d9bdc4246a64a6f0b3f8140f89d87"}, + {file = "orjson-3.9.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:df0545fc5a5f699d7693498847064df56a94990f5a779276549622083e1e850b"}, + {file = "orjson-3.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd7b491ae93221b38c4a6a99a044e0f84a99fba36321e22cf94a38ac7f517d8"}, + {file = "orjson-3.9.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20c7bad91dabf327fb7d034bb579e7d613d1a003f5ed773a3324acc038ae5f9a"}, + {file = "orjson-3.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b2dd0042fc1527960ddb3e7e81376df9cb799e9c0d31931befd14dc77a4f422"}, + {file = "orjson-3.9.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62f8d96904024620edd73d0b2d72321ba5fd499ee3a459dd8691d44252db3310"}, + {file = "orjson-3.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df35c8c0b1a0dad33dc679375d9e6464b0100f1899f72d6b8f9938d877d6f67e"}, + {file = "orjson-3.9.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ea3be461eb2aa2299399445001a1d9a53d170efc7dbe39087f163f40733cd9c1"}, + {file = "orjson-3.9.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ba3f11b0197508c4c5e99d11a088182360fd1d4177fe281824105b0cf452137"}, + {file = "orjson-3.9.6-cp39-none-win32.whl", hash = "sha256:e2f394a2c112080b8ccec2cc24cc196375980914afa943446df46b6cc133f0ab"}, + {file = "orjson-3.9.6-cp39-none-win_amd64.whl", hash = "sha256:66c9e0b728a1e0b1c4acb1f9c728800176a86a4c5b3e3bdb0c00d9dba8823ef0"}, + {file = "orjson-3.9.6.tar.gz", hash = "sha256:118171ed986d71f8201571911d6ec8c8e8e498afd8a8dd038ac55d642d8246b8"}, ] [[package]] @@ -1019,13 +1023,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.1" +version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = true python-versions = ">=3.7" files = [ - {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, - {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] @@ -1616,11 +1620,11 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -dev = ["black", "debugpy", "isort", "mypy", "pytest", "responses"] +dev = ["black", "debugpy", "isort", "mypy", "pytest", "responses", "ruff"] medical = ["connected-components-3d", "nibabel"] ml = ["scikit-learn", "torch", "torchvision"] ocv = ["opencv-python-headless"] -test = ["pytest", "responses"] +test = ["pytest", "responses", "ruff"] [metadata] lock-version = "2.0" From 82325aef531073bed065820ab076f3a359161c26 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 11:47:20 +0100 Subject: [PATCH 128/195] Setup to test doc gen in branch --- .github/workflows/EVENT_merge_to_master.yml | 9 ++++++++- .github/workflows/JOB_generate_documentation.yml | 11 +++++++++-- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index e850129a0..c9986aeba 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -4,7 +4,7 @@ run-name: Merge to master on: push: branches: - - master + - io-1554-test-merge # TODO change to master concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -20,6 +20,13 @@ jobs: name: Documentation uses: ./.github/workflows/JOB_generate_documentation.yml + # test_fail_action: + # name: DEBUG Cause deliberate workflow failure to test last action + # runs-on: ubuntu-latest + # steps: + # - name: Fail + # run: exit 1 + reverse_commit_on_fails: name: Reverse commit on fails uses: ./.github/workflows/JOB_reverse_commit_on_fails.yml diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml index f55274726..f6dd94428 100644 --- a/.github/workflows/JOB_generate_documentation.yml +++ b/.github/workflows/JOB_generate_documentation.yml @@ -56,6 +56,13 @@ jobs: role-session-name: ${{ env.AWS_SESSION_NAME }} aws-region: ${{ env.AWS_REGION }} - - name: Upload docs to S3 - run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive + # - name: Upload docs to S3 + # run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive + + # Generate and save docs github action artefact + - name: Generate and save docs github action artefact + uses: actions/upload-artifact@v2 + with: + name: darwin-py-docs + path: docs/ From f72d0d0976bd2373a848599acd70647dea289327 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 11:57:36 +0100 Subject: [PATCH 129/195] Simulate a squash merge --- .github/workflows/JOB_reverse_commit_on_fails.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index c05d037a3..3854039c5 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -11,6 +11,8 @@ on: jobs: step-master-back-one-commit: + env: + BRANCH_NAME: ${{ github.event.workflow_run.head_branch }} name: Step master back one commit runs-on: ubuntu-latest steps: @@ -22,3 +24,5 @@ jobs: run: | git revert HEAD~1 git push origin master + + # NB: This will run the parent workflow _again_ as it is a push to master From e93ae18c98ddebb78364736c2346cea343e8416b Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 12:13:29 +0100 Subject: [PATCH 130/195] Test version of reverse commit step --- .../workflows/JOB_reverse_commit_on_fails.yml | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index 3854039c5..54400b3a9 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -16,13 +16,21 @@ jobs: name: Step master back one commit runs-on: ubuntu-latest steps: - - name: Checkout master + - name: DEBUG + shell: bash + run: | + echo "${{ github.event.workflow_run.head_branch }} - $BRANCH_NAME" + echo "${{ toJson(github.event.push) }}" + + - name: Checkout merge branch [${{ env.BRANCH_NAME }}] uses: actions/checkout@v2 with: - ref: master + ref: $BRANCH_NAME + - name: Reverse commit run: | - git revert HEAD~1 - git push origin master + echo "Just testing atm" + # git reset --hard HEAD~1 + # git push origin $BRANCH_NAME --force # NB: This will run the parent workflow _again_ as it is a push to master From 5b9cafd44b5352634bb510fe0338d035ea8a5fe8 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 12:29:25 +0100 Subject: [PATCH 131/195] Added debug statement to find the correct context member --- .../workflows/JOB_reverse_commit_on_fails.yml | 27 +++++++++---------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index 54400b3a9..0bc8acbfe 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -17,20 +17,17 @@ jobs: runs-on: ubuntu-latest steps: - name: DEBUG - shell: bash - run: | - echo "${{ github.event.workflow_run.head_branch }} - $BRANCH_NAME" - echo "${{ toJson(github.event.push) }}" - - - name: Checkout merge branch [${{ env.BRANCH_NAME }}] - uses: actions/checkout@v2 - with: - ref: $BRANCH_NAME - - - name: Reverse commit - run: | - echo "Just testing atm" - # git reset --hard HEAD~1 - # git push origin $BRANCH_NAME --force + uses: ./.github/actions/JOB_debug-contexts.yml + + # - name: Checkout merge branch [${{ env.BRANCH_NAME }}] + # uses: actions/checkout@v2 + # with: + # ref: $BRANCH_NAME + + # - name: Reverse commit + # run: | + # echo "Just testing atm" + # # git reset --hard HEAD~1 + # # git push origin $BRANCH_NAME --force # NB: This will run the parent workflow _again_ as it is a push to master From e6f2ea38c764144c502ecbb868e58d561597e261 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 12:31:50 +0100 Subject: [PATCH 132/195] Correction to flow --- .github/workflows/EVENT_merge_to_master.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index c9986aeba..7ec000393 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -28,6 +28,8 @@ jobs: # run: exit 1 reverse_commit_on_fails: + needs: [run_tests, documentation] + if : ${{ failure() }} name: Reverse commit on fails uses: ./.github/workflows/JOB_reverse_commit_on_fails.yml From dc9a57bfc4b6b0afad95d064b2fb64ff99e574b7 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 12:47:31 +0100 Subject: [PATCH 133/195] Quick change to get context info --- .github/workflows/EVENT_merge_to_master.yml | 11 ++++++++- .../workflows/JOB_reverse_commit_on_fails.yml | 24 +++++++++---------- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index 7ec000393..6a0644e7a 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -31,6 +31,15 @@ jobs: needs: [run_tests, documentation] if : ${{ failure() }} name: Reverse commit on fails - uses: ./.github/workflows/JOB_reverse_commit_on_fails.yml + uses: ./.github/workflows/JOB_debug_contexts.yml + + success: # Step for the flow to have to indicate success + needs: [run_tests, documentation] + if : ${{ success() }} + name: Success + runs-on: ubuntu-latest + steps: + - name: Success + run: echo "Success" diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index 0bc8acbfe..5a2099651 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -16,18 +16,16 @@ jobs: name: Step master back one commit runs-on: ubuntu-latest steps: - - name: DEBUG - uses: ./.github/actions/JOB_debug-contexts.yml - - # - name: Checkout merge branch [${{ env.BRANCH_NAME }}] - # uses: actions/checkout@v2 - # with: - # ref: $BRANCH_NAME - - # - name: Reverse commit - # run: | - # echo "Just testing atm" - # # git reset --hard HEAD~1 - # # git push origin $BRANCH_NAME --force + + - name: Checkout merge branch [${{ env.BRANCH_NAME }}] + uses: actions/checkout@v2 + with: + ref: $BRANCH_NAME + + - name: Reverse commit + run: | + echo "Just testing atm" + # git reset --hard HEAD~1 + # git push origin $BRANCH_NAME --force # NB: This will run the parent workflow _again_ as it is a push to master From 3468a374e1a4e0c282901634015f169800fc80b3 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 12:56:35 +0100 Subject: [PATCH 134/195] Reverse commit should now get the correct branch name --- .github/workflows/JOB_reverse_commit_on_fails.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index 5a2099651..50abfd776 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -12,7 +12,7 @@ jobs: step-master-back-one-commit: env: - BRANCH_NAME: ${{ github.event.workflow_run.head_branch }} + BRANCH_NAME: ${{ github.ref_name }} name: Step master back one commit runs-on: ubuntu-latest steps: From 46e011dc0218bba6e92f708f99d3a6989f5127db Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 13:17:16 +0100 Subject: [PATCH 135/195] Remove AWS from the doc run --- .github/workflows/EVENT_merge_to_master.yml | 7 ------- .github/workflows/JOB_generate_documentation.yml | 12 ++++++------ 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index 6a0644e7a..985027f37 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -20,13 +20,6 @@ jobs: name: Documentation uses: ./.github/workflows/JOB_generate_documentation.yml - # test_fail_action: - # name: DEBUG Cause deliberate workflow failure to test last action - # runs-on: ubuntu-latest - # steps: - # - name: Fail - # run: exit 1 - reverse_commit_on_fails: needs: [run_tests, documentation] if : ${{ failure() }} diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml index f6dd94428..a3c3504c5 100644 --- a/.github/workflows/JOB_generate_documentation.yml +++ b/.github/workflows/JOB_generate_documentation.yml @@ -49,12 +49,12 @@ jobs: sphinx-apidoc -f -o source darwin darwin/future &&\ sphinx-build -b html source/ docs/ -W - - name: Setup access to AWS - uses: aws-actions/configure-aws-credentials@v2 - with: - role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} - role-session-name: ${{ env.AWS_SESSION_NAME }} - aws-region: ${{ env.AWS_REGION }} + # - name: Setup access to AWS + # uses: aws-actions/configure-aws-credentials@v2 + # with: + # role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} + # role-session-name: ${{ env.AWS_SESSION_NAME }} + # aws-region: ${{ env.AWS_REGION }} # - name: Upload docs to S3 # run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive From 5cc1adaa969945174084a8e965d23c27e87336fb Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:06:46 +0100 Subject: [PATCH 136/195] Starting debug of AWS step --- .github/workflows/EVENT_merge_to_master.yml | 12 ++++- .../workflows/JOB_generate_documentation.yml | 47 +++++++++++-------- 2 files changed, 37 insertions(+), 22 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index 985027f37..3a532626f 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -12,9 +12,17 @@ concurrency: jobs: - run_tests: + # run_tests: + # name: Run tests + # uses: ./.github/workflows/JOB_tests.yml + + run_tests: # Save run time while we test this. name: Run tests - uses: ./.github/workflows/JOB_tests.yml + # passively succeed + runs-on: ubuntu-latest + steps: + - name: Run tests + run: echo "Run tests" documentation: name: Documentation diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml index a3c3504c5..1dbf11752 100644 --- a/.github/workflows/JOB_generate_documentation.yml +++ b/.github/workflows/JOB_generate_documentation.yml @@ -28,33 +28,40 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install dependencies + - name: DEBUG env + env: + ALL_THE_ENVS: ${{ toJson(env) }} + shell: bash run: | - pip install poetry setuptools - poetry install --all-extras --no-interaction --no-root - pip install --editable ".[test,ml,medical,dev,ocv]" - pip install -U sphinx sphinx_rtd_theme m2r2 + echo "$ALL_THE_ENVS" - - name: Parse README - run: | - rm -f README.rst&&\ - m2r2 README.md&&\ - mv README.rst source/ + # - name: Install dependencies + # run: | + # pip install poetry setuptools + # poetry install --all-extras --no-interaction --no-root + # pip install --editable ".[test,ml,medical,dev,ocv]" + # pip install -U sphinx sphinx_rtd_theme m2r2 - - name: Generate new docs - env: - PYTHONPATH: "." - run: | - rm -rf docs/* &&\ - sphinx-apidoc -f -o source darwin darwin/future &&\ - sphinx-build -b html source/ docs/ -W + # - name: Parse README + # run: | + # rm -f README.rst&&\ + # m2r2 README.md&&\ + # mv README.rst source/ + + # - name: Generate new docs + # env: + # PYTHONPATH: "." + # run: | + # rm -rf docs/* &&\ + # sphinx-apidoc -f -o source darwin darwin/future &&\ + # sphinx-build -b html source/ docs/ -W # - name: Setup access to AWS # uses: aws-actions/configure-aws-credentials@v2 # with: - # role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} - # role-session-name: ${{ env.AWS_SESSION_NAME }} - # aws-region: ${{ env.AWS_REGION }} + # role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} # TODO: Check this works + # role-session-name: ${{ env.AWS_SESSION_NAME }} # TODO: Check this works + # aws-region: ${{ env.AWS_REGION }} # TODO: Check this works # - name: Upload docs to S3 # run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive From b4936a8e2d84858c2526c3dfee937ce1f29c22fa Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:24:42 +0100 Subject: [PATCH 137/195] Test AWS auth with no upload --- .../workflows/JOB_generate_documentation.yml | 31 +++++++------------ 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml index 1dbf11752..726e3e117 100644 --- a/.github/workflows/JOB_generate_documentation.yml +++ b/.github/workflows/JOB_generate_documentation.yml @@ -28,13 +28,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: DEBUG env - env: - ALL_THE_ENVS: ${{ toJson(env) }} - shell: bash - run: | - echo "$ALL_THE_ENVS" - # - name: Install dependencies # run: | # pip install poetry setuptools @@ -56,20 +49,20 @@ jobs: # sphinx-apidoc -f -o source darwin darwin/future &&\ # sphinx-build -b html source/ docs/ -W - # - name: Setup access to AWS - # uses: aws-actions/configure-aws-credentials@v2 - # with: - # role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} # TODO: Check this works - # role-session-name: ${{ env.AWS_SESSION_NAME }} # TODO: Check this works - # aws-region: ${{ env.AWS_REGION }} # TODO: Check this works + - name: Setup access to AWS + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} + role-session-name: ${{ env.AWS_SESSION_NAME }} + aws-region: ${{ env.AWS_REGION }} # - name: Upload docs to S3 # run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive - # Generate and save docs github action artefact - - name: Generate and save docs github action artefact - uses: actions/upload-artifact@v2 - with: - name: darwin-py-docs - path: docs/ + # # Generate and save docs github action artefact + # - name: Generate and save docs github action artefact + # uses: actions/upload-artifact@v2 + # with: + # name: darwin-py-docs + # path: docs/ From aa3a6488be305b1648a958e5ae63f7d50e25059e Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:33:36 +0100 Subject: [PATCH 138/195] Enable in place --- .github/workflows/EVENT_merge_to_master.yml | 14 ++--- .../workflows/JOB_generate_documentation.yml | 51 +++++++++---------- 2 files changed, 26 insertions(+), 39 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index 3a532626f..97a2d83da 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -4,7 +4,7 @@ run-name: Merge to master on: push: branches: - - io-1554-test-merge # TODO change to master + - master concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -12,17 +12,9 @@ concurrency: jobs: - # run_tests: - # name: Run tests - # uses: ./.github/workflows/JOB_tests.yml - - run_tests: # Save run time while we test this. + run_tests: name: Run tests - # passively succeed - runs-on: ubuntu-latest - steps: - - name: Run tests - run: echo "Run tests" + uses: ./.github/workflows/JOB_tests.yml documentation: name: Documentation diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml index 726e3e117..a9d2bf786 100644 --- a/.github/workflows/JOB_generate_documentation.yml +++ b/.github/workflows/JOB_generate_documentation.yml @@ -28,41 +28,36 @@ jobs: with: python-version: ${{ matrix.python-version }} - # - name: Install dependencies - # run: | - # pip install poetry setuptools - # poetry install --all-extras --no-interaction --no-root - # pip install --editable ".[test,ml,medical,dev,ocv]" - # pip install -U sphinx sphinx_rtd_theme m2r2 - - # - name: Parse README - # run: | - # rm -f README.rst&&\ - # m2r2 README.md&&\ - # mv README.rst source/ - - # - name: Generate new docs - # env: - # PYTHONPATH: "." - # run: | - # rm -rf docs/* &&\ - # sphinx-apidoc -f -o source darwin darwin/future &&\ - # sphinx-build -b html source/ docs/ -W + - name: Install dependencies + run: | + pip install poetry setuptools + poetry install --all-extras --no-interaction --no-root + pip install --editable ".[test,ml,medical,dev,ocv]" + pip install -U sphinx sphinx_rtd_theme m2r2 + + - name: Parse README + run: | + rm -f README.rst&&\ + m2r2 README.md&&\ + mv README.rst source/ + + - name: Generate new docs + env: + PYTHONPATH: "." + run: | + rm -rf docs/* &&\ + sphinx-apidoc -f -o source darwin darwin/future &&\ + sphinx-build -b html source/ docs/ -W - name: Setup access to AWS + id: aws_assume_role uses: aws-actions/configure-aws-credentials@v2 with: role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} role-session-name: ${{ env.AWS_SESSION_NAME }} aws-region: ${{ env.AWS_REGION }} - # - name: Upload docs to S3 - # run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive - # # Generate and save docs github action artefact - # - name: Generate and save docs github action artefact - # uses: actions/upload-artifact@v2 - # with: - # name: darwin-py-docs - # path: docs/ + - name: Upload docs to S3 + run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive From c5fcd78fa8d3e1e7acb9c9c2a6e83e83c931ea1d Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:36:24 +0100 Subject: [PATCH 139/195] Enable merge to master action --- .github/workflows/JOB_reverse_commit_on_fails.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index 50abfd776..6da19b682 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -8,8 +8,10 @@ on: # Run when master is failing due to a merge, and you want to reverse the merge # and push the reverse commit to master. -jobs: +permissions: + contents: write +jobs: step-master-back-one-commit: env: BRANCH_NAME: ${{ github.ref_name }} From e1250db5fc544591f985edbe49a0b9848f3c2436 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:39:43 +0100 Subject: [PATCH 140/195] Add permissions to scheduled release action --- .github/workflows/EVENT_scheduled_release.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml index a3355f4fb..29ce1fc6b 100644 --- a/.github/workflows/EVENT_scheduled_release.yml +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -5,6 +5,9 @@ on: schedule: - cron: '30 10 * * 2' # every Tuesday at 10:30am +permissions: + contents: read + jobs: checkout: name: Checkout @@ -46,7 +49,8 @@ jobs: git add . git commit -m "HOUSEKEEPING: Increment version number to $version" git tag master "v$version" - git push origin master --tags + git status + # git push origin master --tags #TODO Ensure this triggers the release job, which is trigged on tag From 033a1f3dd86acac17fa8bb78f133ae4bdcfa2ffb Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:40:45 +0100 Subject: [PATCH 141/195] Enable workflow dispatch for testing --- .github/workflows/EVENT_scheduled_release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml index 29ce1fc6b..f988faeb8 100644 --- a/.github/workflows/EVENT_scheduled_release.yml +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -2,6 +2,7 @@ name: scheduled-release run-name: Scheduled Release on: + workflow_dispatch: schedule: - cron: '30 10 * * 2' # every Tuesday at 10:30am From 84828c5c48b63a2e5b3da2649883cbdf0599df3d Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:42:32 +0100 Subject: [PATCH 142/195] Change crontab for testing --- .github/workflows/EVENT_scheduled_release.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml index f988faeb8..5e873067c 100644 --- a/.github/workflows/EVENT_scheduled_release.yml +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -4,7 +4,8 @@ run-name: Scheduled Release on: workflow_dispatch: schedule: - - cron: '30 10 * * 2' # every Tuesday at 10:30am + # - cron: '30 10 * * 2' # every Tuesday at 10:30am + - cron: '45 14 * * *' # every Tuesday at 10:30am permissions: contents: read From c99d45b91aa5739ade77deac69e7264b4e7b634d Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 14:50:01 +0100 Subject: [PATCH 143/195] Use existing linear label updater --- .github/workflows/EVENT_scheduled_release.yml | 2 +- ...ate-linear-labels.yml => EVENT_update-linear-labels.yml} | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) rename .github/workflows/{old/OLD_update-linear-labels.yml => EVENT_update-linear-labels.yml} (90%) diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml index 5e873067c..da10c97ca 100644 --- a/.github/workflows/EVENT_scheduled_release.yml +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -5,7 +5,7 @@ on: workflow_dispatch: schedule: # - cron: '30 10 * * 2' # every Tuesday at 10:30am - - cron: '45 14 * * *' # every Tuesday at 10:30am + - cron: '55 14 * * *' # every Tuesday at 10:30am permissions: contents: read diff --git a/.github/workflows/old/OLD_update-linear-labels.yml b/.github/workflows/EVENT_update-linear-labels.yml similarity index 90% rename from .github/workflows/old/OLD_update-linear-labels.yml rename to .github/workflows/EVENT_update-linear-labels.yml index 171dd36d3..3e0056d3a 100644 --- a/.github/workflows/old/OLD_update-linear-labels.yml +++ b/.github/workflows/EVENT_update-linear-labels.yml @@ -1,10 +1,8 @@ name: "Update Linear Labels" -# on: -# pull_request: -# branches: [master] - on: + pull_request: + branches: [master] workflow_dispatch: jobs: From 6fc04cc8af3863e9e28f06ee4c40a52e51b4f820 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 15:02:33 +0100 Subject: [PATCH 144/195] Scheduled release CRON --- .github/workflows/EVENT_scheduled_release.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml index da10c97ca..f988faeb8 100644 --- a/.github/workflows/EVENT_scheduled_release.yml +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -4,8 +4,7 @@ run-name: Scheduled Release on: workflow_dispatch: schedule: - # - cron: '30 10 * * 2' # every Tuesday at 10:30am - - cron: '55 14 * * *' # every Tuesday at 10:30am + - cron: '30 10 * * 2' # every Tuesday at 10:30am permissions: contents: read From e458b8f8c6c05ba833b04cc85ef1c8c94e9e1a25 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 15:11:24 +0100 Subject: [PATCH 145/195] Release process --- .github/workflows/EVENT_release.yml | 6 ++++-- .github/workflows/EVENT_tag.yml | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index e1000c126..fe4ad5cbd 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -35,7 +35,8 @@ jobs: POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - poetry publish + # poetry publish + echo "Publishing to pypi.org - Emulated" - name: Publish on test.pypi.org # if release is a prerelease, publish to test.pypi.org @@ -44,4 +45,5 @@ jobs: POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.TEST_PYPI_USERNAME }} POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.TEST_PYPI_PASSWORD }} run: | - poetry publish + # poetry publish + echo "Publishing to test.pypi.org - Emulated" diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 2d3552e83..423726a6b 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -27,7 +27,7 @@ jobs: runs-on: ubuntu-latest needs: checkout steps: - - name: Check master is passing + - name: Check branch is passing uses: actions/github-script@v3 with: github-token: ${{ secrets.GITHUB_TOKEN }} From 64415ca9cc69355e7a48aca4e94381c1167644f6 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 15:15:11 +0100 Subject: [PATCH 146/195] Debug check master is passing --- .github/workflows/EVENT_tag.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 423726a6b..46a7593ae 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -37,6 +37,8 @@ jobs: repo: context.repo.repo, ref: '${{ github.head_ref || github.ref }}' }); + //log state + console.log(state); if (state !== 'success') { core.setFailed('Tagged branch is not passing, cannot create a release'); } From 83ffe0f4218ae6f855f04c7cfcaa8044b8255320 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 17:59:57 +0100 Subject: [PATCH 147/195] Making tag check test against main branch as default. --- .github/workflows/EVENT_tag.yml | 42 +++++++++++++++++++++++++++------ 1 file changed, 35 insertions(+), 7 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 46a7593ae..78eb4fa29 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -27,18 +27,46 @@ jobs: runs-on: ubuntu-latest needs: checkout steps: - - name: Check branch is passing + - name: Wait for branch to not be pending, and ensure it's passing uses: actions/github-script@v3 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | - const { data: { state } } = await github.repos.getCombinedStatusForRef({ + /* + NB: This only test the main branch, not the branch that was tagged - tests only run on that branch if it is a PR + */ + // get main branch name + const main_branch = await github.repos.get({ owner: context.repo.owner, - repo: context.repo.repo, - ref: '${{ github.head_ref || github.ref }}' - }); - //log state - console.log(state); + repo: context.repo.repo + }).then(res => res.data.default_branch); + + // wait until main branch is not pending + let state; + + // set a timeout to avoid waiting forever + const TEN_MINUTES = 10 * 60 * 1000; + const avoidInfiniteWait = setTimeout(() => { + core.setFailed('Timed out waiting for main branch to not be pending'); + }, TEN_MINUTES); + + while (true) { + const { data: { state } } = await github.repos.getCombinedStatusForRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: main_branch + }); + if (state !== 'pending') { + break; + } + console.log('Main branch is pending, waiting 10 seconds'); + await new Promise(resolve => setTimeout(resolve, 10000)); + } + clearTimeout(avoidInfiniteWait); + + console.log('Branch state: ' + state); + console.log('Branch ref: ' + ref'); + if (state !== 'success') { core.setFailed('Tagged branch is not passing, cannot create a release'); } From dcb8f56d61fadafb63d37229a16b032eba61f24d Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:05:52 +0100 Subject: [PATCH 148/195] Fixed the JS --- .github/workflows/EVENT_tag.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 78eb4fa29..31a1b2458 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -65,7 +65,7 @@ jobs: clearTimeout(avoidInfiniteWait); console.log('Branch state: ' + state); - console.log('Branch ref: ' + ref'); + console.log('Branch ref: ' + ref); if (state !== 'success') { core.setFailed('Tagged branch is not passing, cannot create a release'); From fb390f681ed688e11c8d937b54bf1c191561056b Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:17:31 +0100 Subject: [PATCH 149/195] Fixed the JS --- .github/workflows/EVENT_tag.yml | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 31a1b2458..a4307279f 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -36,30 +36,42 @@ jobs: NB: This only test the main branch, not the branch that was tagged - tests only run on that branch if it is a PR */ // get main branch name - const main_branch = await github.repos.get({ + // const main_branch = await github.repos.get({ + // owner: context.repo.owner, + // repo: context.repo.repo + // }).then(res => res.data.default_branch); + + const branch = '${{ github.head_ref || github.ref }}'; + + // get latest commit on branch + const { data: { object: { sha: ref } } } = await github.repos.getBranch({ owner: context.repo.owner, - repo: context.repo.repo - }).then(res => res.data.default_branch); + repo: context.repo.repo, + branch: branch + }); + + console.log('This branch: ' + main_branch); + console.log('This branch ref: ' + ref); - // wait until main branch is not pending + // wait until branch is not pending let state; // set a timeout to avoid waiting forever const TEN_MINUTES = 10 * 60 * 1000; const avoidInfiniteWait = setTimeout(() => { - core.setFailed('Timed out waiting for main branch to not be pending'); + core.setFailed('Timed out waiting for branch to not be pending'); }, TEN_MINUTES); while (true) { const { data: { state } } = await github.repos.getCombinedStatusForRef({ owner: context.repo.owner, repo: context.repo.repo, - ref: main_branch + ref: branch }); if (state !== 'pending') { break; } - console.log('Main branch is pending, waiting 10 seconds'); + console.log('Branch is pending, waiting 10 seconds'); await new Promise(resolve => setTimeout(resolve, 10000)); } clearTimeout(avoidInfiniteWait); From 447ba7ae2b1acabcf4aadc606cb74fcae4d35e2b Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:21:35 +0100 Subject: [PATCH 150/195] some debugging --- .github/workflows/EVENT_tag.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index a4307279f..fb6c875d0 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -41,7 +41,14 @@ jobs: // repo: context.repo.repo // }).then(res => res.data.default_branch); + // get info for tag + const main_branch = '${{ github.ref }}'; + + + const branch = '${{ github.head_ref || github.ref }}'; + console.log('Head ref: ${{ github.head_ref }}'); + console.log('Ref: ${{ github.ref }}'); // get latest commit on branch const { data: { object: { sha: ref } } } = await github.repos.getBranch({ From 9e5aceee8dad202167b3b85fd2ea67cef28586a9 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:23:57 +0100 Subject: [PATCH 151/195] Get contexts for debugging --- .github/workflows/EVENT_tag.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index fb6c875d0..bdea5c944 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -22,6 +22,8 @@ jobs: name: Checkout with: ref: ${{ github.head_ref || github.ref }} + - uses: ./.github/workflows/JOB_debug.yml + name: Debug check_master_is_passing: runs-on: ubuntu-latest From 604e71b0682f4226176ce467a6fb7ea9c11a8fac Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:26:10 +0100 Subject: [PATCH 152/195] Add contexts --- .github/workflows/EVENT_tag.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index bdea5c944..2703dc2c2 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -22,7 +22,7 @@ jobs: name: Checkout with: ref: ${{ github.head_ref || github.ref }} - - uses: ./.github/workflows/JOB_debug.yml + - uses: ./.github/workflows/JOB_debug_contexts.yml name: Debug check_master_is_passing: From 3de6ad794e3e97bb87498fed3adf1fdd670bec22 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:28:51 +0100 Subject: [PATCH 153/195] Correct debug --- .github/workflows/EVENT_tag.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 2703dc2c2..77c6850b8 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -22,8 +22,10 @@ jobs: name: Checkout with: ref: ${{ github.head_ref || github.ref }} - - uses: ./.github/workflows/JOB_debug_contexts.yml - name: Debug + + debug: + needs: checkout + uses: ./.github/workflows/JOB_debug_contexts.yml check_master_is_passing: runs-on: ubuntu-latest From 37256231b9a28dc13671e23da11e90151ca9efd5 Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:32:26 +0100 Subject: [PATCH 154/195] Some debug --- .github/workflows/EVENT_tag.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 77c6850b8..bfbdaae3c 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -46,13 +46,11 @@ jobs: // }).then(res => res.data.default_branch); // get info for tag - const main_branch = '${{ github.ref }}'; + const branch = '${{ github.event.base_ref }}'; + console.log('Branch: ${{ github.event.base_ref }}'); - - - const branch = '${{ github.head_ref || github.ref }}'; - console.log('Head ref: ${{ github.head_ref }}'); - console.log('Ref: ${{ github.ref }}'); + //stop + return; // get latest commit on branch const { data: { object: { sha: ref } } } = await github.repos.getBranch({ From de50d733a55138b7a8f280f6d53225d6b7698a2c Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 8 Sep 2023 18:34:17 +0100 Subject: [PATCH 155/195] Correct JS --- .github/workflows/EVENT_tag.yml | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index bfbdaae3c..67fefa532 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -39,19 +39,11 @@ jobs: /* NB: This only test the main branch, not the branch that was tagged - tests only run on that branch if it is a PR */ - // get main branch name - // const main_branch = await github.repos.get({ - // owner: context.repo.owner, - // repo: context.repo.repo - // }).then(res => res.data.default_branch); // get info for tag const branch = '${{ github.event.base_ref }}'; console.log('Branch: ${{ github.event.base_ref }}'); - //stop - return; - // get latest commit on branch const { data: { object: { sha: ref } } } = await github.repos.getBranch({ owner: context.repo.owner, @@ -59,7 +51,7 @@ jobs: branch: branch }); - console.log('This branch: ' + main_branch); + console.log('This branch: ' + branch); console.log('This branch ref: ' + ref); // wait until branch is not pending From d6fb12c16fe42c21b3d279fa5b7d4754fdc1461a Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 14:20:09 +0100 Subject: [PATCH 156/195] Finding workflow runs --- .github/workflows/EVENT_tag.yml | 106 +++++++++++++++++--------------- 1 file changed, 56 insertions(+), 50 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 67fefa532..f03d4023a 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -32,14 +32,10 @@ jobs: needs: checkout steps: - name: Wait for branch to not be pending, and ensure it's passing - uses: actions/github-script@v3 + uses: actions/github-script@v6 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | - /* - NB: This only test the main branch, not the branch that was tagged - tests only run on that branch if it is a PR - */ - // get info for tag const branch = '${{ github.event.base_ref }}'; console.log('Branch: ${{ github.event.base_ref }}'); @@ -54,51 +50,61 @@ jobs: console.log('This branch: ' + branch); console.log('This branch ref: ' + ref); - // wait until branch is not pending - let state; - - // set a timeout to avoid waiting forever - const TEN_MINUTES = 10 * 60 * 1000; - const avoidInfiniteWait = setTimeout(() => { - core.setFailed('Timed out waiting for branch to not be pending'); - }, TEN_MINUTES); - - while (true) { - const { data: { state } } = await github.repos.getCombinedStatusForRef({ - owner: context.repo.owner, - repo: context.repo.repo, - ref: branch - }); - if (state !== 'pending') { - break; - } - console.log('Branch is pending, waiting 10 seconds'); - await new Promise(resolve => setTimeout(resolve, 10000)); - } - clearTimeout(avoidInfiniteWait); - - console.log('Branch state: ' + state); - console.log('Branch ref: ' + ref); - - if (state !== 'success') { - core.setFailed('Tagged branch is not passing, cannot create a release'); - } - - deploy: - needs: [checkout, check_master_is_passing] - runs-on: ubuntu-latest - steps: - - name: Create release - uses: actions/github-script@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const { data: { id: release_id } } = await github.repos.createRelease({ + // Get last workflow run triggered by this branch + const { data: { workflow_runs: workflow_runs } } = await github.actions.listWorkflowRunsForRepo({ owner: context.repo.owner, repo: context.repo.repo, - tag_name: context.ref, - name: context.ref, - draft: true, - prerelease: context.ref.startsWith('test-') + branch: branch, + status: 'completed', }); - core.setOutput('release_id', release_id); + + console.log('Workflow runs: ' + workflow_runs); + + //// wait until branch is not pending + //let state; + + //// set a timeout to avoid waiting forever + //const TEN_MINUTES = 10 * 60 * 1000; + //const avoidInfiniteWait = setTimeout(() => { + // core.setFailed('Timed out waiting for branch to not be pending'); + //}, TEN_MINUTES); + + //while (true) { + // const { data: { state } } = await github.repos.getCombinedStatusForRef({ + // owner: context.repo.owner, + // repo: context.repo.repo, + // ref: branch + // }); + // if (state !== 'pending') { + // break; + // } + // console.log('Branch is pending, waiting 10 seconds'); + // await new Promise(resolve => setTimeout(resolve, 10000)); + //} + //clearTimeout(avoidInfiniteWait); + + //console.log('Branch state: ' + state); + //console.log('Branch ref: ' + ref); + + //if (state !== 'success') { + // core.setFailed('Tagged branch is not passing, cannot create a release'); + //} + + # deploy: + # needs: [checkout, check_master_is_passing] + # runs-on: ubuntu-latest + # steps: + # - name: Create release + # uses: actions/github-script@v3 + # with: + # github-token: ${{ secrets.GITHUB_TOKEN }} + # script: | + # const { data: { id: release_id } } = await github.repos.createRelease({ + # owner: context.repo.owner, + # repo: context.repo.repo, + # tag_name: context.ref, + # name: context.ref, + # draft: true, + # prerelease: context.ref.startsWith('test-') + # }); + # core.setOutput('release_id', release_id); From bc5f22c30f69b3a6e29fddd1ddbd3f1e5dc0a4e4 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 14:29:08 +0100 Subject: [PATCH 157/195] Change version back --- .github/workflows/EVENT_tag.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index f03d4023a..ba29084dd 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -32,7 +32,7 @@ jobs: needs: checkout steps: - name: Wait for branch to not be pending, and ensure it's passing - uses: actions/github-script@v6 + uses: actions/github-script@v3 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | From 34777c010f2654595becaf93a00b5fbd42710b92 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 14:32:29 +0100 Subject: [PATCH 158/195] Fix JS --- .github/workflows/EVENT_tag.yml | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index ba29084dd..68d81431c 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -41,24 +41,23 @@ jobs: console.log('Branch: ${{ github.event.base_ref }}'); // get latest commit on branch - const { data: { object: { sha: ref } } } = await github.repos.getBranch({ + const data = await github.repos.getBranch({ owner: context.repo.owner, repo: context.repo.repo, branch: branch }); - console.log('This branch: ' + branch); - console.log('This branch ref: ' + ref); + console.log(data) // Get last workflow run triggered by this branch - const { data: { workflow_runs: workflow_runs } } = await github.actions.listWorkflowRunsForRepo({ - owner: context.repo.owner, - repo: context.repo.repo, - branch: branch, - status: 'completed', - }); - - console.log('Workflow runs: ' + workflow_runs); + //const { data: { workflow_runs: workflow_runs } } = await github.actions.listWorkflowRunsForRepo({ + // owner: context.repo.owner, + // repo: context.repo.repo, + // branch: branch, + // status: 'completed', + //}); + + //console.log('Workflow runs: ' + workflow_runs); //// wait until branch is not pending //let state; From 23fc1b182caec82fa6e91e50babc332ec1beef8e Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 14:47:46 +0100 Subject: [PATCH 159/195] Retrieve workflow runs --- .github/workflows/EVENT_tag.yml | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 68d81431c..98d2c4280 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -28,6 +28,7 @@ jobs: uses: ./.github/workflows/JOB_debug_contexts.yml check_master_is_passing: + name: Check master is passing runs-on: ubuntu-latest needs: checkout steps: @@ -41,23 +42,25 @@ jobs: console.log('Branch: ${{ github.event.base_ref }}'); // get latest commit on branch - const data = await github.repos.getBranch({ + const { data } = await github.repos.getBranch({ owner: context.repo.owner, repo: context.repo.repo, branch: branch }); - console.log(data) + const branch = data.name; + const mostRecentCommit = data.commit.sha; + console.log('Branch: ' + branch); + console.log('Most recent commit: ' + mostRecentCommit); // Get last workflow run triggered by this branch - //const { data: { workflow_runs: workflow_runs } } = await github.actions.listWorkflowRunsForRepo({ - // owner: context.repo.owner, - // repo: context.repo.repo, - // branch: branch, - // status: 'completed', - //}); - - //console.log('Workflow runs: ' + workflow_runs); + const { data: { workflow_runs: workflow_runs } } = await github.actions.listWorkflowRunsForRepo({ + owner: context.repo.owner, + repo: context.repo.repo, + branch: branch, + status: 'completed', + }) + console.log('Workflow runs: ' + workflow_runs); //// wait until branch is not pending //let state; From 1130a3b8b0a8ec915b1b3cf8559aec8c4955a053 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 15:03:13 +0100 Subject: [PATCH 160/195] JS correction --- .github/workflows/EVENT_tag.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 98d2c4280..afc192ca7 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -48,9 +48,7 @@ jobs: branch: branch }); - const branch = data.name; const mostRecentCommit = data.commit.sha; - console.log('Branch: ' + branch); console.log('Most recent commit: ' + mostRecentCommit); // Get last workflow run triggered by this branch From 8eb383d5a14929da744046906200ae7a0c5e9e74 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 16:11:48 +0100 Subject: [PATCH 161/195] Trying a new approach --- .github/workflows/EVENT_tag.yml | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index afc192ca7..b035b89e8 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -51,14 +51,21 @@ jobs: const mostRecentCommit = data.commit.sha; console.log('Most recent commit: ' + mostRecentCommit); - // Get last workflow run triggered by this branch - const { data: { workflow_runs: workflow_runs } } = await github.actions.listWorkflowRunsForRepo({ + const { data: commitData } = await github.repos.getCombinedStatusForRef({ owner: context.repo.owner, repo: context.repo.repo, - branch: branch, - status: 'completed', - }) - console.log('Workflow runs: ' + workflow_runs); + ref: mostRecentCommit + }); + + + const { data: branchData } await github.repos.getCombinedStatusForRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: branch + }); + + console.log('Commit state: ' + commitData); + console.log('Branch state: ' + branchData); //// wait until branch is not pending //let state; From 3645276e7aa5fe0b50754b1643a1b756792939e9 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 16:14:59 +0100 Subject: [PATCH 162/195] Reverse destructuring --- .github/workflows/EVENT_tag.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index b035b89e8..bd2a342a7 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -51,14 +51,14 @@ jobs: const mostRecentCommit = data.commit.sha; console.log('Most recent commit: ' + mostRecentCommit); - const { data: commitData } = await github.repos.getCombinedStatusForRef({ + const { commitData: data } = await github.repos.getCombinedStatusForRef({ owner: context.repo.owner, repo: context.repo.repo, ref: mostRecentCommit }); - const { data: branchData } await github.repos.getCombinedStatusForRef({ + const { branchData: data } await github.repos.getCombinedStatusForRef({ owner: context.repo.owner, repo: context.repo.repo, ref: branch From 06be44e6f2f5e08d1e0c15845c71fce78b757330 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 17:05:23 +0100 Subject: [PATCH 163/195] Used different approach --- .github/workflows/EVENT_tag.yml | 96 +++++---------------------------- 1 file changed, 14 insertions(+), 82 deletions(-) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index bd2a342a7..7de181379 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -23,95 +23,27 @@ jobs: with: ref: ${{ github.head_ref || github.ref }} - debug: - needs: checkout - uses: ./.github/workflows/JOB_debug_contexts.yml + run_tests: + runs-on: ubuntu-latest + steps: + - name: Run tests + uses: ./.github/workflows/run_tests.yml - check_master_is_passing: - name: Check master is passing + deploy: + needs: [checkout, run_tests] runs-on: ubuntu-latest - needs: checkout steps: - - name: Wait for branch to not be pending, and ensure it's passing + - name: Create release uses: actions/github-script@v3 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | - // get info for tag - const branch = '${{ github.event.base_ref }}'; - console.log('Branch: ${{ github.event.base_ref }}'); - - // get latest commit on branch - const { data } = await github.repos.getBranch({ - owner: context.repo.owner, - repo: context.repo.repo, - branch: branch - }); - - const mostRecentCommit = data.commit.sha; - console.log('Most recent commit: ' + mostRecentCommit); - - const { commitData: data } = await github.repos.getCombinedStatusForRef({ - owner: context.repo.owner, - repo: context.repo.repo, - ref: mostRecentCommit - }); - - - const { branchData: data } await github.repos.getCombinedStatusForRef({ + const { data: { id: release_id } } = await github.repos.createRelease({ owner: context.repo.owner, repo: context.repo.repo, - ref: branch + tag_name: context.ref, + name: context.ref, + draft: true, + prerelease: context.ref.startsWith('test-') }); - - console.log('Commit state: ' + commitData); - console.log('Branch state: ' + branchData); - - //// wait until branch is not pending - //let state; - - //// set a timeout to avoid waiting forever - //const TEN_MINUTES = 10 * 60 * 1000; - //const avoidInfiniteWait = setTimeout(() => { - // core.setFailed('Timed out waiting for branch to not be pending'); - //}, TEN_MINUTES); - - //while (true) { - // const { data: { state } } = await github.repos.getCombinedStatusForRef({ - // owner: context.repo.owner, - // repo: context.repo.repo, - // ref: branch - // }); - // if (state !== 'pending') { - // break; - // } - // console.log('Branch is pending, waiting 10 seconds'); - // await new Promise(resolve => setTimeout(resolve, 10000)); - //} - //clearTimeout(avoidInfiniteWait); - - //console.log('Branch state: ' + state); - //console.log('Branch ref: ' + ref); - - //if (state !== 'success') { - // core.setFailed('Tagged branch is not passing, cannot create a release'); - //} - - # deploy: - # needs: [checkout, check_master_is_passing] - # runs-on: ubuntu-latest - # steps: - # - name: Create release - # uses: actions/github-script@v3 - # with: - # github-token: ${{ secrets.GITHUB_TOKEN }} - # script: | - # const { data: { id: release_id } } = await github.repos.createRelease({ - # owner: context.repo.owner, - # repo: context.repo.repo, - # tag_name: context.ref, - # name: context.ref, - # draft: true, - # prerelease: context.ref.startsWith('test-') - # }); - # core.setOutput('release_id', release_id); + core.setOutput('release_id', release_id); From 7b107c9b4fae324194ef971143d099d6351d9bac Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 17:31:52 +0100 Subject: [PATCH 164/195] Release path changes --- .github/workflows/EVENT_release.yml | 2 ++ .github/workflows/EVENT_tag.yml | 12 +++++++----- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index fe4ad5cbd..346f12a18 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -5,6 +5,8 @@ on: release: types: [created] + workflow_call: + jobs: deploy: runs-on: ubuntu-latest diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 7de181379..5dbbdcd34 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -24,12 +24,9 @@ jobs: ref: ${{ github.head_ref || github.ref }} run_tests: - runs-on: ubuntu-latest - steps: - - name: Run tests - uses: ./.github/workflows/run_tests.yml + uses: ./.github/workflows/JOB_tests.yml - deploy: + create_release: needs: [checkout, run_tests] runs-on: ubuntu-latest steps: @@ -47,3 +44,8 @@ jobs: prerelease: context.ref.startsWith('test-') }); core.setOutput('release_id', release_id); + + release: + needs: [create_release] + name: Release + uses: ./.github/workflows/EVENT_release.yml From ecc1abd9ebac33397d3271ed38613b56d8dc4ec4 Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 11 Sep 2023 17:33:01 +0100 Subject: [PATCH 165/195] Add name to step --- .github/workflows/EVENT_tag.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 5dbbdcd34..9a68693e6 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -27,6 +27,7 @@ jobs: uses: ./.github/workflows/JOB_tests.yml create_release: + name: Create Release needs: [checkout, run_tests] runs-on: ubuntu-latest steps: From 25d3e96f9ccf3d39a20d0d1d851de51356a5c036 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 12 Sep 2023 12:16:45 +0100 Subject: [PATCH 166/195] Removed clutter --- .github/workflows/JOB_documentation.yml | 53 ------------------------- deploy/build.sh | 27 ------------- deploy/build_documentation.py | 18 --------- deploy/check_poetry.sh | 9 ----- deploy/check_version_validity.py | 0 deploy/deploy.py | 18 --------- deploy/docs_dependencies.sh | 17 -------- deploy/docs_generate_docs.sh | 7 ---- deploy/docs_parse_readme.sh | 7 ---- deploy/install_deps.sh | 37 ----------------- deploy/validate_version_and_update.py | 18 --------- 11 files changed, 211 deletions(-) delete mode 100644 .github/workflows/JOB_documentation.yml delete mode 100755 deploy/build.sh delete mode 100755 deploy/build_documentation.py delete mode 100755 deploy/check_poetry.sh delete mode 100755 deploy/check_version_validity.py delete mode 100755 deploy/deploy.py delete mode 100755 deploy/docs_dependencies.sh delete mode 100755 deploy/docs_generate_docs.sh delete mode 100755 deploy/docs_parse_readme.sh delete mode 100755 deploy/install_deps.sh delete mode 100755 deploy/validate_version_and_update.py diff --git a/.github/workflows/JOB_documentation.yml b/.github/workflows/JOB_documentation.yml deleted file mode 100644 index 62763a9a9..000000000 --- a/.github/workflows/JOB_documentation.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: documentation -run-name: Generate Documentation - -on: - push: - branches: - - master - -concurrency: - group: documentation - cancel-in-progress: true - -env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} - AWS_REGION: eu-west-1 - AWS_SESSION_NAME: darwinPyDocumentation - - -permissions: - id-token: write # This is required for requesting the JWT - contents: read # This is required for actions/checkout - -jobs: - generate-docs: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.8, 3.9, "3.10", "3.11"] - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: ${{ github.workspace }}/scripts/docs-dependencies.sh - - name: Parse README - run: ${{ github.workspace }}/scripts/docs_parse_readme.sh - - name: Generate new docs - env: - PYTHONPATH: "." - run: ${{ github.workspace }}/scripts/docs_generate_docs.sh - - - name: Setup access to AWS - uses: aws-actions/configure-aws-credentials@v2 - with: - role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} - role-session-name: ${{ env.AWS_SESSION_NAME }} - aws-region: ${{ env.AWS_REGION }} - - name: Upload docs to S3 - run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive - - \ No newline at end of file diff --git a/deploy/build.sh b/deploy/build.sh deleted file mode 100755 index 4036df004..000000000 --- a/deploy/build.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash - -# Check python and pip are installed -echo "Check that python3 and pip3 are installed" -./deploy/check_python.sh || exit $? - - -echo "Check that poetry is installed" -if ! command -v poetry &> /dev/null -then - # Try to run install deps script, and if that fails, exit gracefully - echo "Poetry could not be found" - echo "Installing dependencies" - - .deploy/install_deps.sh || exit 1 -fi - -# Check poetry is installed -if ! command -v poetry &> /dev/null -then - echo "Poetry could not be found after dependency install" - exit 2 -fi - -poetry build || exit 3 - - diff --git a/deploy/build_documentation.py b/deploy/build_documentation.py deleted file mode 100755 index 307a15823..000000000 --- a/deploy/build_documentation.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python3 -import logging -import sys - -logger = logging.getLogger(__name__) - -logger.setLevel(logging.INFO) - - -def main() -> None: - # TODO: Implement - logger.info("This function is not yet implemented") - logger.info(f"This file is {__file__}") - logger.info("args: {}".format(sys.argv)) - - -if __name__ == "__main__": - main() diff --git a/deploy/check_poetry.sh b/deploy/check_poetry.sh deleted file mode 100755 index 4f3922819..000000000 --- a/deploy/check_poetry.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -# Check poetry is installed -if ! command -v poetry &> /dev/null -then - echo "Poetry could not be found" - exit 4 -fi - -exit 0 \ No newline at end of file diff --git a/deploy/check_version_validity.py b/deploy/check_version_validity.py deleted file mode 100755 index e69de29bb..000000000 diff --git a/deploy/deploy.py b/deploy/deploy.py deleted file mode 100755 index 307a15823..000000000 --- a/deploy/deploy.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python3 -import logging -import sys - -logger = logging.getLogger(__name__) - -logger.setLevel(logging.INFO) - - -def main() -> None: - # TODO: Implement - logger.info("This function is not yet implemented") - logger.info(f"This file is {__file__}") - logger.info("args: {}".format(sys.argv)) - - -if __name__ == "__main__": - main() diff --git a/deploy/docs_dependencies.sh b/deploy/docs_dependencies.sh deleted file mode 100755 index 1e3e136bc..000000000 --- a/deploy/docs_dependencies.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin env python3 - -# TODO: refactor as needed - -python -m pip install --upgrade pip -pip install poetry -poetry install --all-extras --no-interaction --no-root -pip install wheel -pip install --upgrade setuptools -pip install --editable ".[test,ml,medical,dev]" -pip install torch torchvision -pip install -U sphinx -# Locking mistune version so m2r works. More info on issue: -# https://github.com/miyakogi/m2r/issues/66 -pip install mistune==0.8.4 # TODO: Mistune is now at version 3, so this is quite old, look into upgrading -pip install m2r # TODO: m2r is deprecated. Find alternative. -pip install sphinx_rtd_theme \ No newline at end of file diff --git a/deploy/docs_generate_docs.sh b/deploy/docs_generate_docs.sh deleted file mode 100755 index a266bbb3e..000000000 --- a/deploy/docs_generate_docs.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -#TODO: refactor as needed - -rm -rf docs/* -sphinx-apidoc -f -o source darwin darwin/future -sphinx-build -b html source/ docs/ -W \ No newline at end of file diff --git a/deploy/docs_parse_readme.sh b/deploy/docs_parse_readme.sh deleted file mode 100755 index 07ca87af6..000000000 --- a/deploy/docs_parse_readme.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -# TODO: refactor as needed - -rm -f README.rst -m2r README.md -mv README.rst source/ \ No newline at end of file diff --git a/deploy/install_deps.sh b/deploy/install_deps.sh deleted file mode 100755 index 026589690..000000000 --- a/deploy/install_deps.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash -# This script installs dependencies for the project -# It is intended for CI/CD, but can be run locally -# It will exit at any point that fails, and will return different exit codes: -# 1 - Python3 not found -# 2 - Python version is not 3.8 or higher -# 3 - pip3 not found -# 4 - Poetry not found after attempted install -# 5 - pip3 upgrade failed -# 6 - Poetry install failed - - -echo "Installing dependencies" - -# Check python is installed -./deploy/check_python.sh || exit $? - -# Check poetry is installed, and install if not -if ! command -v poetry &> /dev/null -then - curl -sSL https://install.python-poetry.org | python3 - -fi - -# Check poetry is installed -if ! command -v poetry &> /dev/null -then - echo "Poetry could not be found" - exit 4 -fi - -# Install dependencies -python3 -m pip install --upgrade pip || exit 5 -python3 -m poetry install --all-extras --no-interaction --no-root || exit 6 - -echo "Dependencies installed" - -exit 0 \ No newline at end of file diff --git a/deploy/validate_version_and_update.py b/deploy/validate_version_and_update.py deleted file mode 100755 index 307a15823..000000000 --- a/deploy/validate_version_and_update.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python3 -import logging -import sys - -logger = logging.getLogger(__name__) - -logger.setLevel(logging.INFO) - - -def main() -> None: - # TODO: Implement - logger.info("This function is not yet implemented") - logger.info(f"This file is {__file__}") - logger.info("args: {}".format(sys.argv)) - - -if __name__ == "__main__": - main() From f2a42371b236a5369b1958465cb86712cef1b9c5 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 12 Sep 2023 12:33:01 +0100 Subject: [PATCH 167/195] Release refactor --- .github/workflows/EVENT_release.yml | 89 ++++++++++++++++++- .github/workflows/EVENT_scheduled_release.yml | 16 +++- .github/workflows/EVENT_tag.yml | 30 +------ .github/workflows/JOB_create_release.yml | 47 ++++++++++ 4 files changed, 153 insertions(+), 29 deletions(-) create mode 100644 .github/workflows/JOB_create_release.yml diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index 346f12a18..b4b908d09 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -6,9 +6,42 @@ on: types: [created] workflow_call: + inputs: + release_id: + type: string + description: "The id of the release" + required: false + + release_tag: + type: string + description: "The tag of the release" + required: false + +env: + release_id: ${{ inputs.release_id || github.event.release.id }} + release_tag: ${{ inputs.release_tag || github.event.release.tag_name }} jobs: - deploy: + validate_tag: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.ref }} + + - name: Fail early if tag schema is invalid + run: | + if [[ ! ${{ env.release_tag }} =~ ^refs/tags/(v[0-9]+\.[0-9]+\.[0-9]+)$ && ${{ env.release_tag }} =~ ^refs/tags/test-.*$ ]]; then + echo "Tag ${{ env.release_tag }} is not a valid semver tag" + exit 1 + fi + + run_tests: + needs: validate_tag + uses: ./.github/workflows/JOB_tests.yml + + build: + needs: validate_tag runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -31,6 +64,19 @@ jobs: poetry install --no-interaction --no-root --all-extras -vvv poetry build + - name: Add build to release + uses: actions/upload-release-asset@v1 + with: + upload_url: ${{ github.event.release.upload_url }} + asset_path: ./dist/* + asset_name: ${{ env.release_id }}.tar.gz + asset_content_type: application/gzip + + release: + needs: [run_tests, build] + if: startsWith(github.ref, 'refs/tags/v') + runs-on: ubuntu-latest + steps: - name: Publish on pypi.org if: startsWith(github.ref, 'refs/tags/v') env: @@ -40,6 +86,11 @@ jobs: # poetry publish echo "Publishing to pypi.org - Emulated" + test_release: + needs: [run_tests, build] + if: startsWith(github.ref, 'refs/tags/test-') + runs-on: ubuntu-latest + steps: - name: Publish on test.pypi.org # if release is a prerelease, publish to test.pypi.org if: startsWith(github.ref, 'refs/tags/test-') @@ -49,3 +100,39 @@ jobs: run: | # poetry publish echo "Publishing to test.pypi.org - Emulated" + + set_release_status: + needs: [release, test_release] + if: always() && contains(needs.*.result, 'success') + runs-on: ubuntu-latest + steps: + - name: Set release status + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + github.repos.createReleaseStatus({ + owner: context.repo.owner, + repo: context.repo.repo, + release_id: ${{ env.release_id }}, + state: 'success', + description: 'Release was successful', + target_url: ' + + notify_release: + # Slack notify + needs: [release, test_release] + if: always() && contains(needs.*.result, 'success') + runs-on: ubuntu-latest + steps: + - name: Notify Slack + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_USERNAME: ${{ secrets.SLACK_USERNAME }} + SLACK_ICON: ${{ secrets.SLACK_ICON }} + SLACK_MESSAGE: | + *Darwin-py ${{ env.release_tag }} has been released 🎉* + ${{ github.event.release.html_url }} + ${{ github.event.release.body }} diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml index f988faeb8..944bbe0e1 100644 --- a/.github/workflows/EVENT_scheduled_release.yml +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -52,6 +52,20 @@ jobs: git tag master "v$version" git status # git push origin master --tags + echo "tag_name=v$version" >> $GITHUB_ENV + + create_release: + name: Create release + needs: commit_changes_to_master + uses: ./.github/workflows/JOB_create_release.yml + with: + is_draft: false + + release: + needs: create_release + uses: ./.github/workflows/EVENT_release.yml + with: + release_tag: ${{ github.env.tag_name }} + release_id: ${{ needs.create_release.outputs.release_id }} - #TODO Ensure this triggers the release job, which is trigged on tag diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 9a68693e6..159e5a113 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -15,7 +15,8 @@ env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: - checkout: + create_release: + name: Create Release runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -23,30 +24,5 @@ jobs: with: ref: ${{ github.head_ref || github.ref }} - run_tests: - uses: ./.github/workflows/JOB_tests.yml - - create_release: - name: Create Release - needs: [checkout, run_tests] - runs-on: ubuntu-latest - steps: - name: Create release - uses: actions/github-script@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const { data: { id: release_id } } = await github.repos.createRelease({ - owner: context.repo.owner, - repo: context.repo.repo, - tag_name: context.ref, - name: context.ref, - draft: true, - prerelease: context.ref.startsWith('test-') - }); - core.setOutput('release_id', release_id); - - release: - needs: [create_release] - name: Release - uses: ./.github/workflows/EVENT_release.yml + uses: ./.github/workflows/JOB_create_release.yml diff --git a/.github/workflows/JOB_create_release.yml b/.github/workflows/JOB_create_release.yml new file mode 100644 index 000000000..f1da9ecc3 --- /dev/null +++ b/.github/workflows/JOB_create_release.yml @@ -0,0 +1,47 @@ +name: created_tag +run-name: Created Tag + +on: + workflow_call: + outputs: + release_id: + description: The ID of the release that was created + value: ${{ jobs.create_release.outputs.release_id }} + inputs: + is_draft: + type: boolean + description: Whether the release should be a draft + required: false + default: true + +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + +jobs: + create_release: + outputs: + release_id: # Set in the JS + + name: Create Release + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + name: Checkout + with: + ref: ${{ github.head_ref || github.ref }} + + - name: Create release + id: create_release + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { data: { id: release_id } } = await github.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: context.ref, + name: context.ref, + draft: ${{ github.event.inputs.is_draft }}, + }); + core.setOutput('release_id', release_id); From efedae631fa59750e6fa997f7a3593d7424e6549 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 12 Sep 2023 14:05:51 +0100 Subject: [PATCH 168/195] Added a codeowners file --- .github/CODEOWNERS | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..bb0e64f74 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,4 @@ +owencjones +nathanjp91 +simedw +andreaazzini From 478c71f32b3f514c4f7ab51d497b96976b5e0cf1 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 12 Sep 2023 14:08:14 +0100 Subject: [PATCH 169/195] Update to CODEOWNERS file --- .github/CODEOWNERS | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index bb0e64f74..e312966c0 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,4 +1,4 @@ -owencjones -nathanjp91 -simedw -andreaazzini +@owencjones +@nathanjp91 +@simedw +@andreaazzini From 64da97d6b0f7eb5c67be8cd7bfeefb72769e591e Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 12 Sep 2023 14:32:38 +0100 Subject: [PATCH 170/195] Release draft system --- .github/workflows/EVENT_release.yml | 29 +++++-------------- .github/workflows/EVENT_scheduled_release.yml | 5 ++-- .github/workflows/EVENT_tag.yml | 3 ++ 3 files changed, 12 insertions(+), 25 deletions(-) diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index b4b908d09..b08d51d96 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -3,7 +3,7 @@ run-name: Release on: release: - types: [created] + types: [released] workflow_call: inputs: @@ -17,9 +17,15 @@ on: description: "The tag of the release" required: false + is_draft: + type: boolean + description: "Is the release a draft" + required: false + env: release_id: ${{ inputs.release_id || github.event.release.id }} release_tag: ${{ inputs.release_tag || github.event.release.tag_name }} + is_draft: ${{ inputs.is_draft || github.event.release.draft }} jobs: validate_tag: @@ -78,7 +84,6 @@ jobs: runs-on: ubuntu-latest steps: - name: Publish on pypi.org - if: startsWith(github.ref, 'refs/tags/v') env: POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} @@ -92,8 +97,6 @@ jobs: runs-on: ubuntu-latest steps: - name: Publish on test.pypi.org - # if release is a prerelease, publish to test.pypi.org - if: startsWith(github.ref, 'refs/tags/test-') env: POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.TEST_PYPI_USERNAME }} POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.TEST_PYPI_PASSWORD }} @@ -101,24 +104,6 @@ jobs: # poetry publish echo "Publishing to test.pypi.org - Emulated" - set_release_status: - needs: [release, test_release] - if: always() && contains(needs.*.result, 'success') - runs-on: ubuntu-latest - steps: - - name: Set release status - uses: actions/github-script@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - github.repos.createReleaseStatus({ - owner: context.repo.owner, - repo: context.repo.repo, - release_id: ${{ env.release_id }}, - state: 'success', - description: 'Release was successful', - target_url: ' - notify_release: # Slack notify needs: [release, test_release] diff --git a/.github/workflows/EVENT_scheduled_release.yml b/.github/workflows/EVENT_scheduled_release.yml index 944bbe0e1..04b9ec640 100644 --- a/.github/workflows/EVENT_scheduled_release.yml +++ b/.github/workflows/EVENT_scheduled_release.yml @@ -59,7 +59,7 @@ jobs: needs: commit_changes_to_master uses: ./.github/workflows/JOB_create_release.yml with: - is_draft: false + is_draft: true # will not publish, only create draft release: needs: create_release @@ -67,5 +67,4 @@ jobs: with: release_tag: ${{ github.env.tag_name }} release_id: ${{ needs.create_release.outputs.release_id }} - - + is_draft: true # will not publish, only create draft diff --git a/.github/workflows/EVENT_tag.yml b/.github/workflows/EVENT_tag.yml index 159e5a113..f93b719ee 100644 --- a/.github/workflows/EVENT_tag.yml +++ b/.github/workflows/EVENT_tag.yml @@ -26,3 +26,6 @@ jobs: - name: Create release uses: ./.github/workflows/JOB_create_release.yml + with: + is_draft: true + From 9476640cd03db91fa4c671e247a41d78bb861981 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 12 Sep 2023 15:02:57 +0100 Subject: [PATCH 171/195] One step deploy script --- deploy/create_release.sh | 79 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 deploy/create_release.sh diff --git a/deploy/create_release.sh b/deploy/create_release.sh new file mode 100644 index 000000000..f1823ee3b --- /dev/null +++ b/deploy/create_release.sh @@ -0,0 +1,79 @@ +#!/usr/bin/env bash +WORKING_DIR=$(dirname "$0") + +echo "CLI Tool for creating a new release in one step." +echo +echo "Releases from master branch only. To perform releases from other branches, tag the branch and push the tag" +echo +echo "Usage: ./deploy/create_release.sh" +echo + +# Check that the current branch is master +CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) +if [ "$CURRENT_BRANCH" != "master" ]; then + echo "ERROR: You must be on the master branch to create a release." + exit 1 +fi + +# Check that the working directory is clean +if [ -n "$(git status --porcelain)" ]; then + echo "ERROR: You have uncommitted changes. Please commit or stash them before creating a release." + exit 1 +fi + +# Check that the remote is set +REMOTE=$(git remote get-url origin) +if [ -z "$REMOTE" ]; then + echo "ERROR: The remote is not set. Please set the remote before creating a release." + exit 1 +fi + +# Increment the version number one patch version +"$WORKING_DIR/increase_version.sh --patch --force" +if [ $? -ne 0 ]; then + echo "ERROR: Failed to increase the version number." + exit 1 +fi +VERSION=$(cat "$WORKING_DIR/../VERSION") +if [ -z "$VERSION" ]; then + echo "ERROR: Failed to read the version number." + exit 1 +fi + +echo "Created changes for release v$VERSION" +echo +echo "Please review the changes and commit them." +echo +echo "Continue? (y/n)" +read -r CONTINUE +if [ "$CONTINUE" != "y" ]; then + echo "Aborting." + exit 1 +fi +echo + +# Commit the changes +commit_and_tag=$(git add "$WORKING_DIR/../darwin/version/__init__.py" && \ + git add "$WORKING_DIR/../pyproject.toml" && \ + git commit -m "HOUSEKEEPING: Bump version to v$VERSION" && \ + git tag master "v$VERSION" && \ +git push origin "v$VERSION") +masterpush=$(git push origin master) + +if [ "$commit_and_tag" -ne 0 ]; then + echo "ERROR: Failed to commit the changes and tag the release. You may have an issue with your git configuration." + exit 1 +fi + +if [ "$masterpush" -ne 0 ]; then + echo "ERROR: Failed to push the changes. You need to be an admin to bump version directly on master." + echo "Stash your changes to a branch and create a pull request." + exit 1 +fi + + +echo "Successfully created release v$VERSION" +echo "The release action should trigger, and the release will be available on PyPI in ~20m." +echo "Check the github actions tab for the status of the release." + +exit 0 From 437002abee6af31f3f176350aa4644274024cd29 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 12 Sep 2023 15:15:17 +0100 Subject: [PATCH 172/195] Remove old code --- .github/workflows/old/OLD_document.yml | 64 ----------------- .../workflows/old/OLD_pythonpublish_sdist.yml | 34 --------- .github/workflows/old/OLD_tests.yml | 70 ------------------- 3 files changed, 168 deletions(-) delete mode 100644 .github/workflows/old/OLD_document.yml delete mode 100644 .github/workflows/old/OLD_pythonpublish_sdist.yml delete mode 100644 .github/workflows/old/OLD_tests.yml diff --git a/.github/workflows/old/OLD_document.yml b/.github/workflows/old/OLD_document.yml deleted file mode 100644 index 0c5b3deee..000000000 --- a/.github/workflows/old/OLD_document.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Documentation - -# on: -# push: -# branches: ["master"] -# workflow_dispatch: -on: - workflow_dispatch: - -permissions: - id-token: write # This is required for requesting the JWT - contents: read # This is required for actions/checkout - -env: - AWS_REGION: eu-west-1 - AWS_SESSION_NAME: darwinPyDocumentation - -jobs: - generate-docs: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.8, 3.9, "3.10"] - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install poetry - poetry install --all-extras --no-interaction --no-root - pip install wheel - pip install --upgrade setuptools - pip install --editable ".[test,ml,medical,dev]" - pip install torch torchvision - pip install -U sphinx - # Locking mistune version so m2r works. More info on issue: - # https://github.com/miyakogi/m2r/issues/66 - pip install mistune==0.8.4 - pip install m2r - pip install sphinx_rtd_theme - - name: Parse README - run: | - rm -f README.rst - m2r README.md - mv README.rst source/ - - name: Generate new docs - env: - PYTHONPATH: "." - run: | - rm -rf docs/* - sphinx-apidoc -f -o source darwin darwin/future - sphinx-build -b html source/ docs/ -W - - name: Setup access to AWS - uses: aws-actions/configure-aws-credentials@v2 - with: - role-to-assume: ${{ secrets.DARWIN_PY_AWS_GITHUB_CICD_ROLE }} - role-session-name: ${{ env.AWS_SESSION_NAME }} - aws-region: ${{ env.AWS_REGION }} - - name: Upload docs to S3 - run: aws s3 cp docs/ s3://darwin-py-sdk.v7labs.com/ --recursive diff --git a/.github/workflows/old/OLD_pythonpublish_sdist.yml b/.github/workflows/old/OLD_pythonpublish_sdist.yml deleted file mode 100644 index b9f881b9f..000000000 --- a/.github/workflows/old/OLD_pythonpublish_sdist.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Upload Python Package (SDIST) - -# on: -# release: -# types: [created] -# workflow_dispatch: - -on: - workflow_dispatch: - -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: "3.9" - - run: pip install pip --upgrade - - name: Setup Poetry - uses: abatilo/actions-poetry@v2 - with: - poetry-version: "1.3.1" - - name: Install dependencies - run: | - poetry install --no-interaction --no-root --all-extras -vvv - poetry build - - name: Publish - env: - POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} - POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - poetry publish diff --git a/.github/workflows/old/OLD_tests.yml b/.github/workflows/old/OLD_tests.yml deleted file mode 100644 index a73073106..000000000 --- a/.github/workflows/old/OLD_tests.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Testing - -# on: -# push: -# branches: -# - master -# pull_request: - -on: - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - version_test: - name: "Testing installs on other versions" - strategy: - fail-fast: false - matrix: - python-version: ["3.7"] - poetry-version: ["1.3.1"] - os: [ubuntu-latest, windows-latest] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Upgrade pip - run: python -m pip install --upgrade pip - - name: Setup Poetry - uses: abatilo/actions-poetry@v2 - with: - poetry-version: ${{ matrix.poetry-version }} - - name: Install dependencies - run: | - poetry install --no-interaction --no-root -vvv - pip install wheel - pip install --upgrade setuptools - pip install --editable "." - ci: - strategy: - fail-fast: false - matrix: - python-version: ["3.8", "3.9", "3.10"] - poetry-version: ["1.3.1"] - os: [ubuntu-latest, windows-latest] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Upgrade pip - run: python -m pip install --upgrade pip - - name: Setup Poetry - uses: abatilo/actions-poetry@v2 - with: - poetry-version: ${{ matrix.poetry-version }} - - name: Install dependencies - run: | - poetry install --no-interaction --no-root --all-extras -vvv - pip install wheel - pip install --upgrade setuptools - pip install --editable ".[test,ml,medical,dev]" - pip install pytest pytest-describe - - name: Run Tests - run: python -m pytest -W ignore::DeprecationWarning From 67b46049e61c41119651a528c4674fa53cbb4570 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 13 Sep 2023 14:53:49 +0100 Subject: [PATCH 173/195] Slack message sender --- .github/workflows/JOB_slack_message.yml | 27 +++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 .github/workflows/JOB_slack_message.yml diff --git a/.github/workflows/JOB_slack_message.yml b/.github/workflows/JOB_slack_message.yml new file mode 100644 index 000000000..b85a1d3e9 --- /dev/null +++ b/.github/workflows/JOB_slack_message.yml @@ -0,0 +1,27 @@ +name: slack-notify +run-name: Slack Notify + +on: + workflow_call: + inputs: + message: + type: string + description: 'The message to send' + required: true + +jobs: + slack-notify: + name: Slack Notification + runs-on: ubuntu-latest + steps: + - name: Send Slack Notification + run: | + PAYLOAD=$(cat < Date: Wed, 13 Sep 2023 14:54:03 +0100 Subject: [PATCH 174/195] Release use slack action --- .github/workflows/EVENT_release.yml | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index b08d51d96..4e1d29550 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -111,13 +111,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Notify Slack - uses: rtCamp/action-slack-notify@v2 - env: - SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }} - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - SLACK_USERNAME: ${{ secrets.SLACK_USERNAME }} - SLACK_ICON: ${{ secrets.SLACK_ICON }} - SLACK_MESSAGE: | - *Darwin-py ${{ env.release_tag }} has been released 🎉* - ${{ github.event.release.html_url }} - ${{ github.event.release.body }} + uses: ./.github/workflows/JOB_slack_message.yml + with: + message: | + :rocket: *${{ env.release_tag }}* has been released! + :link: + - https://pypi.org/project/darwin-py From 1f91c6200f82579804d09723100036d060ae4c26 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 13 Sep 2023 15:18:37 +0100 Subject: [PATCH 175/195] Release process document --- docs/release_process.md | 95 +++++++++++++---------------------------- 1 file changed, 29 insertions(+), 66 deletions(-) diff --git a/docs/release_process.md b/docs/release_process.md index ed9fe27d2..9f70806b3 100644 --- a/docs/release_process.md +++ b/docs/release_process.md @@ -7,8 +7,7 @@ How to create releases of `darwin-py`. - [Introduction](#introduction) - [Make a standard release](#make-a-standard-release) - [Steps](#steps) -- [Make a hotfix release](#make-a-hotfix-release) - - [Steps](#steps-1) +- [Make a non-standard release](#make-a-non-standard-release) - [Contact](#contact) @@ -24,74 +23,38 @@ These are the steps for releasing a passing `master` branch. If you are releasi ### Steps +**Pre-release** + 1. Ensure all tickets to be included are QA'd and to be included in a release. Speak to PM's and code owners if unsure. 2. Once passed on QA, merge all PRs to be included into `master` -3. Ensure that `master` tests are still passing after merge. -4. Checkout a new branch for the version bump. -5. Open `pyproject.toml`, and update the version in there. Maintain semver, and agree with owner if you are intending to make a release that is not a minor increment. -``` -[tool.poetry] -name = "darwin-py" -version = "0.8.7" # update this when you change the version - See: https://peps.python.org/pep-0440/ -``` -6. Add, commit, and open a PR for the version bump. -7. Once accepted, merge, and tag `master` with the version you have set, prefixed with "v". E.g. if you set the version to `1.2.3`, then tag as `v1.2.3` -```shell -$ git checkout master -$ git tag v1.2.3 master # for example -$ git push origin v1.2.3 # again, for example -``` -8. Push the tag. -9. In the ["Draft a new release"](https://github.com/v7labs/darwin-py/releases/new) section of github, select the tag you created. Write an appropriate summary of the release (see Engineering team guidance on language), and create the release. Click "Publish release" -10. Ensure release is successful in CI/CD -11. Ensure that release appears in Pypi. -**12. Announce the release and update the tickets** - -## Make a hotfix release - -Making a hotfix release is _broadly_ the same as the process for [making a normal release](#make-a-standard-release), but first you need to `cherry-pick` the items you need into a hotfix branch. -### Steps +**Release** + +This is the process for making a simple semver _patch_ release. If you need to make a _minor_ or _major_ version release, follow the instructions here [Making a non-standard release](#make-a-non-standard-release) + +1. Run the script `deploy/create_release.sh` - follow the prompts, and the script will: + * Increment the version in all places it exists + * Commit this, and push these changes, along with a tag for the version + * Check the script didn't throw any errors, if it didn't, the script will prompt you to look at the Actions dialog, to see when the tests and quality checks all pass. + **NB: If checks fail, the release will fail** +2. A draft release will be created in Github, you can release this with the UI, or the `gh` CLI + +**Done!** + +**Make sure you update the tickets to 'Done'** + +## Make a non-standard release + +Making a hotfix release is _broadly_ the same as the process for [making a normal release](#make-a-standard-release), but with a few more steps: + +1. Run the script `python deploy/increase_version.py` and use `--patch`, `--minor`, or `--major` to set the version change you want to make. If you need to make a more complex version change (changing more than one aspect of the version), you can pass `--new-version` followed by a valid semver version. +2. Commit and push the changes to master +3. Add a tag using `git tag origin v0.0.0`, substituiting the right version number (**NB: the preceeding 'v' is necessary for release**) +4. Push the tag `git push origin v0.0.0`, again substituting the right version number +5. A draft release will be created in Github, you can release this with the UI, or the `gh` CLI -1. Create a hotfix branch to contain your new release, based on the **last release, not master**. -```shell -$ git checkout v1.2.2 # for example - should be the last released tag -$ git checkout -b hotfix_branch_v1_2_3 # naming discretionary, branch won't exist long -``` -2. Two possible routes forward: - 1. If your branch is based on other, unmerged branches, then you can merge them into your hotfix branch: - ```shell - $ git merge thing_i_need_in_hotfix - $ git merge other_thing_i_need_in_hotfix - ``` - - You may have to settle merge conflicts. - - 2. If your branch is based on already-merged-into-master items, then you need to use `cherry-pick` to include them in your branch. We use a squash-merge on PR, so you can cherry pick the squash merge of each PR, this will be the commit number that was actually merged in. If for some reason you're dealing with merges not squash-merged, you need to cherry pick each commit from the branch in, in order that they happened. - ```sh - $ git cherry-pick [commit-number] - $ git cherry-pick [other-commit-number] - ...and so on - ``` -3. After these, you will have a branch with the items you need to include in a hot-fix. Open `pyproject.toml`, and update the version in there. Maintain semver, and agree with owner if you are intending to make a release that is not a minor increment. -``` -[tool.poetry] -name = "darwin-py" -version = "0.8.7" # update this when you change the version - See: https://peps.python.org/pep-0440/ -``` -4. Add, commit, and push the branch. -5. Once accepted, tag the branch with the version you have set, prefixed with "v". E.g. if you set the version to `1.2.3`, then tag as `v1.2.3` -```shell -$ git checkout [your hotfix branch name] -$ git tag v1.2.3 [your hotfix branch name] # for example -$ git push origin v1.2.3 # again, for example -``` -6. Push the tag. -7. In the ["Draft a new release"](https://github.com/v7labs/darwin-py/releases/new) section of github, select the tag you created, and the hotfix branch. Write an appropriate summary of the release (see Engineering team guidance on language), and create the release. Click "Publish release" -8. Ensure release is successful in CI/CD -9. Ensure release is successful in Pypi -**10. Announce the release and update the tickets** +**Make sure you update the tickets to 'Done'** ## Contact -Any issues, reach out in the appropriate channels on slack, or contact owen@v7labs.com - Slack is faster. \ No newline at end of file +Any issues, reach out in the appropriate channels on slack, or contact owen@v7labs.com - Slack is faster. From fc7d4780b1812ea4dd8de473a8324f12ac98e50a Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 13 Sep 2023 15:20:53 +0100 Subject: [PATCH 176/195] Remove the testmerge file --- testmerge | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 testmerge diff --git a/testmerge b/testmerge deleted file mode 100644 index e69de29bb..000000000 From b7a57e4af2ec94f4fc7a96511455633147dd9203 Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 13 Sep 2023 16:38:43 +0100 Subject: [PATCH 177/195] Fully enable merge-to-master behaviour --- .github/workflows/EVENT_merge_to_master.yml | 3 ++- .github/workflows/JOB_reverse_commit_on_fails.yml | 7 ++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index 97a2d83da..90c981f7d 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -5,6 +5,7 @@ on: push: branches: - master + workflow_call: concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -24,7 +25,7 @@ jobs: needs: [run_tests, documentation] if : ${{ failure() }} name: Reverse commit on fails - uses: ./.github/workflows/JOB_debug_contexts.yml + uses: ./.github/workflows/JOB_reverse_commit_on_fails.yml success: # Step for the flow to have to indicate success needs: [run_tests, documentation] diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index 6da19b682..43401c727 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -27,7 +27,8 @@ jobs: - name: Reverse commit run: | echo "Just testing atm" - # git reset --hard HEAD~1 - # git push origin $BRANCH_NAME --force + git reset --hard HEAD~1 + git push origin $BRANCH_NAME --force - # NB: This will run the parent workflow _again_ as it is a push to master + recall-master-merge-flow: + uses: ./.github/workflows/EVENT_merge_to_master.yml From f7d702a3a9c218532e5e3494745272875cc3dd1c Mon Sep 17 00:00:00 2001 From: Owen Date: Wed, 13 Sep 2023 17:27:27 +0100 Subject: [PATCH 178/195] Set success on master merge action --- .github/workflows/EVENT_merge_to_master.yml | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index 90c981f7d..f40a528a4 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -27,13 +27,22 @@ jobs: name: Reverse commit on fails uses: ./.github/workflows/JOB_reverse_commit_on_fails.yml - success: # Step for the flow to have to indicate success + success: needs: [run_tests, documentation] if : ${{ success() }} name: Success runs-on: ubuntu-latest steps: - - name: Success - run: echo "Success" + - name: Set branch status to success + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + github.repos.createCommitStatus({ + owner: context.repo.owner, + repo: context.repo.repo, + sha: context.sha, + state: 'success' + }) From 7babb9389da23cb935034808e9bdcec2b5fa23ac Mon Sep 17 00:00:00 2001 From: Owen Date: Mon, 18 Sep 2023 16:42:42 +0100 Subject: [PATCH 179/195] Ticket moving script --- deploy/__init__.py | 0 deploy/_move_tickets_to_done.py | 54 +++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 deploy/__init__.py create mode 100644 deploy/_move_tickets_to_done.py diff --git a/deploy/__init__.py b/deploy/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/deploy/_move_tickets_to_done.py b/deploy/_move_tickets_to_done.py new file mode 100644 index 000000000..80062ba50 --- /dev/null +++ b/deploy/_move_tickets_to_done.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 + +import argparse +import json +import os +import re + +from confirm_main_branch_deployability import ExitCodes, _exit, _run_command + +parser = argparse.ArgumentParser(description="Move tickets in release to done.") +parser.add_argument("--release-tag", "--tag", "-t", help="The release tag to move tickets from", required=True) +parser.add_argument("--dry-run", action="store_true", help="Don't actually move tickets to done") + +args = parser.parse_args() +release_tag = args.release_tag + +LINEAR_API_KEY = os.environ.get("LINEAR_API_KEY") + +print(f"\nMoving tickets in release {release_tag} to done") + +if dry_run := args.dry_run: + print("Dry run, not actually moving tickets") + +# get details +body, error = _run_command("gh", "release", "view", release_tag, "--json", "body") +assert error == 0, _exit("Failed to get last release body", ExitCodes.GETTING_RELEASE_METADATA_THREW_EXITCODE) + +body_parsed = json.loads(body) +body = body_parsed["body"].split("\n") + +TICKET_MATCHER = re.compile(r"^\* \[([A-z]+-[0-9]+)\]") + +wrong_lines = [line for line in body if line.startswith("* ") and not TICKET_MATCHER.match(line)] +body_lines = [line.upper() for line in body if TICKET_MATCHER.match(line)] + +unmoved_tickets = len(wrong_lines) - len(body_lines) +moved_tickets = len(body_lines) + +print("Tickets to move to done:") +for line in body_lines: + ticket = TICKET_MATCHER.match(line).group(1) # type: ignore + print(f" - {ticket}") + +if unmoved_tickets > 0: + print("\nWARNING: Some PRs weren't properly formatted") + print(f"There were {unmoved_tickets} PRs that weren't properly formatted") + print("These will need moving manually.") + print() + [print(line) for line in wrong_lines] + exit(232) + +print("\n") + +# TODO move tickets to done From 078a57c605261dc8a5da9ae033e16339c5839668 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 19 Sep 2023 10:01:24 +0100 Subject: [PATCH 180/195] WIP --- deploy/_move_tickets_to_done.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/deploy/_move_tickets_to_done.py b/deploy/_move_tickets_to_done.py index 80062ba50..ade220a28 100644 --- a/deploy/_move_tickets_to_done.py +++ b/deploy/_move_tickets_to_done.py @@ -37,9 +37,11 @@ moved_tickets = len(body_lines) print("Tickets to move to done:") +ticket_codes = [] for line in body_lines: ticket = TICKET_MATCHER.match(line).group(1) # type: ignore print(f" - {ticket}") + ticket_codes.append(ticket) if unmoved_tickets > 0: print("\nWARNING: Some PRs weren't properly formatted") @@ -52,3 +54,24 @@ print("\n") # TODO move tickets to done +if not dry_run: + for ticket in ticket_codes: + query = f""" + mutation IssueUpdate {{ + issueUpdate( + id: "{ticket}", + input: {{ + stateId: "Done", + }} + ) {{ + success + issue {{ + id + title + state {{ + id + name + }} + }} + }} + }}""" From a3525bdcaf34a6212bbd4a7db868d31b47033b0c Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 26 Sep 2023 09:54:23 +0100 Subject: [PATCH 181/195] Placeholder in ticket --- .github/workflows/EVENT_release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index 4e1d29550..8b18a9647 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -104,6 +104,8 @@ jobs: # poetry publish echo "Publishing to test.pypi.org - Emulated" + # Linear tickets update + notify_release: # Slack notify needs: [release, test_release] From aade733030fcb455914c8ed75415243270a4ab8d Mon Sep 17 00:00:00 2001 From: Owen Date: Fri, 29 Sep 2023 13:34:47 +0100 Subject: [PATCH 182/195] WIP, close to done --- .github/workflows/{e2e.yml => JOB_e2e.yml} | 0 .github/workflows/JOB_slack_message.yml | 12 +++++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) rename .github/workflows/{e2e.yml => JOB_e2e.yml} (100%) diff --git a/.github/workflows/e2e.yml b/.github/workflows/JOB_e2e.yml similarity index 100% rename from .github/workflows/e2e.yml rename to .github/workflows/JOB_e2e.yml diff --git a/.github/workflows/JOB_slack_message.yml b/.github/workflows/JOB_slack_message.yml index b85a1d3e9..ac576ccb5 100644 --- a/.github/workflows/JOB_slack_message.yml +++ b/.github/workflows/JOB_slack_message.yml @@ -9,6 +9,12 @@ on: description: 'The message to send' required: true + icon: + type: string + description: 'The icon to use' + required: false + default: ':snake:' + jobs: slack-notify: name: Slack Notification @@ -18,10 +24,10 @@ jobs: run: | PAYLOAD=$(cat < Date: Sun, 1 Oct 2023 16:09:56 +0100 Subject: [PATCH 183/195] Unify master e2e dir with branch --- e2e_tests/teardown_tests.py | 1 - 1 file changed, 1 deletion(-) delete mode 100644 e2e_tests/teardown_tests.py diff --git a/e2e_tests/teardown_tests.py b/e2e_tests/teardown_tests.py deleted file mode 100644 index 8862d2f8f..000000000 --- a/e2e_tests/teardown_tests.py +++ /dev/null @@ -1 +0,0 @@ -# TODO: In IO:1336 From 98fb564aa6e75e7890f60577e92f0828163a3020 Mon Sep 17 00:00:00 2001 From: Owen Date: Sun, 1 Oct 2023 16:45:08 +0100 Subject: [PATCH 184/195] Implementation of slack messaging --- .github/workflows/EVENT_release.yml | 25 +++++++++++++++++++++++-- .github/workflows/JOB_e2e.yml | 13 ++++++++++++- .github/workflows/JOB_slack_message.yml | 12 +++++++++++- 3 files changed, 46 insertions(+), 4 deletions(-) diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index 8b18a9647..0a31b87fd 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -26,6 +26,7 @@ env: release_id: ${{ inputs.release_id || github.event.release.id }} release_tag: ${{ inputs.release_tag || github.event.release.tag_name }} is_draft: ${{ inputs.is_draft || github.event.release.draft }} + is_scheduled: ${{ github.event_name == 'schedule' }} jobs: validate_tag: @@ -107,7 +108,6 @@ jobs: # Linear tickets update notify_release: - # Slack notify needs: [release, test_release] if: always() && contains(needs.*.result, 'success') runs-on: ubuntu-latest @@ -115,7 +115,28 @@ jobs: - name: Notify Slack uses: ./.github/workflows/JOB_slack_message.yml with: + icon: ":rocket:" + at_team: ${{ env.is_scheduled || !env.is_draft }} message: | - :rocket: *${{ env.release_tag }}* has been released! + :tada: *${{ env.release_tag }}* has been released! :link: - https://pypi.org/project/darwin-py + - ${{ github.event.release.html_url }} + + notify_failed_release: + needs: [release, test_release] + if: always() && contains(needs.*.result, 'failure') + runs-on: ubuntu-latest + steps: + - name: Notify Slack + uses: ./.github/workflows/JOB_slack_message.yml + with: + icon: ":warning:" + at_team: true + message: | + :warning: *${{ env.release_tag }}* has failed to be released! + + *An error occurred performing release, and you may need to release manually.* + + :link: + - ${{ github.event.release.html_url }} diff --git a/.github/workflows/JOB_e2e.yml b/.github/workflows/JOB_e2e.yml index c5ada389a..ad3bae632 100644 --- a/.github/workflows/JOB_e2e.yml +++ b/.github/workflows/JOB_e2e.yml @@ -47,13 +47,24 @@ jobs: if: failure() && github.event_name == 'schedule' runs-on: ubuntu-latest steps: + - name: Notify Slack + uses: ./.github/workflows/JOB_slack_message.yml + with: + icon: ":warning:" + at_team: true + message: | + *Nightly E2E run failed* + + :link: + - https://github.com/v7labs/darwin-py/actions/runs/${{ github.run_id }} + :warning: ${{ github.workflow }} failed - name: Send Slack Notification run: | PAYLOAD=$(cat <: https://github.com/v7labs/darwin-py/actions/runs/${{ github.run_id }}", + "text": "", "icon_emoji": "${{ vars.SLACK_ICON }}" } EOF diff --git a/.github/workflows/JOB_slack_message.yml b/.github/workflows/JOB_slack_message.yml index ac576ccb5..4c8638121 100644 --- a/.github/workflows/JOB_slack_message.yml +++ b/.github/workflows/JOB_slack_message.yml @@ -15,6 +15,16 @@ on: required: false default: ':snake:' + at_team: + type: boolean + description: 'Whether to hail the developers in the message' + required: false + default: false + +env: + PREFIX: if(${{ inputs.at_team }}, '${{ vars.SLACK_TEAM_TAG}}', '') + + jobs: slack-notify: name: Slack Notification @@ -26,7 +36,7 @@ jobs: { "channel": "#${{ vars.SLACK_CHANNEL }}", "username": "${{ vars.SLACK_USERNAME }}", - "text": "${{ inputs.message }}", + "text": "${{ env.PREFIX }}${{ inputs.message }}", "icon_emoji": "${{ inputs.icon || vars.SLACK_ICON }}" }) EOF From 610f6f37004581ec89ae93aef0d4788b67eaf12d Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 3 Oct 2023 11:20:26 +0100 Subject: [PATCH 185/195] Updating reverse commit --- .github/workflows/JOB_reverse_commit_on_fails.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml index 43401c727..3d846a987 100644 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ b/.github/workflows/JOB_reverse_commit_on_fails.yml @@ -26,7 +26,6 @@ jobs: - name: Reverse commit run: | - echo "Just testing atm" git reset --hard HEAD~1 git push origin $BRANCH_NAME --force From a1ce7495dc8f2e7783ea0d1def75be67e636094a Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 3 Oct 2023 11:52:03 +0100 Subject: [PATCH 186/195] PR Comments --- .github/workflows/EVENT_merge_to_master.yml | 10 ++++-- .github/workflows/EVENT_release.yml | 2 +- .../workflows/JOB_reverse_commit_on_fails.yml | 33 ------------------- deploy/create_release.sh | 2 +- 4 files changed, 9 insertions(+), 38 deletions(-) delete mode 100644 .github/workflows/JOB_reverse_commit_on_fails.yml diff --git a/.github/workflows/EVENT_merge_to_master.yml b/.github/workflows/EVENT_merge_to_master.yml index f40a528a4..380f6cad9 100644 --- a/.github/workflows/EVENT_merge_to_master.yml +++ b/.github/workflows/EVENT_merge_to_master.yml @@ -21,11 +21,15 @@ jobs: name: Documentation uses: ./.github/workflows/JOB_generate_documentation.yml - reverse_commit_on_fails: + warn_on_fail: needs: [run_tests, documentation] if : ${{ failure() }} - name: Reverse commit on fails - uses: ./.github/workflows/JOB_reverse_commit_on_fails.yml + name: Slack message us on fail + uses: ./.github/workflows/JOB_slack_message.yml + with: + at_team: true + icon: ':warning:' + message: 'Master is failing after a push event, please review at ${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}' success: needs: [run_tests, documentation] diff --git a/.github/workflows/EVENT_release.yml b/.github/workflows/EVENT_release.yml index 0a31b87fd..8e1472536 100644 --- a/.github/workflows/EVENT_release.yml +++ b/.github/workflows/EVENT_release.yml @@ -102,7 +102,7 @@ jobs: POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.TEST_PYPI_USERNAME }} POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.TEST_PYPI_PASSWORD }} run: | - # poetry publish + poetry publish echo "Publishing to test.pypi.org - Emulated" # Linear tickets update diff --git a/.github/workflows/JOB_reverse_commit_on_fails.yml b/.github/workflows/JOB_reverse_commit_on_fails.yml deleted file mode 100644 index 3d846a987..000000000 --- a/.github/workflows/JOB_reverse_commit_on_fails.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: reverse-commit-on-fails -run-name: Reverse commit on fails - -on: - workflow_call: - - -# Run when master is failing due to a merge, and you want to reverse the merge -# and push the reverse commit to master. - -permissions: - contents: write - -jobs: - step-master-back-one-commit: - env: - BRANCH_NAME: ${{ github.ref_name }} - name: Step master back one commit - runs-on: ubuntu-latest - steps: - - - name: Checkout merge branch [${{ env.BRANCH_NAME }}] - uses: actions/checkout@v2 - with: - ref: $BRANCH_NAME - - - name: Reverse commit - run: | - git reset --hard HEAD~1 - git push origin $BRANCH_NAME --force - - recall-master-merge-flow: - uses: ./.github/workflows/EVENT_merge_to_master.yml diff --git a/deploy/create_release.sh b/deploy/create_release.sh index f1823ee3b..45640f46a 100644 --- a/deploy/create_release.sh +++ b/deploy/create_release.sh @@ -29,7 +29,7 @@ if [ -z "$REMOTE" ]; then fi # Increment the version number one patch version -"$WORKING_DIR/increase_version.sh --patch --force" +"python3 $WORKING_DIR/increase_version.py --patch --force" if [ $? -ne 0 ]; then echo "ERROR: Failed to increase the version number." exit 1 From 3e9ebc62466efe4c2396d0c074986904d1852ed8 Mon Sep 17 00:00:00 2001 From: Nathan Perkins Date: Sun, 1 Oct 2023 17:45:00 +0100 Subject: [PATCH 187/195] [IO-1754] Darwin-py v2 Dogfooding refactors (#670) * client refactor * item object rename * delete/create classmethods * core item rename * meta object rename * Client naming consistency * remove useless asserts * remove useless asserts * collect refactor * mypy cleanup * streamlining * expanding on exceptions * __init__.py backend import changes * team backend restructure for circular reference * comment * changes to collect operation, now invalidates cache if adding to query * restructure exceptions module --- darwin/future/core/backend.py | 24 ------ darwin/future/core/client.py | 8 +- darwin/future/core/datasets/__init__.py | 4 + darwin/future/core/datasets/create_dataset.py | 8 +- darwin/future/core/datasets/get_dataset.py | 8 +- darwin/future/core/datasets/list_datasets.py | 10 +-- darwin/future/core/datasets/remove_dataset.py | 6 +- darwin/future/core/items/__init__.py | 2 + darwin/future/core/items/get.py | 6 +- darwin/future/core/items/move_items.py | 5 +- darwin/future/core/team/__init__.py | 5 ++ darwin/future/core/team/get_raw.py | 10 +++ darwin/future/core/team/get_team.py | 25 ++++++ darwin/future/core/types/common.py | 8 +- darwin/future/core/types/query.py | 78 ++++++++++--------- darwin/future/core/utils/pathutils.py | 2 +- darwin/future/core/workflows/__init__.py | 3 + darwin/future/core/workflows/get_workflow.py | 12 +-- darwin/future/core/workflows/get_workflows.py | 8 +- .../future/core/workflows/list_workflows.py | 10 +-- darwin/future/data_objects/dataset.py | 4 +- darwin/future/data_objects/release.py | 4 +- darwin/future/data_objects/team.py | 56 +++---------- darwin/future/data_objects/workflow.py | 30 +++---- .../{exceptions/base.py => exceptions.py} | 42 ++++++++++ darwin/future/exceptions/__init__.py | 1 - darwin/future/exceptions/client.py | 9 --- darwin/future/exceptions/core/__init__.py | 0 darwin/future/exceptions/core/datasets.py | 13 ---- darwin/future/exceptions/files.py | 5 -- darwin/future/exceptions/meta.py | 5 -- darwin/future/meta/client.py | 20 ++--- darwin/future/meta/objects/base.py | 14 ++-- darwin/future/meta/objects/dataset.py | 72 +++++++++-------- darwin/future/meta/objects/stage.py | 15 ++-- darwin/future/meta/objects/team.py | 28 +++---- darwin/future/meta/objects/team_member.py | 10 +-- darwin/future/meta/objects/workflow.py | 40 ++++------ darwin/future/meta/queries/dataset.py | 18 ++--- darwin/future/meta/queries/stage.py | 17 ++-- darwin/future/meta/queries/team_member.py | 14 ++-- darwin/future/meta/queries/workflow.py | 46 +++++++---- darwin/future/tests/core/datasets/fixtures.py | 18 ++--- .../core/datasets/test_create_dataset.py | 12 ++- .../core/datasets/test_delete_dataset.py | 8 +- .../tests/core/datasets/test_get_dataset.py | 12 +-- .../tests/core/datasets/test_list_datasets.py | 12 ++- darwin/future/tests/core/fixtures.py | 32 ++++---- .../future/tests/core/items/test_get_items.py | 15 ++-- .../tests/core/items/test_move_items.py | 34 ++++---- darwin/future/tests/core/test_client.py | 11 ++- darwin/future/tests/core/test_query.py | 29 +++---- .../tests/core/workflows/test_get_workflow.py | 19 ++--- .../core/workflows/test_get_workflows.py | 19 ++--- .../core/workflows/test_list_workflows.py | 19 ++--- darwin/future/tests/data_objects/fixtures.py | 2 +- .../test_general_darwin_objects.py | 12 +-- darwin/future/tests/data_objects/test_team.py | 21 ++--- .../data_objects/workflow/test_wfdataset.py | 18 ++--- .../data_objects/workflow/test_wfedge.py | 6 +- .../data_objects/workflow/test_wfstage.py | 10 +-- .../workflow/test_wfstage_config.py | 6 +- .../data_objects/workflow/test_wfuser.py | 6 +- .../data_objects/workflow/test_workflow.py | 10 +-- darwin/future/tests/meta/fixtures.py | 6 +- darwin/future/tests/meta/objects/fixtures.py | 26 +++---- .../tests/meta/objects/test_datasetmeta.py | 57 +++++++------- .../tests/meta/objects/test_stagemeta.py | 40 ++++++---- .../tests/meta/objects/test_teammeta.py | 12 +-- .../future/tests/meta/queries/test_dataset.py | 59 +++++++------- .../future/tests/meta/queries/test_stage.py | 46 ++++++----- .../tests/meta/queries/test_team_member.py | 38 ++++----- .../tests/meta/queries/test_workflow.py | 50 ++++++------ darwin/future/tests/meta/test_client.py | 14 ++-- 74 files changed, 692 insertions(+), 692 deletions(-) delete mode 100644 darwin/future/core/backend.py create mode 100644 darwin/future/core/team/__init__.py create mode 100644 darwin/future/core/team/get_raw.py create mode 100644 darwin/future/core/team/get_team.py rename darwin/future/{exceptions/base.py => exceptions.py} (74%) delete mode 100644 darwin/future/exceptions/__init__.py delete mode 100644 darwin/future/exceptions/client.py delete mode 100644 darwin/future/exceptions/core/__init__.py delete mode 100644 darwin/future/exceptions/core/datasets.py delete mode 100644 darwin/future/exceptions/files.py delete mode 100644 darwin/future/exceptions/meta.py diff --git a/darwin/future/core/backend.py b/darwin/future/core/backend.py deleted file mode 100644 index 409ca9ef9..000000000 --- a/darwin/future/core/backend.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import List, Optional, Tuple - -from darwin.future.core.client import Client -from darwin.future.data_objects.team import Team, TeamMember - - -def get_team(client: Client, team_slug: Optional[str] = None) -> Team: - """Returns the team with the given slug""" - if not team_slug: - team_slug = client.config.default_team - response = client.get(f"/teams/{team_slug}/") - return Team.parse_obj(response) - - -def get_team_members(client: Client) -> Tuple[List[TeamMember], List[Exception]]: - response = client.get("/memberships") - members = [] - errors = [] - for item in response: - try: - members.append(TeamMember.parse_obj(item)) - except Exception as e: - errors.append(e) - return (members, errors) diff --git a/darwin/future/core/client.py b/darwin/future/core/client.py index 24ad6f291..8787170b5 100644 --- a/darwin/future/core/client.py +++ b/darwin/future/core/client.py @@ -9,10 +9,8 @@ from pydantic import BaseModel, root_validator, validator from requests.adapters import HTTPAdapter, Retry -from darwin.future.core.types.common import QueryString -from darwin.future.exceptions.client import NotFound, Unauthorized - -JSONType = Union[Dict[str, Any], List[Dict[str, Any]]] # type: ignore +from darwin.future.core.types.common import JSONType, QueryString +from darwin.future.exceptions import NotFound, Unauthorized class TeamsConfig(BaseModel): @@ -125,7 +123,7 @@ class Result(BaseModel): ... -class Client: +class ClientCore: """Client Object to manage and make requests to the Darwin API Attributes ---------- diff --git a/darwin/future/core/datasets/__init__.py b/darwin/future/core/datasets/__init__.py index e69de29bb..a5ccb19d5 100644 --- a/darwin/future/core/datasets/__init__.py +++ b/darwin/future/core/datasets/__init__.py @@ -0,0 +1,4 @@ +from darwin.future.core.datasets.create_dataset import * +from darwin.future.core.datasets.get_dataset import * +from darwin.future.core.datasets.list_datasets import * +from darwin.future.core.datasets.remove_dataset import * diff --git a/darwin/future/core/datasets/create_dataset.py b/darwin/future/core/datasets/create_dataset.py index 4d7e678cb..1ad4596c9 100644 --- a/darwin/future/core/datasets/create_dataset.py +++ b/darwin/future/core/datasets/create_dataset.py @@ -1,10 +1,10 @@ from pydantic import parse_obj_as -from darwin.future.core.client import Client -from darwin.future.data_objects.dataset import Dataset +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.dataset import DatasetCore -def create_dataset(api_client: Client, name: str) -> Dataset: +def create_dataset(api_client: ClientCore, name: str) -> DatasetCore: """ Creates a new dataset for the given team @@ -33,4 +33,4 @@ def create_dataset(api_client: Client, name: str) -> Dataset: }, ) - return parse_obj_as(Dataset, response) + return parse_obj_as(DatasetCore, response) diff --git a/darwin/future/core/datasets/get_dataset.py b/darwin/future/core/datasets/get_dataset.py index ee42fdbe8..cf3ebdc79 100644 --- a/darwin/future/core/datasets/get_dataset.py +++ b/darwin/future/core/datasets/get_dataset.py @@ -1,11 +1,11 @@ from pydantic import parse_obj_as -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore from darwin.future.core.types.common import QueryString -from darwin.future.data_objects.dataset import Dataset +from darwin.future.data_objects.dataset import DatasetCore -def get_dataset(api_client: Client, dataset_id: str) -> Dataset: +def get_dataset(api_client: ClientCore, dataset_id: str) -> DatasetCore: """ Returns a list of datasets for the given team @@ -30,4 +30,4 @@ def get_dataset(api_client: Client, dataset_id: str) -> Dataset: response = api_client.get("/datasets", QueryString({"id": str(dataset_id)})) - return parse_obj_as(Dataset, response) + return parse_obj_as(DatasetCore, response) diff --git a/darwin/future/core/datasets/list_datasets.py b/darwin/future/core/datasets/list_datasets.py index 31480cb68..0b214bc78 100644 --- a/darwin/future/core/datasets/list_datasets.py +++ b/darwin/future/core/datasets/list_datasets.py @@ -2,11 +2,11 @@ from pydantic import parse_obj_as -from darwin.future.core.client import Client -from darwin.future.data_objects.dataset import Dataset +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.dataset import DatasetCore -def list_datasets(api_client: Client) -> Tuple[List[Dataset], List[Exception]]: +def list_datasets(api_client: ClientCore) -> Tuple[List[DatasetCore], List[Exception]]: """ Returns a list of datasets for the given team @@ -21,13 +21,13 @@ def list_datasets(api_client: Client) -> Tuple[List[Dataset], List[Exception]]: ------- Tuple[DatasetList, List[Exception]] """ - datasets: List[Dataset] = [] + datasets: List[DatasetCore] = [] errors: List[Exception] = [] try: response = api_client.get("/datasets") for item in response: - datasets.append(parse_obj_as(Dataset, item)) + datasets.append(parse_obj_as(DatasetCore, item)) except Exception as e: errors.append(e) diff --git a/darwin/future/core/datasets/remove_dataset.py b/darwin/future/core/datasets/remove_dataset.py index c94a61842..26abd7a08 100644 --- a/darwin/future/core/datasets/remove_dataset.py +++ b/darwin/future/core/datasets/remove_dataset.py @@ -1,10 +1,10 @@ from typing import Optional -from darwin.future.core.client import Client -from darwin.future.exceptions.core.datasets import DatasetNotFound +from darwin.future.core.client import ClientCore +from darwin.future.exceptions import DatasetNotFound -def remove_dataset(api_client: Client, id: int, team_slug: Optional[str] = None) -> int: +def remove_dataset(api_client: ClientCore, id: int, team_slug: Optional[str] = None) -> int: """ Creates a new dataset for the given team diff --git a/darwin/future/core/items/__init__.py b/darwin/future/core/items/__init__.py index e69de29bb..619a680fe 100644 --- a/darwin/future/core/items/__init__.py +++ b/darwin/future/core/items/__init__.py @@ -0,0 +1,2 @@ +from darwin.future.core.items.get import * +from darwin.future.core.items.move_items import * diff --git a/darwin/future/core/items/get.py b/darwin/future/core/items/get.py index 01eb81dae..8ac7a7559 100644 --- a/darwin/future/core/items/get.py +++ b/darwin/future/core/items/get.py @@ -1,11 +1,11 @@ from typing import List, Union from uuid import UUID -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore from darwin.future.core.types.common import QueryString -def get_item_ids(api_client: Client, team_slug: str, dataset_id: Union[str, int]) -> List[UUID]: +def get_item_ids(api_client: ClientCore, team_slug: str, dataset_id: Union[str, int]) -> List[UUID]: """ Returns a list of item ids for the dataset @@ -34,7 +34,7 @@ def get_item_ids(api_client: Client, team_slug: str, dataset_id: Union[str, int] def get_item_ids_stage( - api_client: Client, team_slug: str, dataset_id: Union[int, str], stage_id: Union[UUID, str] + api_client: ClientCore, team_slug: str, dataset_id: Union[int, str], stage_id: Union[UUID, str] ) -> List[UUID]: """ Returns a list of item ids for the stage diff --git a/darwin/future/core/items/move_items.py b/darwin/future/core/items/move_items.py index b6c6ac8d4..32a39b86f 100644 --- a/darwin/future/core/items/move_items.py +++ b/darwin/future/core/items/move_items.py @@ -1,11 +1,12 @@ from typing import List from uuid import UUID -from darwin.future.core.client import Client, JSONType +from darwin.future.core.client import ClientCore +from darwin.future.core.types.common import JSONType def move_items_to_stage( - api_client: Client, team_slug: str, workflow_id: UUID, dataset_id: int, stage_id: UUID, item_ids: List[UUID] + api_client: ClientCore, team_slug: str, workflow_id: UUID, dataset_id: int, stage_id: UUID, item_ids: List[UUID] ) -> JSONType: """ Moves a list of items to a stage diff --git a/darwin/future/core/team/__init__.py b/darwin/future/core/team/__init__.py new file mode 100644 index 000000000..8bc574aaa --- /dev/null +++ b/darwin/future/core/team/__init__.py @@ -0,0 +1,5 @@ +# Can't import * in this module because of a circular import problem specific to teams +# The TeamCore module can instantiate from a client, but the client needs to use the team backend module +# to request the object for team. To circumvent this there's a get_raw method in this module that returns +# the raw team object, which is then passed to the TeamCore module, but if we import * here it introduces the +# circular import problem. diff --git a/darwin/future/core/team/get_raw.py b/darwin/future/core/team/get_raw.py new file mode 100644 index 000000000..87555bdd4 --- /dev/null +++ b/darwin/future/core/team/get_raw.py @@ -0,0 +1,10 @@ +from requests import Session + +from darwin.future.core.types.common import JSONType + + +def get_team_raw(session: Session, url: str) -> JSONType: + """Returns the team with the given slug in raw JSON format""" + response = session.get(url) + response.raise_for_status() + return response.json() diff --git a/darwin/future/core/team/get_team.py b/darwin/future/core/team/get_team.py new file mode 100644 index 000000000..619c5e649 --- /dev/null +++ b/darwin/future/core/team/get_team.py @@ -0,0 +1,25 @@ +from typing import List, Optional, Tuple + +from darwin.future.core.client import ClientCore +from darwin.future.core.types.common import JSONType +from darwin.future.data_objects.team import TeamCore, TeamMemberCore + + +def get_team(client: ClientCore, team_slug: Optional[str] = None) -> TeamCore: + """Returns the team with the given slug""" + if not team_slug: + team_slug = client.config.default_team + response = client.get(f"/teams/{team_slug}/") + return TeamCore.parse_obj(response) + + +def get_team_members(client: ClientCore) -> Tuple[List[TeamMemberCore], List[Exception]]: + response = client.get("/memberships") + members = [] + errors = [] + for item in response: + try: + members.append(TeamMemberCore.parse_obj(item)) + except Exception as e: + errors.append(e) + return (members, errors) diff --git a/darwin/future/core/types/common.py b/darwin/future/core/types/common.py index c497bfd75..297eb3d4b 100644 --- a/darwin/future/core/types/common.py +++ b/darwin/future/core/types/common.py @@ -1,11 +1,13 @@ -from typing import Dict +from typing import Any, Dict, List, Union -from darwin.future.data_objects import validators as darwin_validators -from pydantic import BaseModel import pydantic +from pydantic import BaseModel +from darwin.future.data_objects import validators as darwin_validators from darwin.future.data_objects.typing import UnknownType +JSONType = Union[Dict[str, Any], List[Dict[str, Any]]] # type: ignore + class TeamSlug(str): """Team slug type""" diff --git a/darwin/future/core/types/query.py b/darwin/future/core/types/query.py index 9aae585ec..ac45d2903 100644 --- a/darwin/future/core/types/query.py +++ b/darwin/future/core/types/query.py @@ -14,7 +14,13 @@ overload, ) -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore +from darwin.future.exceptions import ( + InvalidQueryFilter, + InvalidQueryModifier, + MoreThanOneResultFound, + ResultsNotFound, +) from darwin.future.meta.objects.base import MetaBase from darwin.future.pydantic_base import DefaultDarwin @@ -64,12 +70,12 @@ def filter_attr(self, attr: Any) -> bool: # type: ignore elif self.modifier == Modifier.CONTAINS: return param in attr else: - raise ValueError(f"Unknown modifier {self.modifier}") + raise InvalidQueryModifier(f"Unknown modifier {self.modifier}") @classmethod def _from_dict(cls, d: Dict[str, Any]) -> QueryFilter: # type: ignore if "name" not in d or "param" not in d: - raise ValueError(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}") + raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}") modifier = Modifier(d["modifier"]) if "modifier" in d else None return QueryFilter(name=d["name"], param=str(d["param"]), modifier=modifier) @@ -89,7 +95,7 @@ def _from_arg(cls, arg: object) -> QueryFilter: elif isinstance(arg, dict): return cls._from_dict(arg) else: - raise ValueError(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}") + raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}") @classmethod def _from_kwarg(cls, key: str, value: str) -> QueryFilter: @@ -111,52 +117,40 @@ class Query(Generic[T], ABC): """ def __init__( - self, client: Client, filters: Optional[List[QueryFilter]] = None, meta_params: Optional[Param] = None + self, client: ClientCore, filters: Optional[List[QueryFilter]] = None, meta_params: Optional[Param] = None ): - self.meta_params = meta_params + self.meta_params: dict = meta_params or dict() self.client = client - self.filters = filters + self.filters = filters or [] self.results: Optional[List[T]] = None + self._changed_since_last: bool = True def filter(self, filter: QueryFilter) -> Query[T]: return self + filter def __add__(self, filter: QueryFilter) -> Query[T]: - assert filter is not None - assert isinstance(filter, QueryFilter) - if self.filters is None: - self.filters = [] + self._changed_since_last = True return self.__class__(self.client, filters=[*self.filters, filter], meta_params=self.meta_params) def __sub__(self, filter: QueryFilter) -> Query[T]: - assert filter is not None - assert isinstance(filter, QueryFilter) - if self.filters is None: - return self + self._changed_since_last = True return self.__class__( self.client, filters=[f for f in self.filters if f != filter], meta_params=self.meta_params ) def __iadd__(self, filter: QueryFilter) -> Query[T]: - assert filter is not None - assert isinstance(filter, QueryFilter) - if self.filters is None: - self.filters = [filter] - return self self.filters.append(filter) + self._changed_since_last = True return self def __isub__(self, filter: QueryFilter) -> Query[T]: - assert filter is not None - assert isinstance(filter, QueryFilter) - if self.filters is None: - return self self.filters = [f for f in self.filters if f != filter] + self._changed_since_last = True return self def __len__(self) -> int: - if self.results is None: - self.results = list(self.collect()) + if not self.results: + self.results = list(self._collect()) return len(self.results) def __iter__(self) -> Query[T]: @@ -164,8 +158,8 @@ def __iter__(self) -> Query[T]: return self def __next__(self) -> T: - if self.results is None: - self.results = list(self.collect()) + if not self.results: + self.results = list(self._collect()) if self.n < len(self.results): result = self.results[self.n] self.n += 1 @@ -174,40 +168,48 @@ def __next__(self) -> T: raise StopIteration def __getitem__(self, index: int) -> T: - if self.results is None: - self.results = list(self.collect()) + if not self.results: + self.results = list(self._collect()) return self.results[index] def __setitem__(self, index: int, value: T) -> None: - if self.results is None: - self.results = list(self.collect()) + if not self.results: + self.results = list(self._collect()) self.results[index] = value def where(self, *args: object, **kwargs: str) -> Query[T]: filters = QueryFilter._from_args(*args, **kwargs) for item in filters: self += item + self._changed_since_last = True return self + def collect(self, force: bool = False) -> List[T]: + if force or self._changed_since_last: + self.results = [] + self.results = self._collect() + self._changed_since_last = False + return self.results + @abstractmethod - def collect(self) -> List[T]: + def _collect(self) -> List[T]: raise NotImplementedError("Not implemented") def collect_one(self) -> T: - if self.results is None: + if not self.results: self.results = list(self.collect()) if len(self.results) == 0: - raise ValueError("No results found") + raise ResultsNotFound("No results found") if len(self.results) > 1: - raise ValueError("More than one result found") + raise MoreThanOneResultFound("More than one result found") return self.results[0] def first(self) -> Optional[T]: - if self.results is None: + if not self.results: self.results = list(self.collect()) if len(self.results) == 0: return None return self.results[0] def _generic_execute_filter(self, objects: List[T], filter: QueryFilter) -> List[T]: - return [m for m in objects if filter.filter_attr(getattr(m._item, filter.name))] + return [m for m in objects if filter.filter_attr(getattr(m._element, filter.name))] diff --git a/darwin/future/core/utils/pathutils.py b/darwin/future/core/utils/pathutils.py index 0f929d0da..ccaebdeba 100644 --- a/darwin/future/core/utils/pathutils.py +++ b/darwin/future/core/utils/pathutils.py @@ -4,7 +4,7 @@ import yaml -from darwin.future.exceptions.files import UnrecognizableFileEncoding +from darwin.future.exceptions import UnrecognizableFileEncoding ENCODINGS = ["utf-8", "utf-16", "utf-32", "ascii"] diff --git a/darwin/future/core/workflows/__init__.py b/darwin/future/core/workflows/__init__.py index e69de29bb..c2282b65a 100644 --- a/darwin/future/core/workflows/__init__.py +++ b/darwin/future/core/workflows/__init__.py @@ -0,0 +1,3 @@ +from darwin.future.core.workflows.get_workflow import * +from darwin.future.core.workflows.get_workflows import * +from darwin.future.core.workflows.list_workflows import * diff --git a/darwin/future/core/workflows/get_workflow.py b/darwin/future/core/workflows/get_workflow.py index 0c51d0e21..0afca7047 100644 --- a/darwin/future/core/workflows/get_workflow.py +++ b/darwin/future/core/workflows/get_workflow.py @@ -2,21 +2,21 @@ from pydantic import parse_obj_as -from darwin.future.core.client import Client -from darwin.future.data_objects.workflow import Workflow +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.workflow import WorkflowCore def get_workflow( - client: Client, workflow_id: str, team_slug: Optional[str] = None -) -> Tuple[Optional[Workflow], List[Exception]]: - workflow: Optional[Workflow] = None + client: ClientCore, workflow_id: str, team_slug: Optional[str] = None +) -> Tuple[Optional[WorkflowCore], List[Exception]]: + workflow: Optional[WorkflowCore] = None exceptions: List[Exception] = [] try: team_slug = team_slug or client.config.default_team response = client.get(f"/v2/teams/{team_slug}/workflows/{workflow_id}") - workflow = parse_obj_as(Workflow, response) + workflow = parse_obj_as(WorkflowCore, response) except Exception as e: exceptions.append(e) diff --git a/darwin/future/core/workflows/get_workflows.py b/darwin/future/core/workflows/get_workflows.py index 203dabf6c..8664d1f74 100644 --- a/darwin/future/core/workflows/get_workflows.py +++ b/darwin/future/core/workflows/get_workflows.py @@ -2,12 +2,12 @@ from pydantic import parse_obj_as -from darwin.future.core.client import Client -from darwin.future.data_objects.workflow import Workflow +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.workflow import WorkflowCore -def get_workflows(client: Client, team_slug: Optional[str] = None) -> List[Workflow]: +def get_workflows(client: ClientCore, team_slug: Optional[str] = None) -> List[WorkflowCore]: team_slug = team_slug or client.config.default_team response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") - return [parse_obj_as(Workflow, workflow) for workflow in response] + return [parse_obj_as(WorkflowCore, workflow) for workflow in response] diff --git a/darwin/future/core/workflows/list_workflows.py b/darwin/future/core/workflows/list_workflows.py index 0555171ab..e28e766d5 100644 --- a/darwin/future/core/workflows/list_workflows.py +++ b/darwin/future/core/workflows/list_workflows.py @@ -2,11 +2,11 @@ from pydantic import ValidationError -from darwin.future.core.client import Client -from darwin.future.data_objects.workflow import Workflow, WorkflowListValidator +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.workflow import WorkflowCore, WorkflowListValidator -def list_workflows(client: Client, team_slug: Optional[str] = None) -> Tuple[List[Workflow], List[Exception]]: +def list_workflows(client: ClientCore, team_slug: Optional[str] = None) -> Tuple[List[WorkflowCore], List[Exception]]: """ Returns a list of workflows for the given team @@ -22,13 +22,13 @@ def list_workflows(client: Client, team_slug: Optional[str] = None) -> Tuple[Lis Tuple[List[Workflow], List[Exception]] """ exceptions: List[Exception] = [] - workflows: List[Workflow] = [] + workflows: List[WorkflowCore] = [] try: team_slug = team_slug or client.config.default_team response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") list_of_workflows = WorkflowListValidator(list=response) # type: ignore - workflows = [Workflow.parse_obj(workflow) for workflow in list_of_workflows.list] + workflows = [WorkflowCore.parse_obj(workflow) for workflow in list_of_workflows.list] except Exception as e: exceptions.append(e) diff --git a/darwin/future/data_objects/dataset.py b/darwin/future/data_objects/dataset.py index 9dd4f92a7..134a50f5a 100644 --- a/darwin/future/data_objects/dataset.py +++ b/darwin/future/data_objects/dataset.py @@ -7,7 +7,7 @@ from darwin.future.pydantic_base import DefaultDarwin -class Dataset(DefaultDarwin): +class DatasetCore(DefaultDarwin): """ A class to manage all the information around a dataset on the darwin platform, including validation @@ -33,4 +33,4 @@ class Dataset(DefaultDarwin): _name_validator = validator("name", allow_reuse=True)(parse_name) -DatasetList = List[Dataset] +DatasetList = List[DatasetCore] diff --git a/darwin/future/data_objects/release.py b/darwin/future/data_objects/release.py index bbd656253..e1d7f1ac3 100644 --- a/darwin/future/data_objects/release.py +++ b/darwin/future/data_objects/release.py @@ -6,7 +6,7 @@ from darwin.future.pydantic_base import DefaultDarwin -class Release(DefaultDarwin): +class ReleaseCore(DefaultDarwin): """A class to manage all the information around a release on the darwin platform, including validation Attributes ---------- @@ -26,4 +26,4 @@ def __str__(self) -> str: _name_validator = validator("name", allow_reuse=True)(darwin_validators.parse_name) -ReleaseList = List[Release] +ReleaseList = List[ReleaseCore] diff --git a/darwin/future/data_objects/team.py b/darwin/future/data_objects/team.py index 49240b30c..c766fd591 100644 --- a/darwin/future/data_objects/team.py +++ b/darwin/future/data_objects/team.py @@ -4,14 +4,15 @@ from pydantic import validator -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore +from darwin.future.core.team.get_raw import get_team_raw from darwin.future.data_objects.dataset import DatasetList from darwin.future.data_objects.team_member_role import TeamMemberRole from darwin.future.data_objects.validators import parse_name from darwin.future.pydantic_base import DefaultDarwin -class TeamMember(DefaultDarwin): +class TeamMemberCore(DefaultDarwin): """A class to manage all the information around a team member on the darwin platform Attributes ---------- @@ -27,10 +28,10 @@ class TeamMember(DefaultDarwin): user_id: int -TeamMemberList = List[TeamMember] +TeamMemberList = List[TeamMemberCore] -class Team(DefaultDarwin): +class TeamCore(DefaultDarwin): """A class to manage all the information around a Team on the darwin platform, including validation Attributes @@ -44,18 +45,19 @@ class Team(DefaultDarwin): ---------- _slug_validator: validates and auto formats the slug variable """ + name: str slug: str id: int datasets: Optional[DatasetList] = None - members: Optional[List[TeamMember]] = None + members: Optional[List[TeamMemberCore]] = None default_role: TeamMemberRole = TeamMemberRole.USER # Data Validation _slug_validator = validator("slug", allow_reuse=True)(parse_name) @staticmethod - def from_client(client: Client, team_slug: Optional[str] = None) -> Team: + def from_client(client: ClientCore, team_slug: Optional[str] = None) -> TeamCore: """Returns the team with the given slug from the client Args: @@ -67,43 +69,5 @@ def from_client(client: Client, team_slug: Optional[str] = None) -> Team: """ if not team_slug: team_slug = client.config.default_team - return get_team(client, team_slug) - - -TeamList = List[Team] - - -def get_team(client: Client, team_slug: Optional[str] = None) -> Team: - """Returns the team with the given slug from the client - - Args: - client (Client): Core client object - team_slug (Optional[str], optional): team slug str, Defaults to None. - - Returns: - Team: Team object retrieved from the client with the given slug - """ - if not team_slug: - team_slug = client.config.default_team - response = client.get(f"/teams/{team_slug}/") - return Team.parse_obj(response) - - -def get_team_members(client: Client) -> Tuple[List[TeamMember], List[Exception]]: - """Returns a list of team members for the given client - - Args: - client (Client): Core client object - - Returns: - Tuple[List[TeamMember], List[Exception]]: List of team members and list of errors if any - """ - response = client.get(f"/memberships") - members = [] - errors = [] - for item in response: - try: - members.append(TeamMember.parse_obj(item)) - except Exception as e: - errors.append(e) - return members, errors + url = client.config.api_endpoint + f"teams/{team_slug}" + return TeamCore.parse_obj(get_team_raw(client.session, url)) diff --git a/darwin/future/data_objects/workflow.py b/darwin/future/data_objects/workflow.py index 2691627bc..0af79c14f 100644 --- a/darwin/future/data_objects/workflow.py +++ b/darwin/future/data_objects/workflow.py @@ -9,7 +9,7 @@ from darwin.future.pydantic_base import DefaultDarwin -class WFDataset(DefaultDarwin): +class WFDatasetCore(DefaultDarwin): """ A class to manage all the information around a dataset on the darwin platform, including validation @@ -38,7 +38,7 @@ def __str__(self) -> str: return self.name -class WFEdge(DefaultDarwin): +class WFEdgeCore(DefaultDarwin): """ A workflow edge @@ -64,7 +64,7 @@ def _one_or_both_must_exist(cls, values: dict) -> dict: return values -class WFType(Enum): +class WFTypeCore(Enum): """ The type of workflow stage (Enum) @@ -90,16 +90,16 @@ class WFType(Enum): UNKNOWN = "unknown" @classmethod - def _missing_(cls, value: str) -> "WFType": - return WFType.UNKNOWN + def _missing_(cls, value: object) -> "WFTypeCore": + return WFTypeCore.UNKNOWN -class WFUser(DefaultDarwin): +class WFUserCore(DefaultDarwin): stage_id: UUID user_id: int -class WFStageConfig(DefaultDarwin): +class WFStageConfigCore(DefaultDarwin): # ! NB: We may be able to remove many of these attributes url: Optional[str] x: Optional[int] @@ -133,7 +133,7 @@ class WFStageConfig(DefaultDarwin): threshold: UnknownType -class WFStage(DefaultDarwin): +class WFStageCore(DefaultDarwin): """ A workflow stage @@ -151,13 +151,13 @@ class WFStage(DefaultDarwin): id: UUID name: str - type: WFType + type: WFTypeCore - assignable_users: List[WFUser] - edges: List[WFEdge] + assignable_users: List[WFUserCore] + edges: List[WFEdgeCore] -class Workflow(DefaultDarwin): +class WorkflowCore(DefaultDarwin): """ A class to manage all the information around a workflow on the darwin platform, including validation @@ -184,11 +184,11 @@ class Workflow(DefaultDarwin): inserted_at: datetime updated_at: datetime - dataset: Optional[WFDataset] - stages: List[WFStage] + dataset: Optional[WFDatasetCore] + stages: List[WFStageCore] thumbnails: List[str] class WorkflowListValidator(DefaultDarwin): - list: List[Workflow] + list: List[WorkflowCore] diff --git a/darwin/future/exceptions/base.py b/darwin/future/exceptions.py similarity index 74% rename from darwin/future/exceptions/base.py rename to darwin/future/exceptions.py index e47767921..bfd6cd0b3 100644 --- a/darwin/future/exceptions/base.py +++ b/darwin/future/exceptions.py @@ -61,3 +61,45 @@ class ValidationError(DarwinException): class AssertionError(DarwinException): pass + + +class NotFound(DarwinException): + pass + + +class Unauthorized(DarwinException): + pass + + +class UnrecognizableFileEncoding(DarwinException): + pass + + +class MissingSlug(DarwinException): + pass + + +class MissingDataset(DarwinException): + pass + + +class ResultsNotFound(DarwinException): + pass + + +class MoreThanOneResultFound(DarwinException): + pass + + +class InvalidQueryModifier(DarwinException): + pass + + +class InvalidQueryFilter(DarwinException): + pass + + +class DatasetNotFound(DarwinException): + """Raised when the dataset endpoint returns a malformed response.""" + + ... diff --git a/darwin/future/exceptions/__init__.py b/darwin/future/exceptions/__init__.py deleted file mode 100644 index 6d3b3ec1d..000000000 --- a/darwin/future/exceptions/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .base import DarwinException # noqa diff --git a/darwin/future/exceptions/client.py b/darwin/future/exceptions/client.py deleted file mode 100644 index c3f924a4a..000000000 --- a/darwin/future/exceptions/client.py +++ /dev/null @@ -1,9 +0,0 @@ -from darwin.future.exceptions.base import DarwinException - - -class NotFound(DarwinException): - pass - - -class Unauthorized(DarwinException): - pass diff --git a/darwin/future/exceptions/core/__init__.py b/darwin/future/exceptions/core/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/darwin/future/exceptions/core/datasets.py b/darwin/future/exceptions/core/datasets.py deleted file mode 100644 index 6cffea02b..000000000 --- a/darwin/future/exceptions/core/datasets.py +++ /dev/null @@ -1,13 +0,0 @@ -from darwin.future.exceptions.base import DarwinException - - -class DatasetException(DarwinException): - """Base class for all dataset exceptions.""" - - ... - - -class DatasetNotFound(DatasetException): - """Raised when the dataset endpoint returns a malformed response.""" - - ... diff --git a/darwin/future/exceptions/files.py b/darwin/future/exceptions/files.py deleted file mode 100644 index 3b810675a..000000000 --- a/darwin/future/exceptions/files.py +++ /dev/null @@ -1,5 +0,0 @@ -from darwin.future.exceptions.base import DarwinException - - -class UnrecognizableFileEncoding(DarwinException): - pass diff --git a/darwin/future/exceptions/meta.py b/darwin/future/exceptions/meta.py deleted file mode 100644 index 1b68b9402..000000000 --- a/darwin/future/exceptions/meta.py +++ /dev/null @@ -1,5 +0,0 @@ -from darwin.future.exceptions.base import DarwinException - - -class MetaException(DarwinException): - pass diff --git a/darwin/future/meta/client.py b/darwin/future/meta/client.py index ad0ec3700..8d230dfb2 100644 --- a/darwin/future/meta/client.py +++ b/darwin/future/meta/client.py @@ -5,26 +5,26 @@ from requests.adapters import Retry -from darwin.future.core.client import Client, DarwinConfig -from darwin.future.meta.objects.team import TeamMeta -from darwin.future.meta.objects.workflow import WorkflowMeta +from darwin.future.core.client import ClientCore, DarwinConfig +from darwin.future.meta.objects.team import Team +from darwin.future.meta.objects.workflow import Workflow from darwin.future.meta.queries.workflow import WorkflowQuery -class MetaClient(Client): +class Client(ClientCore): def __init__(self, config: DarwinConfig, retries: Optional[Retry] = None) -> None: - self._team: Optional[TeamMeta] = None + self._team: Optional[Team] = None super().__init__(config, retries=retries) @classmethod - def local(cls) -> MetaClient: + def local(cls) -> Client: config = DarwinConfig.local() return cls(config) @classmethod - def from_api_key(cls, api_key: str, datasets_dir: Optional[Path] = None) -> MetaClient: + def from_api_key(cls, api_key: str, datasets_dir: Optional[Path] = None) -> Client: config = DarwinConfig.from_api_key_with_defaults(api_key=api_key) - client = Client(config) # create a temporary client to get the default team + client = ClientCore(config) # create a temporary client to get the default team token_info = client.get("/users/token_info") assert isinstance(token_info, dict) default_team: str = token_info["selected_team"]["slug"] @@ -34,9 +34,9 @@ def from_api_key(cls, api_key: str, datasets_dir: Optional[Path] = None) -> Meta return cls(config) @property - def team(self) -> TeamMeta: + def team(self) -> Team: if self._team is None: - self._team = TeamMeta(self) + self._team = Team(self) return self._team # @property diff --git a/darwin/future/meta/objects/base.py b/darwin/future/meta/objects/base.py index 632058a55..aa51789f1 100644 --- a/darwin/future/meta/objects/base.py +++ b/darwin/future/meta/objects/base.py @@ -2,7 +2,7 @@ from typing import Dict, Generic, List, Optional, TypeVar -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore from darwin.future.pydantic_base import DefaultDarwin R = TypeVar("R", bound=DefaultDarwin) @@ -10,15 +10,13 @@ class MetaBase(Generic[R]): - _item: Optional[R] - client: Client + _element: R + client: ClientCore - def __init__(self, client: Client, item: Optional[R] = None, meta_params: Optional[Param] = None) -> None: + def __init__(self, client: ClientCore, element: R, meta_params: Optional[Param] = None) -> None: self.client = client - self._item = item + self._element = element self.meta_params = meta_params or dict() def __str__(self) -> str: - if self._item is None: - raise ValueError("MetaBase has no item") - return str(self._item) + return str(self._element) diff --git a/darwin/future/meta/objects/dataset.py b/darwin/future/meta/objects/dataset.py index 2e21d489d..be870afaf 100644 --- a/darwin/future/meta/objects/dataset.py +++ b/darwin/future/meta/objects/dataset.py @@ -6,18 +6,16 @@ from darwin.cli_functions import upload_data from darwin.dataset.upload_manager import LocalFile from darwin.datatypes import PathLike -from darwin.future.core.client import Client -from darwin.future.core.datasets.create_dataset import create_dataset -from darwin.future.core.datasets.get_dataset import get_dataset -from darwin.future.core.datasets.list_datasets import list_datasets -from darwin.future.core.datasets.remove_dataset import remove_dataset -from darwin.future.core.items.get import get_item_ids -from darwin.future.data_objects.dataset import Dataset +from darwin.future.core.client import ClientCore +from darwin.future.core.datasets import create_dataset, get_dataset, remove_dataset +from darwin.future.core.items import get_item_ids +from darwin.future.data_objects.dataset import DatasetCore +from darwin.future.exceptions import MissingDataset from darwin.future.helpers.assertion import assert_is from darwin.future.meta.objects.base import MetaBase -class DatasetMeta(MetaBase[Dataset]): +class Dataset(MetaBase[DatasetCore]): """Dataset Meta object. Facilitates the creation of Query objects, lazy loading of sub fields Args: @@ -26,21 +24,21 @@ class DatasetMeta(MetaBase[Dataset]): Returns: _type_: DatasetMeta """ + @property def name(self) -> str: - assert self._item is not None - assert self._item.name is not None - return self._item.name + assert self._element.name is not None + return self._element.name + @property def slug(self) -> str: - assert self._item is not None - assert self._item.slug is not None - return self._item.slug + assert self._element.slug is not None + return self._element.slug + @property def id(self) -> int: - assert self._item is not None - assert self._item.id is not None - return self._item.id + assert self._element.id is not None + return self._element.id @property def item_ids(self) -> List[UUID]: @@ -49,16 +47,16 @@ def item_ids(self) -> List[UUID]: Returns: List[UUID]: A list of item ids """ - assert self._item is not None - assert self._item.id is not None + assert self._element.id is not None assert self.meta_params["team_slug"] is not None and type(self.meta_params["team_slug"]) == str - return get_item_ids(self.client, self.meta_params["team_slug"], str(self._item.id)) + return get_item_ids(self.client, self.meta_params["team_slug"], str(self._element.id)) - def get_dataset_by_id(self) -> Dataset: + def get_dataset_by_id(self) -> DatasetCore: # TODO: implement raise NotImplementedError() - def create_dataset(self, slug: str) -> Tuple[Optional[List[Exception]], Optional[Dataset]]: + @classmethod + def create_dataset(cls, client: ClientCore, slug: str) -> Tuple[Optional[List[Exception]], Optional[DatasetCore]]: """ Creates a new dataset for the given team @@ -74,21 +72,22 @@ def create_dataset(self, slug: str) -> Tuple[Optional[List[Exception]], Optional """ exceptions = [] - dataset: Optional[Dataset] = None + dataset: Optional[DatasetCore] = None try: - self._validate_slug(slug) - dataset = create_dataset(self.client, slug) + cls._validate_slug(slug) + dataset = create_dataset(client, slug) except Exception as e: exceptions.append(e) return exceptions or None, dataset - def update_dataset(self) -> Dataset: + def update_dataset(self) -> DatasetCore: # TODO: implement in IO-1018 raise NotImplementedError() - def delete_dataset(self, dataset_id: Union[int, str]) -> Tuple[Optional[List[Exception]], int]: + @classmethod + def delete_dataset(cls, client: ClientCore, dataset_id: Union[int, str]) -> Tuple[Optional[List[Exception]], int]: """ Deletes a dataset by id or slug @@ -107,9 +106,9 @@ def delete_dataset(self, dataset_id: Union[int, str]) -> Tuple[Optional[List[Exc try: if isinstance(dataset_id, str): - dataset_deleted = self._delete_by_slug(self.client, dataset_id) + dataset_deleted = cls._delete_by_slug(client, dataset_id) else: - dataset_deleted = self._delete_by_id(self.client, dataset_id) + dataset_deleted = cls._delete_by_id(client, dataset_id) except Exception as e: exceptions.append(e) @@ -117,7 +116,7 @@ def delete_dataset(self, dataset_id: Union[int, str]) -> Tuple[Optional[List[Exc return exceptions or None, dataset_deleted @staticmethod - def _delete_by_slug(client: Client, slug: str) -> int: + def _delete_by_slug(client: ClientCore, slug: str) -> int: """ (internal) Deletes a dataset by slug @@ -134,19 +133,19 @@ def _delete_by_slug(client: Client, slug: str) -> int: int The dataset deleted """ - assert_is(isinstance(client, Client), "client must be a Core Client") + assert_is(isinstance(client, ClientCore), "client must be a Core Client") assert_is(isinstance(slug, str), "slug must be a string") dataset = get_dataset(client, slug) if dataset and dataset.id: dataset_deleted = remove_dataset(client, dataset.id) else: - raise Exception(f"Dataset with slug {slug} not found") + raise MissingDataset(f"Dataset with slug {slug} not found") return dataset_deleted @staticmethod - def _delete_by_id(client: Client, dataset_id: int) -> int: + def _delete_by_id(client: ClientCore, dataset_id: int) -> int: """ (internal) Deletes a dataset by id @@ -163,7 +162,7 @@ def _delete_by_id(client: Client, dataset_id: int) -> int: int The dataset deleted """ - assert_is(isinstance(client, Client), "client must be a Client") + assert_is(isinstance(client, ClientCore), "client must be a Client") assert_is(isinstance(dataset_id, int), "dataset_id must be an integer") dataset_deleted = remove_dataset(client, dataset_id) @@ -200,9 +199,8 @@ def upload_files( extract_views: bool = False, preserve_folders: bool = False, verbose: bool = False, - ) -> DatasetMeta: - assert self._item is not None + ) -> Dataset: upload_data( - self._item.name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose + self._element.name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose # type: ignore ) return self diff --git a/darwin/future/meta/objects/stage.py b/darwin/future/meta/objects/stage.py index 008d4eba9..85ae3b0ba 100644 --- a/darwin/future/meta/objects/stage.py +++ b/darwin/future/meta/objects/stage.py @@ -3,13 +3,12 @@ from typing import List from uuid import UUID -from darwin.future.core.items.get import get_item_ids_stage -from darwin.future.core.items.move_items import move_items_to_stage -from darwin.future.data_objects.workflow import WFStage +from darwin.future.core.items import get_item_ids_stage, move_items_to_stage +from darwin.future.data_objects.workflow import WFStageCore from darwin.future.meta.objects.base import MetaBase -class StageMeta(MetaBase[WFStage]): +class Stage(MetaBase[WFStageCore]): """_summary_ Args: @@ -23,13 +22,12 @@ def item_ids(self) -> List[UUID]: Returns: _type_: _description_ """ - assert self._item is not None - assert self._item.id is not None + assert self._element.id is not None return get_item_ids_stage( self.client, str(self.meta_params["team_slug"]), str(self.meta_params["dataset_id"]), self.id ) - def move_attached_files_to_stage(self, new_stage_id: UUID) -> StageMeta: + def move_attached_files_to_stage(self, new_stage_id: UUID) -> Stage: assert self.meta_params["team_slug"] is not None and type(self.meta_params["team_slug"]) == str assert self.meta_params["workflow_id"] is not None and type(self.meta_params["workflow_id"]) == UUID assert self.meta_params["dataset_id"] is not None and type(self.meta_params["dataset_id"]) == int @@ -43,5 +41,4 @@ def move_attached_files_to_stage(self, new_stage_id: UUID) -> StageMeta: @property def id(self) -> UUID: - assert self._item is not None - return self._item.id + return self._element.id diff --git a/darwin/future/meta/objects/team.py b/darwin/future/meta/objects/team.py index 48245066a..4783f4484 100644 --- a/darwin/future/meta/objects/team.py +++ b/darwin/future/meta/objects/team.py @@ -1,7 +1,8 @@ from typing import List, Optional -from darwin.future.core.client import Client -from darwin.future.data_objects.team import Team, get_team +from darwin.future.core.client import ClientCore +from darwin.future.core.team.get_team import get_team +from darwin.future.data_objects.team import TeamCore from darwin.future.helpers.assertion import assert_is from darwin.future.meta.objects.base import MetaBase from darwin.future.meta.queries.dataset import DatasetQuery @@ -9,7 +10,7 @@ from darwin.future.meta.queries.workflow import WorkflowQuery -class TeamMeta(MetaBase[Team]): +class Team(MetaBase[TeamCore]): """Team Meta object. Facilitates the creation of Query objects, lazy loading of sub fields like members unlike other MetaBase objects, does not extend the __next__ function because it is not iterable. This is because Team is linked to api key and only one team can be returned, but stores a list of teams for consistency. This @@ -23,29 +24,26 @@ class TeamMeta(MetaBase[Team]): _type_: TeamMeta """ - def __init__(self, client: Client, team: Optional[Team] = None) -> None: + def __init__(self, client: ClientCore, team: Optional[TeamCore] = None) -> None: team = team or get_team(client) super().__init__(client, team) @property def name(self) -> str: - assert self._item is not None - return self._item.name + return self._element.name @property def id(self) -> int: - assert self._item is not None - assert self._item.id is not None - return self._item.id - + assert self._element.id is not None + return self._element.id + @property def members(self) -> TeamMemberQuery: return TeamMemberQuery(self.client, meta_params={"team_slug": self.slug}) @property def slug(self) -> str: - assert self._item is not None - return self._item.slug + return self._element.slug @property def datasets(self) -> DatasetQuery: @@ -54,8 +52,6 @@ def datasets(self) -> DatasetQuery: @property def workflows(self) -> WorkflowQuery: return WorkflowQuery(self.client, meta_params={"team_slug": self.slug}) - + def __str__(self) -> str: - assert self._item is not None - return f"TeamMeta(name='{self.name}', slug='{self.slug}', id='{self.id}' - {len(self._item.members if self._item.members else [])} members)" - + return f"TeamMeta(name='{self.name}', slug='{self.slug}', id='{self.id}' - {len(self._element.members if self._element.members else [])} members)" diff --git a/darwin/future/meta/objects/team_member.py b/darwin/future/meta/objects/team_member.py index 024bffd9d..222e24454 100644 --- a/darwin/future/meta/objects/team_member.py +++ b/darwin/future/meta/objects/team_member.py @@ -1,14 +1,12 @@ from typing import List, Optional -from darwin.future.core.client import Client -from darwin.future.data_objects.team import TeamMember, get_team_members +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.team import TeamMemberCore from darwin.future.data_objects.team_member_role import TeamMemberRole from darwin.future.meta.objects.base import MetaBase -class TeamMemberMeta(MetaBase[TeamMember]): +class TeamMember(MetaBase[TeamMemberCore]): @property def role(self) -> TeamMemberRole: - if self._item is None: - raise ValueError("TeamMemberMeta has no item") - return self._item.role + return self._element.role diff --git a/darwin/future/meta/objects/workflow.py b/darwin/future/meta/objects/workflow.py index 2285227ae..f28671243 100644 --- a/darwin/future/meta/objects/workflow.py +++ b/darwin/future/meta/objects/workflow.py @@ -8,51 +8,42 @@ from darwin.cli_functions import upload_data from darwin.dataset.upload_manager import LocalFile from darwin.datatypes import PathLike -from darwin.future.data_objects.workflow import WFDataset, WFType, Workflow +from darwin.future.data_objects.workflow import WFDatasetCore, WFTypeCore, WorkflowCore from darwin.future.meta.objects.base import MetaBase from darwin.future.meta.queries.stage import StageQuery -class WorkflowMeta(MetaBase[Workflow]): +class Workflow(MetaBase[WorkflowCore]): @property def stages(self) -> StageQuery: - if self._item is None: - raise ValueError("WorkflowMeta has no item") meta_params = self.meta_params.copy() - meta_params["workflow_id"] = self._item.id + meta_params["workflow_id"] = self._element.id if self.datasets is not None: meta_params["dataset_id"] = self.datasets[0].id meta_params["dataset_name"] = self.datasets[0].name return StageQuery(self.client, meta_params=meta_params) @property - def datasets(self) -> List[WFDataset]: - if self._item is None: - raise ValueError("WorkflowMeta has no item") - if self._item.dataset is None: + def datasets(self) -> List[WFDatasetCore]: + if self._element.dataset is None: raise ValueError("WorkflowMeta has no associated dataset") - return [self._item.dataset] + return [self._element.dataset] @property def id(self) -> UUID: - if self._item is None: - raise ValueError("WorkflowMeta has no item") - return self._item.id + return self._element.id @property def name(self) -> str: - if self._item is None: - raise ValueError("WorkflowMeta has no item") - return self._item.name + return self._element.name - def push_from_dataset_stage(self) -> WorkflowMeta: - assert self._item is not None - assert self._item.dataset is not None + def push_from_dataset_stage(self) -> Workflow: + assert self._element.dataset is not None stages = self.stages ds_stage = stages[0] assert len(stages) > 1 - assert ds_stage._item is not None and ds_stage._item.type == WFType.DATASET - next_stage = ds_stage._item.edges[0].target_stage_id + assert ds_stage._element.type == WFTypeCore.DATASET + next_stage = ds_stage._element.edges[0].target_stage_id assert next_stage is not None ds_stage.move_attached_files_to_stage(next_stage) return self @@ -68,11 +59,10 @@ def upload_files( preserve_folders: bool = False, verbose: bool = False, auto_push: bool = True, - ) -> WorkflowMeta: - assert self._item is not None - assert self._item.dataset is not None + ) -> Workflow: + assert self._element.dataset is not None upload_data( - self.datasets[0].name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose + self.datasets[0].name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose # type: ignore ) if auto_push: self.push_from_dataset_stage() diff --git a/darwin/future/meta/queries/dataset.py b/darwin/future/meta/queries/dataset.py index 90a9a893e..daf3dc24e 100644 --- a/darwin/future/meta/queries/dataset.py +++ b/darwin/future/meta/queries/dataset.py @@ -2,14 +2,12 @@ from typing import List -from darwin.cli_functions import upload_data -from darwin.future.core.datasets.list_datasets import list_datasets +from darwin.future.core.datasets import list_datasets from darwin.future.core.types.query import Param, Query, QueryFilter -from darwin.future.data_objects.dataset import Dataset -from darwin.future.meta.objects.dataset import DatasetMeta +from darwin.future.meta.objects.dataset import Dataset -class DatasetQuery(Query[DatasetMeta]): +class DatasetQuery(Query[Dataset]): """ DatasetQuery object with methods to manage filters, retrieve data, and execute filters @@ -20,12 +18,12 @@ class DatasetQuery(Query[DatasetMeta]): collect: Executes the query and returns the filtered data """ - def collect(self) -> List[DatasetMeta]: + def _collect(self) -> List[Dataset]: datasets, exceptions = list_datasets(self.client) if exceptions: # TODO: print and or raise exceptions, tbd how we want to handle this pass - datasets_meta = [DatasetMeta(self.client, dataset) for dataset in datasets] + datasets_meta = [Dataset(self.client, dataset) for dataset in datasets] if not self.filters: self.filters = [] @@ -34,7 +32,7 @@ def collect(self) -> List[DatasetMeta]: return datasets_meta - def _execute_filters(self, datasets: List[DatasetMeta], filter: QueryFilter) -> List[DatasetMeta]: + def _execute_filters(self, datasets: List[Dataset], filter: QueryFilter) -> List[Dataset]: """Executes filtering on the local list of datasets, applying special logic for role filtering otherwise calls the parent method for general filtering on the values of the datasets @@ -52,7 +50,9 @@ def _execute_filters(self, datasets: List[DatasetMeta], filter: QueryFilter) -> return [ d for d in datasets - if d._item is not None and d._item.releases and filter.param in [str(r) for r in d._item.releases] + if d._element is not None + and d._element.releases + and filter.param in [str(r) for r in d._element.releases] ] return super()._generic_execute_filter(datasets, filter) diff --git a/darwin/future/meta/queries/stage.py b/darwin/future/meta/queries/stage.py index 9f56b6bba..3c8bd86db 100644 --- a/darwin/future/meta/queries/stage.py +++ b/darwin/future/meta/queries/stage.py @@ -3,28 +3,27 @@ from typing import List from uuid import UUID -from darwin.future.core.client import Client from darwin.future.core.types.query import Param, Query, QueryFilter -from darwin.future.core.workflows.get_workflow import get_workflow -from darwin.future.meta.objects.stage import StageMeta +from darwin.future.core.workflows import get_workflow +from darwin.future.meta.objects.stage import Stage -class StageQuery(Query[StageMeta]): - def collect(self) -> List[StageMeta]: - if not self.meta_params: +class StageQuery(Query[Stage]): + def _collect(self) -> List[Stage]: + if "workflow_id" not in self.meta_params: raise ValueError("Must specify workflow_id to query stages") workflow_id: UUID = self.meta_params["workflow_id"] meta_params = self.meta_params workflow, exceptions = get_workflow(self.client, str(workflow_id)) assert workflow is not None - stages = [StageMeta(self.client, s, meta_params=meta_params) for s in workflow.stages] + stages = [Stage(self.client, s, meta_params=meta_params) for s in workflow.stages] if not self.filters: self.filters = [] for filter in self.filters: stages = self._execute_filter(stages, filter) return stages - def _execute_filter(self, stages: List[StageMeta], filter: QueryFilter) -> List[StageMeta]: + def _execute_filter(self, stages: List[Stage], filter: QueryFilter) -> List[Stage]: """Executes filtering on the local list of stages Parameters ---------- @@ -36,5 +35,5 @@ def _execute_filter(self, stages: List[StageMeta], filter: QueryFilter) -> List[ List[Stage]: Filtered subset of stages """ if filter.name == "role": - return [s for s in stages if s._item is not None and filter.filter_attr(s._item.type.value)] + return [s for s in stages if s._element is not None and filter.filter_attr(s._element.type.value)] return super()._generic_execute_filter(stages, filter) diff --git a/darwin/future/meta/queries/team_member.py b/darwin/future/meta/queries/team_member.py index f0074f236..0fa192208 100644 --- a/darwin/future/meta/queries/team_member.py +++ b/darwin/future/meta/queries/team_member.py @@ -2,21 +2,21 @@ from typing import List +from darwin.future.core.team.get_team import get_team_members from darwin.future.core.types.query import Param, Query, QueryFilter -from darwin.future.data_objects.team import get_team_members -from darwin.future.meta.objects.team_member import TeamMemberMeta +from darwin.future.meta.objects.team_member import TeamMember -class TeamMemberQuery(Query[TeamMemberMeta]): +class TeamMemberQuery(Query[TeamMember]): """TeamMemberQuery object with methods to manage filters, retrieve data, and execute filters Methods: collect: Executes the query and returns the filtered data _execute_filter: Executes a filter on a list of objects """ - def collect(self) -> List[TeamMemberMeta]: + def _collect(self) -> List[TeamMember]: members, exceptions = get_team_members(self.client) - members_meta = [TeamMemberMeta(self.client, member) for member in members] + members_meta = [TeamMember(self.client, member) for member in members] if exceptions: # TODO: print and or raise exceptions, tbd how we want to handle this pass @@ -27,7 +27,7 @@ def collect(self) -> List[TeamMemberMeta]: return members_meta - def _execute_filter(self, members: List[TeamMemberMeta], filter: QueryFilter) -> List[TeamMemberMeta]: + def _execute_filter(self, members: List[TeamMember], filter: QueryFilter) -> List[TeamMember]: """Executes filtering on the local list of members, applying special logic for role filtering otherwise calls the parent method for general filtering on the values of the members @@ -41,6 +41,6 @@ def _execute_filter(self, members: List[TeamMemberMeta], filter: QueryFilter) -> List[TeamMember]: Filtered subset of members """ if filter.name == "role": - return [m for m in members if m._item is not None and filter.filter_attr(m._item.role.value)] + return [m for m in members if m._element is not None and filter.filter_attr(m._element.role.value)] else: return super()._generic_execute_filter(members, filter) diff --git a/darwin/future/meta/queries/workflow.py b/darwin/future/meta/queries/workflow.py index 29e207f4e..552a49dfa 100644 --- a/darwin/future/meta/queries/workflow.py +++ b/darwin/future/meta/queries/workflow.py @@ -4,13 +4,13 @@ from darwin.exceptions import DarwinException from darwin.future.core.types.query import Param, Query, QueryFilter -from darwin.future.core.workflows.list_workflows import list_workflows -from darwin.future.data_objects.workflow import WFStage +from darwin.future.core.workflows import list_workflows +from darwin.future.data_objects.workflow import WFStageCore from darwin.future.helpers.exception_handler import handle_exception -from darwin.future.meta.objects.workflow import WorkflowMeta +from darwin.future.meta.objects.workflow import Workflow -class WorkflowQuery(Query[WorkflowMeta]): +class WorkflowQuery(Query[Workflow]): """ WorkflowQuery object with methods to manage filters, retrieve data, and execute filters @@ -21,12 +21,12 @@ class WorkflowQuery(Query[WorkflowMeta]): collect: Executes the query and returns the filtered data """ - def collect(self) -> List[WorkflowMeta]: + def _collect(self) -> List[Workflow]: workflows_core, exceptions = list_workflows(self.client) if exceptions: handle_exception(exceptions) raise DarwinException from exceptions[0] - workflows = [WorkflowMeta(self.client, workflow, self.meta_params) for workflow in workflows_core] + workflows = [Workflow(self.client, workflow, self.meta_params) for workflow in workflows_core] if not self.filters: return workflows @@ -35,45 +35,57 @@ def collect(self) -> List[WorkflowMeta]: return workflows - def _execute_filters(self, workflows: List[WorkflowMeta], filter: QueryFilter) -> List[WorkflowMeta]: + def _execute_filters(self, workflows: List[Workflow], filter: QueryFilter) -> List[Workflow]: if filter.name == "id": id_to_find = UUID(filter.param) return [w for w in workflows if w.id == id_to_find] if filter.name == "inserted_at_start": start_date = datetime.fromisoformat(filter.param) - return [w for w in workflows if w._item is not None and self._date_compare(w._item.inserted_at, start_date)] + return [ + w + for w in workflows + if w._element is not None and self._date_compare(w._element.inserted_at, start_date) + ] if filter.name == "inserted_at_end": end_date = datetime.fromisoformat(filter.param) - return [w for w in workflows if w._item is not None and self._date_compare(end_date, w._item.inserted_at)] + return [ + w for w in workflows if w._element is not None and self._date_compare(end_date, w._element.inserted_at) + ] if filter.name == "updated_at_start": start_date = datetime.fromisoformat(filter.param) - return [w for w in workflows if w._item is not None and self._date_compare(w._item.updated_at, start_date)] + return [ + w for w in workflows if w._element is not None and self._date_compare(w._element.updated_at, start_date) + ] if filter.name == "updated_at_end": end_date = datetime.fromisoformat(filter.param) - return [w for w in workflows if w._item is not None and self._date_compare(end_date, w._item.updated_at)] + return [ + w for w in workflows if w._element is not None and self._date_compare(end_date, w._element.updated_at) + ] if filter.name == "dataset_id": datasets_to_find_id: List[int] = [int(s) for s in filter.param.split(",")] return [ w for w in workflows - if w._item is not None - and w._item.dataset is not None - and int(w._item.dataset.id) in datasets_to_find_id + if w._element is not None + and w._element.dataset is not None + and int(w._element.dataset.id) in datasets_to_find_id ] if filter.name == "dataset_name": datasets_to_find_name: List[str] = [str(s) for s in filter.param.split(",")] - return [w for w in workflows if w._item is not None and str(w._item.dataset) in datasets_to_find_name] + return [w for w in workflows if w._element is not None and str(w._element.dataset) in datasets_to_find_name] if filter.name == "has_stages": stages_to_find = [s for s in filter.param.split(",")] return [ - w for w in workflows if w._item is not None and self._stages_contains(w._item.stages, stages_to_find) + w + for w in workflows + if w._element is not None and self._stages_contains(w._element.stages, stages_to_find) ] return self._generic_execute_filter(workflows, filter) @@ -83,6 +95,6 @@ def _date_compare(cls, date1: datetime, date2: datetime) -> bool: return date1.astimezone(timezone.utc) >= date2.astimezone(timezone.utc) @classmethod - def _stages_contains(cls, stages: List[WFStage], stages_to_find: List[str]) -> bool: + def _stages_contains(cls, stages: List[WFStageCore], stages_to_find: List[str]) -> bool: stage_ids = [str(s.id) for s in stages] return any(stage_to_find in stage_ids for stage_to_find in stages_to_find) diff --git a/darwin/future/tests/core/datasets/fixtures.py b/darwin/future/tests/core/datasets/fixtures.py index 0272f6b47..bbf044840 100644 --- a/darwin/future/tests/core/datasets/fixtures.py +++ b/darwin/future/tests/core/datasets/fixtures.py @@ -4,7 +4,7 @@ from pytest import fixture from requests import HTTPError -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore @fixture @@ -42,8 +42,8 @@ def basic_list_of_datasets() -> list: @fixture -def sad_http_client() -> Client: - mock = MagicMock(Client) +def sad_http_client() -> ClientCore: + mock = MagicMock(ClientCore) mock.post.side_effect = HTTPError("error") mock.get.side_effect = HTTPError("error") mock.delete.side_effect = HTTPError("error") @@ -52,8 +52,8 @@ def sad_http_client() -> Client: @fixture -def happy_post_client() -> Client: - mock_client = MagicMock(Client) +def happy_post_client() -> ClientCore: + mock_client = MagicMock(ClientCore) mock_client.post.return_value = { "name": "test-dataset", "slug": "1337", @@ -65,8 +65,8 @@ def happy_post_client() -> Client: @fixture -def happy_get_client() -> Client: - mock_client = MagicMock(Client) +def happy_get_client() -> ClientCore: + mock_client = MagicMock(ClientCore) mock_client.get.return_value = [ { "name": "test-dataset", @@ -80,5 +80,5 @@ def happy_get_client() -> Client: @fixture -def sad_client_pydantic() -> Client: - return MagicMock(Client, side_effect=ValidationError(["error1", "error2", "error3"], model=BaseModel)) +def sad_client_pydantic() -> ClientCore: + return MagicMock(ClientCore, side_effect=ValidationError(["error1", "error2", "error3"], model=BaseModel)) diff --git a/darwin/future/tests/core/datasets/test_create_dataset.py b/darwin/future/tests/core/datasets/test_create_dataset.py index 576220530..fd323fed1 100644 --- a/darwin/future/tests/core/datasets/test_create_dataset.py +++ b/darwin/future/tests/core/datasets/test_create_dataset.py @@ -4,16 +4,15 @@ from pytest import raises from requests import HTTPError -from darwin.future.core.client import Client -from darwin.future.core.datasets.create_dataset import create_dataset -from darwin.future.data_objects.dataset import Dataset -from darwin.future.exceptions.base import DarwinException +from darwin.future.core.client import ClientCore +from darwin.future.core.datasets import create_dataset +from darwin.future.data_objects.dataset import DatasetCore from darwin.future.tests.core.fixtures import * # noqa: F401, F403 from .fixtures import * # noqa: F401, F403 -def test_it_creates_a_dataset(basic_dataset: Dataset, base_client: Client) -> None: +def test_it_creates_a_dataset(basic_dataset: DatasetCore, base_client: ClientCore) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.POST, @@ -27,7 +26,7 @@ def test_it_creates_a_dataset(basic_dataset: Dataset, base_client: Client) -> No assert dataset.slug == "1337" -def test_it_raises_an_error_on_http_error(basic_dataset: Dataset, base_client: Client) -> None: +def test_it_raises_an_error_on_http_error(basic_dataset: DatasetCore, base_client: ClientCore) -> None: with raises(HTTPError): with responses.RequestsMock() as rsps: rsps.add( @@ -38,4 +37,3 @@ def test_it_raises_an_error_on_http_error(basic_dataset: Dataset, base_client: C ) create_dataset(base_client, "test-dataset") - diff --git a/darwin/future/tests/core/datasets/test_delete_dataset.py b/darwin/future/tests/core/datasets/test_delete_dataset.py index 48e9dcdfb..f93eeeb01 100644 --- a/darwin/future/tests/core/datasets/test_delete_dataset.py +++ b/darwin/future/tests/core/datasets/test_delete_dataset.py @@ -2,14 +2,14 @@ from pytest import raises from requests import HTTPError -from darwin.future.core.client import Client -from darwin.future.core.datasets.remove_dataset import remove_dataset +from darwin.future.core.client import ClientCore +from darwin.future.core.datasets import remove_dataset from darwin.future.tests.core.fixtures import * from .fixtures import * -def test_it_deletes_a_dataset(base_client: Client) -> None: +def test_it_deletes_a_dataset(base_client: ClientCore) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.PUT, @@ -25,7 +25,7 @@ def test_it_deletes_a_dataset(base_client: Client) -> None: assert output == 1337 -def test_it_throws_http_errors_returned_by_the_client(base_client: Client) -> None: +def test_it_throws_http_errors_returned_by_the_client(base_client: ClientCore) -> None: with raises(HTTPError): with responses.RequestsMock() as rsps: rsps.add( diff --git a/darwin/future/tests/core/datasets/test_get_dataset.py b/darwin/future/tests/core/datasets/test_get_dataset.py index 6ed1ebab3..7fbe51d73 100644 --- a/darwin/future/tests/core/datasets/test_get_dataset.py +++ b/darwin/future/tests/core/datasets/test_get_dataset.py @@ -3,15 +3,15 @@ from pytest import raises from requests import HTTPError -from darwin.future.core.client import Client -from darwin.future.core.datasets.get_dataset import get_dataset -from darwin.future.data_objects.dataset import Dataset +from darwin.future.core.client import ClientCore +from darwin.future.core.datasets import get_dataset +from darwin.future.data_objects.dataset import DatasetCore from darwin.future.tests.core.fixtures import * from .fixtures import * -def test_it_gets_a_dataset(base_client: Client, basic_dataset: Dataset) -> None: +def test_it_gets_a_dataset(base_client: ClientCore, basic_dataset: DatasetCore) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, @@ -26,7 +26,7 @@ def test_it_gets_a_dataset(base_client: Client, basic_dataset: Dataset) -> None: assert dataset.slug == "1337" -def test_it_raises_an_error_on_http_error(base_client: Client) -> None: +def test_it_raises_an_error_on_http_error(base_client: ClientCore) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, @@ -39,6 +39,6 @@ def test_it_raises_an_error_on_http_error(base_client: Client) -> None: get_dataset(base_client, "test-dataset") -def test_it_raises_an_error_on_pydantic_error(sad_client_pydantic: Client) -> None: +def test_it_raises_an_error_on_pydantic_error(sad_client_pydantic: ClientCore) -> None: with raises(ValidationError): get_dataset(sad_client_pydantic, "test-dataset") diff --git a/darwin/future/tests/core/datasets/test_list_datasets.py b/darwin/future/tests/core/datasets/test_list_datasets.py index 798b8ed80..641b7331f 100644 --- a/darwin/future/tests/core/datasets/test_list_datasets.py +++ b/darwin/future/tests/core/datasets/test_list_datasets.py @@ -3,17 +3,15 @@ import responses from requests.exceptions import HTTPError -from darwin.future.core.client import Client -from darwin.future.core.datasets.list_datasets import list_datasets -from darwin.future.core.types import TeamSlug -from darwin.future.core.types.common import QueryString -from darwin.future.data_objects.dataset import Dataset +from darwin.future.core.client import ClientCore +from darwin.future.core.datasets import list_datasets +from darwin.future.data_objects.dataset import DatasetCore from darwin.future.tests.core.fixtures import * from .fixtures import * -def test_it_lists_datasets(base_client: Client, basic_list_of_datasets: List[Dataset]) -> None: +def test_it_lists_datasets(base_client: ClientCore, basic_list_of_datasets: List[DatasetCore]) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, @@ -31,7 +29,7 @@ def test_it_lists_datasets(base_client: Client, basic_list_of_datasets: List[Dat assert datasets[0].slug == "1337" -def test_it_returns_an_error_if_the_client_returns_an_http_error(base_client: Client) -> None: +def test_it_returns_an_error_if_the_client_returns_an_http_error(base_client: ClientCore) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, diff --git a/darwin/future/tests/core/fixtures.py b/darwin/future/tests/core/fixtures.py index 828c138a1..f929d191c 100644 --- a/darwin/future/tests/core/fixtures.py +++ b/darwin/future/tests/core/fixtures.py @@ -4,11 +4,11 @@ import orjson as json import pytest -from darwin.future.core.client import Client, DarwinConfig -from darwin.future.data_objects.dataset import Dataset -from darwin.future.data_objects.team import Team, TeamMember +from darwin.future.core.client import ClientCore, DarwinConfig +from darwin.future.data_objects.dataset import DatasetCore +from darwin.future.data_objects.team import TeamCore, TeamMemberCore from darwin.future.data_objects.team_member_role import TeamMemberRole -from darwin.future.data_objects.workflow import WFType +from darwin.future.data_objects.workflow import WFTypeCore @pytest.fixture @@ -24,8 +24,8 @@ def base_config() -> DarwinConfig: @pytest.fixture -def base_client(base_config: DarwinConfig) -> Client: - return Client(base_config) +def base_client(base_config: DarwinConfig) -> ClientCore: + return ClientCore(base_config) @pytest.fixture @@ -34,8 +34,8 @@ def base_team_json() -> dict: @pytest.fixture -def base_team(base_team_json: dict) -> Team: - return Team.parse_obj(base_team_json) +def base_team(base_team_json: dict) -> TeamCore: + return TeamCore.parse_obj(base_team_json) @pytest.fixture @@ -52,8 +52,8 @@ def base_team_member_json() -> dict: @pytest.fixture -def base_team_member(base_team_member_json: dict) -> TeamMember: - return TeamMember.parse_obj(base_team_member_json) +def base_team_member(base_team_member_json: dict) -> TeamMemberCore: + return TeamMemberCore.parse_obj(base_team_member_json) @pytest.fixture @@ -67,8 +67,8 @@ def base_team_members_json(base_team_member_json: dict) -> List[dict]: @pytest.fixture -def team_members(base_team_members_json: List[dict]) -> List[TeamMember]: - return [TeamMember.parse_obj(item) for item in base_team_members_json] +def team_members(base_team_members_json: List[dict]) -> List[TeamMemberCore]: + return [TeamMemberCore.parse_obj(item) for item in base_team_members_json] @pytest.fixture @@ -93,12 +93,12 @@ def base_dataset_json_with_releases() -> dict: @pytest.fixture -def base_dataset(base_dataset_json: dict) -> Dataset: - return Dataset.parse_obj(base_dataset_json) +def base_dataset(base_dataset_json: dict) -> DatasetCore: + return DatasetCore.parse_obj(base_dataset_json) -def base_dataset_with_releases(base_dataset_json_with_releases: dict) -> Dataset: - return Dataset.parse_obj(base_dataset_json_with_releases) +def base_dataset_with_releases(base_dataset_json_with_releases: dict) -> DatasetCore: + return DatasetCore.parse_obj(base_dataset_json_with_releases) @pytest.fixture diff --git a/darwin/future/tests/core/items/test_get_items.py b/darwin/future/tests/core/items/test_get_items.py index 91c000920..0bee40afd 100644 --- a/darwin/future/tests/core/items/test_get_items.py +++ b/darwin/future/tests/core/items/test_get_items.py @@ -3,29 +3,32 @@ import responses -from darwin.future.core.client import Client -from darwin.future.core.items.get import get_item_ids, get_item_ids_stage +from darwin.future.core.client import ClientCore +from darwin.future.core.items import get_item_ids, get_item_ids_stage from darwin.future.tests.core.fixtures import * from darwin.future.tests.core.items.fixtures import * -def test_get_item_ids(UUIDs: List[UUID], UUIDs_str: List[str], base_client: Client) -> None: +def test_get_item_ids(UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, - base_client.config.api_endpoint + f"v2/teams/default-team/items/ids?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", + base_client.config.api_endpoint + + f"v2/teams/default-team/items/ids?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) item_ids = get_item_ids(base_client, "default-team", "1337") assert item_ids == UUIDs -def test_get_item_ids_stage(UUIDs: List[UUID], UUIDs_str: List[str], base_client: Client) -> None: + +def test_get_item_ids_stage(UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore) -> None: stage_id = str(uuid4()) with responses.RequestsMock() as rsps: rsps.add( rsps.GET, - base_client.config.api_endpoint + f"v2/teams/default-team/items/ids?workflow_stage_ids={stage_id}&dataset_ids=1337", + base_client.config.api_endpoint + + f"v2/teams/default-team/items/ids?workflow_stage_ids={stage_id}&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) diff --git a/darwin/future/tests/core/items/test_move_items.py b/darwin/future/tests/core/items/test_move_items.py index b532d4ff0..e3ea7bea6 100644 --- a/darwin/future/tests/core/items/test_move_items.py +++ b/darwin/future/tests/core/items/test_move_items.py @@ -4,8 +4,8 @@ import pytest import responses -from darwin.future.core.client import Client -from darwin.future.core.items.move_items import move_items_to_stage +from darwin.future.core.client import ClientCore +from darwin.future.core.items import move_items_to_stage from darwin.future.tests.core.fixtures import * from darwin.future.tests.core.items.fixtures import * @@ -13,21 +13,29 @@ @pytest.fixture def move_payload(UUIDs_str: List[str], stage_id: UUID, workflow_id: UUID) -> Dict: return { - "filters": { - "dataset_ids": [1337], - "item_ids": UUIDs_str, - }, - "stage_id": str(stage_id), - "workflow_id": str(workflow_id), - } - -def test_move_items(base_client: Client, move_payload: Dict, stage_id: UUID, workflow_id: UUID, UUIDs_str: List[str], UUIDs: List[UUID]) -> None: + "filters": { + "dataset_ids": [1337], + "item_ids": UUIDs_str, + }, + "stage_id": str(stage_id), + "workflow_id": str(workflow_id), + } + + +def test_move_items( + base_client: ClientCore, + move_payload: Dict, + stage_id: UUID, + workflow_id: UUID, + UUIDs_str: List[str], + UUIDs: List[UUID], +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.POST, base_client.config.api_endpoint + "v2/teams/default-team/items/stage", - json={"success": UUIDs_str}, + json={"success": UUIDs_str}, status=200, ) item_ids = move_items_to_stage(base_client, "default-team", workflow_id, 1337, stage_id, UUIDs) - assert rsps.assert_call_count(base_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1) \ No newline at end of file + assert rsps.assert_call_count(base_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1) diff --git a/darwin/future/tests/core/test_client.py b/darwin/future/tests/core/test_client.py index 30c5ab20c..2ee03ad2c 100644 --- a/darwin/future/tests/core/test_client.py +++ b/darwin/future/tests/core/test_client.py @@ -6,9 +6,8 @@ from pydantic import ValidationError from requests import HTTPError -from darwin.future.core.client import Client, DarwinConfig, TeamsConfig -from darwin.future.exceptions.base import DarwinException -from darwin.future.exceptions.client import NotFound, Unauthorized +from darwin.future.core.client import ClientCore, DarwinConfig, TeamsConfig +from darwin.future.exceptions import DarwinException, NotFound, Unauthorized from darwin.future.tests.core.fixtures import * from darwin.future.tests.fixtures import * @@ -46,7 +45,7 @@ def test_invalid_config_url_validation(base_url: str, tmp_path: Path) -> None: ) -def test_client(base_client: Client) -> None: +def test_client(base_client: ClientCore) -> None: assert base_client.config.api_key == "test_key" assert base_client.config.base_url == "http://test_url.com/" assert base_client.config.default_team == "default-team" @@ -93,7 +92,7 @@ def test_client(base_client: Client) -> None: "status_code, exception", [(401, Unauthorized), (404, NotFound)], ) -def test_client_raises_darwin(status_code: int, exception: DarwinException, base_client: Client) -> None: +def test_client_raises_darwin(status_code: int, exception: DarwinException, base_client: ClientCore) -> None: endpoint = base_client.config.api_endpoint + "test_endpoint" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json={"test": "test"}, status=status_code) @@ -117,7 +116,7 @@ def test_client_raises_darwin(status_code: int, exception: DarwinException, base base_client.patch("test_endpoint", {"test": "test"}) -def test_client_raises_generic(base_client: Client) -> None: +def test_client_raises_generic(base_client: ClientCore) -> None: endpoint = base_client.config.api_endpoint + "test_endpoint" status_code = 499 with responses.RequestsMock() as rsps: diff --git a/darwin/future/tests/core/test_query.py b/darwin/future/tests/core/test_query.py index 4f3024d35..5008c195f 100644 --- a/darwin/future/tests/core/test_query.py +++ b/darwin/future/tests/core/test_query.py @@ -2,10 +2,10 @@ import pytest -from darwin import item -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore from darwin.future.core.types import query as Query -from darwin.future.data_objects.team import Team +from darwin.future.data_objects.team import TeamCore +from darwin.future.exceptions import InvalidQueryFilter, MoreThanOneResultFound from darwin.future.tests.core.fixtures import * @@ -29,19 +29,19 @@ def basic_filters() -> List[Query.QueryFilter]: @pytest.fixture -def test_team() -> Team: - return Team(slug="test-team", id=0) +def test_team() -> TeamCore: + return TeamCore(name="test-team", slug="test-team", id=0) def test_query_instantiated( - base_client: Client, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] + base_client: ClientCore, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] ) -> None: q = non_abc_query(base_client, basic_filters) assert q.filters == basic_filters def test_query_filter_functionality( - base_client: Client, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] + base_client: ClientCore, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] ) -> None: q = non_abc_query(base_client) for f in basic_filters: @@ -120,23 +120,24 @@ def test_QF_from_asteriks() -> None: assert QF[1].modifier == Query.Modifier("!=") # fails on bad args - with pytest.raises(ValueError): + with pytest.raises(InvalidQueryFilter): Query.QueryFilter._from_args({}) Query.QueryFilter._from_args([]) Query.QueryFilter._from_args(1, 2, 3) -def test_query_first(non_abc_query: Type[Query.Query], base_client: Client) -> None: + +def test_query_first(non_abc_query: Type[Query.Query], base_client: ClientCore) -> None: query = non_abc_query(base_client) query.results = [1, 2, 3] first = query.first() assert first == 1 - -def test_query_collect_one(non_abc_query: Type[Query.Query], base_client: Client) -> None: + + +def test_query_collect_one(non_abc_query: Type[Query.Query], base_client: ClientCore) -> None: query = non_abc_query(base_client) query.results = [1, 2, 3] - with pytest.raises(ValueError): + with pytest.raises(MoreThanOneResultFound): query.collect_one() - + query.results = [1] assert query.collect_one() == 1 - diff --git a/darwin/future/tests/core/workflows/test_get_workflow.py b/darwin/future/tests/core/workflows/test_get_workflow.py index dad28fe08..c0f443635 100644 --- a/darwin/future/tests/core/workflows/test_get_workflow.py +++ b/darwin/future/tests/core/workflows/test_get_workflow.py @@ -3,14 +3,15 @@ from pydantic import ValidationError from requests import HTTPError -from darwin.future.core.client import Client, JSONType -from darwin.future.core.workflows.get_workflow import get_workflow -from darwin.future.data_objects.workflow import Workflow +from darwin.future.core.client import ClientCore +from darwin.future.core.types.common import JSONType +from darwin.future.core.workflows import get_workflow +from darwin.future.data_objects.workflow import WorkflowCore from darwin.future.tests.core.fixtures import * @responses.activate -def test_get_workflow(base_client: Client, base_single_workflow_object: JSONType) -> None: +def test_get_workflow(base_client: ClientCore, base_single_workflow_object: JSONType) -> None: # Mocking the response using responses library response_data = base_single_workflow_object workflow_id = "1" @@ -25,12 +26,12 @@ def test_get_workflow(base_client: Client, base_single_workflow_object: JSONType workflow, exceptions = get_workflow(base_client, workflow_id) # Assertions - assert isinstance(workflow, Workflow) + assert isinstance(workflow, WorkflowCore) assert not exceptions @responses.activate -def test_get_workflow_with_team_slug(base_client: Client, base_single_workflow_object: JSONType) -> None: +def test_get_workflow_with_team_slug(base_client: ClientCore, base_single_workflow_object: JSONType) -> None: # Mocking the response using responses library team_slug = "team-slug" workflow_id = "1" @@ -47,12 +48,12 @@ def test_get_workflow_with_team_slug(base_client: Client, base_single_workflow_o workflow, exceptions = get_workflow(base_client, workflow_id, team_slug) # Assertions - assert isinstance(workflow, Workflow) + assert isinstance(workflow, WorkflowCore) assert not exceptions @responses.activate -def test_get_workflows_with_invalid_response(base_client: Client) -> None: +def test_get_workflows_with_invalid_response(base_client: ClientCore) -> None: # Mocking the response using responses library # fmt: off NON_EXISTENT_ID = "1" @@ -74,7 +75,7 @@ def test_get_workflows_with_invalid_response(base_client: Client) -> None: @responses.activate -def test_get_workflows_with_error(base_client: Client) -> None: +def test_get_workflows_with_error(base_client: ClientCore) -> None: # Mocking the response using responses library # fmt: off NON_EXISTENT_ID = "1" diff --git a/darwin/future/tests/core/workflows/test_get_workflows.py b/darwin/future/tests/core/workflows/test_get_workflows.py index 22d448c2b..0c6fb751e 100644 --- a/darwin/future/tests/core/workflows/test_get_workflows.py +++ b/darwin/future/tests/core/workflows/test_get_workflows.py @@ -5,14 +5,15 @@ from pydantic import ValidationError from requests import HTTPError -from darwin.future.core.client import Client, JSONType -from darwin.future.core.workflows.get_workflows import get_workflows -from darwin.future.data_objects.workflow import Workflow +from darwin.future.core.client import ClientCore +from darwin.future.core.types.common import JSONType +from darwin.future.core.workflows import get_workflows +from darwin.future.data_objects.workflow import WorkflowCore from darwin.future.tests.core.fixtures import * @responses.activate -def test_get_workflows(base_client: Client, base_workflows_object: str) -> None: +def test_get_workflows(base_client: ClientCore, base_workflows_object: str) -> None: # Mocking the response using responses library response_data = base_workflows_object responses.add( @@ -28,11 +29,11 @@ def test_get_workflows(base_client: Client, base_workflows_object: str) -> None: # Assertions assert isinstance(workflows, List) assert len(workflows) == 3 - assert all(isinstance(workflow, Workflow) for workflow in workflows) + assert all(isinstance(workflow, WorkflowCore) for workflow in workflows) @responses.activate -def test_get_workflows_with_team_slug(base_client: Client, base_workflows_object: JSONType) -> None: +def test_get_workflows_with_team_slug(base_client: ClientCore, base_workflows_object: JSONType) -> None: # Mocking the response using responses library team_slug = "team-slug" response_data = base_workflows_object @@ -49,11 +50,11 @@ def test_get_workflows_with_team_slug(base_client: Client, base_workflows_object # Assertions assert isinstance(workflows, List) assert len(workflows) == len(response_data) - assert all(isinstance(workflow, Workflow) for workflow in workflows) + assert all(isinstance(workflow, WorkflowCore) for workflow in workflows) @responses.activate -def test_get_workflows_with_invalid_response(base_client: Client) -> None: +def test_get_workflows_with_invalid_response(base_client: ClientCore) -> None: # Mocking the response using responses library responses.add( responses.GET, @@ -68,7 +69,7 @@ def test_get_workflows_with_invalid_response(base_client: Client) -> None: @responses.activate -def test_get_workflows_with_error(base_client: Client) -> None: +def test_get_workflows_with_error(base_client: ClientCore) -> None: # Mocking the response using responses library responses.add( responses.GET, diff --git a/darwin/future/tests/core/workflows/test_list_workflows.py b/darwin/future/tests/core/workflows/test_list_workflows.py index 39f6f74bf..c959cea1d 100644 --- a/darwin/future/tests/core/workflows/test_list_workflows.py +++ b/darwin/future/tests/core/workflows/test_list_workflows.py @@ -5,14 +5,15 @@ from pydantic import ValidationError from requests import HTTPError -from darwin.future.core.client import Client, JSONType -from darwin.future.core.workflows.list_workflows import list_workflows -from darwin.future.data_objects.workflow import Workflow +from darwin.future.core.client import ClientCore +from darwin.future.core.types.common import JSONType +from darwin.future.core.workflows import list_workflows +from darwin.future.data_objects.workflow import WorkflowCore from darwin.future.tests.core.fixtures import * @responses.activate -def test_list_workflows(base_client: Client, base_workflows_object: str) -> None: +def test_list_workflows(base_client: ClientCore, base_workflows_object: str) -> None: # Mocking the response using responses library response_data = base_workflows_object responses.add( @@ -28,13 +29,13 @@ def test_list_workflows(base_client: Client, base_workflows_object: str) -> None # Assertions assert isinstance(workflows, List) assert len(workflows) == 3 - assert all(isinstance(workflow, Workflow) for workflow in workflows) + assert all(isinstance(workflow, WorkflowCore) for workflow in workflows) assert not exceptions @responses.activate -def test_list_workflows_with_team_slug(base_client: Client, base_workflows_object: JSONType) -> None: +def test_list_workflows_with_team_slug(base_client: ClientCore, base_workflows_object: JSONType) -> None: # Mocking the response using responses library team_slug = "team-slug" response_data = base_workflows_object @@ -51,13 +52,13 @@ def test_list_workflows_with_team_slug(base_client: Client, base_workflows_objec # Assertions assert isinstance(workflows, List) assert len(workflows) == len(response_data) - assert all(isinstance(workflow, Workflow) for workflow in workflows) + assert all(isinstance(workflow, WorkflowCore) for workflow in workflows) assert not exceptions @responses.activate -def test_list_workflows_with_invalid_response(base_client: Client) -> None: +def test_list_workflows_with_invalid_response(base_client: ClientCore) -> None: # Mocking the response using responses library responses.add( responses.GET, @@ -77,7 +78,7 @@ def test_list_workflows_with_invalid_response(base_client: Client) -> None: @responses.activate -def test_list_workflows_with_error(base_client: Client) -> None: +def test_list_workflows_with_error(base_client: ClientCore) -> None: # Mocking the response using responses library responses.add( responses.GET, diff --git a/darwin/future/tests/data_objects/fixtures.py b/darwin/future/tests/data_objects/fixtures.py index b70274ea9..8f0849e3a 100644 --- a/darwin/future/tests/data_objects/fixtures.py +++ b/darwin/future/tests/data_objects/fixtures.py @@ -4,7 +4,7 @@ import pytest -from darwin.future.data_objects.workflow import WFStage, Workflow +from darwin.future.data_objects.workflow import WFStageCore, WorkflowCore test_data_path: Path = Path(__file__).parent / "workflow" / "data" valid_stage_json = test_data_path / "stage.json" diff --git a/darwin/future/tests/data_objects/test_general_darwin_objects.py b/darwin/future/tests/data_objects/test_general_darwin_objects.py index 575c2769b..0a9382d39 100644 --- a/darwin/future/tests/data_objects/test_general_darwin_objects.py +++ b/darwin/future/tests/data_objects/test_general_darwin_objects.py @@ -3,14 +3,14 @@ import pytest from pydantic import BaseModel, ValidationError -from darwin.future.data_objects.dataset import Dataset -from darwin.future.data_objects.release import Release -from darwin.future.data_objects.team import Team +from darwin.future.data_objects.dataset import DatasetCore +from darwin.future.data_objects.release import ReleaseCore +from darwin.future.data_objects.team import TeamCore from darwin.future.tests.data_objects.fixtures import * def test_integrated_parsing_works_with_raw(basic_combined: dict) -> None: - team = Team.parse_obj(basic_combined) + team = TeamCore.parse_obj(basic_combined) assert team.slug == "test-team" assert team.datasets is not None assert team.datasets[0].name == "test-dataset" @@ -20,10 +20,10 @@ def test_integrated_parsing_works_with_raw(basic_combined: dict) -> None: def test_broken_obj_raises(broken_combined: dict) -> None: with pytest.raises(ValidationError) as e_info: - broken = Team.parse_obj(broken_combined) + broken = TeamCore.parse_obj(broken_combined) -@pytest.mark.parametrize("test_object", [Team, Dataset, Release]) +@pytest.mark.parametrize("test_object", [TeamCore, DatasetCore, ReleaseCore]) def test_empty_obj_raises(test_object: BaseModel) -> None: with pytest.raises(ValidationError) as e_info: broken = test_object.parse_obj({}) diff --git a/darwin/future/tests/data_objects/test_team.py b/darwin/future/tests/data_objects/test_team.py index b06464d34..1e17bed9e 100644 --- a/darwin/future/tests/data_objects/test_team.py +++ b/darwin/future/tests/data_objects/test_team.py @@ -4,13 +4,14 @@ import responses from pydantic import ValidationError -from darwin.future.core.client import Client -from darwin.future.data_objects.team import Team, TeamMember, get_team, get_team_members +from darwin.future.core.client import ClientCore +from darwin.future.core.team.get_team import get_team, get_team_members +from darwin.future.data_objects.team import TeamCore, TeamMemberCore from darwin.future.tests.core.fixtures import * from darwin.future.tests.fixtures import * -def test_get_team_returns_valid_team(base_client: Client, base_team_json: dict, base_team: Team) -> None: +def test_get_team_returns_valid_team(base_client: ClientCore, base_team_json: dict, base_team: TeamCore) -> None: slug = "test-slug" endpoint = base_client.config.api_endpoint + f"teams/{slug}" with responses.RequestsMock() as rsps: @@ -20,7 +21,7 @@ def test_get_team_returns_valid_team(base_client: Client, base_team_json: dict, assert team == base_team -def test_get_team_fails_on_incorrect_input(base_client: Client, base_team: Team) -> None: +def test_get_team_fails_on_incorrect_input(base_client: ClientCore, base_team: TeamCore) -> None: slug = "test-slug" endpoint = base_client.config.api_endpoint + f"teams/{slug}" with responses.RequestsMock() as rsps: @@ -30,8 +31,8 @@ def test_get_team_fails_on_incorrect_input(base_client: Client, base_team: Team) team = get_team(base_client, slug) -def test_get_team_members_returns_valid_list(base_client: Client, base_team_member_json: dict) -> None: - synthetic_list = [TeamMember.parse_obj(base_team_member_json), TeamMember.parse_obj(base_team_member_json)] +def test_get_team_members_returns_valid_list(base_client: ClientCore, base_team_member_json: dict) -> None: + synthetic_list = [TeamMemberCore.parse_obj(base_team_member_json), TeamMemberCore.parse_obj(base_team_member_json)] endpoint = base_client.config.api_endpoint + "memberships" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json=[base_team_member_json, base_team_member_json]) @@ -42,7 +43,7 @@ def test_get_team_members_returns_valid_list(base_client: Client, base_team_memb assert members == synthetic_list -def test_get_team_members_fails_on_incorrect_input(base_client: Client, base_team_member_json: dict) -> None: +def test_get_team_members_fails_on_incorrect_input(base_client: ClientCore, base_team_member_json: dict) -> None: endpoint = base_client.config.api_endpoint + "memberships" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json=[base_team_member_json, {}]) @@ -51,10 +52,10 @@ def test_get_team_members_fails_on_incorrect_input(base_client: Client, base_tea assert len(members) == 1 assert len(errors) == 1 assert isinstance(errors[0], ValidationError) - assert isinstance(members[0], TeamMember) + assert isinstance(members[0], TeamMemberCore) -def test_team_from_client(base_client: Client, base_team_json: dict, base_team: Team) -> None: +def test_team_from_client(base_client: ClientCore, base_team_json: dict, base_team: TeamCore) -> None: with responses.RequestsMock() as rsps: rsps.add( responses.GET, @@ -62,5 +63,5 @@ def test_team_from_client(base_client: Client, base_team_json: dict, base_team: json=base_team_json, ) - team = Team.from_client(base_client) + team = TeamCore.from_client(base_client) assert team == base_team diff --git a/darwin/future/tests/data_objects/workflow/test_wfdataset.py b/darwin/future/tests/data_objects/workflow/test_wfdataset.py index 7a10b5acb..0126853dd 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfdataset.py +++ b/darwin/future/tests/data_objects/workflow/test_wfdataset.py @@ -3,7 +3,7 @@ import pytest from pydantic import ValidationError -from darwin.future.data_objects.workflow import WFDataset +from darwin.future.data_objects.workflow import WFDatasetCore from darwin.future.tests.data_objects.workflow.invalidvaluefortest import ( InvalidValueForTest, ) @@ -19,22 +19,22 @@ def test_file_exists() -> None: def test_WFDataset_validates_from_valid_json() -> None: - WFDataset.parse_file(validate_dataset_json) + WFDatasetCore.parse_file(validate_dataset_json) assert True def test_cast_to_int_returns_dataset_id() -> None: - dataset = WFDataset.parse_file(validate_dataset_json) + dataset = WFDatasetCore.parse_file(validate_dataset_json) assert dataset.id == 101 def test_cast_to_str_returns_dataset_name() -> None: - dataset = WFDataset.parse_file(validate_dataset_json) + dataset = WFDatasetCore.parse_file(validate_dataset_json) assert dataset.name == "Test Dataset" def test_sad_paths() -> None: - dataset = WFDataset.parse_file(validate_dataset_json) + dataset = WFDatasetCore.parse_file(validate_dataset_json) fields = ["id", "name", "instructions"] # Test missing fields @@ -42,16 +42,16 @@ def test_sad_paths() -> None: with pytest.raises(ValidationError) as excinfo: working_dataset = dataset.copy().dict() del working_dataset[key] - WFDataset.parse_obj(working_dataset) + WFDatasetCore.parse_obj(working_dataset) assert "value_error.missing" in (err_string := str(excinfo.value)) - assert err_string.startswith(f"1 validation error for WFDataset\n{key}") + assert err_string.startswith(f"1 validation error for WFDatasetCore\n{key}") # Test invalid types for key in fields: with pytest.raises(ValidationError) as excinfo: working_dataset = dataset.copy().dict() working_dataset[key] = InvalidValueForTest() - WFDataset.parse_obj(working_dataset) + WFDatasetCore.parse_obj(working_dataset) - assert str(excinfo.value).startswith(f"1 validation error for WFDataset\n{key}") + assert str(excinfo.value).startswith(f"1 validation error for WFDatasetCore\n{key}") diff --git a/darwin/future/tests/data_objects/workflow/test_wfedge.py b/darwin/future/tests/data_objects/workflow/test_wfedge.py index d16872e5f..c3d0c7b8c 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfedge.py +++ b/darwin/future/tests/data_objects/workflow/test_wfedge.py @@ -2,7 +2,7 @@ import pytest -from darwin.future.data_objects.workflow import WFEdge +from darwin.future.data_objects.workflow import WFEdgeCore test_data_path: Path = Path(__file__).parent / "data" validate_json = test_data_path / "edge.json" @@ -15,6 +15,6 @@ def test_file_exists() -> None: def test_WFEdge_validates_from_valid_json() -> None: - parsed_edge = WFEdge.parse_file(validate_json) + parsed_edge = WFEdgeCore.parse_file(validate_json) - assert isinstance(parsed_edge, WFEdge) + assert isinstance(parsed_edge, WFEdgeCore) diff --git a/darwin/future/tests/data_objects/workflow/test_wfstage.py b/darwin/future/tests/data_objects/workflow/test_wfstage.py index b103550e3..a03613aaf 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfstage.py +++ b/darwin/future/tests/data_objects/workflow/test_wfstage.py @@ -4,7 +4,7 @@ import pytest -from darwin.future.data_objects.workflow import WFStage +from darwin.future.data_objects.workflow import WFStageCore from darwin.future.tests.data_objects.fixtures import test_data_path validate_json = test_data_path / "stage.json" @@ -17,12 +17,12 @@ def test_file_exists() -> None: def test_WFStage_validates_from_valid_json() -> None: - WFStage.parse_file(validate_json) + WFStageCore.parse_file(validate_json) assert True def test_casts_strings_to_uuids_as_needed() -> None: - parsed_stage = WFStage.parse_file(validate_json) + parsed_stage = WFStageCore.parse_file(validate_json) assert isinstance(parsed_stage.id, UUID) assert str(parsed_stage.id) == "e69d3ebe-6ab9-4159-b44f-2bf84d29bb20" @@ -32,7 +32,7 @@ def test_raises_with_invalid_uuid() -> None: dict_from_json["id"] = "not-a-uuid" with pytest.raises(ValueError) as excinfo: - WFStage.parse_obj(dict_from_json) + WFStageCore.parse_obj(dict_from_json) assert "not a valid uuid" in str(excinfo.value) - assert str(excinfo.value).startswith("1 validation error for WFStage\nid") + assert str(excinfo.value).startswith("1 validation error for WFStageCore\nid") diff --git a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py index efb726f3c..7a5ad2ef3 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py +++ b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py @@ -2,7 +2,7 @@ import pytest -from darwin.future.data_objects.workflow import WFStageConfig +from darwin.future.data_objects.workflow import WFStageConfigCore test_data_path: Path = Path(__file__).parent / "data" validate_json = test_data_path / "stage_config.json" @@ -15,6 +15,6 @@ def test_file_exists() -> None: def test_WFStageConfig_validates_from_valid_json() -> None: - parsed_stage_config = WFStageConfig.parse_file(validate_json) + parsed_stage_config = WFStageConfigCore.parse_file(validate_json) - assert isinstance(parsed_stage_config, WFStageConfig) + assert isinstance(parsed_stage_config, WFStageConfigCore) diff --git a/darwin/future/tests/data_objects/workflow/test_wfuser.py b/darwin/future/tests/data_objects/workflow/test_wfuser.py index 95df22638..8619a73e7 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfuser.py +++ b/darwin/future/tests/data_objects/workflow/test_wfuser.py @@ -2,7 +2,7 @@ import pytest -from darwin.future.data_objects.workflow import WFUser +from darwin.future.data_objects.workflow import WFUserCore test_data_path: Path = Path(__file__).parent / "data" validate_json = test_data_path / "user.json" @@ -15,8 +15,8 @@ def test_file_exists() -> None: def test_WFUser_validates_from_valid_json() -> None: - parsed_user = WFUser.parse_file(validate_json) + parsed_user = WFUserCore.parse_file(validate_json) - assert isinstance(parsed_user, WFUser) + assert isinstance(parsed_user, WFUserCore) assert parsed_user.user_id == 100 assert str(parsed_user.stage_id) == "0fa1ae43-fb46-44d7-bf85-b78e81d0d02f" diff --git a/darwin/future/tests/data_objects/workflow/test_workflow.py b/darwin/future/tests/data_objects/workflow/test_workflow.py index ac6fa27e4..23221d6df 100644 --- a/darwin/future/tests/data_objects/workflow/test_workflow.py +++ b/darwin/future/tests/data_objects/workflow/test_workflow.py @@ -2,7 +2,7 @@ from pathlib import Path from uuid import UUID -from darwin.future.data_objects.workflow import WFDataset, WFStage, Workflow +from darwin.future.data_objects.workflow import WFDatasetCore, WFStageCore, WorkflowCore test_data_path: Path = Path(__file__).parent / "data" validate_json = test_data_path / "workflow.json" @@ -15,16 +15,16 @@ def test_file_exists() -> None: def test_Workflow_validates_from_valid_json() -> None: - parsed_set = Workflow.parse_file(validate_json) + parsed_set = WorkflowCore.parse_file(validate_json) - assert isinstance(parsed_set, Workflow) + assert isinstance(parsed_set, WorkflowCore) assert isinstance(parsed_set.id, UUID) assert isinstance(parsed_set.name, str) assert isinstance(parsed_set.team_id, int) assert isinstance(parsed_set.stages, list) - assert all(isinstance(i, WFStage) for i in parsed_set.stages) - assert isinstance(parsed_set.dataset, WFDataset) + assert all(isinstance(i, WFStageCore) for i in parsed_set.stages) + assert isinstance(parsed_set.dataset, WFDatasetCore) assert isinstance(parsed_set.inserted_at, datetime) assert isinstance(parsed_set.updated_at, datetime) diff --git a/darwin/future/tests/meta/fixtures.py b/darwin/future/tests/meta/fixtures.py index c0e3b1af6..0a78cc9e0 100644 --- a/darwin/future/tests/meta/fixtures.py +++ b/darwin/future/tests/meta/fixtures.py @@ -1,10 +1,10 @@ from pytest import fixture, raises from darwin.future.core.client import DarwinConfig -from darwin.future.meta.client import MetaClient +from darwin.future.meta.client import Client from darwin.future.tests.core.fixtures import * @fixture -def base_meta_client(base_config: DarwinConfig) -> MetaClient: - return MetaClient(base_config) \ No newline at end of file +def base_meta_client(base_config: DarwinConfig) -> Client: + return Client(base_config) diff --git a/darwin/future/tests/meta/objects/fixtures.py b/darwin/future/tests/meta/objects/fixtures.py index 6a52f3a1c..fd84327a0 100644 --- a/darwin/future/tests/meta/objects/fixtures.py +++ b/darwin/future/tests/meta/objects/fixtures.py @@ -3,13 +3,13 @@ from pytest import fixture, raises -from darwin.future.core.client import Client -from darwin.future.data_objects.team import Team -from darwin.future.data_objects.workflow import WFStage, Workflow +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.team import TeamCore +from darwin.future.data_objects.workflow import WFStageCore, WorkflowCore from darwin.future.meta.objects import stage -from darwin.future.meta.objects.stage import StageMeta -from darwin.future.meta.objects.team import TeamMeta -from darwin.future.meta.objects.workflow import WorkflowMeta +from darwin.future.meta.objects.stage import Stage +from darwin.future.meta.objects.team import Team +from darwin.future.meta.objects.workflow import Workflow from darwin.future.tests.core.fixtures import * @@ -19,20 +19,20 @@ def base_UUID() -> UUID: @fixture -def base_meta_team(base_client: Client, base_team: Team) -> TeamMeta: - return TeamMeta(base_client, base_team) +def base_meta_team(base_client: ClientCore, base_team: TeamCore) -> Team: + return Team(base_client, base_team) @fixture -def base_meta_workflow(base_client: Client, base_workflow: Workflow) -> WorkflowMeta: - return WorkflowMeta(base_client, base_workflow) +def base_meta_workflow(base_client: ClientCore, base_workflow: WorkflowCore) -> Workflow: + return Workflow(base_client, base_workflow) @fixture -def base_meta_stage(base_client: Client, base_stage: WFStage, base_UUID: UUID) -> StageMeta: - return StageMeta(base_client, base_stage, base_UUID) +def base_meta_stage(base_client: ClientCore, base_stage: WFStageCore, base_UUID: UUID) -> Stage: + return Stage(base_client, base_stage) @fixture -def base_meta_stage_list(base_meta_stage: StageMeta, base_UUID: UUID) -> List[StageMeta]: +def base_meta_stage_list(base_meta_stage: Stage, base_UUID: UUID) -> List[Stage]: return [base_meta_stage] diff --git a/darwin/future/tests/meta/objects/test_datasetmeta.py b/darwin/future/tests/meta/objects/test_datasetmeta.py index 5e02da744..8f5726d62 100644 --- a/darwin/future/tests/meta/objects/test_datasetmeta.py +++ b/darwin/future/tests/meta/objects/test_datasetmeta.py @@ -5,20 +5,20 @@ from responses import RequestsMock from darwin.future.core.client import DarwinConfig -from darwin.future.meta.client import MetaClient -from darwin.future.meta.objects.dataset import DatasetMeta +from darwin.future.meta.client import Client +from darwin.future.meta.objects.dataset import Dataset from darwin.future.tests.core.fixtures import * @fixture def _delete_by_slug_mock(): # type: ignore - with patch.object(DatasetMeta, "_delete_by_slug") as mock: + with patch.object(Dataset, "_delete_by_slug") as mock: yield mock @fixture def _delete_by_id_mock(): # type: ignore - with patch.object(DatasetMeta, "_delete_by_id") as mock: + with patch.object(Dataset, "_delete_by_id") as mock: yield mock @@ -31,16 +31,15 @@ def _delete_by_id_mock(): # type: ignore # `create_dataset` tests def test_create_dataset_returns_exceptions_thrown(base_config: DarwinConfig) -> None: - valid_client = MetaClient(base_config) + valid_client = Client(base_config) valid_slug = "test_dataset" base_url = base_config.base_url + "api/datasets" with RequestsMock() as rsps: rsps.add(rsps.POST, base_url, status=500) - dataset_meta = DatasetMeta(valid_client) - exceptions, dataset_created = dataset_meta.create_dataset(valid_slug) + exceptions, dataset_created = Dataset.create_dataset(valid_client, valid_slug) assert exceptions is not None assert "500 Server Error" in str(exceptions[0]) @@ -48,7 +47,7 @@ def test_create_dataset_returns_exceptions_thrown(base_config: DarwinConfig) -> def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: DarwinConfig) -> None: - valid_client = MetaClient(base_config) + valid_client = Client(base_config) valid_slug = "test_dataset" base_url = base_config.base_url + "api/datasets" @@ -60,9 +59,8 @@ def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, status=201, ) - dataset_meta = DatasetMeta(valid_client) - exceptions, dataset_created = dataset_meta.create_dataset(valid_slug) + exceptions, dataset_created = Dataset.create_dataset(valid_client, valid_slug) assert exceptions is None assert dataset_created is not None @@ -81,10 +79,9 @@ def test_delete_dataset_returns_exceptions_thrown( ) -> None: _delete_by_slug_mock.side_effect = Exception("test exception") - client = MetaClient(base_config) - dataset_meta = DatasetMeta(client) + valid_client = Client(base_config) - exceptions, dataset_deleted = dataset_meta.delete_dataset("test_dataset") + exceptions, dataset_deleted = Dataset.delete_dataset(valid_client, "test_dataset") assert exceptions is not None assert str(exceptions[0]) == "test exception" @@ -97,10 +94,9 @@ def test_delete_dataset_returns_exceptions_thrown( def test_delete_dataset_calls_delete_by_slug_as_appropriate( base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock ) -> None: - client = MetaClient(base_config) - dataset_meta = DatasetMeta(client) + valid_client = Client(base_config) - exceptions, _ = dataset_meta.delete_dataset("test_dataset") + exceptions, _ = Dataset.delete_dataset(valid_client, "test_dataset") assert exceptions is None assert _delete_by_slug_mock.call_count == 1 @@ -110,10 +106,9 @@ def test_delete_dataset_calls_delete_by_slug_as_appropriate( def test_delete_dataset_calls_delete_by_id_as_appropriate( base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock ) -> None: - client = MetaClient(base_config) - dataset_meta = DatasetMeta(client) + valid_client = Client(base_config) - exceptions, _ = dataset_meta.delete_dataset(1) + exceptions, _ = Dataset.delete_dataset(valid_client, 1) assert exceptions is None assert _delete_by_slug_mock.call_count == 0 @@ -122,20 +117,20 @@ def test_delete_dataset_calls_delete_by_id_as_appropriate( # Test `_delete_by_slug` def test_delete_by_slug_raises_exception_if_not_passed_str_and_client(base_config: DarwinConfig) -> None: - valid_client = MetaClient(base_config) + valid_client = Client(base_config) valid_slug = "test_dataset" invalid_client = "client" invalid_slug = 1 with raises(AssertionError): - DatasetMeta._delete_by_slug(valid_client, invalid_slug) # type: ignore + Dataset._delete_by_slug(valid_client, invalid_slug) # type: ignore with raises(AssertionError): - DatasetMeta._delete_by_slug(invalid_client, valid_slug) # type: ignore + Dataset._delete_by_slug(invalid_client, valid_slug) # type: ignore def test_delete_by_slug__returns_dataset_deleted_if_dataset_found(base_config: DarwinConfig) -> None: - valid_client = MetaClient(base_config) + valid_client = Client(base_config) valid_slug = "test_dataset" base_url = base_config.base_url + "api/datasets" @@ -153,27 +148,27 @@ def test_delete_by_slug__returns_dataset_deleted_if_dataset_found(base_config: D json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, status=200, ) - dataset_deleted = DatasetMeta._delete_by_slug(valid_client, valid_slug) + dataset_deleted = Dataset._delete_by_slug(valid_client, valid_slug) assert dataset_deleted == 1 # Test `_delete_by_id` def test_delete_by_id_raises_exception_if_not_passed_int_and_client(base_config: DarwinConfig) -> None: - valid_client = MetaClient(base_config) + valid_client = Client(base_config) valid_id = 1 invalid_client = "client" invalid_id = "1" with raises(AssertionError): - DatasetMeta._delete_by_id(valid_client, invalid_id) # type: ignore + Dataset._delete_by_id(valid_client, invalid_id) # type: ignore with raises(AssertionError): - DatasetMeta._delete_by_id(invalid_client, valid_id) # type: ignore + Dataset._delete_by_id(invalid_client, valid_id) # type: ignore def test_delete_by_id_returns_dataset_deleted_if_dataset_found(base_config: DarwinConfig) -> None: - valid_client = MetaClient(base_config) + valid_client = Client(base_config) valid_id = 1 base_url = base_config.base_url + "api/datasets" @@ -185,7 +180,7 @@ def test_delete_by_id_returns_dataset_deleted_if_dataset_found(base_config: Darw json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, status=200, ) - dataset_deleted = DatasetMeta._delete_by_id(valid_client, valid_id) + dataset_deleted = Dataset._delete_by_id(valid_client, valid_id) assert dataset_deleted == 1 @@ -196,10 +191,10 @@ def test_delete_by_id_returns_dataset_deleted_if_dataset_found(base_config: Darw ) def test_validate_slugh_raises_exception_if_passed_invalid_inputs(invalid_slug: str) -> None: with raises(AssertionError): - DatasetMeta._validate_slug(invalid_slug) + Dataset._validate_slug(invalid_slug) def test_validate_slug_returns_none_if_passed_valid_slug() -> None: valid_slug = "test-dataset" - assert DatasetMeta._validate_slug(valid_slug) is None + assert Dataset._validate_slug(valid_slug) is None diff --git a/darwin/future/tests/meta/objects/test_stagemeta.py b/darwin/future/tests/meta/objects/test_stagemeta.py index 407db61b1..99f4dfef5 100644 --- a/darwin/future/tests/meta/objects/test_stagemeta.py +++ b/darwin/future/tests/meta/objects/test_stagemeta.py @@ -7,9 +7,9 @@ from sklearn import base from darwin.future.core.client import DarwinConfig -from darwin.future.data_objects.workflow import WFStage, WFType -from darwin.future.meta.client import MetaClient -from darwin.future.meta.objects.stage import StageMeta +from darwin.future.data_objects.workflow import WFStageCore, WFTypeCore +from darwin.future.meta.client import Client +from darwin.future.meta.objects.stage import Stage from darwin.future.tests.core.fixtures import * from darwin.future.tests.core.items.fixtures import * from darwin.future.tests.meta.fixtures import * @@ -19,39 +19,53 @@ def uuid_str() -> str: return "00000000-0000-0000-0000-000000000000" + @fixture -def base_WFStage(uuid_str: str) -> WFStage: - return WFStage(id=UUID(uuid_str), name="test-stage", type=WFType.ANNOTATE, assignable_users=[],edges=[]) +def base_WFStage(uuid_str: str) -> WFStageCore: + return WFStageCore(id=UUID(uuid_str), name="test-stage", type=WFTypeCore.ANNOTATE, assignable_users=[], edges=[]) + @fixture -def stage_meta(base_meta_client: MetaClient, base_WFStage: WFStage, workflow_id: UUID) -> StageMeta: - return StageMeta(base_meta_client, base_WFStage, {"team_slug": "default-team", "dataset_id": 1337, "workflow_id": workflow_id}) +def stage_meta(base_meta_client: Client, base_WFStage: WFStageCore, workflow_id: UUID) -> Stage: + return Stage( + base_meta_client, base_WFStage, {"team_slug": "default-team", "dataset_id": 1337, "workflow_id": workflow_id} + ) + -def test_item_ids(base_meta_client: MetaClient, stage_meta: StageMeta, UUIDs_str: List[str], UUIDs: List[UUID]) -> None: +def test_item_ids(base_meta_client: Client, stage_meta: Stage, UUIDs_str: List[str], UUIDs: List[UUID]) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, - base_meta_client.config.api_endpoint + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", + base_meta_client.config.api_endpoint + + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) item_ids = stage_meta.item_ids assert item_ids == UUIDs -def test_move_attached_files_to_stage(base_meta_client: MetaClient, stage_meta: StageMeta, UUIDs_str: List[str], UUIDs: List[UUID]) -> None: + +def test_move_attached_files_to_stage( + base_meta_client: Client, stage_meta: Stage, UUIDs_str: List[str], UUIDs: List[UUID] +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, - base_meta_client.config.api_endpoint + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", + base_meta_client.config.api_endpoint + + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) rsps.add( rsps.POST, base_meta_client.config.api_endpoint + "v2/teams/default-team/items/stage", - json={"success": UUIDs_str}, + json={"success": UUIDs_str}, status=200, ) stage_meta.move_attached_files_to_stage(stage_meta.id) assert rsps.assert_call_count(base_meta_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1) - assert rsps.assert_call_count(base_meta_client.config.api_endpoint + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", 1) \ No newline at end of file + assert rsps.assert_call_count( + base_meta_client.config.api_endpoint + + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", + 1, + ) diff --git a/darwin/future/tests/meta/objects/test_teammeta.py b/darwin/future/tests/meta/objects/test_teammeta.py index f0346a620..06370b4d7 100644 --- a/darwin/future/tests/meta/objects/test_teammeta.py +++ b/darwin/future/tests/meta/objects/test_teammeta.py @@ -3,19 +3,19 @@ import responses from pytest import fixture, raises -from darwin.future.core.client import Client, DarwinConfig -from darwin.future.data_objects.team import Team, TeamMember -from darwin.future.meta.objects.team import TeamMeta +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.team import TeamMemberCore +from darwin.future.meta.objects.team import Team from darwin.future.tests.core.fixtures import * from darwin.future.tests.meta.objects.fixtures import * def test_team_meta_collects_members( - base_meta_team: TeamMeta, base_client: Client, base_team_member: TeamMember, base_team_member_json: dict + base_meta_team: Team, base_client: ClientCore, base_team_member: TeamMemberCore, base_team_member_json: dict ) -> None: with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" rsps.add(responses.GET, endpoint, json=[base_team_member_json]) - members = base_meta_team.members.collect() + members = base_meta_team.members._collect() assert len(members) == 1 - assert members[0]._item == base_team_member + assert members[0]._element == base_team_member diff --git a/darwin/future/tests/meta/queries/test_dataset.py b/darwin/future/tests/meta/queries/test_dataset.py index 6b9eecaf0..3d3e163d9 100644 --- a/darwin/future/tests/meta/queries/test_dataset.py +++ b/darwin/future/tests/meta/queries/test_dataset.py @@ -1,89 +1,84 @@ import responses from pytest import fixture, mark -from darwin.future.core.client import Client -from darwin.future.data_objects.dataset import Dataset -from darwin.future.meta.objects.dataset import DatasetMeta +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.dataset import DatasetCore +from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.queries.dataset import DatasetQuery from darwin.future.tests.core.fixtures import * -def test_dataset_collects_basic(base_client: Client, base_datasets_json: dict) -> None: +def test_dataset_collects_basic(base_client: ClientCore, base_datasets_json: dict) -> None: query = DatasetQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) - datasets = query.collect() + datasets = query._collect() assert len(datasets) == 2 - assert all([isinstance(dataset, DatasetMeta) for dataset in datasets]) + assert all([isinstance(dataset, Dataset) for dataset in datasets]) -def test_datasetquery_only_passes_back_correctly_formed_objects(base_client: Client, base_dataset_json: dict) -> None: +def test_datasetquery_only_passes_back_correctly_formed_objects( + base_client: ClientCore, base_dataset_json: dict +) -> None: query = DatasetQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=[base_dataset_json, {}]) - datasets = query.collect() + datasets = query._collect() assert len(datasets) == 1 - assert isinstance(datasets[0], DatasetMeta) + assert isinstance(datasets[0], Dataset) -def test_dataset_filters_name(base_client: Client, base_datasets_json: dict) -> None: +def test_dataset_filters_name(base_client: ClientCore, base_datasets_json: dict) -> None: with responses.RequestsMock() as rsps: query = DatasetQuery(base_client).where({"name": "name", "param": "test dataset 1"}) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) - datasets = query.collect() + datasets = query._collect() assert len(datasets) == 1 - assert datasets[0]._item is not None - assert datasets[0]._item.slug == "test-dataset-1" + assert datasets[0]._element.slug == "test-dataset-1" -def test_dataset_filters_id(base_client: Client, base_datasets_json: dict) -> None: +def test_dataset_filters_id(base_client: ClientCore, base_datasets_json: dict) -> None: with responses.RequestsMock() as rsps: query = DatasetQuery(base_client).where({"name": "id", "param": 1}) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) - datasets = query.collect() + datasets = query._collect() assert len(datasets) == 1 - assert datasets[0]._item is not None - assert datasets[0]._item.slug == "test-dataset-1" + assert datasets[0]._element.slug == "test-dataset-1" -def test_dataset_filters_slug(base_client: Client, base_datasets_json: dict) -> None: +def test_dataset_filters_slug(base_client: ClientCore, base_datasets_json: dict) -> None: with responses.RequestsMock() as rsps: query = DatasetQuery(base_client).where({"name": "slug", "param": "test-dataset-1"}) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) - datasets = query.collect() + datasets = query._collect() assert len(datasets) == 1 - assert datasets[0]._item is not None - assert datasets[0]._item.slug == "test-dataset-1" + assert datasets[0]._element.slug == "test-dataset-1" -def test_dataset_filters_releases(base_client: Client, base_datasets_json_with_releases: dict) -> None: +def test_dataset_filters_releases(base_client: ClientCore, base_datasets_json_with_releases: dict) -> None: with responses.RequestsMock() as rsps: query = DatasetQuery(base_client).where({"name": "releases", "param": "release1"}) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json_with_releases) - datasets_odd_ids = query.collect() + datasets_odd_ids = query._collect() assert len(datasets_odd_ids) == 2 - assert datasets_odd_ids[0]._item is not None - assert datasets_odd_ids[1]._item is not None - assert datasets_odd_ids[0]._item.slug == "test-dataset-1" - assert datasets_odd_ids[1]._item.slug == "test-dataset-3" + assert datasets_odd_ids[0]._element.slug == "test-dataset-1" + assert datasets_odd_ids[1]._element.slug == "test-dataset-3" query2 = DatasetQuery(base_client).where({"name": "releases", "param": "release2"}) - datasets_even_ids = query2.collect() + datasets_even_ids = query2._collect() assert len(datasets_even_ids) == 2 - assert datasets_even_ids[0]._item is not None - assert datasets_even_ids[1]._item is not None - assert datasets_even_ids[0]._item.slug == "test-dataset-2" - assert datasets_even_ids[1]._item.slug == "test-dataset-4" + assert datasets_even_ids[0]._element.slug == "test-dataset-2" + assert datasets_even_ids[1]._element.slug == "test-dataset-4" diff --git a/darwin/future/tests/meta/queries/test_stage.py b/darwin/future/tests/meta/queries/test_stage.py index 9dfdc20e6..a9cc61267 100644 --- a/darwin/future/tests/meta/queries/test_stage.py +++ b/darwin/future/tests/meta/queries/test_stage.py @@ -3,28 +3,28 @@ import pytest import responses -from darwin.future.core.client import Client -from darwin.future.data_objects.workflow import WFType, Workflow -from darwin.future.meta.objects.stage import StageMeta -from darwin.future.meta.objects.workflow import WorkflowMeta +from darwin.future.core.client import ClientCore +from darwin.future.data_objects.workflow import WFTypeCore, WorkflowCore +from darwin.future.meta.objects.stage import Stage +from darwin.future.meta.objects.workflow import Workflow from darwin.future.meta.queries.stage import StageQuery from darwin.future.tests.core.fixtures import * @pytest.fixture -def filled_query(base_client: Client, base_workflow_meta: WorkflowMeta) -> StageQuery: +def filled_query(base_client: ClientCore, base_workflow_meta: Workflow) -> StageQuery: return StageQuery(base_client, meta_params={"workflow_id": str(base_workflow_meta.id)}) @pytest.fixture -def base_workflow_meta(base_client: Client, base_single_workflow_object: dict) -> WorkflowMeta: - return WorkflowMeta(base_client, Workflow.parse_obj(base_single_workflow_object)) +def base_workflow_meta(base_client: ClientCore, base_single_workflow_object: dict) -> Workflow: + return Workflow(base_client, WorkflowCore.parse_obj(base_single_workflow_object)) @pytest.fixture def multi_stage_workflow_object(base_single_workflow_object: dict) -> dict: stage = base_single_workflow_object["stages"][0] - types = [t for t in WFType.__members__.values()] * 3 + types = [t for t in WFTypeCore.__members__.values()] * 3 stages = [] for i, t in enumerate(types): temp = stage.copy() @@ -36,47 +36,45 @@ def multi_stage_workflow_object(base_single_workflow_object: dict) -> dict: def test_WFTypes_accept_unknonwn() -> None: - assert WFType("unknown") == WFType.UNKNOWN - assert WFType("test") == WFType.UNKNOWN + assert WFTypeCore("unknown") == WFTypeCore.UNKNOWN + assert WFTypeCore("test") == WFTypeCore.UNKNOWN def test_stage_collects_basic( - filled_query: StageQuery, base_single_workflow_object: dict, base_workflow_meta: WorkflowMeta + filled_query: StageQuery, base_single_workflow_object: dict, base_workflow_meta: Workflow ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" rsps.add(responses.GET, endpoint, json=base_single_workflow_object) - stages = filled_query.collect() + stages = filled_query._collect() assert len(stages) == len(base_workflow_meta.stages) - assert isinstance(stages[0], StageMeta) + assert isinstance(stages[0], Stage) def test_stage_filters_basic( - filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: WorkflowMeta + filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: Workflow ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" rsps.add(responses.GET, endpoint, json=multi_stage_workflow_object) - stages = filled_query.where({"name": "name", "param": "stage1"}).collect() + stages = filled_query.where({"name": "name", "param": "stage1"})._collect() assert len(stages) == 1 - assert isinstance(stages[0], StageMeta) - assert stages[0]._item is not None - assert stages[0]._item.name == "stage1" + assert isinstance(stages[0], Stage) + assert stages[0]._element.name == "stage1" -@pytest.mark.parametrize("wf_type", [t for t in WFType.__members__.values()]) +@pytest.mark.parametrize("wf_type", [t for t in WFTypeCore.__members__.values()]) def test_stage_filters_WFType( - wf_type: WFType, filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: WorkflowMeta + wf_type: WFTypeCore, filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: Workflow ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" rsps.add(responses.GET, endpoint, json=multi_stage_workflow_object) - stages = filled_query.where({"name": "type", "param": wf_type.value}).collect() + stages = filled_query.where({"name": "type", "param": wf_type.value})._collect() assert len(stages) == 3 - assert isinstance(stages[0], StageMeta) + assert isinstance(stages[0], Stage) for stage in stages: - assert stage._item is not None - assert stage._item.type == wf_type + assert stage._element.type == wf_type diff --git a/darwin/future/tests/meta/queries/test_team_member.py b/darwin/future/tests/meta/queries/test_team_member.py index 7a9d82c75..19b19a2e2 100644 --- a/darwin/future/tests/meta/queries/test_team_member.py +++ b/darwin/future/tests/meta/queries/test_team_member.py @@ -3,60 +3,57 @@ import pytest import responses -from darwin.future.core.client import Client -from darwin.future.data_objects.team import TeamMember +from darwin.future.core.client import ClientCore from darwin.future.data_objects.team_member_role import TeamMemberRole -from darwin.future.meta.objects.team_member import TeamMemberMeta +from darwin.future.meta.objects.team_member import TeamMember from darwin.future.meta.queries.team_member import TeamMemberQuery from darwin.future.tests.core.fixtures import * -def test_team_member_collects_basic(base_client: Client, base_team_members_json: List[dict]) -> None: +def test_team_member_collects_basic(base_client: ClientCore, base_team_members_json: List[dict]) -> None: query = TeamMemberQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" rsps.add(responses.GET, endpoint, json=base_team_members_json) - members = query.collect() + members = query._collect() assert len(members) == len(TeamMemberRole) - assert isinstance(members[0], TeamMemberMeta) + assert isinstance(members[0], TeamMember) -def test_team_member_only_passes_back_correct(base_client: Client, base_team_member_json: dict) -> None: +def test_team_member_only_passes_back_correct(base_client: ClientCore, base_team_member_json: dict) -> None: query = TeamMemberQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" rsps.add(responses.GET, endpoint, json=[base_team_member_json, {}]) - members = query.collect() + members = query._collect() assert len(members) == 1 - assert isinstance(members[0], TeamMemberMeta) + assert isinstance(members[0], TeamMember) @pytest.mark.parametrize("role", [role for role in TeamMemberRole]) def test_team_member_filters_role( - role: TeamMemberRole, base_client: Client, base_team_members_json: List[dict] + role: TeamMemberRole, base_client: ClientCore, base_team_members_json: List[dict] ) -> None: with responses.RequestsMock() as rsps: # Test equal query = TeamMemberQuery(base_client).where({"name": "role", "param": role.value}) endpoint = base_client.config.api_endpoint + "memberships" rsps.add(responses.GET, endpoint, json=base_team_members_json) - members = query.collect() + members = query._collect() assert len(members) == 1 - assert members[0]._item is not None - assert members[0]._item.role == role + assert members[0]._element.role == role # Test not equal rsps.reset() query = TeamMemberQuery(base_client).where({"name": "role", "param": role.value, "modifier": "!="}) rsps.add(responses.GET, endpoint, json=base_team_members_json) - members = query.collect() + members = query._collect() assert len(members) == len(TeamMemberRole) - 1 for member in members: - assert member._item is not None - assert member._item.role != role + assert member._element.role != role -def test_team_member_filters_general(base_client: Client, base_team_members_json: List[dict]) -> None: +def test_team_member_filters_general(base_client: ClientCore, base_team_members_json: List[dict]) -> None: for idx in range(len(base_team_members_json)): base_team_members_json[idx]["id"] = idx + 1 @@ -64,10 +61,9 @@ def test_team_member_filters_general(base_client: Client, base_team_members_json query = TeamMemberQuery(base_client).where({"name": "id", "param": 1}) endpoint = base_client.config.api_endpoint + "memberships" rsps.add(responses.GET, endpoint, json=base_team_members_json) - members = query.collect() + members = query._collect() assert len(members) == 1 - assert members[0]._item is not None - assert members[0]._item.id == 1 + assert members[0]._element.id == 1 # Test chained rsps.reset() @@ -78,7 +74,7 @@ def test_team_member_filters_general(base_client: Client, base_team_members_json TeamMemberQuery(base_client) .where({"name": "id", "param": 1, "modifier": ">"}) .where({"name": "id", "param": len(base_team_members_json), "modifier": "<"}) - .collect() + ._collect() ) assert len(members) == len(base_team_members_json) - 2 diff --git a/darwin/future/tests/meta/queries/test_workflow.py b/darwin/future/tests/meta/queries/test_workflow.py index 71499cf98..71f77c802 100644 --- a/darwin/future/tests/meta/queries/test_workflow.py +++ b/darwin/future/tests/meta/queries/test_workflow.py @@ -3,10 +3,10 @@ import responses -from darwin.future.core.client import Client +from darwin.future.core.client import ClientCore from darwin.future.core.types.query import Modifier -from darwin.future.data_objects.workflow import Workflow -from darwin.future.meta.objects.workflow import WorkflowMeta +from darwin.future.data_objects.workflow import WorkflowCore +from darwin.future.meta.objects.workflow import Workflow from darwin.future.meta.queries.workflow import WorkflowQuery from darwin.future.tests.core.fixtures import * @@ -20,19 +20,19 @@ def workflows_query_endpoint(team: str) -> str: @responses.activate -def test_workflowquery_collects_basic(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_collects_basic(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 3 - assert all([isinstance(workflow, WorkflowMeta) for workflow in workflows]) + assert all([isinstance(workflow, Workflow) for workflow in workflows]) @responses.activate -def test_workflowquery_filters_uuid(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_uuid(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -42,14 +42,14 @@ def test_workflowquery_filters_uuid(base_client: Client, base_filterable_workflo "param": "6dca86a3-48fb-40cc-8594-88310f5f1fdf", } ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 1 assert str(workflows[0].id) == WORKFLOW_1 @responses.activate -def test_workflowquery_filters_inserted_at(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_inserted_at(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -71,7 +71,7 @@ def test_workflowquery_filters_inserted_at(base_client: Client, base_filterable_ } ) ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 2 ids = [str(workflow.id) for workflow in workflows] @@ -80,7 +80,7 @@ def test_workflowquery_filters_inserted_at(base_client: Client, base_filterable_ @responses.activate -def test_workflowquery_filters_updated_at(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_updated_at(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -102,7 +102,7 @@ def test_workflowquery_filters_updated_at(base_client: Client, base_filterable_w } ) ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 2 ids = [str(workflow.id) for workflow in workflows] @@ -111,7 +111,7 @@ def test_workflowquery_filters_updated_at(base_client: Client, base_filterable_w @responses.activate -def test_workflowquery_filters_dataset_id(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_dataset_id(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -121,14 +121,16 @@ def test_workflowquery_filters_dataset_id(base_client: Client, base_filterable_w "param": "1", } ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 1 assert str(workflows[0].id) == WORKFLOW_1 @responses.activate -def test_workflowquery_filters_dataset_id_multiple_ids(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_dataset_id_multiple_ids( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -138,7 +140,7 @@ def test_workflowquery_filters_dataset_id_multiple_ids(base_client: Client, base "param": "1,2", } ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 2 assert str(workflows[0].id) == WORKFLOW_1 @@ -146,7 +148,7 @@ def test_workflowquery_filters_dataset_id_multiple_ids(base_client: Client, base @responses.activate -def test_workflowquery_filters_dataset_name(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_dataset_name(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -156,7 +158,7 @@ def test_workflowquery_filters_dataset_name(base_client: Client, base_filterable "param": "test-dataset-1", } ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 1 assert str(workflows[0].id) == WORKFLOW_1 @@ -164,7 +166,7 @@ def test_workflowquery_filters_dataset_name(base_client: Client, base_filterable @responses.activate def test_workflowquery_filters_dataset_name_mutliple_names( - base_client: Client, base_filterable_workflows: dict + base_client: ClientCore, base_filterable_workflows: dict ) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -175,7 +177,7 @@ def test_workflowquery_filters_dataset_name_mutliple_names( "param": "test-dataset-1,test-dataset-2", } ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 2 assert str(workflows[0].id) == WORKFLOW_1 @@ -183,7 +185,7 @@ def test_workflowquery_filters_dataset_name_mutliple_names( @responses.activate -def test_workflowquery_filters_stages(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_stages(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -193,14 +195,14 @@ def test_workflowquery_filters_stages(base_client: Client, base_filterable_workf "param": "5445adcb-193d-4f76-adb0-0c6d5f5e4c04", } ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 1 assert str(workflows[0].name) == "test-workflow-3" @responses.activate -def test_workflowquery_filters_stages_multiple(base_client: Client, base_filterable_workflows: dict) -> None: +def test_workflowquery_filters_stages_multiple(base_client: ClientCore, base_filterable_workflows: dict) -> None: endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) responses.add(responses.GET, endpoint, json=base_filterable_workflows) @@ -210,7 +212,7 @@ def test_workflowquery_filters_stages_multiple(base_client: Client, base_filtera "param": "5445adcb-193d-4f76-adb0-0c6d5f5e4c04,53d2c997-6bb0-4766-803c-3c8d1fb21072", } ) - workflows = query.collect() + workflows = query._collect() assert len(workflows) == 2 workflow_names = [workflow.name for workflow in workflows] diff --git a/darwin/future/tests/meta/test_client.py b/darwin/future/tests/meta/test_client.py index 05693394c..fb3b74b16 100644 --- a/darwin/future/tests/meta/test_client.py +++ b/darwin/future/tests/meta/test_client.py @@ -4,9 +4,9 @@ import responses from darwin.future.core.client import DarwinConfig -from darwin.future.data_objects.team import Team -from darwin.future.meta.client import MetaClient -from darwin.future.meta.objects.team import TeamMeta +from darwin.future.data_objects.team import TeamCore +from darwin.future.meta.client import Client +from darwin.future.meta.objects.team import Team from darwin.future.tests.core.fixtures import * from darwin.future.tests.meta.fixtures import * @@ -15,15 +15,15 @@ def test_creates_from_api_key() -> None: with responses.RequestsMock() as rsps: base_api_endpoint = DarwinConfig._default_api_endpoint() rsps.add(responses.GET, base_api_endpoint + "users/token_info", json={"selected_team": {"slug": "test-team"}}) - client = MetaClient.from_api_key(api_key="test") + client = Client.from_api_key(api_key="test") assert client.config.default_team == "test-team" -def test_team_property(base_meta_client: MetaClient, base_team: Team, base_team_json: dict) -> None: +def test_team_property(base_meta_client: Client, base_team: TeamCore, base_team_json: dict) -> None: client = base_meta_client endpoint = client.config.api_endpoint + f"teams/{client.config.default_team}" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json=base_team_json) team = client.team - assert isinstance(team, TeamMeta) - assert team._item == base_team + assert isinstance(team, Team) + assert team._element == base_team From 9ffa4205afe6778566788b02b67b9523b9e9bc84 Mon Sep 17 00:00:00 2001 From: Owen Date: Sun, 1 Oct 2023 20:50:47 +0100 Subject: [PATCH 188/195] HOTFIX: Documentation fix --- darwin/datatypes.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/darwin/datatypes.py b/darwin/datatypes.py index 3ead5cb64..ad177986e 100644 --- a/darwin/datatypes.py +++ b/darwin/datatypes.py @@ -959,6 +959,7 @@ def make_graph( AnnotationClass(class_name, "graph"), {"nodes": nodes, "edges": edges}, subs or [], slot_names=slot_names or [] ) + def make_mask( class_name: str, subs: Optional[List[SubAnnotation]] = None, slot_names: Optional[List[str]] = None ) -> Annotation: @@ -979,6 +980,7 @@ def make_mask( """ return Annotation(AnnotationClass(class_name, "mask"), {}, subs or [], slot_names=slot_names or []) + def make_raster_layer( class_name: str, mask_annotation_ids_mapping: Dict[str, str], @@ -998,12 +1000,12 @@ def make_raster_layer( mask_annotation_ids_mapping : Dict[str, str] Mapping of mask annotations ids to unique small integers used in the dense_rle. Should be in following format: - .. code-block:: javascript + .. code-block:: javascript - { - "91bb3c24-883a-433b-ae95-a6ee7845bea5": 1, - "5a0ceba1-2e26-425e-8579-e6013ca415c5": 2 - } + { + "91bb3c24-883a-433b-ae95-a6ee7845bea5": 1, + "5a0ceba1-2e26-425e-8579-e6013ca415c5": 2 + } total_pixels : int Total number of pixels in a corresponding image. @@ -1011,9 +1013,9 @@ def make_raster_layer( dense_rle : int Run length encoding of all masks in the raster layer. Should be in following format: - .. code-block:: javascript + .. code-block:: javascript - [0, 5, 1, 15, 2, 10] + [0, 5, 1, 15, 2, 10] subs : Optional[List[SubAnnotation]], default: None List of ``SubAnnotation``\\s for this ``Annotation``. @@ -1024,7 +1026,14 @@ def make_raster_layer( A raster_layer ``Annotation``. """ return Annotation( - AnnotationClass(class_name, "raster_layer"), {"mask_annotation_ids_mapping": mask_annotation_ids_mapping, "total_pixels": total_pixels, "dense_rle": dense_rle}, subs or [], slot_names=slot_names or [] + AnnotationClass(class_name, "raster_layer"), + { + "mask_annotation_ids_mapping": mask_annotation_ids_mapping, + "total_pixels": total_pixels, + "dense_rle": dense_rle, + }, + subs or [], + slot_names=slot_names or [], ) From 1840d4f09f8f209009319425c99d152e6c60adf2 Mon Sep 17 00:00:00 2001 From: Owen Jones Date: Mon, 2 Oct 2023 14:02:14 +0100 Subject: [PATCH 189/195] [IO-1824][internal] Documentation fix (#673) * Fix for unrelated ticket * Fix to tolerate issue in place --------- Co-authored-by: Owen --- .github/workflows/JOB_generate_documentation.yml | 7 +++---- .gitignore | 4 ++-- README.md | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/JOB_generate_documentation.yml b/.github/workflows/JOB_generate_documentation.yml index a9d2bf786..7fcf0fde4 100644 --- a/.github/workflows/JOB_generate_documentation.yml +++ b/.github/workflows/JOB_generate_documentation.yml @@ -45,10 +45,9 @@ jobs: env: PYTHONPATH: "." run: | - rm -rf docs/* &&\ - sphinx-apidoc -f -o source darwin darwin/future &&\ - sphinx-build -b html source/ docs/ -W - + rm -rf docs/* + sphinx-apidoc -f -o source darwin darwin/future + sphinx-build -b html source/ docs/ - name: Setup access to AWS id: aws_assume_role uses: aws-actions/configure-aws-credentials@v2 diff --git a/.gitignore b/.gitignore index 112bea8ce..3dd50932a 100644 --- a/.gitignore +++ b/.gitignore @@ -126,8 +126,8 @@ celerybeat.pid *.sage.py # Environments -.env -.venv +.env* +.venv* env/ venv/ ENV/ diff --git a/README.md b/README.md index a9171f262..9f652f323 100644 --- a/README.md +++ b/README.md @@ -167,7 +167,7 @@ Dataset example-team/test:0.1 downloaded at /directory/choosen/at/authentication The framework is designed to be usable as a standalone python library. Usage can be inferred from looking at the operations performed in `darwin/cli_functions.py`. A minimal example to download a dataset is provided below and a more extensive one can be found in -[darwin_demo.py]([./darwin_demo.py](https://github.com/v7labs/darwin-py/blob/master/darwin_demo.py). +[darwin_demo.py](https://github.com/v7labs/darwin-py/blob/master/darwin_demo.py). ```python from darwin.client import Client From 445981d7290c7a6e67c72b3d93df19f96d65cef7 Mon Sep 17 00:00:00 2001 From: Nathan Perkins Date: Tue, 3 Oct 2023 09:54:48 +0100 Subject: [PATCH 190/195] [IO-1561] Mypy cleanup (#652) * mypy cleanup * drop mypy settings that don't work * np typing change * remove overrides * Literal fix --- darwin/client.py | 3 +- darwin/exporter/formats/nifti.py | 18 ++++---- .../tests/meta/objects/test_datasetmeta.py | 5 ++- darwin/torch/dataset.py | 4 +- darwin/torch/transforms.py | 7 ++-- darwin/torch/utils.py | 5 ++- tests/darwin/torch/dataset_test.py | 42 ++++++++++++------- tests/darwin/torch/utils_test.py | 9 ++-- tests/fixtures.py | 5 ++- 9 files changed, 58 insertions(+), 40 deletions(-) diff --git a/darwin/client.py b/darwin/client.py index 5436f6fd5..296c09b2e 100644 --- a/darwin/client.py +++ b/darwin/client.py @@ -911,7 +911,7 @@ def fetch_binary(self, url: str) -> Response: Response ``request``'s Response object. """ - response: Response = cast(Response, self._get_raw_from_full_url(url, stream=True)) + response: Response = self._get_raw_from_full_url(url, stream=True) return response @classmethod @@ -1299,5 +1299,4 @@ def api_v2(self) -> BackendV2: team = self.config.get_default_team() if not team: raise ValueError("No team was found.") - return BackendV2(self, team.slug) diff --git a/darwin/exporter/formats/nifti.py b/darwin/exporter/formats/nifti.py index 31cbb9ac9..115f7afa1 100644 --- a/darwin/exporter/formats/nifti.py +++ b/darwin/exporter/formats/nifti.py @@ -3,7 +3,7 @@ from asyncore import loop from dataclasses import dataclass from pathlib import Path -from typing import Dict, Iterable, List, Optional, Union +from typing import Dict, Iterable, List, Optional, Tuple, Union from rich.console import Console @@ -67,7 +67,7 @@ def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> N write_output_volume_to_disk(output_volumes, image_id=image_id, output_dir=output_dir) -def build_output_volumes(video_annotation: dt.AnnotationFile): +def build_output_volumes(video_annotation: dt.AnnotationFile) -> Dict: """ This is a function to create the output volumes based on the whole annotation file @@ -85,6 +85,7 @@ def build_output_volumes(video_annotation: dt.AnnotationFile): class_map = {} class_count = 1 for annotation in video_annotation.annotations: + assert isinstance(annotation, dt.VideoAnnotation) frames = annotation.frames for frame_idx in frames.keys(): class_name = frames[frame_idx].annotation_class.name @@ -95,6 +96,7 @@ def build_output_volumes(video_annotation: dt.AnnotationFile): output_volumes = {} for slot in video_annotation.slots: slot_metadata = slot.metadata + assert slot_metadata is not None series_instance_uid = slot_metadata.get("SeriesInstanceUID", "SeriesIntanceUIDNotProvided") # Builds output volumes per class volume_dims, pixdims, affine, original_affine = process_metadata(slot.metadata) @@ -182,7 +184,7 @@ def check_for_error_and_return_imageid(video_annotation: dt.AnnotationFile, outp def populate_output_volumes( annotation: Union[dt.Annotation, dt.VideoAnnotation], - output_dir: str, + output_dir: Union[str, Path], slot_map: Dict, output_volumes: Dict, image_id: str, @@ -270,9 +272,9 @@ def populate_output_volumes( ) -def write_output_volume_to_disk(output_volumes: Dict, image_id: str, output_dir: str): +def write_output_volume_to_disk(output_volumes: Dict, image_id: str, output_dir: Union[str, Path]) -> None: # volumes are the values of this nested dict - def unnest_dict_to_list(d): + def unnest_dict_to_list(d: Dict) -> List: result = [] for value in d.values(): if isinstance(value, dict): @@ -316,7 +318,7 @@ def get_view_idx(frame_idx, groups): return view_idx -def get_view_idx_from_slot_name(slot_name, orientation): +def get_view_idx_from_slot_name(slot_name: str, orientation: Union[str, None]) -> int: if orientation is None: orientation_dict = {"0.1": 0, "0.2": 1, "0.3": 2} return orientation_dict.get(slot_name, 0) @@ -325,7 +327,7 @@ def get_view_idx_from_slot_name(slot_name, orientation): return orientation_dict.get(orientation, 0) -def process_metadata(metadata): +def process_metadata(metadata: Dict) -> Tuple: volume_dims = metadata.get("shape") pixdim = metadata.get("pixdim") affine = process_affine(metadata.get("affine")) @@ -360,7 +362,7 @@ def process_affine(affine): return affine -def create_error_message_json(error_message: str, output_dir: str, image_id: str): +def create_error_message_json(error_message: str, output_dir: Union[str, Path], image_id: str) -> bool: output_path = Path(output_dir) / f"{image_id}_error.json" if not output_path.parent.exists(): output_path.parent.mkdir(parents=True) diff --git a/darwin/future/tests/meta/objects/test_datasetmeta.py b/darwin/future/tests/meta/objects/test_datasetmeta.py index 8f5726d62..7d3088b38 100644 --- a/darwin/future/tests/meta/objects/test_datasetmeta.py +++ b/darwin/future/tests/meta/objects/test_datasetmeta.py @@ -1,4 +1,5 @@ import string +from typing import Generator from unittest.mock import Mock, patch from pytest import fixture, mark, raises @@ -11,13 +12,13 @@ @fixture -def _delete_by_slug_mock(): # type: ignore +def _delete_by_slug_mock() -> Generator: with patch.object(Dataset, "_delete_by_slug") as mock: yield mock @fixture -def _delete_by_id_mock(): # type: ignore +def _delete_by_id_mock() -> Generator: with patch.object(Dataset, "_delete_by_id") as mock: yield mock diff --git a/darwin/torch/dataset.py b/darwin/torch/dataset.py index ebc3b4d17..0ad2d1be3 100644 --- a/darwin/torch/dataset.py +++ b/darwin/torch/dataset.py @@ -8,8 +8,8 @@ from darwin.cli_functions import _error, _load_client from darwin.client import Client -from darwin.dataset import LocalDataset from darwin.dataset.identifier import DatasetIdentifier +from darwin.dataset.local_dataset import LocalDataset from darwin.torch.transforms import ( Compose, ConvertPolygonsToInstanceMasks, @@ -99,7 +99,7 @@ class ClassificationDataset(LocalDataset): be composed via torchvision. """ - def __init__(self, transform: Optional[Union[Callable, List]] = None, **kwargs): + def __init__(self, transform: Optional[Union[Callable, List]] = None, **kwargs) -> None: super().__init__(annotation_type="tag", **kwargs) if transform is not None and isinstance(transform, list): diff --git a/darwin/torch/transforms.py b/darwin/torch/transforms.py index 65331a551..5541f123b 100644 --- a/darwin/torch/transforms.py +++ b/darwin/torch/transforms.py @@ -1,6 +1,6 @@ import random from pathlib import Path -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any, Dict, Literal, Optional, Tuple, Union import numpy as np import torch @@ -8,6 +8,8 @@ import torchvision.transforms.functional as F from PIL import Image as PILImage +from darwin.torch.utils import convert_segmentation_to_mask, flatten_masks_by_category + # Optional dependency try: import albumentations as A @@ -25,10 +27,9 @@ AType = Type[None] Compose = Type[None] +TargetKey = Literal["boxes", "labels", "mask", "masks", "image_id", "area", "iscrowd"] -from darwin.torch.utils import convert_segmentation_to_mask, flatten_masks_by_category -TargetKey = Union["boxes", "labels", "mask", "masks", "image_id", "area", "iscrowd"] TargetType = Dict[TargetKey, torch.Tensor] diff --git a/darwin/torch/utils.py b/darwin/torch/utils.py index 3080c6ac6..8b2dbf27d 100644 --- a/darwin/torch/utils.py +++ b/darwin/torch/utils.py @@ -1,10 +1,11 @@ import os import sys from pathlib import Path -from typing import Iterable, List, Optional, Tuple +from typing import Iterable, List, Optional, Tuple, Union import numpy as np import torch +from numpy.typing import ArrayLike from upolygon import draw_polygon from darwin.cli_functions import _error, _load_client @@ -72,7 +73,7 @@ def convert_segmentation_to_mask(segmentations: List[Segment], height: int, widt return torch.stack(masks) -def polygon_area(x: np.ndarray, y: np.ndarray) -> float: +def polygon_area(x: ArrayLike, y: ArrayLike) -> float: """ Returns the area of the input polygon, represented by two numpy arrays for x and y coordinates. diff --git a/tests/darwin/torch/dataset_test.py b/tests/darwin/torch/dataset_test.py index 70d22c34f..50484c00c 100644 --- a/tests/darwin/torch/dataset_test.py +++ b/tests/darwin/torch/dataset_test.py @@ -4,8 +4,10 @@ from unittest.mock import patch import numpy as np +import pytest import torch +from darwin.config import Config from darwin.torch.dataset import ( ClassificationDataset, InstanceSegmentationDataset, @@ -26,14 +28,18 @@ def generic_dataset_test(ds, n, size): class TestClassificationDataset: - def test_should_correctly_create_a_single_label_dataset(self, team_slug: str, team_extracted_dataset_path: Path): + def test_should_correctly_create_a_single_label_dataset( + self, team_slug: str, team_extracted_dataset_path: Path + ) -> None: root = team_extracted_dataset_path / team_slug / "sl" ds = ClassificationDataset(dataset_path=root, release_name="latest") generic_dataset_test(ds, n=20, size=(50, 50)) assert not ds.is_multi_label - def test_should_correctly_create_a_multi_label_dataset(self, team_slug: str, team_extracted_dataset_path: Path): + def test_should_correctly_create_a_multi_label_dataset( + self, team_slug: str, team_extracted_dataset_path: Path + ) -> None: root = team_extracted_dataset_path / team_slug / "ml" ds = ClassificationDataset(dataset_path=root, release_name="latest") @@ -42,7 +48,9 @@ def test_should_correctly_create_a_multi_label_dataset(self, team_slug: str, tea class TestInstanceSegmentationDataset: - def test_should_correctly_create_a_instance_seg_dataset(self, team_slug: str, team_extracted_dataset_path: Path): + def test_should_correctly_create_a_instance_seg_dataset( + self, team_slug: str, team_extracted_dataset_path: Path + ) -> None: root = team_extracted_dataset_path / team_slug / "coco" ds = InstanceSegmentationDataset(dataset_path=root, release_name="latest") @@ -51,7 +59,9 @@ def test_should_correctly_create_a_instance_seg_dataset(self, team_slug: str, te class TestSemanticSegmentationDataset: - def test_should_correctly_create_a_semantic_seg_dataset(self, team_slug: str, team_extracted_dataset_path: Path): + def test_should_correctly_create_a_semantic_seg_dataset( + self, team_slug: str, team_extracted_dataset_path: Path + ) -> None: root = team_extracted_dataset_path / team_slug / "coco" ds = SemanticSegmentationDataset(dataset_path=root, release_name="latest") @@ -62,7 +72,7 @@ def test_should_correctly_create_a_semantic_seg_dataset(self, team_slug: str, te class TestObjectDetectionDataset: def test_should_correctly_create_a_object_detection_dataset( self, team_slug: str, team_extracted_dataset_path: Path - ): + ) -> None: root = team_extracted_dataset_path / team_slug / "coco" ds = ObjectDetectionDataset(dataset_path=root, release_name="latest") @@ -85,19 +95,19 @@ def v1_or_v2_slug(request): class TestGetDataset: - def test_exits_when_dataset_not_supported(self, v1_or_v2_slug: str, local_config_file: Config): + def test_exits_when_dataset_not_supported(self, v1_or_v2_slug: str, local_config_file: Config) -> None: with patch.object(sys, "exit") as exception: get_dataset(f"{v1_or_v2_slug}/test", "unknown") exception.assert_called_once_with(1) - def test_exits_when_dataset_does_not_exist_locally(self, v1_or_v2_slug: str, local_config_file: Config): + def test_exits_when_dataset_does_not_exist_locally(self, v1_or_v2_slug: str, local_config_file: Config) -> None: with patch.object(sys, "exit") as exception: get_dataset(f"{v1_or_v2_slug}/test", "classification") exception.assert_called_once_with(1) def test_loads_classification_dataset( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/sl", "classification") assert isinstance(dataset, ClassificationDataset) assert len(dataset) == 20 @@ -108,7 +118,7 @@ def test_loads_classification_dataset( def test_loads_multi_label_classification_dataset( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/ml", "classification") assert isinstance(dataset, ClassificationDataset) assert len(dataset) == 20 @@ -120,7 +130,7 @@ def test_loads_multi_label_classification_dataset( def test_loads_object_detection_dataset_from_bounding_box_annotations( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/bb", "object-detection") assert isinstance(dataset, ObjectDetectionDataset) assert len(dataset) == 1 @@ -140,7 +150,7 @@ def test_loads_object_detection_dataset_from_bounding_box_annotations( def test_loads_object_detection_dataset_from_polygon_annotations( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/coco", "object-detection") assert isinstance(dataset, ObjectDetectionDataset) assert len(dataset) == 20 @@ -159,7 +169,7 @@ def test_loads_object_detection_dataset_from_polygon_annotations( def test_loads_object_detection_dataset_from_complex_polygon_annotations( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/complex_polygons", "object-detection") assert isinstance(dataset, ObjectDetectionDataset) assert len(dataset) == 1 @@ -178,7 +188,7 @@ def test_loads_object_detection_dataset_from_complex_polygon_annotations( def test_loads_instance_segmentation_dataset_from_bounding_box_annotations( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: # You can load an instance segmentation dataset from an export that only has bounding boxes. # But it will ignore all the annotations, so you'll end up with 0 annotations. dataset = get_dataset(f"{v1_or_v2_slug}/bb", "instance-segmentation") @@ -201,7 +211,7 @@ def test_loads_instance_segmentation_dataset_from_bounding_box_annotations( def test_loads_instance_segmentation_dataset_from_polygon_annotations( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/coco", "instance-segmentation") assert isinstance(dataset, InstanceSegmentationDataset) assert len(dataset) == 20 @@ -222,7 +232,7 @@ def test_loads_instance_segmentation_dataset_from_polygon_annotations( def test_loads_instance_segmentation_dataset_from_complex_polygon_annotations( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/complex_polygons", "instance-segmentation") assert isinstance(dataset, InstanceSegmentationDataset) assert len(dataset) == 1 @@ -243,7 +253,7 @@ def test_loads_instance_segmentation_dataset_from_complex_polygon_annotations( def test_loads_semantic_segmentation_dataset_from_polygon_annotations( self, v1_or_v2_slug: str, local_config_file: Config, team_extracted_dataset_path: Path - ): + ) -> None: dataset = get_dataset(f"{v1_or_v2_slug}/coco", "semantic-segmentation") assert isinstance(dataset, SemanticSegmentationDataset) assert len(dataset) == 20 diff --git a/tests/darwin/torch/utils_test.py b/tests/darwin/torch/utils_test.py index 4fca1354e..1be480569 100644 --- a/tests/darwin/torch/utils_test.py +++ b/tests/darwin/torch/utils_test.py @@ -1,6 +1,7 @@ from typing import List, Tuple import numpy as np +import pytest import torch from darwin.torch.utils import clamp_bbox_to_image_size, flatten_masks_by_category @@ -34,13 +35,13 @@ def multiple_overlap_masks() -> Tuple[torch.Tensor, List[int]]: class TestFlattenMasks: - def test_should_raise_with_incorrect_shaped_inputs(self, basic_masks_with_cats) -> None: + def test_should_raise_with_incorrect_shaped_inputs(self, basic_masks_with_cats: Tuple) -> None: masks, _ = basic_masks_with_cats cats = [0] with pytest.raises(AssertionError) as error: flattened = flatten_masks_by_category(masks, cats) - def test_should_correctly_set_overlap(self, basic_masks_with_cats) -> None: + def test_should_correctly_set_overlap(self, basic_masks_with_cats: Tuple) -> None: masks, cats = basic_masks_with_cats flattened: torch.Tensor = flatten_masks_by_category(masks, cats) assert flattened[0, 0] == 2 @@ -51,12 +52,12 @@ def test_should_correctly_set_overlap(self, basic_masks_with_cats) -> None: assert torch.equal(unique, expected_unique) assert torch.equal(counts, expected_counts) - def test_should_handle_fully_masked_image(self, multiple_overlap_masks) -> None: + def test_should_handle_fully_masked_image(self, multiple_overlap_masks: Tuple) -> None: masks, cats = multiple_overlap_masks flattened: torch.Tensor = flatten_masks_by_category(masks, cats) assert 0 not in np.unique(flattened) - def test_should_handle_multiple_overlaps(self, multiple_overlap_masks) -> None: + def test_should_handle_multiple_overlaps(self, multiple_overlap_masks: Tuple) -> None: masks, cats = multiple_overlap_masks flattened: torch.Tensor = flatten_masks_by_category(masks, cats) unique, counts = flattened.unique(return_counts=True) diff --git a/tests/fixtures.py b/tests/fixtures.py index 835526e59..f59fead65 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,5 +1,6 @@ import shutil from pathlib import Path +from typing import Generator from zipfile import ZipFile import pytest @@ -93,7 +94,9 @@ def file_read_write_test(darwin_path: Path, annotations_path: Path, split_path: @pytest.fixture -def local_config_file(team_slug: str, team_slug_darwin_json_v2: str, darwin_datasets_path: Path): +def local_config_file( + team_slug: str, team_slug_darwin_json_v2: str, darwin_datasets_path: Path +) -> Generator[Config, None, None]: darwin_path = Path.home() / ".darwin" backup_darwin_path = Path.home() / ".darwin_backup" config_path = darwin_path / "config.yaml" From 2daeae9a8e05aad895b4f43bd658d9f66d8fcea6 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 3 Oct 2023 10:53:40 +0100 Subject: [PATCH 191/195] HOUSEKEEPING: Bump version to 0.8.43 --- darwin/version/__init__.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/darwin/version/__init__.py b/darwin/version/__init__.py index b3df0471a..fd3281df3 100644 --- a/darwin/version/__init__.py +++ b/darwin/version/__init__.py @@ -1 +1 @@ -__version__ = "0.8.42" +__version__ = "0.8.43" diff --git a/pyproject.toml b/pyproject.toml index 6bff27f3e..d3a74c10e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "darwin-py" -version = "0.8.42" +version = "0.8.43" description = "Library and command line interface for darwin.v7labs.com" homepage = "https://docs.v7labs.com/reference/getting-started-2" documentation = "https://darwin-py-sdk.v7labs.com/index.html" From 8915a7fca244aaf5069243397c670467ec19da23 Mon Sep 17 00:00:00 2001 From: Nathan Perkins Date: Tue, 3 Oct 2023 14:01:00 +0100 Subject: [PATCH 192/195] [IO-1829] Tests refactor and timing changes (#676) * tests refactor and timing changes * tests for tests --- e2e_tests/helpers.py | 58 +++++++++++++++-- e2e_tests/test_darwin.py | 64 +++++++++---------- .../test_run_cli_command.py | 17 ++--- 3 files changed, 92 insertions(+), 47 deletions(-) diff --git a/e2e_tests/helpers.py b/e2e_tests/helpers.py index 66747a6ba..d2316d660 100644 --- a/e2e_tests/helpers.py +++ b/e2e_tests/helpers.py @@ -3,16 +3,33 @@ import uuid from pathlib import Path from subprocess import run +from time import sleep from typing import Generator, Optional, Tuple import pytest +from attr import dataclass +from cv2 import exp from darwin.exceptions import DarwinException from e2e_tests.objects import E2EDataset from e2e_tests.setup_tests import create_random_image -def run_cli_command(command: str, working_directory: Optional[str] = None, yes: bool = False) -> Tuple[int, str, str]: +@dataclass +class CLIResult: + """Wrapper for the result of a CLI command after decoding the stdout and stderr.""" + + return_code: int + stdout: str + stderr: str + + +SERVER_WAIT_TIME = 5 + + +def run_cli_command( + command: str, working_directory: Optional[str] = None, yes: bool = False, server_wait: int = SERVER_WAIT_TIME +) -> CLIResult: """ Run a CLI command and return the return code, stdout, and stderr. @@ -49,9 +66,42 @@ def run_cli_command(command: str, working_directory: Optional[str] = None, yes: capture_output=True, shell=True, ) - + sleep(server_wait) # wait for server to catch up try: - return result.returncode, result.stdout.decode("utf-8"), result.stderr.decode("utf-8") + return CLIResult(result.returncode, result.stdout.decode("utf-8"), result.stderr.decode("utf-8")) except UnicodeDecodeError: - return result.returncode, result.stdout.decode("cp437"), result.stderr.decode("cp437") + return CLIResult(result.returncode, result.stdout.decode("cp437"), result.stderr.decode("cp437")) + + +def format_cli_output(result: CLIResult) -> str: + return f"stdout:\n{result.stdout}\nstderr:\n{result.stderr}\n" + +def assert_cli( + result: CLIResult, + expected_return_code: int = 0, + in_stdout: Optional[str] = None, + in_stderr: Optional[str] = None, + expected_stdout: Optional[str] = None, + expected_stderr: Optional[str] = None, + inverse: bool = False, +) -> None: + assert result.return_code == expected_return_code, format_cli_output(result) + if not inverse: + if in_stdout: + assert in_stdout in result.stdout, format_cli_output(result) + if in_stderr: + assert in_stderr in result.stderr, format_cli_output(result) + if expected_stdout: + assert result.stdout == expected_stdout, format_cli_output(result) + if expected_stderr: + assert result.stderr == expected_stderr, format_cli_output(result) + else: + if in_stdout: + assert in_stdout not in result.stdout, format_cli_output(result) + if in_stderr: + assert in_stderr not in result.stderr, format_cli_output(result) + if expected_stdout: + assert result.stdout != expected_stdout, format_cli_output(result) + if expected_stderr: + assert result.stderr != expected_stderr, format_cli_output(result) diff --git a/e2e_tests/test_darwin.py b/e2e_tests/test_darwin.py index 8025b322c..c1727b417 100644 --- a/e2e_tests/test_darwin.py +++ b/e2e_tests/test_darwin.py @@ -9,7 +9,7 @@ import pytest -from e2e_tests.helpers import run_cli_command +from e2e_tests.helpers import SERVER_WAIT_TIME, assert_cli, run_cli_command from e2e_tests.objects import ConfigValues, E2EDataset, E2EItem from e2e_tests.setup_tests import api_call, create_random_image @@ -19,16 +19,15 @@ def new_dataset() -> E2EDataset: """Create a new dataset via darwin cli and return the dataset object, complete with teardown""" uuid_str = str(uuid.uuid4()) new_dataset_name = "test_dataset_" + uuid_str - exit_level, std_out, _ = run_cli_command(f"darwin dataset create {new_dataset_name}") - assert exit_level == 0 - id_raw = re.findall(r"datasets[/\\+](\d+)", std_out) + result = run_cli_command(f"darwin dataset create {new_dataset_name}") + assert_cli(result, 0) + id_raw = re.findall(r"datasets[/\\+](\d+)", result.stdout) assert id_raw is not None and len(id_raw) == 1 id = int(id_raw[0]) teardown_dataset = E2EDataset(id, new_dataset_name, None) # Add the teardown dataset to the pytest object to ensure it gets deleted when pytest is done pytest.datasets.append(teardown_dataset) # type: ignore - sleep(2) # wait for dataset to be created on server return teardown_dataset @@ -47,11 +46,11 @@ def local_dataset_with_images(local_dataset: E2EDataset) -> E2EDataset: local_dataset.add_item( E2EItem( name=path.name, - id=uuid.uuid4(), # random uuid as only need item for annotation later + id=uuid.uuid4(), # random uuid as only need item for annotation later path=str(path), file_name=path.name, slot_name="", - annotations=[] + annotations=[], ) ) return local_dataset @@ -63,6 +62,7 @@ def basic_annotation(name: str) -> dict: annotation["item"]["name"] = name return annotation + @pytest.fixture def local_dataset_with_annotations(local_dataset_with_images: E2EDataset) -> E2EDataset: assert local_dataset_with_images.directory is not None @@ -75,6 +75,7 @@ def local_dataset_with_annotations(local_dataset_with_images: E2EDataset) -> E2E json.dump(annotation, f) return local_dataset_with_images + def test_darwin_create(local_dataset: E2EDataset) -> None: """ Test creating a dataset via the darwin cli, heavy lifting performed @@ -83,6 +84,7 @@ def test_darwin_create(local_dataset: E2EDataset) -> None: assert local_dataset.id is not None assert local_dataset.name is not None + def test_darwin_push(local_dataset_with_images: E2EDataset) -> None: """ Test pushing a dataset via the darwin cli, dataset created via fixture with images added to object @@ -90,11 +92,11 @@ def test_darwin_push(local_dataset_with_images: E2EDataset) -> None: assert local_dataset_with_images.id is not None assert local_dataset_with_images.name is not None assert local_dataset_with_images.directory is not None - sleep(2) - exit_level, std_out, std_err = run_cli_command( + result = run_cli_command( f"darwin dataset push {local_dataset_with_images.name} {local_dataset_with_images.directory}" ) - assert exit_level == 0 + assert_cli(result, 0) + def test_darwin_import(local_dataset_with_annotations: E2EDataset) -> None: """ @@ -106,13 +108,12 @@ def test_darwin_import(local_dataset_with_annotations: E2EDataset) -> None: result = run_cli_command( f"darwin dataset push {local_dataset_with_annotations.name} {local_dataset_with_annotations.directory}" ) - assert result[0] == 0 - sleep(2) - exit_level, std_out, std_err = run_cli_command( + assert_cli(result, 0) + result = run_cli_command( f"darwin dataset import {local_dataset_with_annotations.name} darwin {Path(local_dataset_with_annotations.directory) / 'annotations'}", - yes=True + yes=True, ) - assert exit_level == 0 + assert_cli(result, 0) def test_darwin_export(local_dataset_with_annotations: E2EDataset, config_values: ConfigValues) -> None: @@ -122,41 +123,34 @@ def test_darwin_export(local_dataset_with_annotations: E2EDataset, config_values assert local_dataset_with_annotations.id is not None assert local_dataset_with_annotations.name is not None assert local_dataset_with_annotations.directory is not None - exit_level, std_out, std_err = run_cli_command( + result = run_cli_command( f"darwin dataset push {local_dataset_with_annotations.name} {local_dataset_with_annotations.directory}" ) - assert exit_level == 0 - sleep(2) - exit_level, std_out, std_err = run_cli_command( + assert_cli(result, 0) + result = run_cli_command( f"darwin dataset import {local_dataset_with_annotations.name} darwin {Path(local_dataset_with_annotations.directory) / 'annotations'}", - yes=True + yes=True, ) - assert exit_level == 0 - + assert_cli(result, 0) + # Get class ids as export either needs a workflow and complete annotations or the class ids url = f"{config_values.server}/api/teams/{config_values.team_slug}/annotation_classes?include_tags=true" - response = api_call('get', url, None, config_values.api_key) + response = api_call("get", url, None, config_values.api_key) if not response.ok: raise Exception(f"Failed to get annotation classes: {response.text}") - classes = response.json()['annotation_classes'] + classes = response.json()["annotation_classes"] class_ids = [c["id"] for c in classes] class_str = " ".join([str(c) for c in class_ids]) # Test darwin export - sleep(2) - exit_level, std_out, std_err = run_cli_command( + result = run_cli_command( f"darwin dataset export {local_dataset_with_annotations.name} test_darwin_export --class-ids {class_str}" ) - assert exit_level == 0 - assert "successfully exported" in std_out, std_out - sleep(5) - exit_level, std_out, std_err = run_cli_command( - f"darwin dataset releases {local_dataset_with_annotations.name}" - ) - assert exit_level == 0 + assert_cli(result, 0, in_stdout="successfully exported") + result = run_cli_command(f"darwin dataset releases {local_dataset_with_annotations.name}") + assert_cli(result, 0, in_stdout="No available releases, export one first", inverse=True) # Check that a release is there via inverse, the CLI will truncate outputs and pass/fail is not clear # if we check for release name - assert "No available releases, export one first" not in std_out + if __name__ == "__main__": pytest.main(["-vv", "-s", __file__]) - diff --git a/tests/e2e_test_internals/test_run_cli_command.py b/tests/e2e_test_internals/test_run_cli_command.py index f439f962c..c8ce45201 100644 --- a/tests/e2e_test_internals/test_run_cli_command.py +++ b/tests/e2e_test_internals/test_run_cli_command.py @@ -1,4 +1,5 @@ from collections import namedtuple +from http import server from unittest import mock import pytest @@ -9,19 +10,19 @@ def test_does_not_allow_directory_traversal() -> None: with pytest.raises(DarwinException) as excinfo: - run_cli_command("darwin --help; ls ..") + run_cli_command("darwin --help; ls ..", server_wait=0) assert excinfo.value == "Cannot pass directory traversal to 'run_cli_command'." with pytest.raises(DarwinException) as excinfo: - run_cli_command("darwin --help", working_directory="/usr/bin/../") + run_cli_command("darwin --help", working_directory="/usr/bin/../", server_wait=0) assert excinfo.value == "Cannot pass directory traversal to 'run_cli_command'." @mock.patch("e2e_tests.helpers.run") def test_passes_working_directory_to_run_cli_command(mock_subprocess_run: mock.Mock) -> None: mock_subprocess_run.reset_mock() - run_cli_command("darwin --help", "/usr/bin") + run_cli_command("darwin --help", "/usr/bin", server_wait=0) mock_subprocess_run.assert_called_once() assert mock_subprocess_run.call_args[0][0] == "darwin --help" @@ -35,19 +36,19 @@ def test_passes_back_returncode_stdout_and_stderr(mock_subprocess_run: mock.Mock mock_subprocess_run.return_value = mocked_output - return_code, std_out, std_err = run_cli_command("darwin --help", "/usr/bin") + result = run_cli_command("darwin --help", "/usr/bin", server_wait=0) mock_subprocess_run.assert_called_once() - assert return_code == 137 - assert std_out == "stdout" - assert std_err == "stderr" + assert result.return_code == 137 + assert result.stdout == "stdout" + assert result.stderr == "stderr" @mock.patch("e2e_tests.helpers.run") def test_does_not_pass_working_directory_to_run_cli_command(mock_subprocess_run: mock.Mock) -> None: mock_subprocess_run.reset_mock() - run_cli_command("darwin --help") + run_cli_command("darwin --help", server_wait=0) mock_subprocess_run.assert_called_once() assert mock_subprocess_run.call_args[0][0] == "darwin --help" From b6d7c64cdec37a925ddf488a52d15ca0140aa3df Mon Sep 17 00:00:00 2001 From: Owen Jones Date: Tue, 3 Oct 2023 17:16:46 +0100 Subject: [PATCH 193/195] [IO-1661][external] Error messages were not clear to customers (#674) * Error messages improved. * Update to error message * Correct typo --------- Co-authored-by: Owen --- darwin/importer/formats/coco.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/darwin/importer/formats/coco.py b/darwin/importer/formats/coco.py index 55d3ae9f0..b3d54546f 100644 --- a/darwin/importer/formats/coco.py +++ b/darwin/importer/formats/coco.py @@ -1,5 +1,6 @@ +from logging import getLogger from pathlib import Path -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, Iterator, List, Optional import deprecation import orjson as json @@ -19,6 +20,9 @@ """ +logger = getLogger(__name__) + + def parse_path(path: Path) -> Optional[List[dt.AnnotationFile]]: """ Parses the given ``coco`` file and returns a ``List[dt.AnnotationFile]`` with the parsed @@ -41,7 +45,7 @@ def parse_path(path: Path) -> Optional[List[dt.AnnotationFile]]: return list(parse_json(path, data)) -def parse_json(path: Path, data: Dict[str, Any]) -> Iterator[dt.AnnotationFile]: +def parse_json(path: Path, data: Dict[str, dt.UnknownType]) -> Iterator[dt.AnnotationFile]: """ Parses the given ``json`` structure into an ``Iterator[dt.AnnotationFile]``. @@ -62,7 +66,7 @@ def parse_json(path: Path, data: Dict[str, Any]) -> Iterator[dt.AnnotationFile]: category_lookup_table = {category["id"]: category for category in data["categories"]} tag_categories = data.get("tag_categories") or [] tag_category_lookup_table = {category["id"]: category for category in tag_categories} - image_annotations: Dict[str, Any] = {} + image_annotations: Dict[str, dt.UnknownType] = {} for image in data["images"]: image_id = image["id"] @@ -91,15 +95,17 @@ def parse_json(path: Path, data: Dict[str, Any]) -> Iterator[dt.AnnotationFile]: yield dt.AnnotationFile(path, filename, annotation_classes, annotations, remote_path=remote_path) -def parse_annotation(annotation: Dict[str, Any], category_lookup_table: Dict[str, Any]) -> Optional[dt.Annotation]: +def parse_annotation( + annotation: Dict[str, dt.UnknownType], category_lookup_table: Dict[str, dt.UnknownType] +) -> Optional[dt.Annotation]: """ Parses the given ``json`` dictionary into a darwin ``Annotation`` if possible. Parameters ---------- - annotation : Dict[str, Any] + annotation : Dict[str, dt.UnknownType] The ``json`` dictionary to parse. - category_lookup_table : Dict[str, Any] + category_lookup_table : Dict[str, dt.UnknownType] Dictionary with all the categories from the ``coco`` file. Returns @@ -112,7 +118,10 @@ def parse_annotation(annotation: Dict[str, Any], category_lookup_table: Dict[str iscrowd = annotation.get("iscrowd") == 1 if iscrowd: - print("Warning, unsupported RLE, skipping") + logger.warn( + f"Skipping annotation {annotation.get('id')} because it is a crowd " + "annotation, and Darwin does not support import of crowd annotations." + ) return None if len(segmentation) == 0 and len(annotation["bbox"]) == 4: @@ -122,7 +131,7 @@ def parse_annotation(annotation: Dict[str, Any], category_lookup_table: Dict[str x, y, w, h = map(int, annotation["bbox"][0]) return dt.make_bounding_box(category["name"], x, y, w, h) elif isinstance(segmentation, dict): - print("warning, converting complex coco rle mask to polygon, could take some time") + logger.warn("warning, converting complex coco rle mask to polygon, could take some time") if isinstance(segmentation["counts"], list): mask = rle_decode(segmentation["counts"], segmentation["size"][::-1]) else: From 7586e1ead7daf8444ccefb0751d4e28567319943 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 10 Oct 2023 09:47:19 +0100 Subject: [PATCH 194/195] HOUSEKEEPING: Bump version to v0.8.44 --- darwin/version/__init__.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/darwin/version/__init__.py b/darwin/version/__init__.py index fd3281df3..eb467a33f 100644 --- a/darwin/version/__init__.py +++ b/darwin/version/__init__.py @@ -1 +1 @@ -__version__ = "0.8.43" +__version__ = "0.8.44" diff --git a/pyproject.toml b/pyproject.toml index d3a74c10e..b6541009e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "darwin-py" -version = "0.8.43" +version = "0.8.44" description = "Library and command line interface for darwin.v7labs.com" homepage = "https://docs.v7labs.com/reference/getting-started-2" documentation = "https://darwin-py-sdk.v7labs.com/index.html" From 763b37720c68795208309dd0603daeaffc779fa2 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 10 Oct 2023 11:56:38 +0100 Subject: [PATCH 195/195] Fixed conflicts --- .github/workflows/JOB_e2e.yml | 2 +- e2e_tests/helpers.py | 2 +- e2e_tests/test_darwin.py | 17 ++- poetry.lock | 274 +++++++++++++++++----------------- pyproject.toml | 16 +- 5 files changed, 165 insertions(+), 146 deletions(-) diff --git a/.github/workflows/JOB_e2e.yml b/.github/workflows/JOB_e2e.yml index ad3bae632..4bdace797 100644 --- a/.github/workflows/JOB_e2e.yml +++ b/.github/workflows/JOB_e2e.yml @@ -36,7 +36,7 @@ jobs: pip install --editable ".[test,ml,medical,dev,ocv]" pip install pytest - name: Run Tests - run: python -m pytest e2e_tests -W ignore::DeprecationWarning + run: python -m pytest e2e_tests --reruns 3 --reruns-delay 10 -W ignore::DeprecationWarning env: E2E_API_KEY: ${{ secrets.E2E_API_KEY }} E2E_ENVIRONMENT: ${{ secrets.E2E_ENVIRONMENT }} diff --git a/e2e_tests/helpers.py b/e2e_tests/helpers.py index d2316d660..af143ff4b 100644 --- a/e2e_tests/helpers.py +++ b/e2e_tests/helpers.py @@ -24,7 +24,7 @@ class CLIResult: stderr: str -SERVER_WAIT_TIME = 5 +SERVER_WAIT_TIME = 10 def run_cli_command( diff --git a/e2e_tests/test_darwin.py b/e2e_tests/test_darwin.py index c1727b417..a01d48e44 100644 --- a/e2e_tests/test_darwin.py +++ b/e2e_tests/test_darwin.py @@ -9,7 +9,7 @@ import pytest -from e2e_tests.helpers import SERVER_WAIT_TIME, assert_cli, run_cli_command +from e2e_tests.helpers import assert_cli, run_cli_command from e2e_tests.objects import ConfigValues, E2EDataset, E2EItem from e2e_tests.setup_tests import api_call, create_random_image @@ -152,5 +152,20 @@ def test_darwin_export(local_dataset_with_annotations: E2EDataset, config_values # if we check for release name +def test_delete(local_dataset: E2EDataset) -> None: + """ + Test deleting a dataset via the darwin cli, dataset created via fixture + """ + assert local_dataset.id is not None + assert local_dataset.name is not None + result = run_cli_command(f"darwin dataset remove {local_dataset.name}", yes=True) + assert_cli(result, 0) + # Check that the dataset is gone, if so, remove from pytest object so it doesn't get deleted again + # and cause a failure on teardown + result = run_cli_command(f"darwin dataset files {local_dataset.name}") + assert_cli(result, 1, in_stdout="Error: No dataset with") + pytest.datasets.remove(local_dataset) # type: ignore + + if __name__ == "__main__": pytest.main(["-vv", "-s", __file__]) diff --git a/poetry.lock b/poetry.lock index 9e42b8efc..4e27775fc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -88,86 +88,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, ] [[package]] @@ -678,18 +693,18 @@ wheel = "*" [[package]] name = "opencv-python-headless" -version = "4.8.0.76" +version = "4.8.1.78" description = "Wrapper package for OpenCV python bindings." optional = true python-versions = ">=3.6" files = [ - {file = "opencv-python-headless-4.8.0.76.tar.gz", hash = "sha256:bc15726187dae26d8a08777faf6bc71d38f20c785c102677f58ba0e935003afb"}, - {file = "opencv_python_headless-4.8.0.76-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:f85d2e3b9d952db35d31f9db8882d073c903921b72b8db1cfed8bbc75e8d3e63"}, - {file = "opencv_python_headless-4.8.0.76-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:8ee3bf1c9086493c340c6a87899f1c7778d729de92bce8560b8c31ab8a9cdf79"}, - {file = "opencv_python_headless-4.8.0.76-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c675b8dec6298ba6a1eec2ce24077a393b4236a043f68dfacb06bf594354ce06"}, - {file = "opencv_python_headless-4.8.0.76-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220d2e292fa45ef0582aab730460bbc15cfe61f2089208167a372ccf76f01e21"}, - {file = "opencv_python_headless-4.8.0.76-cp37-abi3-win32.whl", hash = "sha256:df0608de207ae9b094ad9eaf1a475cf6e9a069fb12cd289d4a18cefdab2f8aa8"}, - {file = "opencv_python_headless-4.8.0.76-cp37-abi3-win_amd64.whl", hash = "sha256:9c094faf6ec7bd360244647b26ebdf8f54edec1d9292cb9179fff9badcca7be8"}, + {file = "opencv-python-headless-4.8.1.78.tar.gz", hash = "sha256:bc7197b42352f6f865c302a49140b889ec7cd957dd697e2d7fc016ad0d3f28f1"}, + {file = "opencv_python_headless-4.8.1.78-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:f3a33f644249f9ce1c913eac580e4b3ef4ce7cab0a71900274708959c2feb5e3"}, + {file = "opencv_python_headless-4.8.1.78-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:2c7d45721df9801c4dcd34683a15caa0e30f38b185263fec04a6eb274bc720f0"}, + {file = "opencv_python_headless-4.8.1.78-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b6bd6e1132b6f5dcb3a5bfe30fc4d341a7bfb26134da349a06c9255288ded94"}, + {file = "opencv_python_headless-4.8.1.78-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58e70d2f0915fe23e02c6e405588276c9397844a47d38b9c87fac5f7f9ba2dcc"}, + {file = "opencv_python_headless-4.8.1.78-cp37-abi3-win32.whl", hash = "sha256:382f8c7a6a14f80091284eecedd52cee4812231ee0eff1118592197b538d9252"}, + {file = "opencv_python_headless-4.8.1.78-cp37-abi3-win_amd64.whl", hash = "sha256:0a0f1e9f836f7d5bad1dd164694944c8761711cbdf4b36ebbd4815a8ef731079"}, ] [package.dependencies] @@ -773,13 +788,13 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -885,13 +900,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.10.0" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = true python-versions = ">=3.7" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.dependencies] @@ -1044,6 +1059,22 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-rerunfailures" +version = "12.0" +description = "pytest plugin to re-run tests to eliminate flaky failures" +optional = true +python-versions = ">=3.7" +files = [ + {file = "pytest-rerunfailures-12.0.tar.gz", hash = "sha256:784f462fa87fe9bdf781d0027d856b47a4bfe6c12af108f6bd887057a917b48e"}, + {file = "pytest_rerunfailures-12.0-py3-none-any.whl", hash = "sha256:9a1afd04e21b8177faf08a9bbbf44de7a0fe3fc29f8ddbe83b9684bd5f8f92a9"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=1", markers = "python_version < \"3.8\""} +packaging = ">=17.1" +pytest = ">=6.2" + [[package]] name = "python-dotenv" version = "1.0.0" @@ -1174,13 +1205,13 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "rich" -version = "13.5.3" +version = "13.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.5.3-py3-none-any.whl", hash = "sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9"}, - {file = "rich-13.5.3.tar.gz", hash = "sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6"}, + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, ] [package.dependencies] @@ -1191,32 +1222,6 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] -[[package]] -name = "ruff" -version = "0.0.287" -description = "An extremely fast Python linter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.0.287-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:1e0f9ee4c3191444eefeda97d7084721d9b8e29017f67997a20c153457f2eafd"}, - {file = "ruff-0.0.287-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e9843e5704d4fb44e1a8161b0d31c1a38819723f0942639dfeb53d553be9bfb5"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca1ed11d759a29695aed2bfc7f914b39bcadfe2ef08d98ff69c873f639ad3a8"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf4d5ad3073af10f186ea22ce24bc5a8afa46151f6896f35c586e40148ba20b"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d9d58bcb29afd72d2afe67120afcc7d240efc69a235853813ad556443dc922"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:06ac5df7dd3ba8bf83bba1490a72f97f1b9b21c7cbcba8406a09de1a83f36083"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bfb478e1146a60aa740ab9ebe448b1f9e3c0dfb54be3cc58713310eef059c30"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00d579a011949108c4b4fa04c4f1ee066dab536a9ba94114e8e580c96be2aeb4"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a810a79b8029cc92d06c36ea1f10be5298d2323d9024e1d21aedbf0a1a13e5"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:150007028ad4976ce9a7704f635ead6d0e767f73354ce0137e3e44f3a6c0963b"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a24a280db71b0fa2e0de0312b4aecb8e6d08081d1b0b3c641846a9af8e35b4a7"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2918cb7885fa1611d542de1530bea3fbd63762da793751cc8c8d6e4ba234c3d8"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:33d7b251afb60bec02a64572b0fd56594b1923ee77585bee1e7e1daf675e7ae7"}, - {file = "ruff-0.0.287-py3-none-win32.whl", hash = "sha256:022f8bed2dcb5e5429339b7c326155e968a06c42825912481e10be15dafb424b"}, - {file = "ruff-0.0.287-py3-none-win_amd64.whl", hash = "sha256:26bd0041d135a883bd6ab3e0b29c42470781fb504cf514e4c17e970e33411d90"}, - {file = "ruff-0.0.287-py3-none-win_arm64.whl", hash = "sha256:44bceb3310ac04f0e59d4851e6227f7b1404f753997c7859192e41dbee9f5c8d"}, - {file = "ruff-0.0.287.tar.gz", hash = "sha256:02dc4f5bf53ef136e459d467f3ce3e04844d509bc46c025a05b018feb37bbc39"}, -] - [[package]] name = "scikit-learn" version = "1.3.1" @@ -1507,17 +1512,17 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.6" +version = "2.31.0.8" description = "Typing stubs for requests" optional = false python-versions = ">=3.7" files = [ - {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, - {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, + {file = "types-requests-2.31.0.8.tar.gz", hash = "sha256:e1b325c687b3494a2f528ab06e411d7092cc546cc9245c000bacc2fca5ae96d4"}, + {file = "types_requests-2.31.0.8-py3-none-any.whl", hash = "sha256:39894cbca3fb3d032ed8bdd02275b4273471aa5668564617cc1734b0a65ffdf8"}, ] [package.dependencies] -types-urllib3 = "*" +urllib3 = ">=2" [[package]] name = "types-toml" @@ -1530,17 +1535,6 @@ files = [ {file = "types_toml-0.10.8.7-py3-none-any.whl", hash = "sha256:61951da6ad410794c97bec035d59376ce1cbf4453dc9b6f90477e81e4442d631"}, ] -[[package]] -name = "types-urllib3" -version = "1.26.25.14" -description = "Typing stubs for urllib3" -optional = false -python-versions = "*" -files = [ - {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, - {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, -] - [[package]] name = "typing-extensions" version = "4.7.1" @@ -1575,13 +1569,13 @@ numpy = "*" [[package]] name = "urllib3" -version = "2.0.5" +version = "2.0.6" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, - {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, ] [package.extras] @@ -1620,13 +1614,13 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -dev = ["black", "debugpy", "isort", "mypy", "pytest", "responses", "ruff"] +dev = ["black", "debugpy", "isort", "mypy", "pytest", "pytest-rerunfailures", "responses"] medical = ["connected-components-3d", "nibabel"] ml = ["scikit-learn", "torch", "torchvision"] ocv = ["opencv-python-headless"] -test = ["pytest", "responses", "ruff"] +test = ["pytest", "responses"] [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<3.11" -content-hash = "522ae94837261aafc88519fb08890070a9e5abae14b4766ff816d9d7337c2f50" +content-hash = "9865d21e7a68ded971e8b79d8d7c3b3ec669bb97eadce284fda60b892540ad2d" diff --git a/pyproject.toml b/pyproject.toml index b6541009e..d0d834d87 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,11 +103,21 @@ types-pyyaml = "^6.0.12.9" python-dotenv = { version = "^1.0.0", python = ">3.8" } opencv-python-headless = { version = "^4.8.0.76", optional = true } pyyaml = "^6.0.1" -ruff = "^0.0.287" +pytest-rerunfailures = { version = "^12.0", optional = true } [tool.poetry.extras] -dev = ["black", "isort", "mypy", "debugpy", "responses", "pytest", "ruff"] -test = ["responses", "pytest", "ruff"] +dev = [ + "black", + "isort", + "flake8", + "mypy", + "debugpy", + "responses", + "pytest", + "flake8-pyproject", + "pytest-rerunfailures", +] +test = ["responses", "pytest", "flake8-pyproject"] ml = ["torch", "torchvision", "scikit-learn", "albumentations"] medical = ["nibabel", "connected-components-3d"] ocv = ["opencv-python-headless"]