diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a0dc570c7..9d0e090ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,26 +1,21 @@ name: CI on: - push: - branches-ignore: - - "github-pages/*" - - "gh-pages/*" - - "main" - - "master" + pull_request: + types: [opened, synchronize, reopened] schedule: - # “At 00:00 on every 7th day-of-month from 1 through 31.” (https://crontab.guru) + # At 00:00 on every 7th day-of-month from 1 through 31. (https://crontab.guru) - cron: "0 0 1/7 * *" jobs: - CI: uses: ./.github/workflows/merge-gate.yml secrets: inherit permissions: contents: read - Metrics: needs: [ CI ] uses: ./.github/workflows/report.yml + secrets: inherit permissions: contents: read \ No newline at end of file diff --git a/.github/workflows/pr-merge.yml b/.github/workflows/pr-merge.yml index e5503575b..9b123ed35 100644 --- a/.github/workflows/pr-merge.yml +++ b/.github/workflows/pr-merge.yml @@ -25,5 +25,6 @@ jobs: metrics: needs: [ ci-job ] uses: ./.github/workflows/report.yml + secrets: inherit permissions: contents: read diff --git a/.github/workflows/report.yml b/.github/workflows/report.yml index fc749f57d..32e80780c 100644 --- a/.github/workflows/report.yml +++ b/.github/workflows/report.yml @@ -35,6 +35,9 @@ jobs: - name: Generate Report run: poetry run -- nox -s project:report -- --format json | tee metrics.json + - name: Upload to sonar + run: poetry run -- nox -s sonar:check -- ${{ secrets.SONAR_TOKEN }} + - name: Upload Artifacts uses: actions/upload-artifact@v4.6.2 with: diff --git a/.gitignore b/.gitignore index 0ed5574db..777a73fd9 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ odbcconfig/odbcinst.ini .html-documentation .coverage +.sonar _build/ diff --git a/doc/changes/unreleased.md b/doc/changes/unreleased.md index 79e701b84..1fe82a3e2 100644 --- a/doc/changes/unreleased.md +++ b/doc/changes/unreleased.md @@ -1 +1,29 @@ # Unreleased + +## Summary +This version of the PTB adds nox task `sonar:check`, see #451. This allows us to +use SonarQube Cloud to analyze, visualize, & track linting, security, & coverage. In +order to properly set it up, you'll need to do the following instruction for each **public** project. +At this time, PTB currently does not support setting up SonarQube for a **private** project. + +1. Specify in the `noxconfig.py` the relative path to the project's source code in `Config.source` + ```python + source: Path = Path("exasol/toolbox") + ``` +2. Add the 'SONAR_TOKEN' to the 'Organization secrets' in GitHub (this requires a person being a GitHub organization owner). +3. Activate the SonarQubeCloud App +4. Create a project on SonarCloud +5. Add the following information to the project's file `pyproject.toml` + ```toml + [tool.sonar] + projectKey = "com.exasol:" + hostUrl = "https://sonarcloud.io" + organization = "exasol" + ``` +6. Post-merge, update the branch protections to include SonarQube analysis + +## ✨ Features +* #451: Added nox task to execute pysonar & added Sonar to the CI + +## ⚒️ Refactorings +* #451: Reduced scope of nox tasks `lint:code` (pylint) and `lint:security` (bandit) to analyze only the package code \ No newline at end of file diff --git a/doc/user_guide/getting_started.rst b/doc/user_guide/getting_started.rst index f00f15c22..ede93a3a0 100644 --- a/doc/user_guide/getting_started.rst +++ b/doc/user_guide/getting_started.rst @@ -179,8 +179,8 @@ forward, and you just can use the example *noxfile.py* below. .. _toolbox tasks: -7. Setup for deploying documentation (optional) -+++++++++++++++++++++++++++++++++++++++++++++++ +7. Set up for deploying documentation (optional) +++++++++++++++++++++++++++++++++++++++++++++++++ Within the `gh-pages.yml`, we use the GitHub `upload-pages-artifact` and `deploy-pages` actions. In order to properly deploy your pages, you'll need to reconfigure the GitHub Pages settings for the repo: @@ -199,8 +199,32 @@ We also need to configure settings for github-pages environment: 5. In the 'Deployment branches and tags', click 'Add deployment branch or tag rule' 6. Select 'Ref type' to be 'Tag' and set the 'Name pattern' to `[0-9]*.[0-9]*.[0-9]*` (or whatever matches that repo's tags) +8. Set up for Sonar ++++++++++++++++++++ +PTB supports using SonarQube Cloud to analyze, visualize, & track linting, security, & +coverage. In order to properly set it up, you'll need to do the following instructions +for each **public** project. At this time, PTB currently does not support setting up +SonarQube for a **private** project. -8. Go 🥜 +1. Specify in the `noxconfig.py` the relative path to the project's source code in `Config.source` + .. code-block:: python + + source: Path = Path("exasol/toolbox") +2. Add the 'SONAR_TOKEN' to the 'Organization secrets' in GitHub (this requires a person being a GitHub organization owner). +3. Activate the SonarQubeCloud App +4. Create a project on SonarCloud +5. Add the following information to the project's file `pyproject.toml` + .. code-block:: toml + + [tool.sonar] + projectKey = "com.exasol:" + hostUrl = "https://sonarcloud.io" + organization = "exasol" +6. Post-merge, update the branch protections to include SonarQube analysis + + + +9. Go 🥜 +++++++++++++ You are ready to use the toolbox. With *nox -l* you can list all available tasks. diff --git a/exasol/toolbox/nox/_artifacts.py b/exasol/toolbox/nox/_artifacts.py index 7bcfceb57..3f21ef2a5 100644 --- a/exasol/toolbox/nox/_artifacts.py +++ b/exasol/toolbox/nox/_artifacts.py @@ -1,5 +1,4 @@ import json -import pathlib import re import shutil import sqlite3 @@ -11,116 +10,129 @@ from nox import Session from exasol.toolbox.nox._shared import MINIMUM_PYTHON_VERSION -from noxconfig import PROJECT_CONFIG +from noxconfig import ( + PROJECT_CONFIG, + Config, +) + +COVERAGE_FILE = ".coverage" +COVERAGE_XML = "ci-coverage.xml" +LINT_JSON = ".lint.json" +LINT_TXT = ".lint.txt" +SECURITY_JSON = ".security.json" + +ALL_LINT_FILES = {COVERAGE_FILE, LINT_JSON, LINT_TXT, SECURITY_JSON} +COVERAGE_TABLES = {"coverage_schema", "meta", "file", "line_bits"} +LINT_JSON_ATTRIBUTES = { + "type", + "module", + "obj", + "line", + "column", + "endLine", + "endColumn", + "path", + "symbol", + "message", + "message-id", +} + +SECURITY_JSON_ATTRIBUTES = {"errors", "generated_at", "metrics", "results"} @nox.session(name="artifacts:validate", python=False) def check_artifacts(session: Session) -> None: """Validate that all project artifacts are available and consistent""" - if not_available := _missing_files( - {".lint.txt", ".security.json", ".coverage"}, PROJECT_CONFIG.root - ): - print(f"not available: {not_available}") + all_files = {f.name for f in PROJECT_CONFIG.root.iterdir() if f.is_file()} + if missing_files := (ALL_LINT_FILES - all_files): + print(f"files not available: {missing_files}", file=sys.stderr) sys.exit(1) - error = False - if msg := _validate_lint_txt(Path(PROJECT_CONFIG.root, ".lint.txt")): - print(f"error in [.lint.txt]: {msg}") - if msg := _validate_lint_json(Path(PROJECT_CONFIG.root, ".lint.json")): - print(f"error in [.lint.json]: {msg}") - if msg := _validate_security_json(Path(PROJECT_CONFIG.root, ".security.json")): - print(f"error in [.security.json]: {msg}") - error = True - if msg := _validate_coverage(Path(PROJECT_CONFIG.root, ".coverage")): - print(f"error in [.coverage]: {msg}") - error = True - if error: + all_is_valid_checks = [ + _is_valid_lint_txt(Path(PROJECT_CONFIG.root, LINT_TXT)), + _is_valid_lint_json(Path(PROJECT_CONFIG.root, LINT_JSON)), + _is_valid_security_json(Path(PROJECT_CONFIG.root, SECURITY_JSON)), + _is_valid_coverage(Path(PROJECT_CONFIG.root, COVERAGE_FILE)), + ] + if not all(all_is_valid_checks): sys.exit(1) -def _missing_files(expected_files: set, directory: Path) -> set: - files = {f.name for f in directory.iterdir() if f.is_file()} - return expected_files - files +def _handle_validation_error(file: Path, message: str) -> bool: + print(f"error in [{file.name}]: {message}", file=sys.stderr) + return False -def _validate_lint_txt(file: Path) -> str: - try: - content = file.read_text() - except FileNotFoundError as ex: - return f"Could not find file {file}, details: {ex}" +def _is_valid_lint_txt(file: Path) -> bool: + content = file.read_text() expr = re.compile(r"^Your code has been rated at (\d+.\d+)/.*", re.MULTILINE) matches = expr.search(content) if not matches: - return f"Could not find a rating" - return "" + _handle_validation_error(file, "Could not find a rating") + return False + return True -def _validate_lint_json(file: Path) -> str: +def _is_valid_lint_json(file: Path) -> bool: try: content = file.read_text() - except FileNotFoundError as ex: - return f"Could not find file {file}, details: {ex}" - try: issues = json.loads(content) except json.JSONDecodeError as ex: - return f"Invalid json file, details: {ex}" - expected = { - "type", - "module", - "obj", - "line", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message", - "message-id", - } + _handle_validation_error(file, f"Invalid json file, details: {ex}") + return False + for number, issue in enumerate(issues): actual = set(issue.keys()) - missing = expected - actual + missing = LINT_JSON_ATTRIBUTES - actual if len(missing) > 0: - return f"Invalid format, issue {number} is missing the following attributes {missing}" - return "" + _handle_validation_error( + file, + f"Invalid format, issue {number} is missing the following attributes {missing}", + ) + return False + return True -def _validate_security_json(file: Path) -> str: +def _is_valid_security_json(file: Path) -> bool: try: content = file.read_text() - except FileNotFoundError as ex: - return f"Could not find file {file}, details: {ex}" - try: actual = set(json.loads(content)) except json.JSONDecodeError as ex: - return f"Invalid json file, details: {ex}" - expected = {"errors", "generated_at", "metrics", "results"} - missing = expected - actual + return _handle_validation_error(file, f"Invalid json file, details: {ex}") + + missing = SECURITY_JSON_ATTRIBUTES - actual if len(missing) > 0: - return f"Invalid format, the file is missing the following attributes {missing}" - return "" + return _handle_validation_error( + file, + f"Invalid format, the file is missing the following attributes {missing}", + ) + return True -def _validate_coverage(path: Path) -> str: +def _is_valid_coverage(path: Path) -> bool: try: conn = sqlite3.connect(path) + cursor = conn.cursor() except sqlite3.Error as ex: - return f"database connection not possible, details: {ex}" - cursor = conn.cursor() + return _handle_validation_error( + path, f"database connection not possible, details: {ex}" + ) try: actual_tables = set( cursor.execute("select name from sqlite_schema where type == 'table'") ) except sqlite3.Error as ex: - return f"schema query not possible, details: {ex}" - expected = {"coverage_schema", "meta", "file", "line_bits"} - actual = {f[0] for f in actual_tables if (f[0] in expected)} - missing = expected - actual + return _handle_validation_error( + path, f"schema query not possible, details: {ex}" + ) + actual = {f[0] for f in actual_tables if (f[0] in COVERAGE_TABLES)} + missing = COVERAGE_TABLES - actual if len(missing) > 0: - return ( - f"Invalid database, the database is missing the following tables {missing}" + return _handle_validation_error( + path, + f"Invalid database, the database is missing the following tables {missing}", ) - return "" + return True @nox.session(name="artifacts:copy", python=False) @@ -129,16 +141,16 @@ def copy_artifacts(session: Session) -> None: Copy artifacts to the current directory """ - dir = Path(session.posargs[0]) + artifact_dir = Path(session.posargs[0]) suffix = _python_version_suffix() - _combine_coverage(session, dir, f"coverage{suffix}*/.coverage") + _combine_coverage(session, artifact_dir, f"coverage{suffix}*/{COVERAGE_FILE}") _copy_artifacts( - dir, - dir.parent, + artifact_dir, + artifact_dir.parent, [ - f"lint{suffix}/.lint.txt", - f"lint{suffix}/.lint.json", - f"security{suffix}/.security.json", + f"lint{suffix}/{LINT_TXT}", + f"lint{suffix}/{LINT_JSON}", + f"security{suffix}/{SECURITY_JSON}", ], ) @@ -149,14 +161,14 @@ def _python_version_suffix() -> str: return f"-python{pivot}" -def _combine_coverage(session: Session, dir: Path, pattern: str): +def _combine_coverage(session: Session, artifact_dir: Path, pattern: str): """ pattern: glob pattern, e.g. "*.coverage" """ - if args := [f for f in dir.glob(pattern) if f.exists()]: + if args := [f for f in artifact_dir.glob(pattern) if f.exists()]: session.run("coverage", "combine", "--keep", *sorted(args)) else: - print(f"Could not find any file {dir}/{pattern}", file=sys.stderr) + print(f"Could not find any file {artifact_dir}/{pattern}", file=sys.stderr) def _copy_artifacts(source: Path, dest: Path, files: Iterable[str]): @@ -167,3 +179,35 @@ def _copy_artifacts(source: Path, dest: Path, files: Iterable[str]): shutil.copy(path, dest) else: print(f"File not found {path}", file=sys.stderr) + + +def _prepare_coverage_xml(session: Session, source: Path) -> None: + command = ["coverage", "xml", "-o", COVERAGE_XML, "--include", f"{source}/*"] + session.run(*command) + + +def _upload_to_sonar(session: Session, sonar_token: str, config: Config) -> None: + command = [ + "pysonar", + "--sonar-token", + sonar_token, + "--sonar-python-coverage-report-paths", + COVERAGE_XML, + "--sonar-python-pylint-report-path", + LINT_JSON, + "--sonar-python-bandit-report-paths", + SECURITY_JSON, + "--sonar-python-version", + ",".join(config.python_versions), + "--sonar-sources", + config.source, + ] + session.run(*command) # type: ignore + + +@nox.session(name="sonar:check", python=False) +def upload_artifacts_to_sonar(session: Session) -> None: + """Upload artifacts to sonar for analysis""" + sonar_token = session.posargs[0] + _prepare_coverage_xml(session, PROJECT_CONFIG.source) + _upload_to_sonar(session, sonar_token, PROJECT_CONFIG) diff --git a/exasol/toolbox/nox/_format.py b/exasol/toolbox/nox/_format.py index cdba9c2fa..0996ac3b1 100644 --- a/exasol/toolbox/nox/_format.py +++ b/exasol/toolbox/nox/_format.py @@ -39,7 +39,7 @@ def _pyupgrade(session: Session, config: Config, files: Iterable[str]) -> None: @nox.session(name="project:fix", python=False) def fix(session: Session) -> None: """Runs all automated fixes on the code base""" - py_files = [f"{file}" for file in python_files(PROJECT_CONFIG.root)] + py_files = python_files(PROJECT_CONFIG.root) _version(session, Mode.Fix) _pyupgrade(session, config=PROJECT_CONFIG, files=py_files) _code_format(session, Mode.Fix, py_files) @@ -48,5 +48,5 @@ def fix(session: Session) -> None: @nox.session(name="project:format", python=False) def fmt_check(session: Session) -> None: """Checks the project for correct formatting""" - py_files = [f"{file}" for file in python_files(PROJECT_CONFIG.root)] + py_files = python_files(PROJECT_CONFIG.root) _code_format(session=session, mode=Mode.Check, files=py_files) diff --git a/exasol/toolbox/nox/_lint.py b/exasol/toolbox/nox/_lint.py index 2972a9c2e..890839a10 100644 --- a/exasol/toolbox/nox/_lint.py +++ b/exasol/toolbox/nox/_lint.py @@ -118,8 +118,8 @@ def report_illegal(illegal: dict[str, list[str]], console: rich.console.Console) @nox.session(name="lint:code", python=False) def lint(session: Session) -> None: - "Runs the static code analyzer on the project" - py_files = [f"{file}" for file in python_files(PROJECT_CONFIG.root)] + """Runs the static code analyzer on the project""" + py_files = python_files(PROJECT_CONFIG.root / PROJECT_CONFIG.source) _pylint(session, py_files) @@ -133,7 +133,7 @@ def type_check(session: Session) -> None: @nox.session(name="lint:security", python=False) def security_lint(session: Session) -> None: """Runs the security linter on the project""" - py_files = [f"{file}" for file in python_files(PROJECT_CONFIG.root)] + py_files = python_files(PROJECT_CONFIG.root / PROJECT_CONFIG.source) _security_lint(session, py_files) diff --git a/exasol/toolbox/nox/_shared.py b/exasol/toolbox/nox/_shared.py index f4473bde6..4d6fa6fe1 100644 --- a/exasol/toolbox/nox/_shared.py +++ b/exasol/toolbox/nox/_shared.py @@ -28,14 +28,14 @@ class Mode(Enum): Check = auto() -def python_files(project_root: Path) -> Iterable[Path]: +def python_files(project_root: Path) -> Iterable[str]: """ Returns iterable of python files after removing unwanted paths """ deny_list = DEFAULT_PATH_FILTERS.union(set(PROJECT_CONFIG.path_filters)) files = project_root.glob("**/*.py") - return [path for path in files if not set(path.parts).intersection(deny_list)] + return [f"{path}" for path in files if not set(path.parts).intersection(deny_list)] def _version(session: Session, mode: Mode) -> None: diff --git a/exasol/toolbox/nox/tasks.py b/exasol/toolbox/nox/tasks.py index aeda05694..b99127dac 100644 --- a/exasol/toolbox/nox/tasks.py +++ b/exasol/toolbox/nox/tasks.py @@ -33,7 +33,7 @@ def check(session: Session) -> None: """Runs all available checks on the project""" context = _context(session, coverage=True) - py_files = [f"{file}" for file in python_files(PROJECT_CONFIG.root)] + py_files = python_files(PROJECT_CONFIG.root) _version(session, Mode.Check) _code_format(session, Mode.Check, py_files) _pylint(session, py_files) @@ -76,7 +76,9 @@ def check(session: Session) -> None: from exasol.toolbox.nox._release import prepare_release from exasol.toolbox.nox._artifacts import ( - check_artifacts + check_artifacts, + copy_artifacts, + upload_artifacts_to_sonar ) from exasol.toolbox.nox._dependencies import ( diff --git a/exasol/toolbox/templates/github/workflows/ci.yml b/exasol/toolbox/templates/github/workflows/ci.yml index e1b61aee9..d7f0a0f3f 100644 --- a/exasol/toolbox/templates/github/workflows/ci.yml +++ b/exasol/toolbox/templates/github/workflows/ci.yml @@ -1,14 +1,10 @@ name: CI on: - push: - branches-ignore: - - "github-pages/*" - - "gh-pages/*" - - "main" - - "master" + pull_request: + types: [opened, synchronize, reopened] schedule: - # "At 00:00 on every 7th day-of-month from 1 through 31." (https://crontab.guru) + # At 00:00 on every 7th day-of-month from 1 through 31. (https://crontab.guru) - cron: "0 0 1/7 * *" jobs: @@ -22,5 +18,6 @@ jobs: Metrics: needs: [ CI ] uses: ./.github/workflows/report.yml + secrets: inherit permissions: contents: read diff --git a/exasol/toolbox/templates/github/workflows/pr-merge.yml b/exasol/toolbox/templates/github/workflows/pr-merge.yml index 3136b6b31..a95cee5c3 100644 --- a/exasol/toolbox/templates/github/workflows/pr-merge.yml +++ b/exasol/toolbox/templates/github/workflows/pr-merge.yml @@ -28,5 +28,6 @@ jobs: metrics: needs: [ ci-job ] uses: ./.github/workflows/report.yml + secrets: inherit permissions: contents: read diff --git a/exasol/toolbox/templates/github/workflows/report.yml b/exasol/toolbox/templates/github/workflows/report.yml index 8fd4f233b..490a8b470 100644 --- a/exasol/toolbox/templates/github/workflows/report.yml +++ b/exasol/toolbox/templates/github/workflows/report.yml @@ -32,6 +32,9 @@ jobs: - name: Validate Artifacts run: poetry run -- nox -s artifacts:validate + - name: Upload to sonar + run: poetry run -- nox -s sonar:check -- ${{ secrets.SONAR_TOKEN }} + - name: Generate Report run: poetry run -- nox -s project:report -- --format json | tee metrics.json diff --git a/noxconfig.py b/noxconfig.py index a5883f27f..f42ea72d5 100644 --- a/noxconfig.py +++ b/noxconfig.py @@ -43,6 +43,7 @@ class Config: root: Path = Path(__file__).parent doc: Path = Path(__file__).parent / "doc" + source: Path = Path("exasol/toolbox") importlinter: Path = Path(__file__).parent / ".import_linter_config" version_file: Path = Path(__file__).parent / "exasol" / "toolbox" / "version.py" path_filters: Iterable[str] = ( diff --git a/poetry.lock b/poetry.lock index 2225d2605..e40852b07 100644 --- a/poetry.lock +++ b/poetry.lock @@ -965,6 +965,21 @@ files = [ {file = "joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444"}, ] +[[package]] +name = "jproperties" +version = "2.1.2" +description = "Java Property file parser and writer for Python" +optional = false +python-versions = ">=2.7" +groups = ["main"] +files = [ + {file = "jproperties-2.1.2-py2.py3-none-any.whl", hash = "sha256:4108e868353a9f4a12bb86a92df5462d0e18d00119169533972ce473029be79a"}, + {file = "jproperties-2.1.2.tar.gz", hash = "sha256:036fcd52c10a8a1c21e6fa2a1c292c93892e759b76490acc4809213a36ddc329"}, +] + +[package.dependencies] +six = ">=1.13,<2.0" + [[package]] name = "license-expression" version = "30.4.1" @@ -984,6 +999,155 @@ files = [ docs = ["Sphinx (>=5.0.2)", "doc8 (>=0.11.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-reredirects (>=0.1.2)", "sphinx-rtd-dark-mode (>=1.3.0)", "sphinx-rtd-theme (>=1.0.0)", "sphinxcontrib-apidoc (>=0.4.0)"] testing = ["black", "isort", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)", "twine"] +[[package]] +name = "lxml" +version = "5.4.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"}, + {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"}, + {file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"}, + {file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"}, + {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"}, + {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"}, + {file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"}, + {file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"}, + {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"}, + {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"}, + {file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"}, + {file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"}, + {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"}, + {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"}, + {file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"}, + {file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"}, + {file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"}, + {file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"}, + {file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"}, + {file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"}, + {file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"}, + {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"}, + {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"}, + {file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"}, + {file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"}, + {file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"}, + {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"}, + {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"}, + {file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"}, + {file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"}, + {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"}, + {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"}, + {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"}, + {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"}, + {file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"}, + {file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"}, + {file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml_html_clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11,<3.1.0)"] + [[package]] name = "maison" version = "1.4.3" @@ -1348,14 +1512,14 @@ uv = ["uv (>=0.1.6)"] [[package]] name = "packageurl-python" -version = "0.16.0" +version = "0.17.1" description = "A purl aka. Package URL parser and builder" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "packageurl_python-0.16.0-py3-none-any.whl", hash = "sha256:5c3872638b177b0f1cf01c3673017b7b27ebee485693ae12a8bed70fa7fa7c35"}, - {file = "packageurl_python-0.16.0.tar.gz", hash = "sha256:69e3bf8a3932fe9c2400f56aaeb9f86911ecee2f9398dbe1b58ec34340be365d"}, + {file = "packageurl_python-0.17.1-py3-none-any.whl", hash = "sha256:59b0862ae0b216994f847e05b4c6e870e0d16e1ddd706feefb19d79810f22cbd"}, + {file = "packageurl_python-0.17.1.tar.gz", hash = "sha256:5db592a990b60bc02446033c50fb1803a26c5124cd72c5a2cd1b8ea1ae741969"}, ] [package.extras] @@ -1736,6 +1900,18 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pyfakefs" +version = "5.8.0" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "pyfakefs-5.8.0-py3-none-any.whl", hash = "sha256:4bd0fc8def7d0582139922447758632ff34a327b460a7e83feb6edbd841061dd"}, + {file = "pyfakefs-5.8.0.tar.gz", hash = "sha256:7e5457ee3cc67069d3cef6e278227ecfc80bfb61e925bc0a4d3b0af32d1c99ce"}, +] + [[package]] name = "pyflakes" version = "3.3.2" @@ -1824,6 +2000,25 @@ files = [ [package.dependencies] typing-extensions = "*" +[[package]] +name = "pysonar" +version = "1.0.2.1722" +description = "Sonar Scanner for the Python Ecosystem" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pysonar-1.0.2.1722-py3-none-any.whl", hash = "sha256:abacede0e5f8ca1468ac644889e2e87e8adeae07718906fe4ad546f5feacbc5d"}, + {file = "pysonar-1.0.2.1722.tar.gz", hash = "sha256:fc024f5172b97faca6c280284a67be1bfaf8d76f4f04e37f93c86b70042bac15"}, +] + +[package.dependencies] +jproperties = ">=2.1.2,<3.0.0" +pyfakefs = ">=5.7.4,<6.0.0" +requests = ">=2.32.3,<3.0.0" +responses = ">=0.25.6,<0.26.0" +tomli = ">=2.2.1,<3.0.0" + [[package]] name = "pytest" version = "7.4.4" @@ -1943,19 +2138,19 @@ files = [ [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -1963,6 +2158,26 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "responses" +version = "0.25.7" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "responses-0.25.7-py3-none-any.whl", hash = "sha256:92ca17416c90fe6b35921f52179bff29332076bb32694c0df02dcac2c6bc043c"}, + {file = "responses-0.25.7.tar.gz", hash = "sha256:8ebae11405d7a5df79ab6fd54277f6f2bc29b2d002d0dd2d5c632594d1ddcedb"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] + [[package]] name = "rich" version = "13.9.4" @@ -2043,6 +2258,18 @@ files = [ [package.dependencies] Sphinx = "*" +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + [[package]] name = "snowballstemmer" version = "3.0.1" @@ -2382,14 +2609,14 @@ markers = {dev = "python_version < \"3.11\""} [[package]] name = "tomlkit" -version = "0.13.2" +version = "0.13.3" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, + {file = "tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0"}, + {file = "tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1"}, ] [[package]] @@ -2412,14 +2639,14 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] [[package]] @@ -2502,15 +2729,15 @@ files = [ [[package]] name = "zipp" -version = "3.22.0" +version = "3.23.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" groups = ["main"] markers = "python_version == \"3.9\"" files = [ - {file = "zipp-3.22.0-py3-none-any.whl", hash = "sha256:fe208f65f2aca48b81f9e6fd8cf7b8b32c26375266b009b413d45306b6148343"}, - {file = "zipp-3.22.0.tar.gz", hash = "sha256:dd2f28c3ce4bc67507bfd3781d21b7bb2be31103b51a4553ad7d90b84e57ace5"}, + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, ] [package.extras] @@ -2518,10 +2745,10 @@ check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \" cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib_resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<4.0" -content-hash = "5ad4971398eda95955839d4330f6073953ca6cb34b741801c483c163f54e08a0" +content-hash = "100029ee8a71d93edb9c27c41705a8b7246d8b8daa741c2be4dca45c5ec98b22" diff --git a/project-template/{{cookiecutter.repo_name}}/.gitignore b/project-template/{{cookiecutter.repo_name}}/.gitignore index ca472841c..fa805e42c 100644 --- a/project-template/{{cookiecutter.repo_name}}/.gitignore +++ b/project-template/{{cookiecutter.repo_name}}/.gitignore @@ -43,6 +43,7 @@ htmlcov/ .nox/ .coverage .coverage.* +.sonar .cache nosetests.xml coverage.xml diff --git a/project-template/{{cookiecutter.repo_name}}/noxconfig.py b/project-template/{{cookiecutter.repo_name}}/noxconfig.py index 4be08f301..365e20bec 100644 --- a/project-template/{{cookiecutter.repo_name}}/noxconfig.py +++ b/project-template/{{cookiecutter.repo_name}}/noxconfig.py @@ -9,6 +9,7 @@ class Config: root: Path = Path(__file__).parent doc: Path = Path(__file__).parent / "doc" + source: Path = Path("exasol/{{cookiecutter.package_name}}") version_file: Path = ( Path(__file__).parent / "exasol" diff --git a/pyproject.toml b/pyproject.toml index 45ba56202..ec2577bdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,8 +4,8 @@ version = "1.4.0" requires-python = ">=3.9,<4.0" description = "Your one-stop solution for managing all standard tasks and core workflows of your Python project." authors = [ - {name = "Nicola Coretti", email = "nicola.coretti@exasol.com"}, - {name = "Ariel Schulz", email = "ariel.schulz@exasol.com"}, + { name = "Nicola Coretti", email = "nicola.coretti@exasol.com" }, + { name = "Ariel Schulz", email = "ariel.schulz@exasol.com" }, ] readme = "README.md" license = "MIT" @@ -39,19 +39,26 @@ Issues = "https://github.com/exasol/python-toolbox/issues" Changelog = "https://exasol.github.io/python-toolbox/changelog.html" [tool.poetry.dependencies] +bandit = { extras = ["toml"], version = "^1.7.9" } black = ">=24.1.0" coverage = ">=6.4.4,<8.0.0" furo = ">=2022.9.15" importlib-resources = ">=5.12.0" import-linter = "^2.0" isort = "^6.0.1" +jinja2 = "^3.1.6" +lxml = "^5.4.0" mypy = ">=0.971" myst-parser = ">=2.0.0,<4" nox = ">=2022.8.7" +pip-audit = "^2.7.3" +pip-licenses = "^5.0.0" pluggy = "^1.5.0" pre-commit = ">=4" -prysk = {extras = ["pytest-plugin"], version = ">0.17.0,<1"} +prysk = { extras = ["pytest-plugin"], version = ">0.17.0,<1" } +pydantic = "^2.11.5" pylint = ">=2.15.4" +pysonar = "^1.0.1.1548" pytest = ">=7.2.2,<9" pyupgrade = ">=2.38.2,<4.0.0" shibuya = ">=2024.5.14" @@ -59,12 +66,7 @@ sphinx = ">=5.3,<8" sphinx-copybutton = "^0.5.0" sphinx-inline-tabs = "^2023.4.21" sphinx-design = ">=0.5.0,<1" -typer = {extras = ["all"], version = ">=0.7.0"} -bandit = {extras = ["toml"], version = "^1.7.9"} -jinja2 = "^3.1.6" -pip-licenses = "^5.0.0" -pip-audit = "^2.7.3" -pydantic = "^2.11.5" +typer = { extras = ["all"], version = ">=0.7.0" } [tool.poetry.group.dev.dependencies] autoimport = "^1.4.0" @@ -104,6 +106,7 @@ max-module-lines = 800 [tool.mypy] plugins = ['pydantic.mypy'] +xml_report = 'mypy.xml' [[tool.mypy.overrides]] module = [ @@ -114,4 +117,9 @@ ignore_errors = true [tool.poetry.plugins."console_scripts"] tbx = 'exasol.toolbox.tools.tbx:CLI' -sphinx-multiversion = 'exasol.toolbox.sphinx.multiversion:main' \ No newline at end of file +sphinx-multiversion = 'exasol.toolbox.sphinx.multiversion:main' + +[tool.sonar] +projectKey = "com.exasol:python-toolbox" +hostUrl = "https://sonarcloud.io" +organization = "exasol" \ No newline at end of file diff --git a/test/unit/artifacts_test.py b/test/unit/artifacts_test.py deleted file mode 100644 index 5cf91679b..000000000 --- a/test/unit/artifacts_test.py +++ /dev/null @@ -1,89 +0,0 @@ -import contextlib -import re -from dataclasses import dataclass -from inspect import cleandoc -from pathlib import Path -from unittest.mock import ( - Mock, - call, - patch, -) - -import pytest - -from exasol.toolbox.nox._artifacts import copy_artifacts - - -@contextlib.contextmanager -def mock_session(path: Path, python_version: str, *files: str): - with patch("exasol.toolbox.nox._artifacts.PROJECT_CONFIG") as config: - config.python_versions = [python_version] - for rel in files: - file = path / rel - file.parent.mkdir(parents=True, exist_ok=True) - file.write_text(rel) - yield Mock(posargs=[str(path)]) - - -def test_missing_files(tmp_path, capsys): - with mock_session(tmp_path, "9.9") as session: - copy_artifacts(session) - captured = capsys.readouterr() - assert re.match( - cleandoc( - f""" - Could not find any file .*/coverage-python9.9\\*/.coverage - File not found .*/lint-python9.9/.lint.txt - File not found .*/lint-python9.9/.lint.json - File not found .*/security-python9.9/.security.json - """ - ), - captured.err, - ) - - -@dataclass -class endswith: - """ - Assert that the str representation of the argument ends with the - specified suffix. - """ - - suffix: str - - def __eq__(self, actual): - return str(actual).endswith(self.suffix) - - -def test_all_files(tmp_path, capsys): - with mock_session( - tmp_path / "artifacts", - "9.9", - "coverage-python9.9-fast/.coverage", - "coverage-python9.9-slow/.coverage", - "lint-python9.9/.lint.txt", - "lint-python9.9/.lint.json", - "security-python9.9/.security.json", - ) as session: - copy_artifacts(session) - - captured = capsys.readouterr() - assert session.run.call_args == call( - "coverage", - "combine", - "--keep", - endswith("coverage-python9.9-fast/.coverage"), - endswith("coverage-python9.9-slow/.coverage"), - ) - assert re.match( - cleandoc( - f""" - Copying file .*/lint-python9.9/.lint.txt - Copying file .*/lint-python9.9/.lint.json - Copying file .*/security-python9.9/.security.json - """ - ), - captured.err, - ) - for f in [".lint.txt", ".lint.json", ".security.json"]: - assert (tmp_path / f).exists() diff --git a/test/unit/lint_file_check_test.py b/test/unit/lint_file_check_test.py deleted file mode 100644 index a9e4aa494..000000000 --- a/test/unit/lint_file_check_test.py +++ /dev/null @@ -1,339 +0,0 @@ -import json -import sqlite3 -from pathlib import Path - -import pytest - -from exasol.toolbox.nox import _artifacts - - -@pytest.mark.parametrize( - "files,requested_files,expected", - [ - ( - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - set(), - ), - ( - {".lint.txt", ".security.json", ".coverage"}, - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - {".lint.json"}, - ), - ( - {".lint.json", ".security.json", ".coverage"}, - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - {".lint.txt"}, - ), - ( - {".lint.json", ".lint.txt", ".coverage"}, - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - {".security.json"}, - ), - ( - {".lint.json", ".lint.txt", ".security.json"}, - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - {".coverage"}, - ), - ( - {","}, - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - {".lint.json", ".lint.txt", ".security.json", ".coverage"}, - ), - ], -) -def test_check_lint_files(files, requested_files, expected, tmp_path): - path = Path(tmp_path) - for file in files: - Path(path, file).touch() - - actual = _artifacts._missing_files(requested_files, path) - assert actual == expected - - -@pytest.mark.parametrize( - "file,expected", - [ - ("Your code has been rated at 7.85/10 (previous run: 7.83/10, +0.02", ""), - ( - "test_text\nYour code has been rated at 7.85/10 (previous run: 7.83/10, +0.02\ntest_text", - "", - ), - ("", "Could not find a rating"), - ("test_text", "Could not find a rating"), - ], -) -def test_check_lint_txt(file, expected, tmp_path): - path = Path(tmp_path, ".lint.txt") - path.touch() - path.write_text(file) - actual = _artifacts._validate_lint_txt(path) - assert actual == expected - - -@pytest.mark.parametrize( - "attributes,expected", - [ - ( - [ - "type", - "module", - "obj", - "line", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message", - "message-id", - ], - "", - ), - ( - [ - "module", - "obj", - "line", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'type'}", - ), - ( - [ - "type", - "obj", - "line", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'module'}", - ), - ( - [ - "type", - "module", - "line", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'obj'}", - ), - ( - [ - "type", - "module", - "obj", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'line'}", - ), - ( - [ - "type", - "module", - "obj", - "line", - "endLine", - "endColumn", - "path", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'column'}", - ), - ( - [ - "type", - "module", - "obj", - "line", - "column", - "endColumn", - "path", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'endLine'}", - ), - ( - [ - "type", - "module", - "obj", - "line", - "column", - "endLine", - "path", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'endColumn'}", - ), - ( - [ - "type", - "module", - "obj", - "line", - "column", - "endLine", - "endColumn", - "symbol", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'path'}", - ), - ( - [ - "type", - "module", - "obj", - "line", - "column", - "endLine", - "endColumn", - "path", - "message", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'symbol'}", - ), - ( - [ - "type", - "module", - "obj", - "line", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message-id", - ], - "Invalid format, issue 0 is missing the following attributes {'message'}", - ), - ( - [ - "type", - "module", - "obj", - "line", - "column", - "endLine", - "endColumn", - "path", - "symbol", - "message", - ], - "Invalid format, issue 0 is missing the following attributes {'message-id'}", - ), - ], -) -def test_check_lint_json(attributes, expected, tmp_path): - path = Path(tmp_path, ".lint.json") - path.touch() - attributes_dict = {} - for attribute in attributes: - attributes_dict[attribute] = None - with path.open("w") as file: - json.dump([attributes_dict], file) - actual = _artifacts._validate_lint_json(path) - assert actual == expected - - -@pytest.mark.parametrize( - "attributes,expected", - [ - (["errors", "generated_at", "metrics", "results"], ""), - ( - ["generated_at", "metrics", "results"], - "Invalid format, the file is missing the following attributes {'errors'}", - ), - ( - ["errors", "metrics", "results"], - "Invalid format, the file is missing the following attributes {'generated_at'}", - ), - ( - ["errors", "generated_at", "results"], - "Invalid format, the file is missing the following attributes {'metrics'}", - ), - ( - ["errors", "generated_at", "metrics"], - "Invalid format, the file is missing the following attributes {'results'}", - ), - ], -) -def test_check_security_json(attributes, expected, tmp_path): - path = Path(tmp_path, ".security.json") - path.touch() - attributes_dict = {} - for attribute in attributes: - attributes_dict[attribute] = None - with path.open("w") as file: - json.dump(attributes_dict, file) - actual = _artifacts._validate_security_json(path) - assert actual == expected - - -@pytest.mark.parametrize( - "tables, expected", - [ - (["coverage_schema", "meta", "file", "line_bits"], ""), - ( - ["meta", "file", "line_bits"], - "Invalid database, the database is missing the following tables {'coverage_schema'}", - ), - ( - ["coverage_schema", "file", "line_bits"], - "Invalid database, the database is missing the following tables {'meta'}", - ), - ( - ["coverage_schema", "meta", "line_bits"], - "Invalid database, the database is missing the following tables {'file'}", - ), - ( - [ - "coverage_schema", - "meta", - "file", - ], - "Invalid database, the database is missing the following tables {'line_bits'}", - ), - ], -) -def test_check_coverage(tables, expected, tmp_path): - path = Path(tmp_path, ".coverage") - connection = sqlite3.connect(path) - cursor = connection.cursor() - for table in tables: - cursor.execute(f"CREATE TABLE IF NOT EXISTS {table} (test INTEGER)") - actual = _artifacts._validate_coverage(path) - assert actual == expected diff --git a/test/unit/nox/_artifacts_test.py b/test/unit/nox/_artifacts_test.py new file mode 100644 index 000000000..31058d372 --- /dev/null +++ b/test/unit/nox/_artifacts_test.py @@ -0,0 +1,305 @@ +import contextlib +import json +import re +import sqlite3 +from dataclasses import dataclass +from inspect import cleandoc +from pathlib import Path +from unittest import mock +from unittest.mock import ( + Mock, + call, + patch, +) + +import pytest + +from exasol.toolbox.nox._artifacts import ( + ALL_LINT_FILES, + COVERAGE_FILE, + COVERAGE_TABLES, + LINT_JSON, + LINT_JSON_ATTRIBUTES, + LINT_TXT, + SECURITY_JSON, + SECURITY_JSON_ATTRIBUTES, + _is_valid_coverage, + _is_valid_lint_json, + _is_valid_lint_txt, + _is_valid_security_json, + check_artifacts, + copy_artifacts, +) + + +@contextlib.contextmanager +def mock_check_artifacts_session( + path: Path, +): + with patch("exasol.toolbox.nox._artifacts.PROJECT_CONFIG") as config: + config.root = path + yield Mock() + + +@contextlib.contextmanager +def mock_session(path: Path, python_version: str, *files: str): + with patch("exasol.toolbox.nox._artifacts.PROJECT_CONFIG") as config: + config.python_versions = [python_version] + for rel in files: + file = path / rel + file.parent.mkdir(parents=True, exist_ok=True) + file.write_text(rel) + yield Mock(posargs=[str(path)]) + + +@dataclass +class EndsWith: + """ + Assert that the str representation of the argument ends with the + specified suffix. + """ + + suffix: str + + def __eq__(self, actual): + return str(actual).endswith(self.suffix) + + +class TestCheckArtifacts: + @staticmethod + def _create_artifact_files(path: Path, existing_files: set): + for file in existing_files: + Path(path, file).touch() + + @mock.patch("exasol.toolbox.nox._artifacts._is_valid_lint_txt", return_value=True) + @mock.patch("exasol.toolbox.nox._artifacts._is_valid_lint_json", return_value=True) + @mock.patch( + "exasol.toolbox.nox._artifacts._is_valid_security_json", return_value=True + ) + @mock.patch("exasol.toolbox.nox._artifacts._is_valid_coverage", return_value=True) + def test_passes_when_as_expected( + self, mock_coverage, mock_security, mock_lint_json, mock_lint_txt, tmp_path + ): + self._create_artifact_files(tmp_path, ALL_LINT_FILES) + with mock_check_artifacts_session(tmp_path) as session: + check_artifacts(session) + + @pytest.mark.parametrize( + "missing_files", + [ + (pytest.param({LINT_JSON}, id="lint_json_missing")), + (pytest.param(ALL_LINT_FILES, id="all_files_missing")), + ], + ) + def test_fails_when_file_missing(self, tmp_path, missing_files, capsys): + existing_files = ALL_LINT_FILES - missing_files + self._create_artifact_files(tmp_path, existing_files) + + with mock_check_artifacts_session(tmp_path) as session: + with pytest.raises(SystemExit): + check_artifacts(session) + assert f"files not available: {missing_files}" in capsys.readouterr().err + + def test_fails_when_check_fails(self, tmp_path, capsys): + self._create_artifact_files(tmp_path, ALL_LINT_FILES) + with mock_check_artifacts_session(tmp_path) as session: + with pytest.raises(SystemExit): + check_artifacts(session) + assert "error in [" in capsys.readouterr().err + + +class TestIsValidLintTxt: + @staticmethod + def _create_json_txt(path: Path, text: str) -> None: + path.touch() + path.write_text(text) + + def test_passes_when_as_expected(self, tmp_path): + path = Path(tmp_path, LINT_TXT) + text = "Your code has been rated at 7.85/10 (previous run: 7.83/10, +0.02" + self._create_json_txt(path, text) + + assert _is_valid_lint_txt(path) + + def test_fails_when_rating_not_found(self, tmp_path, capsys): + path = Path(tmp_path, LINT_TXT) + text = "dummy_text" + self._create_json_txt(path, text) + + result = _is_valid_lint_txt(path) + + assert not result + assert "Could not find a rating" in capsys.readouterr().err + + +class TestIsValidLintJson: + @staticmethod + def _create_expected_json_file(path: Path, attributes: set) -> None: + path.touch() + attributes_dict = {attribute: None for attribute in attributes} + with path.open("w") as file: + json.dump([attributes_dict], file) + + def test_passes_when_as_expected(self, tmp_path): + path = Path(tmp_path, LINT_JSON) + self._create_expected_json_file(path, attributes=LINT_JSON_ATTRIBUTES) + + result = _is_valid_lint_json(path) + assert result + + @staticmethod + def test_is_not_a_json(tmp_path, capsys): + path = Path(tmp_path, LINT_JSON) + path.touch() + path.write_text("dummy") + + result = _is_valid_lint_json(path) + + assert not result + assert "Invalid json file" in capsys.readouterr().err + + @pytest.mark.parametrize( + "missing_attributes", [pytest.param({"message-id"}, id="missing_message-id")] + ) + def test_missing_attributes(self, tmp_path, capsys, missing_attributes): + attributes = LINT_JSON_ATTRIBUTES - missing_attributes + path = Path(tmp_path, LINT_JSON) + self._create_expected_json_file(path, attributes=attributes) + + result = _is_valid_lint_json(path) + + assert not result + assert ( + f"missing the following attributes {missing_attributes}" + in capsys.readouterr().err + ) + + +class TestIsValidSecurityJson: + @staticmethod + def _create_expected_json_file(path: Path, attributes: set) -> None: + path.touch() + attributes_dict = {attribute: None for attribute in attributes} + with path.open("w") as file: + json.dump(attributes_dict, file) + + def test_passes_when_as_expected(self, tmp_path): + path = Path(tmp_path, SECURITY_JSON) + self._create_expected_json_file(path, attributes=SECURITY_JSON_ATTRIBUTES) + + assert _is_valid_security_json(path) + + @staticmethod + def test_is_not_a_json(tmp_path, capsys): + path = Path(tmp_path, LINT_JSON) + path.touch() + path.write_text("dummy") + + result = _is_valid_security_json(path) + + assert not result + assert "Invalid json file" in capsys.readouterr().err + + @pytest.mark.parametrize( + "missing_attributes", [pytest.param({"errors"}, id="missing_errors")] + ) + def test_missing_attributes(self, tmp_path, capsys, missing_attributes): + attributes = SECURITY_JSON_ATTRIBUTES - missing_attributes + path = Path(tmp_path, LINT_JSON) + self._create_expected_json_file(path, attributes=attributes) + + result = _is_valid_security_json(path) + + assert not result + assert ( + f"missing the following attributes {missing_attributes}" + in capsys.readouterr().err + ) + + +class TestIsValidCoverage: + @staticmethod + def _create_coverage_file(path: Path, tables: set) -> None: + connection = sqlite3.connect(path) + cursor = connection.cursor() + for table in tables: + cursor.execute(f"CREATE TABLE IF NOT EXISTS {table} (test INTEGER)") + + def test_passes_when_as_expected(self, tmp_path): + path = Path(tmp_path, COVERAGE_FILE) + self._create_coverage_file(path, COVERAGE_TABLES) + + result = _is_valid_coverage(path) + + assert result + + @pytest.mark.parametrize( + "missing_table", + [ + pytest.param({"coverage_schema"}, id="missing_coverage_schema"), + ], + ) + def test_database_missing_tables(self, tmp_path, capsys, missing_table): + tables = COVERAGE_TABLES - missing_table + path = Path(tmp_path, COVERAGE_FILE) + self._create_coverage_file(path, tables) + + result = _is_valid_coverage(path) + + assert not result + assert ( + f"missing the following tables {missing_table}" in capsys.readouterr().err + ) + + +class TestCopyArtifacts: + @staticmethod + def test_missing_files(tmp_path, capsys): + with mock_session(tmp_path, "9.9") as session: + copy_artifacts(session) + captured = capsys.readouterr() + assert re.match( + cleandoc( + """ + Could not find any file .*/coverage-python9.9\\*/.coverage + File not found .*/lint-python9.9/.lint.txt + File not found .*/lint-python9.9/.lint.json + File not found .*/security-python9.9/.security.json + """ + ), + captured.err, + ) + + @staticmethod + def test_all_files(tmp_path, capsys): + with mock_session( + tmp_path / "artifacts", + "9.9", + "coverage-python9.9-fast/.coverage", + "coverage-python9.9-slow/.coverage", + "lint-python9.9/.lint.txt", + "lint-python9.9/.lint.json", + "security-python9.9/.security.json", + ) as session: + copy_artifacts(session) + + captured = capsys.readouterr() + assert session.run.call_args == call( + "coverage", + "combine", + "--keep", + EndsWith("coverage-python9.9-fast/.coverage"), + EndsWith("coverage-python9.9-slow/.coverage"), + ) + assert re.match( + cleandoc( + """ + Copying file .*/lint-python9.9/.lint.txt + Copying file .*/lint-python9.9/.lint.json + Copying file .*/security-python9.9/.security.json + """ + ), + captured.err, + ) + for f in [".lint.txt", ".lint.json", ".security.json"]: + assert (tmp_path / f).exists() diff --git a/test/unit/nox/_shared_test.py b/test/unit/nox/_shared_test.py index 0005b39d2..e56a2e2b9 100644 --- a/test/unit/nox/_shared_test.py +++ b/test/unit/nox/_shared_test.py @@ -51,4 +51,4 @@ def test_python_files( actual = python_files(tmp_directory) assert len(actual) == 1 - assert actual[0].parent.name == package_directory + assert "toolbox-dummy" in actual[0]