From 0987e61cd43b9bd9c3e612c40d634957f430a9be Mon Sep 17 00:00:00 2001 From: Jonathan Drake Date: Thu, 11 Nov 2021 12:37:26 -0800 Subject: [PATCH] QPT-37223 New nspoet CLI (#1) --- .circleci/config.yml | 365 +++++++ .pre-commit-config.yaml | 2 +- ns_poet/__init__.py | 2 +- ns_poet/cli.py | 57 ++ ns_poet/example.py | 6 - ns_poet/exceptions.py | 67 ++ ns_poet/package.py | 111 +++ ns_poet/package_targets/__init__.py | 4 + ns_poet/package_targets/base.py | 46 + ns_poet/package_targets/python_package.py | 185 ++++ ns_poet/package_targets/requirement.py | 16 + ns_poet/processor.py | 224 +++++ ns_poet/project.py | 182 ++++ ns_poet/requirements.py | 370 +++++++ ns_poet/util.py | 62 ++ poetry.lock | 1061 +++++++++++++++++++++ pyproject.toml | 12 +- tests/functional/test_package.py | 23 + tests/functional/test_packages.py | 28 + tests/functional/test_project.py | 27 + tests/unit/test_example.py | 13 - 21 files changed, 2840 insertions(+), 23 deletions(-) create mode 100644 ns_poet/cli.py delete mode 100644 ns_poet/example.py create mode 100644 ns_poet/exceptions.py create mode 100644 ns_poet/package.py create mode 100644 ns_poet/package_targets/__init__.py create mode 100644 ns_poet/package_targets/base.py create mode 100644 ns_poet/package_targets/python_package.py create mode 100644 ns_poet/package_targets/requirement.py create mode 100644 ns_poet/processor.py create mode 100644 ns_poet/project.py create mode 100644 ns_poet/requirements.py create mode 100644 ns_poet/util.py create mode 100644 poetry.lock create mode 100644 tests/functional/test_package.py create mode 100644 tests/functional/test_packages.py create mode 100644 tests/functional/test_project.py delete mode 100644 tests/unit/test_example.py diff --git a/.circleci/config.yml b/.circleci/config.yml index e69de29..9051e06 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -0,0 +1,365 @@ +commands: + bump-branch-version: + description: Bump package version via a git commit + steps: + - run: + command: .circleci/scripts/bump-version.sh "$GITHUB_PR_BASE_BRANCH" "$CIRCLE_BRANCH" + name: Bump package version + cancel-job: + description: Cancel the current job based on an environment variable + parameters: + method: + default: cancel + description: | + Method of cancellation; Either cancel the job or just halt the step so the job + still succeeds + enum: + - cancel + - halt + type: enum + steps: + - run: + command: | + echo "CANCEL_JOB=$CANCEL_JOB" + [[ -z "$CANCEL_JOB" ]] && exit 0 + set -e + if [[ "<< parameters.method >>" == "cancel" ]]; then + OUTPUT=$( + curl \ + --user "${CIRCLE_API_USER_TOKEN}:" \ + -X POST \ + --max-time 60 \ + --connect-timeout 60 \ + "https://circleci.com/api/v1.1/project/github/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/${CIRCLE_BUILD_NUM}/cancel") + echo "$OUTPUT" + STATUS="$(echo "$OUTPUT" | jq -r .status -)" + if [[ "$STATUS" == 'canceled' ]]; then + # This means the job was cancelled but for some reason the current script is + # still running. Wait a few seconds to let it catch up then fail the job to + # prevent downstream jobs from running unintentionally. + sleep 10 + exit 1 + fi + echo "Failed to cancel job" + exit 1 + elif [[ "<< parameters.method >>" == "halt" ]]; then + # Halt the job and mark it as successful + circleci step halt + else + echo "Unknown cancel method: << parameters.method >>" + exit 1 + fi + name: Cancel the job + conditional-cancel: + description: Cancel the current job based on various conditions + parameters: + cancel_if_paths_changed: + default: "" + description: Cancel the job if these file paths did change (space-separated list) + type: string + cancel_if_tag_in_commit: + default: "" + description: Cancel the job if this tag is in the current commit message + type: string + cancel_if_tag_in_pr_title: + default: "" + description: Cancel the job if this tag is in the pull request title + type: string + cancel_only_if_all_paths_changed_match_pattern: + default: "" + description: Cancel the job if the only files changed match this pattern (grep regex) + type: string + gitref: + default: $CIRCLE_BRANCH + type: string + method: + default: cancel + description: | + Method of cancellation; Either cancel the job or just halt the step so the job + still succeeds + enum: + - cancel + - halt + type: enum + run_if_paths_changed: + default: "" + description: Cancel the job if these file paths did not change (space-separated list) + type: string + run_if_tag_in_commit: + default: "" + description: Cancel the job if this tag is not in the current commit message + type: string + run_only_if_all_paths_changed_match_pattern: + default: "" + description: Cancel the job if any files changed besides the ones matched by this pattern (grep regex) + type: string + steps: + - run: + command: | + COMMIT_RANGE="origin/master..origin/<< parameters.gitref >>" + echo "export COMMIT_RANGE=\"$COMMIT_RANGE\"" >> $BASH_ENV + echo $COMMIT_RANGE + name: Set git diff commit range + - when: + condition: << parameters.run_if_tag_in_commit >> + steps: + - run: + command: | + shopt -s nocasematch + MESSAGE="$(git log -1 --pretty=%B)" + if [[ "$MESSAGE" != *'<< parameters.run_if_tag_in_commit >>'* ]]; then + echo 'export CANCEL_JOB=1' >> $BASH_ENV + fi + shopt -u nocasematch + name: | + Cancel the job if << parameters.run_if_tag_in_commit >> is not in the current + commit message (case insensitive) + - when: + condition: << parameters.cancel_if_tag_in_commit >> + steps: + - run: + command: | + shopt -s nocasematch + MESSAGE="$(git log -1 --pretty=%B)" + if [[ "$MESSAGE" == *'<< parameters.cancel_if_tag_in_commit >>'* ]]; then + echo 'export CANCEL_JOB=1' >> $BASH_ENV + fi + shopt -u nocasematch + name: | + Cancel the job if << parameters.cancel_if_tag_in_commit >> is in the current + commit message (case insensitive) + - when: + condition: << parameters.cancel_if_tag_in_pr_title >> + steps: + - run: + command: | + shopt -s nocasematch + PR_NUMBER="$(echo "$CIRCLE_PULL_REQUEST" | sed 's/.*\/pull\///')" + URL="https://api.github.com/repos/NarrativeScience/talos/pulls/$PR_NUMBER" + PR_TITLE="$(curl --user "$GITHUB_USERNAME:$GITHUB_PASSWORD" "$URL" | jq '.title')" + if [[ "$PR_TITLE" == *'<< parameters.cancel_if_tag_in_pr_title >>'* ]]; then + echo 'export CANCEL_JOB=1' >> $BASH_ENV + fi + shopt -u nocasematch + name: | + Cancel the job if << parameters.cancel_if_tag_in_pr_title >> is in the pull request + title (case insensitive) + - when: + condition: << parameters.run_if_paths_changed >> + steps: + - run: + command: | + FILES="$(git diff --name-only $COMMIT_RANGE << parameters.run_if_paths_changed >>)" + if [[ ${#FILES} -eq 0 ]]; then + echo 'export CANCEL_JOB=1' >> $BASH_ENV + fi + name: Cancel the job if << parameters.run_if_paths_changed >> did not change + - when: + condition: << parameters.run_only_if_all_paths_changed_match_pattern >> + steps: + - run: + command: | + GREP_PATTERN="<< parameters.run_only_if_all_paths_changed_match_pattern >>" + FILES="$(git --no-pager diff --name-only $COMMIT_RANGE)" + if [[ ${#FILES} -gt 0 ]]; then + FILTERED_FILES=$(echo "$FILES" | grep "$GREP_PATTERN") || true + if [[ "$FILES" != "$FILTERED_FILES" ]]; then + echo 'export CANCEL_JOB=1' >> $BASH_ENV + fi + fi + name: Cancel the job if any files beside << parameters.run_only_if_all_paths_changed_match_pattern >> changed + - when: + condition: << parameters.cancel_if_paths_changed >> + steps: + - run: + command: | + FILES="$(git diff --name-only $COMMIT_RANGE << parameters.cancel_if_paths_changed >>)" + if [[ ${#FILES} -gt 0 ]]; then + echo 'export CANCEL_JOB=1' >> $BASH_ENV + fi + name: Cancel the job if << parameters.cancel_if_paths_changed >> did change + - when: + condition: << parameters.cancel_only_if_all_paths_changed_match_pattern >> + steps: + - run: + command: | + GREP_PATTERN="<< parameters.cancel_only_if_all_paths_changed_match_pattern >>" + FILES="$(git --no-pager diff --name-only $COMMIT_RANGE)" + if [[ ${#FILES} -gt 0 ]]; then + FILTERED_FILES=$(echo "$FILES" | grep -E "$GREP_PATTERN") || true + if [[ "$FILES" == "$FILTERED_FILES" ]]; then + echo 'export CANCEL_JOB=1' >> $BASH_ENV + fi + fi + name: Cancel the job if the only files changed match << parameters.cancel_only_if_all_paths_changed_match_pattern >> + - cancel-job: + method: << parameters.method >> + github-release: + description: Create a github release via the github CLI + steps: + - github-cli/setup + - run: + command: | + version="$(poetry version -s)" + notes="$(git show -s --format=%B HEAD)" + gh release create "v${version}" --title "Version ${version}" --notes "$notes" + name: Create GitHub Release + poetry-configure: + description: Configure Poetry + steps: + - run: + command: | + pip install -U pip setuptools + pip install poetry + poetry config pypi-token.pypi "$POETRY_PYPI_TOKEN_PYPI" + name: Configure Poetry + restore-test-cache: + description: Restores the build-test cache for pyenv, poetry, and pre-commit + steps: + - restore_cache: + keys: + - v1-pyenv-{{ arch }}-ns-poet-{{ checksum "poetry.lock" }} + - restore_cache: + keys: + - v1-poetry-{{ arch }}-ns-poet-{{ checksum "poetry.lock" }} + - restore_cache: + keys: + - v1-precommit-{{ arch }}-ns-poet-{{ checksum ".pre-commit-config.yaml" }} + save-test-cache: + description: Stores the build-test cache for pyenv, poetry, and pre-commit + steps: + - save_cache: + key: v1-pyenv-{{ arch }}-ns-poet-{{ checksum "poetry.lock" }} + paths: + - ~/.pyenv + - save_cache: + key: v1-poetry-{{ arch }}-ns-poet-{{ checksum "poetry.lock" }} + paths: + - ~/.cache/pypoetry + - save_cache: + key: v1-precommit-{{ arch }}-ns-poet-{{ checksum ".pre-commit-config.yaml" }} + paths: + - ~/.cache/pre-commit + store-test-artifacts: + description: Store test artifacts in CCI workflow run + parameters: + artifacts_path: + description: The absolute path to the artifacts stored as an env var + type: string + artifacts_storage_dir: + default: << parameters.artifacts_path >> + description: The directory in /tmp where we want to store the artifacts + type: string + export_test_results: + default: false + description: Whether or not to upload the artifacts as Test Summary metadata + type: boolean + steps: + - store_artifacts: + destination: << parameters.artifacts_storage_dir >> + path: << parameters.artifacts_path >> + - when: + condition: << parameters.export_test_results >> + steps: + - store_test_results: + path: << parameters.artifacts_path >> +executors: + default: + docker: + - image: cimg/python:3.6 + resource_class: small + python-medium: + docker: + - image: cimg/python:3.6 + resource_class: medium +jobs: + build-test: + description: Test building with Poetry, Cruft, pre-commit, and run unit tests with pytest. + executor: python-medium + steps: + - add_ssh_keys: + fingerprints: + - cb:4c:e2:f4:c2:be:b7:c0:1a:02:1b:13:15:e0:a4:1b + - ghpr/build-prospective-branch + - aws-cli/install + - circleci-cli/install + - restore-test-cache + - poetry-configure + - bump-branch-version + - run: + command: poetry install + name: Install dependencies + - run: + command: poetry build + name: Build artifact with Poetry + - run: + command: poetry run cruft check + name: Cruft check + - run: + command: | + poetry run pre-commit install + poetry run pre-commit run \ + --source "origin/${GITHUB_PR_BASE_BRANCH}" \ + --origin "origin/${CIRCLE_BRANCH}" \ + --show-diff-on-failure + name: Run commit hooks + - run: + command: | + mkdir -p test-results/unit + poetry run pytest --junit-xml test-results/unit/results.xml + name: Run tests + - store-test-artifacts: + artifacts_path: test-results + export_test_results: true + - save-test-cache + - ghpr/slack-pr-author: + color: '#fcaaa3' + get_slack_user_by: meseeks + message: ':ci-fail: Tests failed' + when: on_fail + poetry-publish: + description: Publish a release of the project + executor: default + steps: + - aws-cli/install + - checkout + - conditional-cancel: + cancel_if_tag_in_commit: '[skip publish]' + - poetry-configure + - run: poetry install + - run: poetry build + - run: poetry publish + send-slack-on-pr-success: + description: Send a Slack message to the PR author on PR workflow success. + executor: default + steps: + - ghpr/slack-pr-author: + color: '#4cb79c' + get_slack_user_by: meseeks + message: ':ci-success: PR tests have passed!' +orbs: + aws-cli: circleci/aws-cli@2.0.3 + circleci-cli: circleci/circleci-cli@0.1.9 + ghpr: narrativescience/ghpr@1.1.2 + github-cli: circleci/github-cli@1.0.3 +scripts: {} +version: 2.1 +workflows: + publish: + jobs: + - poetry-publish: + context: opensource + filters: + branches: + ignore: /.*/ + tags: + only: /v[0-9]+(\.[0-9]+)*/ + pull-request-tests: + jobs: + - build-test: + context: opensource + filters: + branches: + ignore: + - main + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8451fdf..23fd78a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -74,7 +74,7 @@ repos: entry: mypy -p ns_poet language: python types: [ file, python ] - additional_dependencies: [ mypy==0.910 ] + additional_dependencies: [ mypy==0.910, types-toml, types-setuptools ] pass_filenames: false - id: flake8 diff --git a/ns_poet/__init__.py b/ns_poet/__init__.py index 6193360..287cc5c 100644 --- a/ns_poet/__init__.py +++ b/ns_poet/__init__.py @@ -1,3 +1,3 @@ """Autogenerate Poetry package manifests in a monorepo""" -__version__ = "0.1.0" +__version__ = "0.2.0" diff --git a/ns_poet/cli.py b/ns_poet/cli.py new file mode 100644 index 0000000..38e0340 --- /dev/null +++ b/ns_poet/cli.py @@ -0,0 +1,57 @@ +"""Contains the CLI""" + +import logging +from pathlib import Path +from typing import Optional + +import click + +from ns_poet.processor import PackageProcessor +from ns_poet.project import PROJECT_CONFIG +from ns_poet.requirements import update_import_map + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +@click.group() +def cli() -> None: + """Autogenerate Poetry package manifests in a monorepo""" + pass + + +@cli.group(name="import-map") +def import_map() -> None: + """Commands for managing imports""" + pass + + +@import_map.command() +def update() -> None: + """Update an import map from requirements.txt""" + update_import_map() + + +@cli.group() +def package() -> None: + """Commands for managing packages""" + pass + + +@package.command() +@click.option( + "-p", + "--package-path", + type=click.Path(exists=True, dir_okay=True, file_okay=False, path_type=Path), + help="Generate a package manifest for a single package path", +) +def generate(package_path: Optional[Path]) -> None: + """Generate Poetry package manifests""" + PROJECT_CONFIG.load_requirements() + processor = PackageProcessor() + processor.register_packages() + processor.ensure_no_circular_imports() + if package_path: + processor.generate_package_manifest(package_path) + else: + processor.generate_package_manifests() diff --git a/ns_poet/example.py b/ns_poet/example.py deleted file mode 100644 index fed491c..0000000 --- a/ns_poet/example.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Contains an example function""" - - -def foo() -> str: - """Returns bar""" - return "bar" diff --git a/ns_poet/exceptions.py b/ns_poet/exceptions.py new file mode 100644 index 0000000..ec4e755 --- /dev/null +++ b/ns_poet/exceptions.py @@ -0,0 +1,67 @@ +"""Contains exception classes""" + + +class CircularImportsFound(Exception): + """Raised when circular imports were found processing build targets""" + + pass + + +class NoBuildDependenciesFound(Exception): + """Raised when the dependencies argument could not be found in a BUILD file AST""" + + pass + + +class DistributionError(Exception): + """Raised when a Python distribution can't be parsed""" + + pass + + +class UnsupportedDistributionFormat(DistributionError): + """Raised when the distribution file is something other than a .whl or .tar.gz""" + + pass + + +class MissingDistributionMetadataFile(DistributionError): + """Raised when there is a missing metadata file in a .whl or .tar.gz""" + + pass + + +class NoProjectName(Exception): + """Raised when no project name could be parsed from distribution metadata""" + + pass + + +class InvalidTopLevelFile(Exception): + """Raised when a top_level.txt file could not be interpreted""" + + pass + + +class MultipleSourcePackagesFound(Exception): + """Raised when more than one package is found in src/""" + + pass + + +class DuplicateTarget(Exception): + """Raised when attempting to register a target that has an existing key""" + + pass + + +class NoConsoleScriptFound(Exception): + """Raised when no console_scripts are found in setup.py for a binary target""" + + pass + + +class NoTargetFound(Exception): + """Raised when no target could be found in the registered graph of targets""" + + pass diff --git a/ns_poet/package.py b/ns_poet/package.py new file mode 100644 index 0000000..e29bd31 --- /dev/null +++ b/ns_poet/package.py @@ -0,0 +1,111 @@ +"""Contains PoetPackage class""" + +from pathlib import Path +from typing import Any, Dict, MutableMapping, Optional + +import toml + + +class PoetPackage: + """Class that represents a package managed by ns-poet + + This object manages a Python package's pyproject.toml (i.e. manifest) file. + + The schema for the configuration file in the git project root is:: + + [tool.nspoet] + generate_package_manifest = true + + """ + + def __init__(self, package_path: Path) -> None: + """Initializer + + Args: + package_path: Path to the package + + """ + self.package_path = package_path + self.config_file_path = package_path.joinpath("pyproject.toml") + self._config: Optional[MutableMapping[str, Any]] = None + + @classmethod + def from_path(cls, package_path: Path) -> "PoetPackage": + """Create a package object and load configuration from a given package path + + Args: + package_path: Path to the package + + Returns: + new PoetPackage instance + + """ + p = cls(package_path) + p.load_config() + return p + + def load_config(self) -> MutableMapping[str, Any]: + """Load configuration from disk + + Returns: + configuration dict + + """ + if self.config_file_path.is_file(): + with self.config_file_path.open() as f: + self._config = toml.load(f) + else: + self._config = {} + + return self._config + + def save_config(self) -> None: + """Save the configuration object to disk""" + if self.generate_package_manifest: + with self.config_file_path.open("w") as f: + toml.dump( # type: ignore + self._config, + f, + encoder=toml.encoder.TomlPreserveInlineDictEncoder(), # type: ignore + ) + + def to_string(self) -> str: + """Dump the configuration to a TOML string""" + return toml.dumps( # type: ignore + self._config, encoder=toml.encoder.TomlPreserveInlineDictEncoder() # type: ignore + ) + + @property + def package_config(self) -> Dict[str, Any]: + """Return the nspoet configuration subsection within the manifest""" + return self._config.get("tool", {}).get("nspoet", {}) + + @property + def generate_package_manifest(self) -> bool: + """Flag denoting whether to generate a package manifest file""" + return self.package_config.get("generate_package_manifest", True) + + def update( + self, name: str, dependencies: Dict[str, str], dev_dependencies: Dict[str, str] + ) -> None: + """Update the package configuration in place + + Args: + name: package name + dependencies: map of dependency name to version specifier + dev_dependencies: map of development dependency name to version specifier + + """ + self._config.setdefault("tool", {}) + self._config["tool"].setdefault("poetry", {}) + self._config["tool"]["poetry"]["name"] = name + self._config["tool"]["poetry"]["version"] = "1.0.0" + self._config["tool"]["poetry"]["description"] = "" + self._config["tool"]["poetry"]["authors"] = [] + self._config["tool"]["poetry"]["license"] = "Proprietary" + self._config["tool"]["poetry"]["dependencies"] = dependencies + self._config["tool"]["poetry"]["dev-dependencies"] = dev_dependencies + self._config["build-system"] = { + "requires": ["poetry-core>=1.0.0"], + "build-backend": "poetry.core.masonry.api", + } diff --git a/ns_poet/package_targets/__init__.py b/ns_poet/package_targets/__init__.py new file mode 100644 index 0000000..33be71f --- /dev/null +++ b/ns_poet/package_targets/__init__.py @@ -0,0 +1,4 @@ +"""Defines subpackage exports""" +from .base import BuildTarget +from .python_package import PythonPackage +from .requirement import PythonRequirement diff --git a/ns_poet/package_targets/base.py b/ns_poet/package_targets/base.py new file mode 100644 index 0000000..c0c2b09 --- /dev/null +++ b/ns_poet/package_targets/base.py @@ -0,0 +1,46 @@ +"""Contains base BuildTarget class""" +from abc import ABC +from functools import total_ordering +from typing import Any + + +@total_ordering +class BuildTarget(ABC): + """Represents a Python build target in Pants. + + This should be used as a mixin to provide common attributes and methods. + """ + + # This string will be used as an identifier for the build target + key: str = None + + def __str__(self) -> str: + """String representation""" + return str(self.key) + + def __eq__(self, other: Any) -> bool: + """Equality check""" + if hasattr(other, "key"): + return str(self.key) == str(other.key) + else: + return False + + def __hash__(self) -> int: + """Object hash""" + return hash((str(self.key),)) + + def __repr__(self) -> str: + """Object representation for debugging""" + return str(self.key) + + def __lt__(self, other: Any) -> bool: + """Less than comparator""" + if hasattr(other, "key"): + return str(self.key) < str(other.key) + else: + return False + + @property + def dependency_target(self) -> str: + """Returns the representation of this target in another target's dependencies""" + return str(self.key) diff --git a/ns_poet/package_targets/python_package.py b/ns_poet/package_targets/python_package.py new file mode 100644 index 0000000..8767608 --- /dev/null +++ b/ns_poet/package_targets/python_package.py @@ -0,0 +1,185 @@ +"""Contains base PythonPackage class""" +import logging +import os +from pathlib import Path +from typing import Dict, List, Set, Tuple, Union + +from toml.decoder import InlineTableDict + +from ns_poet.package import PoetPackage +from ns_poet.project import PROJECT_CONFIG +from .base import BuildTarget +from .requirement import PythonRequirement + +logger = logging.getLogger(__name__) + + +class DynamicInlineTableDict(dict, InlineTableDict): + """Subclass for inline tables in TOML""" + + +class PythonPackage(BuildTarget): + """Represents a Python package (e.g. lib or test) build target in Pants""" + + def __init__( + self, + package_path: Union[str, Path], + ) -> None: + """Initializer + + Args: + package_path: Path to the package directory, e.g. lib/python_core + + """ + self.package_path = Path(package_path) + + # This string will be used as an identifier for the build target + self.key = str(package_path) + + # Set of dependencies to be gathered later + self.dependencies: Set[BuildTarget] = set() + self.dev_dependencies: Set[BuildTarget] = set() + + # Load the package config + self.config = PoetPackage.from_path(self.package_path) + + @property + def package_name(self) -> str: + """Python package name + + Raises: + :py:exc:`ValueError`: if no package name could be parsed from the directory + structure + + Returns: + package name string + + """ + for p in self.package_path.joinpath("src").iterdir(): + if p.is_dir() and not p.name.endswith(".egg-info"): + return p.name + + raise ValueError("Could not determine package_name") + + def find_target_paths(self) -> List[Tuple["PythonPackage", str]]: + """Find a list of target paths for parsing dependencies + + This default implementation collects all the .py files. Subclasses may override + this method to collect different file types. + + Returns: + list of tuples with items: + * the current target (to indicate the owner) + * the path to the file that will be parsed + + """ + target_paths = [] + for dirpath, dirnames, filenames in os.walk(self.package_path): + if os.path.basename(dirpath) in PROJECT_CONFIG.ignore_dirs: + # Empty the list of directories so os.walk does not recur + dirnames.clear() + else: + for filename in filenames: + if filename.endswith(".py"): + target_paths.append((self, os.path.join(dirpath, filename))) + + return target_paths + + def add_dependency( + self, targets: Dict[str, "PythonPackage"], module_path: str, package_name: str + ) -> None: + """Add a new dependency to the set + + A child class may choose to implement ``set_dependencies`` and skip this method. + + Args: + targets: map of package name to instance of BuildTarget + module_path: path of the module that was parsed for imports + package_name: dependency package name, as parsed from an import statement + + """ + logger.debug(f"Processing {module_path}:{package_name}") + if package_name in targets and package_name != self.package_name: + if "tests" in Path(module_path).parts: + self.dev_dependencies.add(targets[package_name]) + logger.debug( + f"Added dev dependency for existing target: {targets[package_name]}" + ) + else: + self.dependencies.add(targets[package_name]) + logger.debug( + f"Added dependency for existing target: {targets[package_name]}" + ) + return + + import_map = PROJECT_CONFIG.get_import_map() + if package_name in import_map: + if "tests" in Path(module_path).parts: + self.dev_dependencies.add(PythonRequirement(import_map[package_name])) + logger.debug( + f"Added third-party dev dependency: {import_map[package_name]}" + ) + else: + self.dependencies.add(PythonRequirement(import_map[package_name])) + logger.debug( + f"Added third-party dependency: {import_map[package_name]}" + ) + + def set_extra_dependencies(self, targets: Dict[str, "PythonPackage"]) -> None: + """Set extra dependencies on the target + + This method allows targets to add dependencies based on their own custom logic. + By default this is a no-op. + + A standard thing to do would be to call + ``self.add_dependency(targets, package_name)`` one or more times. + + Args: + targets: map of package name to instance of BuildTarget + + """ + pass + + def convert_dependencies(self, dependencies: Set[BuildTarget]) -> Dict[str, str]: + """Convert dependencies into a map suitable for dumping to TOML format + + Args: + dependencies: set of dependencies + + Raises: + :py:exc:`NotImplementedError`: if dependency is not a requirement or Python + package + + Returns: + map of project name to version specifier + + """ + converted = {} + for dependency in dependencies: + if isinstance(dependency, PythonRequirement): + requirement = PROJECT_CONFIG.get_requirement(dependency.key) + version = str(requirement.specifier) # type: ignore + converted[requirement.unsafe_name] = version if version else "*" + elif isinstance(dependency, PythonPackage): + converted[dependency.package_name] = DynamicInlineTableDict( + path=os.path.relpath(dependency.package_path, self.package_path), + develop=True, + ) + else: + raise NotImplementedError(dependency) + + # Sort by key + converted = { + k: v for k, v in sorted(converted.items(), key=lambda item: item[0].lower()) + } + + return converted + + def generate_package_manifest(self) -> None: + """Generate a Poetry package manifest for the build target""" + dependencies = {"python": PROJECT_CONFIG.default_python_version} + dependencies.update(self.convert_dependencies(self.dependencies)) + dev_dependencies = self.convert_dependencies( + self.dev_dependencies - self.dependencies + ) + self.config.update(self.package_name, dependencies, dev_dependencies) diff --git a/ns_poet/package_targets/requirement.py b/ns_poet/package_targets/requirement.py new file mode 100644 index 0000000..fb5aa39 --- /dev/null +++ b/ns_poet/package_targets/requirement.py @@ -0,0 +1,16 @@ +"""Contains PythonRequirement class""" +from .base import BuildTarget + + +class PythonRequirement(BuildTarget): + """Represents a Python requirement build target""" + + def __init__(self, package_name: str) -> None: + """Initializer + + Args: + package_name: package name + + """ + self.package_name = package_name + self.key = f"{self.package_name}" diff --git a/ns_poet/processor.py b/ns_poet/processor.py new file mode 100644 index 0000000..b829c8f --- /dev/null +++ b/ns_poet/processor.py @@ -0,0 +1,224 @@ +"""Contains package processor class""" +import logging +from multiprocessing import Pool +import os +from pathlib import Path +import re +from typing import Dict, List, Optional, Tuple + +import networkx as nx + +from .exceptions import ( + CircularImportsFound, + DuplicateTarget, + MultipleSourcePackagesFound, +) +from .package_targets import PythonPackage +from .project import PROJECT_CONFIG +from .util import gather_dependencies_from_module, write_package_config_file + +logger = logging.getLogger(__name__) + +# Number of workers to use for various multiprocessing pools +PROCESSES = max(1, os.cpu_count() - 1) + + +class PackageProcessor: + """Class with methods for processing internal Python packages in order to: + + * Check for circular imports + * Generate Pants BUILD files + * Determine package dependencies + """ + + def __init__(self) -> None: + """Initializer""" + # Map of target key to instance of a Python package build target + self._targets: Dict[str, PythonPackage] = {} + # Graph of targets + self._target_graph: nx.DiGraph = None + + def get_target(self, target_key: str) -> PythonPackage: + """Get a registered target by key + + Args: + target_key: Key under which the target was registered + + Returns: + target/package object + + """ + return self._targets[target_key] + + def register_packages(self) -> None: + """Register targets and their dependencies. + + This should be called before performing an action with the packages. + """ + self._register_task_targets_code() + # self._register_task_targets_py2sfn_projects() + # self._register_task_targets_tests() + # self._register_extra_targets() + self._gather_dependencies() + self._build_target_graph() + + def _gather_dependencies(self) -> None: + """Gather dependencies for each target""" + logger.info("Gathering dependencies") + + # Collect a list of (build target, Python module) pairs from *all* the targets + target_paths: List[Tuple[PythonPackage, str]] = [] + for target in self._targets.values(): + target_paths.extend(target.find_target_paths()) + + # Create a pool of workers that will parse imports from each Python module + # path. + paths = [path for _, path in target_paths] + with Pool(processes=PROCESSES) as pool: + # Return a list of sets containing imported package names + imported_package_names_per_path = pool.map( + gather_dependencies_from_module, paths + ) + + # For each target path / package name pair, process and add the dependency to + # the target's set of dependencies. + for (target, _), (path, package_names) in zip( + target_paths, imported_package_names_per_path + ): + for package_name in package_names: + target.add_dependency(self._targets, path, package_name) + + # Allow each target to set extra dependencies based on their own custom logic. + # For most targets this will be a no-op. + for target in self._targets.values(): + target.set_extra_dependencies(self._targets) + + def generate_package_manifests(self, target_pattern: Optional[str] = None) -> None: + """Generate package manifest files. + + You must have already called :py:meth:`.register_packages`. + + Args: + target_pattern: If provided, pyproject.toml files are only generated for + targets with keys matching the pattern + + """ + logger.info("Generating package manifest files") + targets_to_save = [] + for key, target in self._targets.items(): + if not target_pattern or re.search(target_pattern, key): + target.generate_package_manifest() + targets_to_save.append( + (target.config.config_file_path, target.config.to_string()) + ) + + # Create a pool of workers that will render, format, and write each manifest + # file to disk. + with Pool(processes=PROCESSES) as pool: + pool.starmap(write_package_config_file, targets_to_save) + + def generate_package_manifest(self, package_path: Path) -> None: + """Generate package manifest file for a single target. + + You must have already called :py:meth:`.register_packages`. + + Args: + package_path: path to a Python package + + """ + p = PythonPackage(package_path) + target = self._targets[p.package_name] + target.generate_package_manifest() + target.config.save_config() + + def _register_task_targets_code(self) -> None: + """Register task targets for code packages + + This iterates through various directories looking for a setup.py file. If it + finds one, it means we've found a Python package and can register a build + target. + """ + logger.info("Registering code targets") + + setup_py_paths: List[Tuple[str, Path]] = [] + for top_dir_name in PROJECT_CONFIG.top_dirs: + top_dir = PROJECT_CONFIG.project_path.joinpath(top_dir_name) + logger.debug(f"Walking {top_dir}") + for dirpath, dirnames, filenames in os.walk(top_dir): + if os.path.basename(dirpath) in PROJECT_CONFIG.ignore_dirs: + # Empty the list of directories so os.walk does not recur + dirnames.clear() + else: + for filename in filenames: + if filename == "setup.py": + setup_py_paths.append( + (top_dir_name, Path(dirpath).joinpath(filename)) + ) + + for top_dir_name, setup_py_path in setup_py_paths: + logger.debug(f"Registering {setup_py_path}") + + src_dir = setup_py_path.parent.joinpath("src") + if not src_dir.is_dir(): + continue + src_entries = [ + src_entry + for src_entry in os.scandir(src_dir) + if ( + src_entry.is_dir() + and "egg-info" not in src_entry.path + and "pycache" not in src_entry.path + ) + ] + if len(src_entries) == 0: + logger.debug(f"No entries found in {src_dir}") + continue + elif len(src_entries) > 1: + raise MultipleSourcePackagesFound( + f"More than one package found in {src_dir}:" + f" {src_entries}. This could be caused by old git" + " state. Either manually clean up the packages or" + " run `git clean -fxd` if you are OK with a" + " completely clean slate." + ) + + src_entry = src_entries[0] + package_name = os.path.basename(src_entry.path) + target = PythonPackage(setup_py_path.parent) + if target.key in PROJECT_CONFIG.ignore_targets: + logger.debug(f"Ignoring {target}") + elif package_name in self._targets: + raise DuplicateTarget( + f"Duplicate target found for {package_name}. Check the" + " git repository for old folders that have the same" + " name." + ) + else: + self._targets[package_name] = target + logger.debug(f"Registered target {target}") + + def _build_target_graph(self) -> None: + """Build a graph of package targets. + + This is used to check for cycles and compute dependencies. + """ + G = nx.DiGraph() + for target in self._targets.values(): + G.add_node(target) + for dependency in target.dependencies: + G.add_edge(target, dependency) + self._target_graph = G + + def ensure_no_circular_imports(self) -> None: + """Ensure there are no circular imports in any of the processed packages + + You must have already called :py:meth:`.register_packages`. + + Raises: + :py:exec:`.CircularImportsFound` if any circular imports were found + + """ + logger.info("Ensuring no circular imports") + cycles = list(nx.simple_cycles(self._target_graph)) + if len(cycles) > 0: + raise CircularImportsFound(cycles) diff --git a/ns_poet/project.py b/ns_poet/project.py new file mode 100644 index 0000000..e24cbf0 --- /dev/null +++ b/ns_poet/project.py @@ -0,0 +1,182 @@ +"""Contains a configuration parser and project config singleton""" +import json +from pathlib import Path +from typing import Any, Dict, FrozenSet, List, MutableMapping, Optional, Union + +import pkg_resources +from pkg_resources import Requirement +import toml + +from .util import get_git_top_level_path + + +class PoetProject: + """Project object + + This object parses project configuration and manages the import map associated with + the project's requirements.txt file. + + The schema for the configuration file in the git project root is:: + + [tool.nspoet] + ignore_dirs = [] + ignore_targets = [] + import_map_path = "3rdparty/python/import-map.json" + requirements_path = "3rdparty/python/requirements.txt" + top_dirs = ["."] + default_python_version = "^3.6.5" + + """ + + def __init__(self, project_path: Union[str, Path]) -> None: + """Initializer + + Args: + project_path: Path to the project + + """ + self.project_path = Path(project_path) + self.config_file_path = self.project_path.joinpath("pyproject.toml") + self._config: Optional[MutableMapping[str, Any]] = None + self._requirements: Optional[Dict[str, Requirement]] = None + self._import_map: Optional[Dict[str, str]] = None + + @classmethod + def from_path(cls, project_path: Path) -> "PoetProject": + """Create a project object and load configuration from a given project path + + Args: + project_path: path to the project (git repo root) + + Returns: + new PoetProject instance + + """ + p = cls(project_path) + p.load_config() + return p + + def load_config(self) -> MutableMapping[str, Any]: + """Load configuration from disk + + Returns: + configuration dict + + """ + if self.config_file_path.is_file(): + with self.config_file_path.open() as f: + self._config = toml.load(f) + else: + self._config = {} + + return self._config + + @property + def project_config(self) -> Dict[str, Any]: + """Return the nspoet configuration subsection within the manifest""" + return self._config.get("tool", {}).get("nspoet", {}) + + @property + def ignore_dirs(self) -> FrozenSet[str]: + """Never look for or process files in this set of directories""" + return frozenset(self.project_config.get("ignore_dirs", [])) + + @property + def ignore_targets(self) -> FrozenSet[str]: + """Set of target package names to ignore when collecting build targets""" + return frozenset(self.project_config.get("ignore_targets", [])) + + @property + def import_map_path(self) -> Path: + """Path to the location of the import-map.json file within the project root + + This file contains a map from the import name found in *.py to the library's + project name (i.e. requirements.txt name). + + """ + return self.project_path.joinpath( + self.project_config.get( + "import_map_path", + "import-map.json", + ) + ) + + @property + def requirements_path(self) -> Path: + """Path to the requirements.txt within the project root""" + return self.project_path.joinpath( + self.project_config.get( + "requirements_path", + "requirements.txt", + ) + ) + + @property + def top_dirs(self) -> List[str]: + """Top-level directories to search for Python packages""" + return self.project_config.get("top_dirs", ["."]) + + @property + def default_python_version(self) -> str: + """Default python version to set in package manifests""" + return self.project_config.get("default_python_version", "^3.6.5") + + def load_requirements(self) -> Dict[str, Requirement]: + """Load requirement specifiers from disk + + Returns: + map of requirement name/project name to requirement object + + """ + if not self._requirements: + self._requirements = {} + with self.requirements_path.open() as f: + for r in pkg_resources.parse_requirements(f.read()): + self._requirements[r.unsafe_name] = r + self._requirements[r.project_name] = r + + return self._requirements + + def get_requirement(self, name: str) -> Requirement: + """Get a requirement by name""" + return self._requirements[name] + + def load_import_map(self) -> Dict[str, str]: + """Load import map from disk + + Returns: + map of import name to project name or empty dict if no file found + + """ + try: + with self.import_map_path.open() as f: + return json.load(f) + except FileNotFoundError: + return {} + + def get_import_map(self) -> Dict[str, str]: + """Get the import map or load it if not already cached + + Returns: + map of import name to project name + + """ + if not self._import_map: + self._import_map = self.load_import_map() + + return self._import_map + + def update_import_map(self, import_map: Dict[str, str]) -> None: + """Update import map file on disk + + Args: + import_map: map of import name to project name + + """ + with self.import_map_path.open("w") as f: + json.dump(import_map, f, sort_keys=True, indent=2) + f.write("\n") + + +# Create a singleton for the project configuration +PROJECT_CONFIG = PoetProject.from_path(get_git_top_level_path()) diff --git a/ns_poet/requirements.py b/ns_poet/requirements.py new file mode 100644 index 0000000..72fd1a9 --- /dev/null +++ b/ns_poet/requirements.py @@ -0,0 +1,370 @@ +"""Contains functions for working with requirements.txt""" + +from copy import deepcopy +import glob +import logging +import os +import subprocess +import sys +import tarfile +import tempfile +from typing import Dict, IO, List, Tuple +import zipfile + +from ns_poet.project import PROJECT_CONFIG +from .exceptions import ( + DistributionError, + InvalidTopLevelFile, + MissingDistributionMetadataFile, + NoProjectName, + UnsupportedDistributionFormat, +) + +logger = logging.getLogger(__name__) + +# Top-level Python package distribution names to ignore when generating the import map +IGNORE_TOP_LEVEL_IMPORTS = {"testing", "tests", "test"} + + +def _parse_top_level_imports_from_file(distribution_path: str, f: IO) -> List[str]: + """Get the top-level import names from a top_level.txt file + + Args: + distribution_path: Path to the .whl or .tar.gz file containing package metadata + f: File handle + + Returns: + Top-level import names for the distribution (case-sensitive). + e.g. ``["auth0"]`` for a project name of ``auth0-python``. + + Raises: + :py:exc:`.InvalidTopLevelFile` when no top level import names could be found + + """ + imports = [ + line + for line in f.read().decode().split("\n") + if len(line.strip()) > 0 + and line.strip() not in IGNORE_TOP_LEVEL_IMPORTS + and not line.strip().startswith("_") + ] + if len(imports) == 0: + raise InvalidTopLevelFile( + f"No top-level imports found in distribution package for {distribution_path}" + ) + + return imports + + +def _get_project_name_from_metadata_file(distribution_path: str, f: IO) -> str: + """Get the project name from a METADATA or PKG-INFO file + + Args: + distribution_path: Path to the .whl or .tar.gz file containing package metadata + f: File handle + + Returns: + Project name for the distribution (case-sensitive) + + Raises: + :py:exc:`.NoProjectName` when no project name could be parsed from distribution + metadata + + """ + for line in f.readlines(): + if line.decode().startswith("Name:"): + project_name = line.decode().split(":")[1].strip() + return project_name + + raise NoProjectName(f"No project name found in {distribution_path}") + + +def _get_package_names_from_wheel(distribution_path: str) -> Tuple[List[str], str]: + """Get the top-level import name and project name for a given .whl distribution + + Args: + distribution_path: Path to the .whl file containing package metadata + + Returns: + Tuple of ``(top_level_imports, project_name)`` + + Raises: + :py:exc:`.MissingDistributionMetadataFile` + + """ + zf = zipfile.ZipFile(distribution_path) + top_level_imports = None + project_name = None + for name in zf.namelist(): + if os.path.basename(name) == "top_level.txt": + with zf.open(name) as f: + top_level_imports = _parse_top_level_imports_from_file( + distribution_path, f + ) + elif name.endswith(".dist-info/METADATA"): + with zf.open(name) as f: + project_name = _get_project_name_from_metadata_file( + distribution_path, f + ) + + if top_level_imports is not None and project_name is not None: + break + + if project_name is None: + raise MissingDistributionMetadataFile( + f"Could not find METADATA in {distribution_path}" + ) + elif top_level_imports is None: + top_level_imports = [project_name] + logger.debug( + f"Could not find top_level.txt in {distribution_path}." + f" Defaulting top level import list to {top_level_imports}" + ) + + return top_level_imports, project_name + + +def _get_package_names_from_tar(distribution_path: str) -> Tuple[List[str], str]: + """Get the top-level import name and project name for a given .tar.gz distribution + + Args: + distribution_path: Path to the .tar.gz file containing package metadata + + Returns: + Tuple of ``(top_level_imports, project_name)`` + + Raises: + :py:exc:`.MissingDistributionMetadataFile` + + """ + top_level_imports = None + project_name = None + with tarfile.open(distribution_path) as tar: + for tarinfo in tar: + if tarinfo.isreg(): + if os.path.basename(tarinfo.name) == "top_level.txt": + f = tar.extractfile(tarinfo) + top_level_imports = _parse_top_level_imports_from_file( + distribution_path, f + ) + elif os.path.basename(tarinfo.name) == "PKG-INFO": + f = tar.extractfile(tarinfo) + project_name = _get_project_name_from_metadata_file( + distribution_path, f + ) + elif tarinfo.name.endswith(".egg-info/PKG-INFO"): + f = tar.extractfile(tarinfo) + project_name = _get_project_name_from_metadata_file( + distribution_path, f + ) + + if top_level_imports is not None and project_name is not None: + break + + if project_name is None: + raise MissingDistributionMetadataFile( + f"Could not find PKG-INFO in {distribution_path}" + ) + elif top_level_imports is None: + top_level_imports = [project_name] + logger.debug( + f"Could not find top_level.txt in {distribution_path}." + f" Defaulting top level import list to {top_level_imports}" + ) + + return top_level_imports, project_name + + +def _get_package_names_from_zip(distribution_path: str) -> Tuple[List[str], str]: + """Get the top-level import name and project name for a given .zip distribution + + Args: + distribution_path: Path to the .zip file containing package metadata + + Returns: + Tuple of ``(top_level_imports, project_name)`` + + Raises: + :py:exc:`.MissingDistributionMetadataFile` + + """ + zf = zipfile.ZipFile(distribution_path) + top_level_imports = None + project_name = None + for name in zf.namelist(): + if os.path.basename(name) == "top_level.txt": + with zf.open(name) as f: + top_level_imports = _parse_top_level_imports_from_file( + distribution_path, f + ) + elif os.path.basename(name) == "PKG-INFO": + with zf.open(name) as f: + project_name = _get_project_name_from_metadata_file( + distribution_path, f + ) + + if top_level_imports is not None and project_name is not None: + break + + if project_name is None: + raise MissingDistributionMetadataFile( + f"Could not find PKG-INFO in {distribution_path}" + ) + elif top_level_imports is None: + top_level_imports = [project_name] + logger.debug( + f"Could not find top_level.txt in {distribution_path}." + f" Defaulting top level import list to {top_level_imports}" + ) + + return top_level_imports, project_name + + +def _get_package_names_for_distribution( + distribution_path: str, +) -> Tuple[List[str], str]: + """Get the top-level import names and project name for a given distribution file path + + Args: + distribution_path: Path to the .whl, .tar.gz, or .zip file containing package + metadata + + Returns: + Tuple of ``(top_level_imports, project_name)`` + + Raises: + :py:exc:`.UnsupportedDistributionFormat` + + """ + if distribution_path.endswith(".whl"): + return _get_package_names_from_wheel(distribution_path) + elif distribution_path.endswith(".tar.gz") or distribution_path.endswith( + ".tar.bz2" + ): + return _get_package_names_from_tar(distribution_path) + elif distribution_path.endswith(".zip"): + return _get_package_names_from_zip(distribution_path) + + raise UnsupportedDistributionFormat( + f"Unsupported Python distribution file: {distribution_path}" + ) + + +def _get_import_map_for_requirements( + requirement_specifiers: List[str], +) -> Tuple[List[Exception], Dict[str, str]]: + """Get a map of top-level import name to project name for a list of requirements + + Args: + requirement_specifiers: List of requirement specifiers like ``requests==1.2.3`` + + Returns: + Tuple with items: + * List of exception objects, if any + * Map of the top-level import name to the project name + + """ + import_map = {} + with tempfile.TemporaryDirectory() as tmpdirname: + logger.debug(f"Downloading Python packages to {tmpdirname}") + # Download the wheels and tarballs for the requirements into a temp directory + subprocess.run( + [ + sys.executable, + "-m", + "pip", + "download", + "-d", + tmpdirname, + "--no-deps", + *requirement_specifiers, + ], + check=True, + ) + distribution_paths = ( + list(glob.glob(f"{tmpdirname}/*.whl")) + + list(glob.glob(f"{tmpdirname}/*.tar.gz")) + + list(glob.glob(f"{tmpdirname}/*.tar.bz2")) + + list(glob.glob(f"{tmpdirname}/*.zip")) + ) + logger.debug(f"Found distribution paths: {distribution_paths}") + # Process each downloaded distribution, extracting the top-level import name + # and the project name to add to the map + exceptions = [] + for distribution_path in distribution_paths: + try: + top_level_imports, project_name = _get_package_names_for_distribution( + distribution_path + ) + except Exception as e: + exceptions.append(e) + else: + for top_level_import in top_level_imports: + logger.debug( + f"Found map of {top_level_import} => {project_name} for" + f" distribution {distribution_path}" + ) + import_map[top_level_import] = project_name + + return exceptions, import_map + + +def update_import_map() -> None: + """Update the import map file. + + This will determine which keys should be added or dropped based on the + requirements.txt file. + + """ + logger.info("Updating import map") + + requirements = PROJECT_CONFIG.load_requirements() + import_map = PROJECT_CONFIG.load_import_map() + + existing_project_names = set(import_map.values()) + current_project_names = set(req.project_name for req in requirements.values()) + + # New requirements for which we need to go resolve an import name + new_project_names = current_project_names - existing_project_names + logger.info( + f"Found {len(new_project_names)} new requirement(s) to resolve: {new_project_names}" + ) + + # Old requirements that should be removed from the map + old_project_names = existing_project_names - current_project_names + logger.info( + f"Found {len(old_project_names)} requirement(s) to remove: {old_project_names}" + ) + + if len(new_project_names) == 0 and len(old_project_names) == 0: + logger.info("No updates needed for import map") + return + + # Create a copy of the import map that we'll mutate + import_map = deepcopy(import_map) + import_map_inverse = {v: k for k, v in import_map.items()} + + # Remove old requirements + for project_name in old_project_names: + del import_map[import_map_inverse[project_name]] + + new_requirement_specifiers = [ + str(req) + for req in requirements.values() + if req.project_name in new_project_names + ] + import_map_exceptions: List[Exception] = [] + if len(new_requirement_specifiers) > 0: + import_map_exceptions, import_map_updates = _get_import_map_for_requirements( + new_requirement_specifiers + ) + import_map.update(import_map_updates) + + PROJECT_CONFIG.update_import_map(import_map) + logger.info("Finished updating import map") + + if len(import_map_exceptions) > 0: + messages = "\n" + "\n".join([str(e) for e in import_map_exceptions]) + raise DistributionError( + f"Failed to process all Python distributions: {messages}" + ) diff --git a/ns_poet/util.py b/ns_poet/util.py new file mode 100644 index 0000000..2a4abdb --- /dev/null +++ b/ns_poet/util.py @@ -0,0 +1,62 @@ +"""Utility functions""" + +import ast +from pathlib import Path +import subprocess +from typing import Any, Set, Tuple + + +class ModuleImportVisitor(ast.NodeVisitor): + """AST visitor for collecting imports from a module""" + + def __init__(self) -> None: + """Initializer""" + self.imports: Set[str] = set() + + def visit_ImportFrom(self, node: Any) -> None: + """Collect an import when specified as `from foo import bar`""" + if node.module is not None: + self.imports.add(node.module.split(".")[0]) + + def visit_Import(self, node: Any) -> None: + """Collect an import when specified as `import foo`""" + self.imports.add(node.names[0].name.split(".")[0]) + + +def gather_dependencies_from_module(path: str) -> Tuple[str, Set[str]]: + """Gather a set of other build targets from a single Python module + + Args: + path: Python module path + + Returns: + tuple of (path, set of package names that were imported in the module) + + """ + with open(path) as f: + contents = f.read() + tree = ast.parse(contents) + visitor = ModuleImportVisitor() + visitor.visit(tree) + return path, visitor.imports + + +def get_git_top_level_path() -> Path: + """Get the path of the git repository where this CLI is running""" + proc = subprocess.run( + ["git", "rev-parse", "--show-toplevel"], check=True, stdout=subprocess.PIPE + ) + output = proc.stdout.decode("utf-8").strip() + return Path(output) + + +def write_package_config_file(config_file_path: Path, contents: str) -> None: + """Write package config file to disk + + Args: + config_file_path: path to config file + contents: TOML string to write + + """ + with config_file_path.open("w") as f: + f.write(contents) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..c1243f1 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1061 @@ +[[package]] +name = "arrow" +version = "1.2.1" +description = "Better dates & times for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +python-dateutil = ">=2.7.0" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "astor" +version = "0.8.1" +description = "Read/rewrite/write Python ASTs" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.2.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] + +[[package]] +name = "backports.entry-points-selectable" +version = "1.1.1" +description = "Compatibility shim providing selectable entry points for older implementations" +category = "dev" +optional = false +python-versions = ">=2.7" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] + +[[package]] +name = "binaryornot" +version = "0.4.4" +description = "Ultra-lightweight pure Python package to check if a file is binary or text." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +chardet = ">=3.0.2" + +[[package]] +name = "black" +version = "21.10b0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=7.1.2" +dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0,<1" +platformdirs = ">=2" +regex = ">=2020.1.8" +tomli = ">=0.2.6,<2.0.0" +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} +typing-extensions = [ + {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, + {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, +] + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +python2 = ["typed-ast (>=1.4.3)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "cfgv" +version = "3.3.1" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "charset-normalizer" +version = "2.0.7" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.0.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "cookiecutter" +version = "1.7.3" +description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +binaryornot = ">=0.4.4" +click = ">=7.0" +Jinja2 = ">=2.7,<4.0.0" +jinja2-time = ">=0.2.0" +poyo = ">=0.5.0" +python-slugify = ">=4.0.0" +requests = ">=2.23.0" +six = ">=1.10" + +[[package]] +name = "cruft" +version = "2.10.1" +description = "Allows you to maintain all the necessary cruft for packaging and building projects separate from the code you intentionally write. Built on-top of CookieCutter." +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +click = ">=7.1.2,<9.0.0" +cookiecutter = ">=1.6,<2.0" +gitpython = ">=3.0,<4.0" +importlib_metadata = ">=3.4.0,<4.0.0" +typer = ">=0.4.0,<0.5.0" + +[package.extras] +pyproject = ["toml (>=0.10,<0.11)"] +examples = ["examples (>=1.0.2,<2.0.0)"] + +[[package]] +name = "dataclasses" +version = "0.8" +description = "A backport of the dataclasses module for Python 3.6" +category = "dev" +optional = false +python-versions = ">=3.6, <3.7" + +[[package]] +name = "decorator" +version = "4.4.2" +description = "Decorators for Humans" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" + +[[package]] +name = "distlib" +version = "0.3.3" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "filelock" +version = "3.3.2" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] +testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] + +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.20" +description = "Python Git Library" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +gitdb = ">=4.0.1,<5" +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} + +[[package]] +name = "identify" +version = "2.3.5" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.extras] +license = ["editdistance-s"] + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "3.10.1" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "jinja2" +version = "3.0.3" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jinja2-time" +version = "0.2.0" +description = "Jinja2 Extension for Dates and Times" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +arrow = "*" +jinja2 = "*" + +[[package]] +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "networkx" +version = "2.5.1" +description = "Python package for creating and manipulating graphs and networks" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +decorator = ">=4.3,<5" + +[package.extras] +all = ["numpy", "scipy", "pandas", "matplotlib", "pygraphviz", "pydot", "pyyaml", "lxml", "pytest"] +gdal = ["gdal"] +lxml = ["lxml"] +matplotlib = ["matplotlib"] +numpy = ["numpy"] +pandas = ["pandas"] +pydot = ["pydot"] +pygraphviz = ["pygraphviz"] +pytest = ["pytest"] +pyyaml = ["pyyaml"] +scipy = ["scipy"] + +[[package]] +name = "nodeenv" +version = "1.6.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.2" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3" + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "platformdirs" +version = "2.4.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "poyo" +version = "0.5.0" +description = "A lightweight YAML Parser for Python. 🐓" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pre-commit" +version = "2.15.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +toml = "*" +virtualenv = ">=20.0.8" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-slugify" +version = "5.0.2" +description = "A Python Slugify application that handles Unicode" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "regex" +version = "2021.11.10" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "requests" +version = "2.26.0" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "1.2.2" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typer" +version = "0.4.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +click = ">=7.1.1,<9.0.0" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)"] +doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=5.4.0,<6.0.0)", "markdown-include (>=0.5.1,<0.6.0)"] +test = ["shellingham (>=1.3.0,<2.0.0)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "coverage (>=5.2,<6.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "mypy (==0.910)", "black (>=19.10b0,<20.0b0)", "isort (>=5.0.6,<6.0.0)"] + +[[package]] +name = "typing-extensions" +version = "3.10.0.2" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "urllib3" +version = "1.26.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.10.0" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +"backports.entry-points-selectable" = ">=1.0.4" +distlib = ">=0.3.1,<1" +filelock = ">=3.2,<4" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} +platformdirs = ">=2,<3" +six = ">=1.9.0,<2" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] + +[[package]] +name = "zipp" +version = "3.6.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.6.5" +content-hash = "802ec799e658f5c245064fe4579e17e233351783ab701aee213883e66f707f90" + +[metadata.files] +arrow = [ + {file = "arrow-1.2.1-py3-none-any.whl", hash = "sha256:6b2914ef3997d1fd7b37a71ce9dd61a6e329d09e1c7b44f4d3099ca4a5c0933e"}, + {file = "arrow-1.2.1.tar.gz", hash = "sha256:c2dde3c382d9f7e6922ce636bf0b318a7a853df40ecb383b29192e6c5cc82840"}, +] +astor = [ + {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, + {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +"backports.entry-points-selectable" = [ + {file = "backports.entry_points_selectable-1.1.1-py2.py3-none-any.whl", hash = "sha256:7fceed9532a7aa2bd888654a7314f864a3c16a4e710b34a58cfc0f08114c663b"}, + {file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"}, +] +binaryornot = [ + {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"}, + {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"}, +] +black = [ + {file = "black-21.10b0-py3-none-any.whl", hash = "sha256:6eb7448da9143ee65b856a5f3676b7dda98ad9abe0f87fce8c59291f15e82a5b"}, + {file = "black-21.10b0.tar.gz", hash = "sha256:a9952229092e325fe5f3dae56d81f639b23f7131eb840781947e4b2886030f33"}, +] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, + {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, +] +click = [ + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +cookiecutter = [ + {file = "cookiecutter-1.7.3-py2.py3-none-any.whl", hash = "sha256:f8671531fa96ab14339d0c59b4f662a4f12a2ecacd94a0f70a3500843da588e2"}, + {file = "cookiecutter-1.7.3.tar.gz", hash = "sha256:6b9a4d72882e243be077a7397d0f1f76fe66cf3df91f3115dbb5330e214fa457"}, +] +cruft = [ + {file = "cruft-2.10.1-py3-none-any.whl", hash = "sha256:082e8044c1a80c8b27645a94ae2bdb0544fec752d2b2798ec901b30e6981426d"}, + {file = "cruft-2.10.1.tar.gz", hash = "sha256:c5350ca3ef7b671409f9b24e8e0bd73d870433aa6b5abbf6e7b9b73f7b4adef0"}, +] +dataclasses = [ + {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, + {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, +] +decorator = [ + {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, + {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, +] +distlib = [ + {file = "distlib-0.3.3-py2.py3-none-any.whl", hash = "sha256:c8b54e8454e5bf6237cc84c20e8264c3e991e824ef27e8f1e81049867d861e31"}, + {file = "distlib-0.3.3.zip", hash = "sha256:d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05"}, +] +filelock = [ + {file = "filelock-3.3.2-py3-none-any.whl", hash = "sha256:bb2a1c717df74c48a2d00ed625e5a66f8572a3a30baacb7657add1d7bac4097b"}, + {file = "filelock-3.3.2.tar.gz", hash = "sha256:7afc856f74fa7006a289fd10fa840e1eebd8bbff6bffb69c26c54a0512ea8cf8"}, +] +gitdb = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] +gitpython = [ + {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, + {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, +] +identify = [ + {file = "identify-2.3.5-py2.py3-none-any.whl", hash = "sha256:ba945bddb4322394afcf3f703fa68eda08a6acc0f99d9573eb2be940aa7b9bba"}, + {file = "identify-2.3.5.tar.gz", hash = "sha256:6f0368ba0f21c199645a331beb7425d5374376e71bc149e9cb55e45cb45f832d"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +importlib-metadata = [ + {file = "importlib_metadata-3.10.1-py3-none-any.whl", hash = "sha256:2ec0faae539743ae6aaa84b49a169670a465f7f5d64e6add98388cc29fd1f2f6"}, + {file = "importlib_metadata-3.10.1.tar.gz", hash = "sha256:c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1"}, +] +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +jinja2 = [ + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, +] +jinja2-time = [ + {file = "jinja2-time-0.2.0.tar.gz", hash = "sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40"}, + {file = "jinja2_time-0.2.0-py2.py3-none-any.whl", hash = "sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa"}, +] +markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +networkx = [ + {file = "networkx-2.5.1-py3-none-any.whl", hash = "sha256:0635858ed7e989f4c574c2328380b452df892ae85084144c73d8cd819f0c4e06"}, + {file = "networkx-2.5.1.tar.gz", hash = "sha256:109cd585cac41297f71103c3c42ac6ef7379f29788eb54cb751be5a663bb235a"}, +] +nodeenv = [ + {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, + {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, +] +packaging = [ + {file = "packaging-21.2-py3-none-any.whl", hash = "sha256:14317396d1e8cdb122989b916fa2c7e9ca8e2be9e8060a6eff75b6b7b4d8a7e0"}, + {file = "packaging-21.2.tar.gz", hash = "sha256:096d689d78ca690e4cd8a89568ba06d07ca097e3306a4381635073ca91479966"}, +] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +platformdirs = [ + {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, + {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +poyo = [ + {file = "poyo-0.5.0-py2.py3-none-any.whl", hash = "sha256:3e2ca8e33fdc3c411cd101ca395668395dd5dc7ac775b8e809e3def9f9fe041a"}, + {file = "poyo-0.5.0.tar.gz", hash = "sha256:e26956aa780c45f011ca9886f044590e2d8fd8b61db7b1c1cf4e0869f48ed4dd"}, +] +pre-commit = [ + {file = "pre_commit-2.15.0-py2.py3-none-any.whl", hash = "sha256:a4ed01000afcb484d9eb8d504272e642c4c4099bbad3a6b27e519bd6a3e928a6"}, + {file = "pre_commit-2.15.0.tar.gz", hash = "sha256:3c25add78dbdfb6a28a651780d5c311ac40dd17f160eb3954a0c59da40a505a7"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pytest = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-slugify = [ + {file = "python-slugify-5.0.2.tar.gz", hash = "sha256:f13383a0b9fcbe649a1892b9c8eb4f8eab1d6d84b84bb7a624317afa98159cab"}, + {file = "python_slugify-5.0.2-py2.py3-none-any.whl", hash = "sha256:6d8c5df75cd4a7c3a2d21e257633de53f52ab0265cd2d1dc62a730e8194a7380"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +regex = [ + {file = "regex-2021.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf"}, + {file = "regex-2021.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4"}, + {file = "regex-2021.11.10-cp310-cp310-win32.whl", hash = "sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a"}, + {file = "regex-2021.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12"}, + {file = "regex-2021.11.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e"}, + {file = "regex-2021.11.10-cp36-cp36m-win32.whl", hash = "sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4"}, + {file = "regex-2021.11.10-cp36-cp36m-win_amd64.whl", hash = "sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e"}, + {file = "regex-2021.11.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f"}, + {file = "regex-2021.11.10-cp37-cp37m-win32.whl", hash = "sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec"}, + {file = "regex-2021.11.10-cp37-cp37m-win_amd64.whl", hash = "sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94"}, + {file = "regex-2021.11.10-cp38-cp38-win32.whl", hash = "sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc"}, + {file = "regex-2021.11.10-cp38-cp38-win_amd64.whl", hash = "sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef"}, + {file = "regex-2021.11.10-cp39-cp39-win32.whl", hash = "sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a"}, + {file = "regex-2021.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29"}, + {file = "regex-2021.11.10.tar.gz", hash = "sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6"}, +] +requests = [ + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] +text-unidecode = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, + {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, +] +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] +typer = [ + {file = "typer-0.4.0-py3-none-any.whl", hash = "sha256:d81169725140423d072df464cad1ff25ee154ef381aaf5b8225352ea187ca338"}, + {file = "typer-0.4.0.tar.gz", hash = "sha256:63c3aeab0549750ffe40da79a1b524f60e08a2cbc3126c520ebf2eeaf507f5dd"}, +] +typing-extensions = [ + {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, + {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, + {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, +] +urllib3 = [ + {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, + {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, +] +virtualenv = [ + {file = "virtualenv-20.10.0-py2.py3-none-any.whl", hash = "sha256:4b02e52a624336eece99c96e3ab7111f469c24ba226a53ec474e8e787b365814"}, + {file = "virtualenv-20.10.0.tar.gz", hash = "sha256:576d05b46eace16a9c348085f7d0dc8ef28713a2cabaa1cf0aea41e8f12c9218"}, +] +zipp = [ + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, +] diff --git a/pyproject.toml b/pyproject.toml index 02db077..3d2247e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "ns-poet" -version = "0.1.0" +version = "0.2.0" description = "Autogenerate Poetry package manifests in a monorepo" authors = ["Jonathan Drake "] license = "BSD-3-Clause" @@ -9,7 +9,12 @@ homepage = "https://github.com/NarrativeScience/ns-poet" repository = "https://github.com/NarrativeScience/ns-poet" [tool.poetry.dependencies] -python = "^3.6" +python = "^3.6.5" +click = "^8.0.3" +setuptools = "^58.5.3" +astor = "^0.8.1" +networkx = "<2.6" +toml = "^0.10.2" [tool.poetry.dev-dependencies] pre-commit = "^2.10.1" @@ -23,3 +28,6 @@ skip = ["pyproject.toml", "ns_poet", "tests", "README.md", ".circleci/config.yml [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.poetry.scripts] +nspoet = "ns_poet.cli:cli" diff --git a/tests/functional/test_package.py b/tests/functional/test_package.py new file mode 100644 index 0000000..f13dc5b --- /dev/null +++ b/tests/functional/test_package.py @@ -0,0 +1,23 @@ +"""Contains tests for the package module""" + +from pathlib import Path +from subprocess import run +from tempfile import TemporaryDirectory + +from ns_poet import package as mod_ut + + +def test_package_config(): + """Should load package config""" + with TemporaryDirectory() as tmpdirname: + with Path(tmpdirname).joinpath("pyproject.toml").open("w") as f: + f.write( + """ +[tool.nspoet] +generate_package_manifest = false +""" + ) + + run(["git", "init"], cwd=tmpdirname) + config = mod_ut.PoetPackage.from_path(Path(tmpdirname)) + assert not config.generate_package_manifest diff --git a/tests/functional/test_packages.py b/tests/functional/test_packages.py new file mode 100644 index 0000000..1deed7b --- /dev/null +++ b/tests/functional/test_packages.py @@ -0,0 +1,28 @@ +"""Contains tests for the processor module""" + +from pathlib import Path +from subprocess import run +from tempfile import TemporaryDirectory +from unittest.mock import patch + +from ns_poet import processor as mod_ut + + +@patch("ns_poet.project.get_git_top_level_path") +def test_register_packages__empty(mock_get_git_top_level_path): + """Should register no packages""" + with TemporaryDirectory() as tmpdirname: + mock_get_git_top_level_path.return_value = tmpdirname + with Path(tmpdirname).joinpath("pyproject.toml").open("w") as f: + f.write( + """ +[tool.nspoet] +import_map_path = "3rdparty/python/import-map.json" +requirements_path = "3rdparty/python/requirements.txt" +""" + ) + + run(["git", "init"], cwd=tmpdirname) + processor = mod_ut.PackageProcessor() + processor.register_packages() + assert processor._targets == {} diff --git a/tests/functional/test_project.py b/tests/functional/test_project.py new file mode 100644 index 0000000..e7e7fc1 --- /dev/null +++ b/tests/functional/test_project.py @@ -0,0 +1,27 @@ +"""Contains tests for the project module""" + +from pathlib import Path +from subprocess import run +from tempfile import TemporaryDirectory + +from ns_poet import project as mod_ut + + +def test_project_config(): + """Should load project config""" + with TemporaryDirectory() as tmpdirname: + with Path(tmpdirname).joinpath("pyproject.toml").open("w") as f: + f.write( + """ +[tool.nspoet] +import_map_path = "3rdparty/python/import-map.json" +requirements_path = "3rdparty/python/requirements.txt" +""" + ) + + run(["git", "init"], cwd=tmpdirname) + config = mod_ut.PoetProject.from_path(Path(tmpdirname)) + assert str(config.import_map_path).endswith("3rdparty/python/import-map.json") + assert str(config.requirements_path).endswith( + "3rdparty/python/requirements.txt" + ) diff --git a/tests/unit/test_example.py b/tests/unit/test_example.py deleted file mode 100644 index 9b22d6b..0000000 --- a/tests/unit/test_example.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Contains tests for the example module""" - -import unittest - -from ns_poet.example import foo - - -class ExampleTests(unittest.TestCase): - """Tests showing an example""" - - def test_example(self): - """Should be true""" - self.assertEqual(foo(), "bar")