diff --git a/.github/dependabot.yml b/.github/dependabot.yml index bb68448..953bde8 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,7 +11,7 @@ updates: interval: "daily" time: "00:00" reviewers: - - "kzscisoft" + - "ryanjfield" commit-message: prefix: "pip" include: "scope" diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 26a76cc..84431a6 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -6,7 +6,7 @@ on: permissions: contents: write jobs: - windows-py39: + windows-py3x: name: Deploy Latest Windows Release runs-on: windows-latest steps: @@ -16,7 +16,7 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: '3.9.7' + python-version: '3.x' architecture: "x64" - name: Get Previous tag @@ -42,7 +42,7 @@ jobs: name: windows_faircli path: dist/fair-cli-${{ steps.previoustag.outputs.tag }}-x64-windows-latest.exe - macos-py39: + macos-py3x: name: Deploy Latest macOS Release runs-on: macos-latest steps: @@ -52,7 +52,7 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: '3.9.7' + python-version: '3.x' architecture: "x64" - name: Install Poetry @@ -79,7 +79,7 @@ jobs: path: dist/fair-cli-${{ steps.previoustag.outputs.tag }}-x64-macos-latest - ubuntu-py39: + ubuntu-py3x: name: Deploy Latest Linux Release runs-on: ubuntu-latest steps: @@ -89,7 +89,7 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: '3.9.7' + python-version: '3.x' architecture: "x64" - name: Install Poetry @@ -120,16 +120,16 @@ jobs: name: Create Release runs-on: ubuntu-latest needs: - - ubuntu-py39 - - macos-py39 - - windows-py39 + - ubuntu-py3x + - macos-py3x + - windows-py3x steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - uses: actions/setup-python@v4 with: - python-version: '3.9.7' + python-version: '3.x' architecture: "x64" - name: Install Poetry diff --git a/.github/workflows/fair-cli.yaml b/.github/workflows/fair-cli.yaml index 1c69d4e..d179b23 100644 --- a/.github/workflows/fair-cli.yaml +++ b/.github/workflows/fair-cli.yaml @@ -7,31 +7,50 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest] #, macos-latest, windows-latest] - # Issues with GitPython and Python >3.9.6, <3.10 + os: [ubuntu-latest, macos-latest, windows-latest] python: ["3.8", "3.9", "3.10", "3.x"] env: GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v3 + if: ${{ runner.os != 'Windows' }} + - name: Checkout to C windows + run: | + mkdir C:/FAIR-CLI + git clone https://github.com/FAIRDataPipeline/FAIR-CLI.git C:/FAIR-CLI + cd C:/FAIR-CLI + git checkout $GITHUB_REF_NAME + echo Github Ref: $GITHUB_REF_NAME + if: ${{ runner.os == 'Windows' }} + shell: bash - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} architecture: "x64" - - name: Install Required - run: sudo apt install libmemcached-dev - if: matrix.os == 'ubuntu-latest' - - name: Install Required - run: brew install memcached - if: matrix.os == 'macos-latest' - name: Install Poetry run: python -m pip install poetry - name: Install Module - run: python -m poetry install + run: | + if [ "$RUNNER_OS" == "Windows" ]; then + cd C:/FAIR-CLI + pwd + fi + python -m poetry install + shell: bash - name: Install Python API for API Tests - run: python -m poetry run pip install git+https://github.com/FAIRDataPipeline/pyDataPipeline.git@dev + run: | + if [ "$RUNNER_OS" == "Windows" ]; then + cd C:/FAIR-CLI + pwd + fi + python -m poetry run pip install git+https://github.com/FAIRDataPipeline/pyDataPipeline.git@main + shell: bash - name: Run Tests for Each Marker run: | + if [ "$RUNNER_OS" == "Windows" ]; then + cd C:/FAIR-CLI + pwd + fi for marker in $(poetry run pytest --markers | grep -oE "faircli_[a-zA-Z|_|0-9]+") do echo "Running tests for marker '$marker'" @@ -41,6 +60,7 @@ jobs: echo "ERROR: Tests for marker '$marker' failed" fi done + shell: bash - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/implementations.yml b/.github/workflows/implementations.yml index e2494d8..5f4727d 100644 --- a/.github/workflows/implementations.yml +++ b/.github/workflows/implementations.yml @@ -50,6 +50,8 @@ jobs: ${GITHUB_WORKSPACE}/registry-rem/scripts/start_fair_registry -p 8001 -s drams.test-remote-settings # Copy the remote token cp ${GITHUB_WORKSPACE}/registry-rem/token $PWD/token + export DJANGO_SETTINGS_MODULE=drams.test-remote-settings + ${GITHUB_WORKSPACE}/registry-rem/venv/bin/python ${GITHUB_WORKSPACE}/registry-rem/manage.py createsuperuser --username FAIRDataPipeline --noinput working-directory: pySimpleModel - name: Install the registry and init diff --git a/.gitignore b/.gitignore index 1d9bf74..3feae9f 100644 --- a/.gitignore +++ b/.gitignore @@ -148,6 +148,7 @@ cython_debug/ # Misc *.yaml +!tests/data/*.yaml *.yml token.txt .fair diff --git a/CITATION.cff b/CITATION.cff index 19ffcd8..37c77b8 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -36,4 +36,4 @@ license: BSD-2-Clause message: If you use this software, please cite it using these metadata. repository-code: https://github.com/FAIRDataPipeline/FAIR-CLI/ title: "The FAIR Data Pipeline command line tool" -version: 0.7.3 +version: 0.8.0 diff --git a/fair/cli.py b/fair/cli.py index c25773d..71c7e72 100644 --- a/fair/cli.py +++ b/fair/cli.py @@ -105,8 +105,7 @@ def status(verbose, debug) -> None: except fdp_exc.FAIRCLIException as e: e.err_print() if e.level.lower() == "error": - if e.level.lower() == "error": - sys.exit(e.exit_code) + sys.exit(e.exit_code) @cli.group(invoke_without_command=True) @click.option("--debug/--no-debug", help="Run in debug mode", default=False) @@ -118,8 +117,10 @@ def list(ctx, debug, remote) -> None: ctx.obj = {} ctx.obj['DEBUG'] = debug ctx.obj['REMOTE'] = remote - ctx.invoke(data_products) - ctx.invoke(code_runs) + _current_args = " ".join(sys.argv) + if not ("data-products" in _current_args or "code-runs" in _current_args): + ctx.invoke(data_products) + ctx.invoke(code_runs) @list.command() @click.pass_context @@ -161,7 +162,7 @@ def create(debug, output: str) -> None: if output else os.path.join(os.getcwd(), fdp_com.USER_CONFIG_FILE) ) - click.echo(f"Generating new user configuration file" f" '{output}'") + click.echo(f"Generating new user configuration file '{output}'") with fdp_session.FAIR(os.getcwd(), debug=debug) as fair_session: fair_session.make_starter_config(output) @@ -527,6 +528,7 @@ def run( ): """Initialises a job with the option to specify a bash command""" # Allow no config to be specified, if that is the case use default local + click.echo("Running run please wait") config = config[0] if config else fdp_com.local_user_config(os.getcwd()) try: with fdp_session.FAIR( @@ -636,6 +638,7 @@ def modify(ctx, label: str, url: str, debug: bool) -> None: ) def push(remote: str, debug: bool, dirty: bool): """Push data between the local and remote registry""" + click.echo("Running push please wait") remote = remote[0] if remote else "origin" try: with fdp_session.FAIR( @@ -679,6 +682,7 @@ def config_email(user_email: str) -> None: ) def pull(config: str, debug: bool, local: bool): """Update local registry from remotes and sources""" + click.echo("Running pull please wait") config = config[0] if config else fdp_com.local_user_config(os.getcwd()) try: with fdp_session.FAIR( diff --git a/fair/common.py b/fair/common.py index 8ae044a..d2042ed 100644 --- a/fair/common.py +++ b/fair/common.py @@ -296,4 +296,11 @@ def set_file_permissions(path: str): for dir in [os.path.join(root,d) for d in dirs]: os.chmod(dir, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) for file in [os.path.join(root, f) for f in files]: - os.chmod(file, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) \ No newline at end of file + os.chmod(file, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) + +def remove_readonly(fn, path, excinfo): + try: + os.chmod(path, stat.S_IWRITE) + fn(path) + except Exception as exc: + print("Skipped:", path, "because:\n", exc) \ No newline at end of file diff --git a/fair/configuration/__init__.py b/fair/configuration/__init__.py index 9d43dbd..9e8f501 100644 --- a/fair/configuration/__init__.py +++ b/fair/configuration/__init__.py @@ -262,7 +262,7 @@ def remote_git_repo(fair_repo_loc: str) -> str: def get_remote_token( repo_dir: str, remote: str = "origin", local: bool = False -) -> str: +) -> typing.Optional[str]: _local_config = read_local_fdpconfig(repo_dir) if remote not in _local_config["registries"]: raise fdp_exc.CLIConfigurationError( @@ -378,6 +378,22 @@ def get_current_user_uri(repo_loc: str) -> str: raise fdp_exc.CLIConfigurationError("No user URI identifier defined.") return _uri +def get_current_user_github(repo_loc: str) -> str: + """Retrieves the URI identifier for the current user + + Returns + ------- + str + github username + """ + _local_conf = read_local_fdpconfig(repo_loc) + try: + _github = _local_conf["user"]["github"] + except KeyError: + _github = None + if not _github or _github == "None": + raise fdp_exc.CLIConfigurationError("No user GitHub username defined.") + return _github def check_registry_exists(registry: str = None) -> typing.Optional[str]: """Checks if fair registry is set up on users machine @@ -436,10 +452,10 @@ def get_local_port(local_uri: str = None) -> int: return _port -def update_local_port() -> str: +def update_local_port(registry_dir: str = None) -> str: """Updates the local port in the global configuration from the session port file""" - _current_port = fdp_com.registry_session_port() - _current_address = fdp_com.registry_session_address() + _current_port = fdp_com.registry_session_port(registry_dir) + _current_address = fdp_com.registry_session_address(registry_dir) _new_url = f'http://{_current_address}:{_current_port}/api/' @@ -474,7 +490,7 @@ def _handle_orcid(user_orcid: str) -> typing.Tuple[typing.Dict, str]: user_orcid = click.prompt("ORCID") _user_info = fdp_id.check_orcid(user_orcid.strip()) - _user_info["orcid"] = user_orcid + _user_info["orcid"] = user_orcid.strip() click.echo( f"Found entry: {_user_info['given_names']} {_user_info['family_name']}" @@ -509,7 +525,7 @@ def _handle_ror(user_ror: str) -> typing.Tuple[typing.Dict, str]: user_ror = click.prompt("ROR ID") _user_info = fdp_id.check_ror(user_ror.strip()) - _user_info["ror"] = user_ror + _user_info["ror"] = user_ror.strip() click.echo(f"Found entry: {_user_info['family_name']} ") @@ -540,7 +556,7 @@ def _handle_grid(user_grid: str) -> typing.Tuple[typing.Dict, str]: user_grid = click.prompt("GRID ID") _user_info = fdp_id.check_grid(user_grid.strip()) - _user_info["grid"] = user_grid + _user_info["grid"] = user_grid.strip() click.echo(f"Found entry: {_user_info['family_name']} ") @@ -548,6 +564,22 @@ def _handle_grid(user_grid: str) -> typing.Tuple[typing.Dict, str]: return _user_info, _def_ospace +def _handle_github(user_github: str) -> typing.Tuple[typing.Dict, str]: + + _user_info = fdp_id.check_github(user_github.strip()) + + while not _user_info: + time.sleep(3) + click.echo(f"Invalid GitHub Username '{user_github}' given.") + user_github = click.prompt("GitHub Username") + _user_info = fdp_id.check_github(user_github.strip()) + + _def_ospace = _user_info["github"].lower() + + click.echo(f"Found entry: {_user_info['github']} ") + return _user_info, _def_ospace + + def _handle_uuid() -> typing.Tuple[typing.Dict, str]: """Obtain metadata for user where no ID provided @@ -575,17 +607,24 @@ def _handle_uuid() -> typing.Tuple[typing.Dict, str]: return _user_info, _def_ospace -def _get_user_info_and_namespaces() -> typing.Dict[str, typing.Dict]: - _user_email = click.prompt("Email") +def _get_user_info_and_namespaces(local: bool = False) -> typing.Dict[str, typing.Dict]: + _user_email = click.prompt("Email (optional)", default = "") _invalid_input = True while _invalid_input: _id_type = click.prompt( - "User ID system (ORCID/ROR/GRID/None)", default="None" + "User ID system (GITHUB/ORCID/ROR/GRID/None)", default="GITHUB" ) - - if _id_type.upper() == "ORCID": + if _id_type.upper() == "GITHUB": + _user_github = click.prompt("GitHub Username") + _user_info, _def_ospace = _handle_github(_user_github) + if not _user_info["name"]: + _user_info["given_names"] = click.prompt("Given Names") + _user_info["family_name"] = click.prompt("Family Name") + _user_info["name"] = " ".join([_user_info["given_names"], _user_info["family_name"]]) + _invalid_input = False + elif _id_type.upper() == "ORCID": _user_orcid = click.prompt("ORCID") _user_info, _def_ospace = _handle_orcid(_user_orcid) _invalid_input = False @@ -603,14 +642,25 @@ def _get_user_info_and_namespaces() -> typing.Dict[str, typing.Dict]: _user_info["uuid"] = _user_uuid _invalid_input = False - _user_info["email"] = _user_email - _def_ospace = _def_ospace.lower().replace(" ", "").strip() _def_ospace = click.prompt("Default output namespace", default=_def_ospace) _def_ispace = click.prompt("Default input namespace", default=_def_ospace) _namespaces = {"input": _def_ispace, "output": _def_ospace} + if not "github" in _user_info: + if local: + _user_info["github"] = "FAIRDataPipeline" + else: + _user_github = click.prompt("GitHub Username") + _user_github_info = _handle_github(_user_github)[0] + _user_info["github"] = _user_github_info["github"] + + if not _user_email: + _user_info["email"] = f'{_user_info["github"]}@users.noreply.github.com' + else: + _user_info["email"] = _user_email + return {"user": _user_info, "namespaces": _namespaces} @@ -668,21 +718,23 @@ def global_config_query( default=_remote_url.replace("/api/", "/data/"), ) - _rem_key_file = click.prompt( - "Remote API Token File", - ) - _rem_key_file = os.path.expandvars(_rem_key_file) - - while ( - not os.path.exists(_rem_key_file) - or not open(_rem_key_file).read().strip() - ): - click.echo( - f"Token file '{_rem_key_file}' does not exist or is empty, " - "please provide a valid token file." + _rem_key_valid = False + while not _rem_key_valid: + _rem_key = click.prompt( + f"Remote API Token", ) - _rem_key_file = click.prompt("Remote API Token File") - _rem_key_file = os.path.expandvars(_rem_key_file) + if len(_rem_key) == 40: + _rem_key_valid = True + else: + click.echo("Remote token should be 40 characters long") + + _rem_key_file = os.path.join(fdp_com.global_config_dir(), "remotetoken.txt") + + with open(_rem_key_file, 'w') as f: + f.write(_rem_key) + + if not os.path.exists(_rem_key_file): + raise fdp_exc.CLIConfigurationError(f'Token could not be written to {_rem_key_file}') if not fdp_serv.check_server_running(): if _ := click.confirm( @@ -696,7 +748,7 @@ def global_config_query( pathlib.Path(_cache_addr).touch() else: - click.echo("Temporarily launching server to retrieve API token.") + click.echo("Temporarily launching local registry to retrieve API token.") fdp_serv.launch_server(registry_dir=registry) fdp_serv.stop_server(registry_dir=registry, local_uri=_local_uri) try: @@ -713,7 +765,7 @@ def global_config_query( if _loc_data_store[-1] != os.path.sep: _loc_data_store += os.path.sep - _glob_conf_dict = _get_user_info_and_namespaces() + _glob_conf_dict = _get_user_info_and_namespaces(local) _glob_conf_dict["registries"] = { "local": { "uri": _local_uri, diff --git a/fair/configuration/validation.py b/fair/configuration/validation.py index b1215b5..04d5d1a 100644 --- a/fair/configuration/validation.py +++ b/fair/configuration/validation.py @@ -108,6 +108,9 @@ class User(pydantic.BaseModel): name: typing.Optional[str] = pydantic.Field( None, title="Full Name", description="Full name for the user" ) + github: typing.Optional[str] = pydantic.Field( + None, title="GitHub Username", description="GitHub Username for the user" + ) class Config: extra = "forbid" diff --git a/fair/history.py b/fair/history.py index 1f7f73c..2c6ea0b 100644 --- a/fair/history.py +++ b/fair/history.py @@ -22,6 +22,8 @@ import click import rich +from pathlib import Path + import fair.common as fdp_com import fair.exceptions as fdp_exc import fair.run as fdp_run @@ -61,8 +63,10 @@ def show_job_log(repo_loc: str, job_id: str) -> str: str log file location for the given job """ + _job_dir = Path(f"{fdp_com.default_jobs_dir()}") + _sorted_time_dirs = sorted( - glob.glob(os.path.join(fdp_com.default_jobs_dir(), "*")), reverse=True + glob.glob(os.path.join(_job_dir, "*")), reverse=True ) _log_files = [ @@ -107,8 +111,10 @@ def show_history(repo_loc: str, length: int = 10) -> None: max number of entries to display, by default 10 """ + _job_dir = Path(f"{fdp_com.default_jobs_dir()}") + _sorted_time_dirs = sorted( - glob.glob(os.path.join(fdp_com.default_jobs_dir(), "*")), reverse=True + glob.glob(os.path.join(_job_dir, "*")), reverse=True ) _log_files = [ diff --git a/fair/identifiers.py b/fair/identifiers.py index c676489..b3c1daa 100644 --- a/fair/identifiers.py +++ b/fair/identifiers.py @@ -24,15 +24,22 @@ import requests import requests.exceptions +import logging +from urllib3.exceptions import InsecureRequestWarning +from fake_useragent import UserAgent + +logger = logging.getLogger("FAIRDataPipeline.Identifiers") ID_URIS = { "orcid": "https://orcid.org/", "ror": "https://ror.org/", + "github" : "https://github.com/" } QUERY_URLS = { "orcid": "https://pub.orcid.org/v3.0/", "ror": "https://api.ror.org/organizations?query=", + "github": "https://api.github.com/users/" } @@ -49,14 +56,16 @@ def check_orcid(orcid: str) -> typing.Dict: typing.Dict metadata from the given ID """ - + orcid = orcid.replace(ID_URIS["orcid"], "") _header = {"Accept": "application/json"} _url = urllib.parse.urljoin(QUERY_URLS["orcid"], orcid) - _response = requests.get(_url, headers=_header) + requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) + _response = requests.get(_url, headers=_header, verify = False, allow_redirects = True) _result_dict: typing.Dict[str, typing.Any] = {} if _response.status_code != 200: + logger.debug(f"{_url} Responded with {_response.status_code}") return _result_dict _names = _response.json()["person"]["name"] @@ -72,6 +81,42 @@ def check_orcid(orcid: str) -> typing.Dict: return _result_dict +def check_github(github: str) -> typing.Dict: + """Checks if valid ORCID using ORCID public api + + Parameters + ---------- + github : str + github username to be checked + + Returns + ------- + typing.Dict + metadata from the given ID + """ + _header = {"Accept": "application/json"} + _url = urllib.parse.urljoin(QUERY_URLS["github"], github) + requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) + _response = requests.get(_url, headers=_header, verify = False, allow_redirects = True) + + _result_dict: typing.Dict[str, typing.Any] = {} + + if _response.status_code != 200: + logger.debug(f"{_url} Responded with {_response.status_code}") + return _result_dict + + _login = _response.json()["login"] + _name = _response.json()["name"] + if _name: + _result_dict["family_name"] = _name.split()[-1] + _result_dict["given_names"] = " ".join(_name.split()[:-1]) + + _result_dict["name"] = _name + _result_dict["github"] = _login + _result_dict["uri"] = f'{ID_URIS["github"]}{_login}' + + return _result_dict + def check_ror(ror: str) -> typing.Dict: """Checks if valid ROR using ROR public api @@ -86,7 +131,6 @@ def check_ror(ror: str) -> typing.Dict: typing.Dict metadata from the given ID """ - _result_dict = _check_generic_ror(ror) if _result_dict: _result_dict["ror"] = ror @@ -124,13 +168,13 @@ def _check_generic_ror(id: str) -> typing.Dict: typing.Dict metadata from the given ID """ - _url = f"{QUERY_URLS['ror']}{id}" _response = requests.get(_url) _result_dict: typing.Dict[str, typing.Any] = {} if _response.status_code != 200: + logger.debug(f"{_url} Responded with {_response.status_code}") return _result_dict if _response.json()["number_of_results"] == 0: @@ -164,18 +208,25 @@ def check_id_permitted(identifier: str, retries: int = 5) -> bool: if valid identifier """ _n_attempts = 0 + fake_agent = False while _n_attempts < retries: try: - requests.get(identifier).raise_for_status() + requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) + headers = {} + if fake_agent: + headers = {'User-Agent':str(UserAgent().chrome)} + requests.get(identifier, verify = False, allow_redirects = True, headers = headers).raise_for_status() return True except ( requests.exceptions.MissingSchema, requests.exceptions.HTTPError, requests.exceptions.ConnectionError, - ): + ) as e: _n_attempts += 1 - time.sleep(1) + time.sleep(3) + fake_agent = True + logger.warning(f"Error identifier: '{identifier}' caused '{e}'") continue return False diff --git a/fair/register.py b/fair/register.py index 809e65b..48016b7 100644 --- a/fair/register.py +++ b/fair/register.py @@ -88,7 +88,7 @@ def fetch_registrations( typing.List[str] list of registered object URLs """ - _expected_keys = [ + _expected_keys_external_object = [ "root", "path", "file_type", @@ -97,9 +97,24 @@ def fetch_registrations( "public", ] + _expected_keys_data_product = [ + "root", + "path", + "file_type", + "version", + "public" + ] + _stored_objects: typing.List[str] = [] for entry in user_config_register: + logger.debug(f"entry {entry}") + + if "external_object" in entry: + _expected_keys = _expected_keys_external_object + else: + _expected_keys = _expected_keys_data_product + for key in _expected_keys: if key not in entry and key not in entry["use"]: raise fdp_exc.UserConfigError( @@ -166,18 +181,22 @@ def fetch_registrations( _search_data["version"] = entry["use"]["version"] _namespace = entry["use"]["namespace"] - if not _identifier and not _unique_name: - raise fdp_exc.UserConfigError( - "Expected either 'unique_name' or 'identifier' in 'register' item" - ) - - elif _identifier and _unique_name: - raise fdp_exc.UserConfigError( - "Only one unique identifier may be provided (doi/unique_name)" - ) + if _external_object: + if not _identifier and not _unique_name: + raise fdp_exc.UserConfigError( + "Expected either 'unique_name' or 'identifier' in 'register' item" + ) + elif _identifier and _unique_name: + raise fdp_exc.UserConfigError( + "Only one unique identifier may be provided (doi/unique_name)" + ) + # Set Remove to True by default so the tempory file gets deleted + _remove = True if "cache" in entry: _temp_data_file = entry["cache"] + # Don't delete the tempory file if it's from a cache + _remove = False else: _local_parsed = urllib.parse.urlparse(local_uri) _local_url = f"{_local_parsed.scheme}://{_local_parsed.netloc}" @@ -207,7 +226,8 @@ def fetch_registrations( " present with this name, deleting temporary data file", _name, ) - os.remove(_temp_data_file) + if _remove: + os.remove(_temp_data_file) continue # Item found but not hash matched retrieve a version number @@ -239,7 +259,8 @@ def fetch_registrations( logger.debug("Saving data file to '%s'", _local_file) shutil.copy(_temp_data_file, _local_file) - os.remove(_temp_data_file) + if _remove: + os.remove(_temp_data_file) if "public" in entry: _public = entry["public"] diff --git a/fair/registry/requests.py b/fair/registry/requests.py index 3fc3030..1835703 100644 --- a/fair/registry/requests.py +++ b/fair/registry/requests.py @@ -42,6 +42,9 @@ import ssl from requests.adapters import HTTPAdapter from urllib3.poolmanager import PoolManager +from urllib3.exceptions import InsecureRequestWarning + +from fake_useragent import UserAgent logger = logging.getLogger("FAIRDataPipeline.Requests") @@ -528,8 +531,8 @@ def download_file(url: str, chunk_size: int = 8192) -> str: _file = tempfile.NamedTemporaryFile(delete=False) _fname = _file.name - # Copy File if local (Windows fix) - if "file://" in url and platform.system() == "Windows": + # Copy File if local + if "file://" in url: _local_fname = url.replace("file://", "") try: shutil.copy2(_local_fname, _fname) @@ -538,17 +541,16 @@ def download_file(url: str, chunk_size: int = 8192) -> str: f"Failed to download file '{url}'" f" due to connection error: {traceback.format_exc()}" ) from e - else: try: - with urllib.request.urlopen(url) as response, open( - _fname, "wb" - ) as out_file: - shutil.copyfileobj(response, out_file) - except urllib.error.URLError as e: + requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) + headers = {'User-Agent':str(UserAgent().chrome)} + response = requests.get(url, allow_redirects = True, verify = False, headers = headers) + open(_fname, 'wb').write(response.content) + except Exception as e: raise fdp_exc.FAIRCLIException( f"Failed to download file '{url}'" - f" due to connection error: {e.reason}" + f" due to connection error: {traceback.format_exc()}" ) from e return _fname @@ -571,7 +573,7 @@ def get_dependency_listing(uri: str, token: str, read_only: bool = False) -> typ """ try: _registry_objs = url_get(uri, token) - except: + except Exception: return {[]} _rtn = { diff --git a/fair/registry/server.py b/fair/registry/server.py index 6bc6faf..59b469a 100644 --- a/fair/registry/server.py +++ b/fair/registry/server.py @@ -124,7 +124,7 @@ def launch_server( _cmd = [_server_start_script, "-p", f"{port}", "-a", f"{address}"] - os.environ["FAIR_ALLOWED_HOSTS"] = address if not "FAIR_ALLOWED_HOSTS" in os.environ else os.environ["FAIR_ALLOWED_HOSTS"] + f",{address}" + os.environ["FAIR_ALLOWED_HOSTS"] = address if "FAIR_ALLOWED_HOSTS" not in os.environ else os.environ["FAIR_ALLOWED_HOSTS"] + f",{address}" logger.debug("Launching server with command '%s'", " ".join(_cmd)) @@ -141,7 +141,7 @@ def launch_server( _start.wait() - local_uri = fdp_conf.update_local_port() + local_uri = fdp_conf.update_local_port(registry_dir) if not check_server_running(local_uri): raise fdp_exc.RegistryError( @@ -193,10 +193,10 @@ def stop_server( " is the FAIR data pipeline properly installed on this system?" ) - logger.debug("Stopping local registry server.") + logger.debug(f"Stopping local registry server with '{_server_stop_script}'.") _stop = subprocess.Popen( - _server_stop_script, + [_server_stop_script, ""], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=False, @@ -328,7 +328,7 @@ def install_registry( logger.debug("Removing existing installation at '%s'", install_dir) if platform.system() == "Windows": fdp_com.set_file_permissions(install_dir) - shutil.rmtree(install_dir, ignore_errors=True) + shutil.rmtree(install_dir, onerror=fdp_com.remove_readonly) logger.debug("Creating directories for installation if they do not exist") @@ -423,7 +423,7 @@ def uninstall_registry() -> None: # On windows file permisions need to be set prior to removing the directory if platform.system() == "Windows": fdp_com.set_file_permissions(fdp_com.registry_home()) - shutil.rmtree(fdp_com.registry_home()) + shutil.rmtree(fdp_com.registry_home(), onerror=fdp_com.remove_readonly) elif os.path.exists(fdp_com.DEFAULT_REGISTRY_LOCATION): logger.debug( "Uninstalling registry, removing '%s'", @@ -432,7 +432,7 @@ def uninstall_registry() -> None: # On windows file permisions need to be set prior to removing the directory if platform.system() == "Windows": fdp_com.set_file_permissions(fdp_com.DEFAULT_REGISTRY_LOCATION) - shutil.rmtree(fdp_com.DEFAULT_REGISTRY_LOCATION) + shutil.rmtree(fdp_com.DEFAULT_REGISTRY_LOCATION, onerror=fdp_com.remove_readonly) else: raise fdp_exc.RegistryError( "Cannot uninstall registry, no local installation identified" @@ -440,7 +440,7 @@ def uninstall_registry() -> None: def update_registry_post_setup( - repo_dir: str, global_setup: bool = False + repo_dir: str, global_setup: bool = False, registry_dir: str = None ) -> None: """Add user namespace and file types after CLI setup @@ -458,25 +458,25 @@ def update_registry_post_setup( _is_running = check_server_running(fdp_conf.get_local_uri()) if not _is_running: - launch_server() + launch_server(registry_dir = registry_dir) if global_setup: logger.debug("Populating file types") fdp_store.populate_file_type( - fdp_conf.get_local_uri(), fdp_req.local_token() + fdp_conf.get_local_uri(), fdp_req.local_token(registry_dir) ) logger.debug("Adding 'author' and 'UserAuthor' entries if not present") # Add author and UserAuthor _author_url = fdp_store.store_user( - repo_dir, fdp_conf.get_local_uri(), fdp_req.local_token() + repo_dir, fdp_conf.get_local_uri(), fdp_req.local_token(registry_dir) ) try: _admin_url = fdp_req.get( fdp_conf.get_local_uri(), "users", - fdp_req.local_token(), + fdp_req.local_token(registry_dir), params={"username": "admin"}, )[0]["url"] except (KeyError, IndexError) as e: @@ -487,10 +487,10 @@ def update_registry_post_setup( fdp_req.post_else_get( fdp_conf.get_local_uri(), "user_author", - fdp_req.local_token(), + fdp_req.local_token(registry_dir), data={"user": _admin_url, "author": _author_url}, ) # Only stop the server if it was not running initially if not _is_running: - stop_server() \ No newline at end of file + stop_server(registry_dir) \ No newline at end of file diff --git a/fair/registry/storage.py b/fair/registry/storage.py index 17d7a69..e546a26 100644 --- a/fair/registry/storage.py +++ b/fair/registry/storage.py @@ -136,6 +136,11 @@ def store_user(repo_dir: str, uri: str, token: str) -> str: return store_author(uri, token, name, _id, _uuid) +def store_user_author(uri:str, token:str, user_uri:str, author_uri:str)-> str: + _data = {"user": user_uri, "author": author_uri} + + return fdp_req.post_else_get(uri, "user_author", token, _data, _data) + def populate_file_type(uri: str, token: str) -> typing.List[typing.Dict]: """Populates file_type table with common file file_types @@ -477,7 +482,7 @@ def _get_url_from_storage_loc( _storage_loc_data = { "path": relative_path, "storage_root": root_store_url, - "public": is_public, + "public": str(is_public).lower(), "hash": _hash, } @@ -580,7 +585,7 @@ def _get_url_from_object( raise e else: raise fdp_exc.RegistryAPICallError( - f"Cannot post object" f"'{_desc}', duplicate already exists", + f"Cannot post object '{_desc}', duplicate already exists", error_code=409, ) from e diff --git a/fair/registry/sync.py b/fair/registry/sync.py index 2785038..f56f933 100644 --- a/fair/registry/sync.py +++ b/fair/registry/sync.py @@ -66,7 +66,7 @@ def _dependency_of(url_list: collections.deque, item: str, _dependency_list:dict url_list.appendleft(item) try: _results = fdp_req.url_get(item, token) - except: + except Exception: _results = {} _type = fdp_req.get_obj_type_from_url(item, token) for req, val in _results.items(): @@ -124,7 +124,7 @@ def pull_all_namespaces( ) if not _remote_namespaces: - return + return [] _writable_fields = fdp_req.get_writable_fields( local_uri, "namespace", local_token @@ -139,7 +139,6 @@ def pull_all_namespaces( local_uri, "namespace", local_token, _writable_data ) - def sync_dependency_chain( object_url: str, dest_uri: str, @@ -148,6 +147,7 @@ def sync_dependency_chain( origin_token: str, local_data_store: str = None, public: bool = False, + remote_author_url = None ) -> typing.Dict[str, str]: """Push an object and all of its dependencies to the remote registry @@ -225,7 +225,8 @@ def sync_dependency_chain( new_urls=_new_urls, writable_data=_writable_data, local_data_store = local_data_store, - public = public + public = public, + remote_author_url = remote_author_url ) if not fdp_util.is_api_url(dest_uri, _new_url): @@ -248,7 +249,8 @@ def _get_new_url( new_urls: typing.Dict, writable_data: typing.Dict, local_data_store = None, - public = False + public = False, + remote_author_url = None ) -> typing.Tuple[typing.Dict, typing.List]: """Internal Function to return a resgistry entry from the remote registry given an origin entry URL If the entry does not exist it will be created @@ -326,6 +328,9 @@ def _get_new_url( _new_obj_data["storage_root"] = _remote_storage_root_url _filters["storage_root"] = fdp_req.get_obj_id_from_url(_remote_storage_root_url) + if remote_author_url and _obj_type == "object": + _new_obj_data["authors"] = _filters["authors"] = [remote_author_url] + return fdp_req.post_else_get( dest_uri, _obj_type, @@ -334,6 +339,54 @@ def _get_new_url( params=_filters, ) +def sync_author( + origin_uri: str, + dest_uri: str, + dest_token: str, + origin_token: str, + identifier: str +) -> None: + current_author = fdp_req.get( + origin_uri, + "author", + origin_token, + params= {"identifier": identifier} + ) + if not current_author: + raise fdp_exc.RegistryError(f"No author matching {name}, on local registry", "Have you run fair init?") + current_author_url = current_author[0]['url'] + new_urls = sync_dependency_chain( + current_author_url, + dest_uri, + origin_uri, + dest_token, + origin_token + ) + if not new_urls: + raise fdp_exc.RegistryError(f"Auther {name}, could not be pushed to {dest_uri}") + new_author_url = new_urls[current_author_url] + if not new_author_url: + raise fdp_exc.RegistryError(f"Auther {name}, was not be pushed to {dest_uri}") + return new_author_url + +def sync_user_author( + origin_uri: str, + dest_uri: str, + dest_token: str, + origin_token: str, + author_url: str, + github: str +) -> None: + current_user = fdp_req.get( + dest_uri, + "users", + dest_token, + params= {"username": github} + ) + if not current_user: + raise fdp_exc.RegistryError(f"No user matching {github}, on remote registry", "Does your GitHub username match the remote registry GitHub user?") + current_user_url = current_user[0]['url'] + fdp_store.store_user_author(dest_uri, dest_token, current_user_url, author_url) def sync_data_products( origin_uri: str, @@ -344,6 +397,7 @@ def sync_data_products( data_products: typing.List[str], local_data_store: str = None, force: bool = False, + remote_author_url = None ) -> None: """Transfer data products from one registry to another @@ -440,7 +494,8 @@ def sync_data_products( dest_token=dest_token, origin_token=origin_token, local_data_store=local_data_store, - public=_is_public + public=_is_public, + remote_author_url = remote_author_url ) # If local_data_store assume we're syncing from remote to local if local_data_store: @@ -462,8 +517,8 @@ def sync_data_products( for origin_input_code_run in origin_input_code_runs: dest_component_url = get_dest_component_url(origin_component_url, dest_uri, dest_token, origin_token) dest_inputs = [dest_component_url] - dest_inputs += get_dest_inputs(origin_input_code_run["inputs"], origin_uri, dest_uri, dest_token, origin_token, remote_label) - sync_code_run(origin_uri, dest_uri, dest_token, origin_token, origin_input_code_run["uuid"], inputs= dest_inputs) + dest_inputs += get_dest_inputs(origin_input_code_run["inputs"], origin_uri, dest_uri, dest_token, origin_token, remote_label, remote_author_url) + sync_code_run(origin_uri, dest_uri, dest_token, origin_token, origin_input_code_run["uuid"], inputs= dest_inputs, remote_author_url = remote_author_url) origin_output_code_runs = fdp_req.get( origin_uri, @@ -473,8 +528,8 @@ def sync_data_products( ) for origin_output_code_run in origin_output_code_runs: dest_component_url = get_dest_component_url(origin_component_url, dest_uri, dest_token, origin_token) - dest_inputs = get_dest_inputs(origin_output_code_run["inputs"], origin_uri, dest_uri, dest_token, origin_token, remote_label, force) - sync_code_run(origin_uri, dest_uri, dest_token, origin_token, origin_output_code_run["uuid"], inputs= dest_inputs, outputs= [dest_component_url]) + dest_inputs = get_dest_inputs(origin_output_code_run["inputs"], origin_uri, dest_uri, dest_token, origin_token, remote_label, force, remote_author_url) + sync_code_run(origin_uri, dest_uri, dest_token, origin_token, origin_output_code_run["uuid"], inputs= dest_inputs, outputs= [dest_component_url], remote_author_url = remote_author_url) def get_dest_component_url( origin_component_url: str, @@ -521,6 +576,7 @@ def sync_code_runs( code_runs: typing.List[str], local_data_store: str = None, force: bool = False, + remote_author_url = None ) -> None: """Transfer data code_run(s) from one registry to another @@ -589,7 +645,8 @@ def sync_code_runs( remote_label, _origin_data_products_formatted, local_data_store, - force) + force, + remote_author_url) # Iterate through formatted objects and get their new values from the remote registry for _origin_data_product_formatted in _origin_data_products_formatted: @@ -617,7 +674,7 @@ def sync_code_runs( if _origin_data_product_formatted in _outputs_data_products: _dest_outputs += _dest_object["components"] logger.debug(f'attempting to sync coderun {code_run_uuid} with inputs {_dest_inputs} and outputs {_dest_outputs}') - sync_code_run(origin_uri, dest_uri, dest_token, origin_token, code_run_uuid, _dest_inputs, _dest_outputs) + sync_code_run(origin_uri, dest_uri, dest_token, origin_token, code_run_uuid, _dest_inputs, _dest_outputs, remote_author_url = remote_author_url) # Internal function to return the (remote) object associated with a code_run field containing and object url def get_dest_object_url( @@ -663,7 +720,8 @@ def sync_code_run( origin_token: str, code_run_uuid: str, inputs = [], - outputs = []) -> typing.Dict: + outputs = [], + remote_author_url = None) -> typing.Dict: """_summary_ Args: @@ -710,7 +768,8 @@ def sync_code_run( origin_uri=origin_uri, dest_token=dest_token, origin_token=origin_token, - public= True + public= True, + remote_author_url = remote_author_url ) _dest_code_run_model_config = get_dest_object_url(code_run["model_config"], dest_uri, dest_token, origin_token) upload_object(origin_uri, dest_uri, dest_token, origin_token, code_run["model_config"]) @@ -723,7 +782,8 @@ def sync_code_run( origin_uri=origin_uri, dest_token=dest_token, origin_token=origin_token, - public= True + public= True, + remote_author_url = remote_author_url ) _dest_code_run_code_repo = get_dest_object_url(code_run["code_repo"], dest_uri, dest_token, origin_token) # Sync Submision Script @@ -733,7 +793,8 @@ def sync_code_run( origin_uri=origin_uri, dest_token=dest_token, origin_token=origin_token, - public= True + public= True, + remote_author_url = remote_author_url ) _dest_code_run_submission_script = get_dest_object_url(code_run["submission_script"], dest_uri, dest_token, origin_token) upload_object(origin_uri, dest_uri, dest_token, origin_token, code_run["submission_script"]) @@ -828,7 +889,8 @@ def get_dest_inputs(origin_inputs: typing.List, dest_token: str, origin_token: str, remote_label: str, - force: bool = False) -> list: + force: bool = False, + remote_author_url = None) -> list: """Returns a list of input component urls on the destination registry from a given list of input component urls from the origin registry assumes the destination componets already exists in the remote registry @@ -849,7 +911,7 @@ def get_dest_inputs(origin_inputs: typing.List, for origin_input in origin_inputs: component_data_products = get_data_products_from_component(origin_input, origin_token) if component_data_products: - sync_data_products(origin_uri, dest_uri, dest_token, origin_token, remote_label, format_data_product_list(component_data_products, origin_token), force= force) + sync_data_products(origin_uri, dest_uri, dest_token, origin_token, remote_label, format_data_product_list(component_data_products, origin_token), force= force, remote_author_url = remote_author_url) for data_product_url in component_data_products: origin_data_product_object_url = fdp_req.url_get(data_product_url, origin_token)["object"] dest_object_url = get_dest_object_url(origin_data_product_object_url, dest_uri, dest_token, origin_token) @@ -862,7 +924,8 @@ def get_dest_inputs(origin_inputs: typing.List, origin_uri=origin_uri, dest_token=dest_token, origin_token=origin_token, - public= is_component_public(origin_input, origin_token) + public= is_component_public(origin_input, origin_token), + remote_author_url = remote_author_url ) dest_inputs.append(get_dest_component_url(origin_input, dest_uri, dest_token, origin_token)) return dest_inputs @@ -1019,7 +1082,7 @@ def upload_object(origin_uri:str, dest_uri:str, dest_token:str, origin_token:str fdp_store.upload_remote_file(_file_loc, dest_uri, dest_token) logger.debug(f"File {_file_loc} Uploaded Successfully") return True - except Exception as e: + except Exception: logger.warning(f'File upload error: {_object["description"]} was not uploaded to remote registry please upload the file manually') logger.debug(f'{traceback.format_exc()}') return False \ No newline at end of file diff --git a/fair/session.py b/fair/session.py index 63073a2..31ccaf2 100644 --- a/fair/session.py +++ b/fair/session.py @@ -211,7 +211,7 @@ def purge( click.echo(f"Removing directory '{_root_dir}'") if platform.system() == "Windows": fdp_com.set_file_permissions(_root_dir) - shutil.rmtree(_root_dir) + shutil.rmtree(_root_dir, onerror=fdp_com.remove_readonly) if clear_all: try: if fdp_serv.check_server_running(): @@ -225,7 +225,7 @@ def purge( click.echo(f"Removing directory '{fdp_com.USER_FAIR_DIR}'") if platform.system() == "Windows": fdp_com.set_file_permissions(fdp_com.USER_FAIR_DIR) - shutil.rmtree(fdp_com.USER_FAIR_DIR) + shutil.rmtree(fdp_com.USER_FAIR_DIR, onerror=fdp_com.remove_readonly) return if clear_data: try: @@ -236,7 +236,7 @@ def purge( if os.path.exists(fdp_com.default_data_dir()): if platform.system() == "Windows": fdp_com.set_file_permissions(fdp_com.default_data_dir()) - shutil.rmtree(fdp_com.default_data_dir()) + shutil.rmtree(fdp_com.default_data_dir(), onerror=fdp_com.remove_readonly) except FileNotFoundError as e: raise fdp_exc.FileNotFoundError( "Cannot remove local data store, a global CLI configuration " @@ -252,7 +252,7 @@ def purge( if os.path.exists(_global_dirs): if platform.system() == "Windows": fdp_com.set_file_permissions(_global_dirs) - shutil.rmtree(_global_dirs) + shutil.rmtree(_global_dirs, onerror=fdp_com.remove_readonly) def _setup_server(self, port: int, address: str) -> None: """Start or stop the server if required""" @@ -360,7 +360,7 @@ def registry_status(self): if fdp_serv.check_server_running(): click.echo(f'Server running at: {fdp_conf.get_local_uri()}') else: - click.echo(f'Server is not running') + click.echo('Server is not running') def push(self, remote: str = "origin"): self._pre_job_setup(remote) @@ -381,6 +381,27 @@ def push(self, remote: str = "origin"): if not _staged_code_runs: click.echo("No Staged Code Runs to Push.") + remote_author_url = fdp_sync.sync_author( + origin_uri=fdp_conf.get_local_uri(), + dest_uri=fdp_conf.get_remote_uri(self._session_loc, remote), + dest_token=fdp_conf.get_remote_token( + self._session_loc, remote, local=self._local + ), + origin_token=fdp_req.local_token(), + identifier= fdp_conf.get_current_user_uri(self._session_loc) + ) + + fdp_sync.sync_user_author( + origin_uri=fdp_conf.get_local_uri(), + dest_uri=fdp_conf.get_remote_uri(self._session_loc, remote), + dest_token=fdp_conf.get_remote_token( + self._session_loc, remote, local=self._local + ), + origin_token=fdp_req.local_token(), + author_url = remote_author_url, + github = fdp_conf.get_current_user_github(self._session_loc) + ) + fdp_sync.sync_code_runs( origin_uri=fdp_conf.get_local_uri(), dest_uri=fdp_conf.get_remote_uri(self._session_loc, remote), @@ -390,6 +411,7 @@ def push(self, remote: str = "origin"): origin_token=fdp_req.local_token(), remote_label=remote, code_runs=_staged_code_runs, + remote_author_url = remote_author_url ) fdp_sync.sync_data_products( @@ -401,6 +423,7 @@ def push(self, remote: str = "origin"): origin_token=fdp_req.local_token(), remote_label=remote, data_products=_staged_data_products, + remote_author_url = remote_author_url ) self._session_config.write_log_lines( @@ -897,7 +920,7 @@ def get_code_run_description(self, uuid: str)-> str: params={"uuid": uuid}) if _remote_code_run: return _remote_code_run[0]["description"] - except Exception as e: + except Exception: pass return "Unknown" @@ -943,7 +966,7 @@ def show_all_code_runs(self, remote: str = None): if _remote_code_runs: for _remote_code_run in _remote_code_runs: _code_run_uuids.append(_remote_code_run["uuid"]) - except Exception as e: + except Exception: self._logger.warning("Could not Fetch from a remote registry") self._logger.debug(f'{traceback.format_exc()}') _code_run_uuids = list(set(_code_run_uuids)) @@ -969,7 +992,7 @@ def show_all_data_products(self, remote: str = None): for remote_data_product in _remote_data_products: _namespace_name = fdp_req.url_get(remote_data_product["namespace"], fdp_conf.get_remote_token(self._session_loc, remote, local=self._local))["name"] _data_products.append(f'{_namespace_name}:{remote_data_product["name"]}@v{remote_data_product["version"]}') - except Exception as e: + except Exception: self._logger.warning("Could not Fetch from a remote registry") self._logger.debug(f'{traceback.format_exc()}') _data_products = list(set(_data_products)) @@ -1084,7 +1107,7 @@ def initialise( using: typing.Dict = None, registry: str = None, export_as: str = None, - local: bool = False, + local: bool = False ) -> None: """Initialise an fair repository within the current location @@ -1100,21 +1123,26 @@ def initialise( _first_time = not os.path.exists(fdp_com.global_fdpconfig()) if self._testing: + if os.path.exists(_fair_dir): + if platform.system() == "Windows": + fdp_com.set_file_permissions(_fair_dir) + shutil.rmtree(_fair_dir, onerror=fdp_com.remove_readonly) using = fdp_test.create_configurations( registry, fdp_com.find_git_root(os.getcwd()), os.getcwd(), - os.path.join(os.getcwd(), "data_store"), + os.path.join(os.getcwd(), ".fair"), ) - if os.path.exists(_fair_dir): + if os.path.exists(_fair_dir) and not self._testing: if export_as: self._export_cli_configuration(export_as) return - click.echo("FAIR repository is already initialised.") - return + else: + click.echo("FAIR repository is already initialised.") + return - if _existing := fdp_com.find_fair_root(self._session_loc): + if _existing := fdp_com.find_fair_root(self._session_loc) and not self._testing: click.echo( "A FAIR repository was initialised for this location at" f" '{_existing}'" @@ -1129,8 +1157,8 @@ def initialise( "Initialising FAIR repository, setup will now ask for basic info (leave blank for default value):\n" ) - if not os.path.exists(_fair_dir): - os.mkdir(_fair_dir) + if not os.path.exists(_fair_dir) or self._testing: + os.makedirs(_fair_dir, exist_ok=True) os.makedirs(fdp_com.session_cache_dir(), exist_ok=True) if using: self._validate_and_load_cli_config(using) @@ -1138,12 +1166,14 @@ def initialise( if not os.path.exists(fdp_com.global_fdpconfig()): try: + click.echo("Setup will now ask you questions regarding the global configuration") self._global_config = fdp_conf.global_config_query( registry, local ) except (fdp_exc.CLIConfigurationError, click.Abort) as e: self._clean_reset(_fair_dir, e) try: + click.echo("Setup will now ask you questions regarding this repo configuration") self._local_config = fdp_conf.local_config_query( self._global_config, first_time_setup=_first_time, @@ -1153,6 +1183,7 @@ def initialise( self._clean_reset(_fair_dir, e, True) elif not using: try: + click.echo("Setup will now ask you questions regarding this repo configuration") self._local_config = fdp_conf.local_config_query( self._global_config, local=local ) @@ -1164,7 +1195,9 @@ def initialise( with open(fdp_com.global_fdpconfig(), "w") as f: yaml.dump(self._global_config, f) else: + click.echo("Setup will now ask you questions regarding the global configuration") self._global_config = fdp_conf.read_global_fdpconfig() + click.echo("Setup will now ask you questions regarding this repo configuration") self._local_config = fdp_conf.read_local_fdpconfig( self._session_loc ) @@ -1172,7 +1205,7 @@ def initialise( if export_as: self._export_cli_configuration(export_as) - fdp_serv.update_registry_post_setup(self._session_loc, _first_time) + fdp_serv.update_registry_post_setup(self._session_loc, _first_time, registry) try: fdp_clivalid.LocalCLIConfig(**self._local_config) except pydantic.ValidationError as e: @@ -1203,12 +1236,12 @@ def _clean_reset( if not local_only: if platform.system() == "Windows": fdp_com.set_file_permissions(fdp_com.session_cache_dir()) - fdp_com.set_file_permissions(fdp_com.fdp_com.global_config_dir()) - shutil.rmtree(fdp_com.session_cache_dir(), ignore_errors=True) - shutil.rmtree(fdp_com.global_config_dir(), ignore_errors=True) + fdp_com.set_file_permissions(fdp_com.global_config_dir()) + shutil.rmtree(fdp_com.session_cache_dir(), onerror=fdp_com.remove_readonly) + shutil.rmtree(fdp_com.global_config_dir(), onerror=fdp_com.remove_readonly) if platform.system() == "Windows": fdp_com.set_file_permissions(_fair_dir) - shutil.rmtree(_fair_dir) + shutil.rmtree(_fair_dir, onerror=fdp_com.remove_readonly) if e: raise e @@ -1271,7 +1304,7 @@ def _validate_and_load_cli_config(self, cli_config: typing.Dict): "Expected key 'directory' for local registry in CLI configuration" ) - _user_keys = ["email", "family_name", "given_names", "orcid", "uuid"] + _user_keys = ["email", "family_name", "given_names", "uuid", "github"] for key in _user_keys: if key not in cli_config["user"]: @@ -1280,9 +1313,9 @@ def _validate_and_load_cli_config(self, cli_config: typing.Dict): f"Expected key 'user:{key}' in CLI configuration file" ) - if not cli_config["user"]["orcid"] and not cli_config["user"]["uuid"]: + if not cli_config["user"]["github"] and not cli_config["user"]["uuid"]: raise fdp_exc.CLIConfigurationError( - "At least one of 'user:orcid' and 'user:uuid' must be provided " + "At least one of 'user:github' and 'user:uuid' must be provided " " in CLI configuration" ) diff --git a/fair/testing.py b/fair/testing.py index b0d85ca..312d2b6 100644 --- a/fair/testing.py +++ b/fair/testing.py @@ -14,7 +14,7 @@ def create_configurations( local_git_dir: typing.Optional[str] = None, remote_reg_dir: typing.Optional[str] = None, testing_dir: str = tempfile.mkdtemp(), - tokenless: bool = False, + tokenless: bool = False ) -> typing.Dict: """ Setup CLI for testing @@ -80,8 +80,9 @@ def create_configurations( "family_name": "Test", "given_names": "Interface", "orcid": "000-0000-0000-0000", - "uri": f'{fdp_id.ID_URIS["orcid"]}000-0000-0000-0000', + "uri": f'{fdp_id.ID_URIS["github"]}FAIRDataPipeline', "uuid": "2ddb2358-84bf-43ff-b2aa-3ac7dc3b49f1", + "github": "FAIRDataPipeline" }, "git": { "local_repo": local_git_dir, diff --git a/fair/user_config/__init__.py b/fair/user_config/__init__.py index c1b6cd9..0b37ea9 100644 --- a/fair/user_config/__init__.py +++ b/fair/user_config/__init__.py @@ -332,6 +332,14 @@ def _switch_namespace_name_to_use(self, register_block: typing.List): for register_entry in register_block: _new_entry = register_entry.copy() if "namespace_name" not in register_entry: + if not "use" in register_entry: + _new_entry["use"] = {} + else: + if "namespace" in register_entry["use"]: + _new_entry["use"]["namespace"] = register_entry["use"]["namespace"] + else: + _new_entry["use"]["namespace"] = self.default_input_namespace + _new_register_block.append(_new_entry) continue if ( register_entry["namespace_name"] diff --git a/poetry.lock b/poetry.lock index da60205..a79da74 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -23,7 +22,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "bandit" version = "1.7.5" description = "Security oriented static analyser for python code." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -45,32 +43,45 @@ yaml = ["PyYAML"] [[package]] name = "black" -version = "22.12.0" +version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, + {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, + {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, + {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, + {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, + {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, + {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, + {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, + {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, + {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, + {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, + {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, + {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, + {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -83,7 +94,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "blinker" version = "1.6.2" description = "Fast, simple object-to-object and broadcast signaling" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -93,18 +103,17 @@ files = [ [[package]] name = "boto3" -version = "1.26.134" +version = "1.27.1" description = "The AWS SDK for Python" -category = "dev" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.26.134-py3-none-any.whl", hash = "sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40"}, - {file = "boto3-1.26.134.tar.gz", hash = "sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6"}, + {file = "boto3-1.27.1-py3-none-any.whl", hash = "sha256:0085c1066953e61915b34f24fbdee7117fd2d8b5c9188b9519d47ba84510c067"}, + {file = "boto3-1.27.1.tar.gz", hash = "sha256:cf43deb4556295219d9de44d1c95921209c90ee25246673b5768aef9d46519cc"}, ] [package.dependencies] -botocore = ">=1.29.134,<1.30.0" +botocore = ">=1.30.1,<1.31.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -113,14 +122,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.134" +version = "1.30.1" description = "Low-level, data-driven core of boto 3." -category = "dev" optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.29.134-py3-none-any.whl", hash = "sha256:8a070ee14a430bd3c9cd16fd142e5c2900749060490698b2b981d6d9dadf5f1f"}, - {file = "botocore-1.29.134.tar.gz", hash = "sha256:0e907b0cab771ab7c9e25efd6b6bc0041ec1b17eb0bab316fd012ef2f8fd99ba"}, + {file = "botocore-1.30.1-py3-none-any.whl", hash = "sha256:18a32a21bfa9b418b9a38ea5ef4464eba003cbb26fca2cd56e4f51098c5d1a0f"}, + {file = "botocore-1.30.1.tar.gz", hash = "sha256:4d1ac5a796c5c5c87946f25f3d98764288a0ed848e772a7a47cd134847e885e7"}, ] [package.dependencies] @@ -135,7 +143,6 @@ crt = ["awscrt (==0.16.9)"] name = "build" version = "0.10.0" description = "A simple, correct Python build frontend" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -157,14 +164,13 @@ virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "cachecontrol" -version = "0.12.11" +version = "0.12.14" description = "httplib2 caching for requests" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "CacheControl-0.12.11-py2.py3-none-any.whl", hash = "sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b"}, - {file = "CacheControl-0.12.11.tar.gz", hash = "sha256:a5b9fcc986b184db101aa280b42ecdcdfc524892596f606858e0b7a8b4d9e144"}, + {file = "CacheControl-0.12.14-py2.py3-none-any.whl", hash = "sha256:1c2939be362a70c4e5f02c6249462b3b7a24441e4f1ced5e9ef028172edf356a"}, + {file = "CacheControl-0.12.14.tar.gz", hash = "sha256:d1087f45781c0e00616479bfd282c78504371ca71da017b49df9f5365a95feba"}, ] [package.dependencies] @@ -180,7 +186,6 @@ redis = ["redis (>=2.10.5)"] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -192,7 +197,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = "*" files = [ @@ -269,7 +273,6 @@ pycparser = "*" name = "cfgv" version = "3.3.1" description = "Validate configuration and produce human readable error messages." -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -277,45 +280,10 @@ files = [ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -[[package]] -name = "cftime" -version = "1.6.2" -description = "Time-handling functionality from netcdf4-python" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cftime-1.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4d2a1920f0aad663f25700b30621ff64af373499e52b544da1148dd8c09409a"}, - {file = "cftime-1.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ba7909a0cd4adcb16797d8d6ab2767e7ddb980b2bf9dbabfc71b3bdd94f072b"}, - {file = "cftime-1.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb294fdb80e33545ae54b4421df35c4e578708a5ffce1c00408b2294e70ecef"}, - {file = "cftime-1.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:2abdac6ca5b8b6102f319122546739dfc42406b816c16f2a98a8f0cd406d3bf0"}, - {file = "cftime-1.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eb7f8cd0996640b83020133b5ef6b97fc9216c3129eaeeaca361abdff5d82166"}, - {file = "cftime-1.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8d49d69c64cee2c175478eed84c3a57fce083da4ceebce16440f72be561a8489"}, - {file = "cftime-1.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:455cec3627e6ca8694b0d9201da6581eb4381b58389f1fbcb51a14fa0e2b3d94"}, - {file = "cftime-1.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:29c18601abea0fd160fbe423e05c7a56fe1d38dd250a6b010de499a132d3fe18"}, - {file = "cftime-1.6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:afb5b38b51b8bc02f1656a9f15c52b0b20a3999adbe1ab9ac57f926e0065b48a"}, - {file = "cftime-1.6.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aedfb7a783d19d7a30cb41951310f3bfe98f9f21fffc723c8af08a11962b0b17"}, - {file = "cftime-1.6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3042048324b4d6a1066c978ec78101effdd84320e8862bfdbf8122d7ad7588ec"}, - {file = "cftime-1.6.2-cp37-none-win_amd64.whl", hash = "sha256:ee70fa069802652cf534de1dd3fc590b7d22d4127447bf96ac9849abcdadadf1"}, - {file = "cftime-1.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:93f00f454329c1f2588ebca2650e8edf7607d6189dbdcc81b5f3be2080155cc4"}, - {file = "cftime-1.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e83db2fdda900eb154a9f79dfb665ac6190781c61d2e18151996de5ee7ffd8a2"}, - {file = "cftime-1.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56d0242fc4990584b265622622b25bb262a178097711d2d95e53ef52a9d23e7e"}, - {file = "cftime-1.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:055d5d60a756c6c1857cf84d77655bb707057bb6c4a4fbb104a550e76c40aad9"}, - {file = "cftime-1.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0955e1f3e1c09a9e0296b50f135ff9719cb2466f81c8ad4a10ef06fa394de984"}, - {file = "cftime-1.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:07fdef2f75a0f0952b0376fa4cd08ef8a1dad3b963976ac07517811d434936b7"}, - {file = "cftime-1.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:892d5dc38f8b998c83a2a01f131e63896d020586de473e1878f9e85acc70ad44"}, - {file = "cftime-1.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86fe550b94525c327578a90b2e13418ca5ba6c636d5efe3edec310e631757eea"}, - {file = "cftime-1.6.2.tar.gz", hash = "sha256:8614c00fb8a5046de304fdd86dbd224f99408185d7b245ac6628d0276596e6d2"}, -] - -[package.dependencies] -numpy = ">1.13.3" - [[package]] name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -400,7 +368,6 @@ files = [ name = "cleo" version = "2.0.1" description = "Cleo allows you to create beautiful and testable command-line interfaces." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -416,7 +383,6 @@ rapidfuzz = ">=2.2.0,<3.0.0" name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -431,7 +397,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -443,7 +408,6 @@ files = [ name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" -category = "main" optional = false python-versions = "*" files = [ @@ -458,7 +422,6 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "contextlib2" version = "21.6.0" description = "Backports and enhancements for the contextlib module" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -468,62 +431,71 @@ files = [ [[package]] name = "coverage" -version = "6.5.0" +version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] @@ -536,7 +508,6 @@ toml = ["tomli"] name = "crashtest" version = "0.4.1" description = "Manage Python errors with ease" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -546,31 +517,30 @@ files = [ [[package]] name = "cryptography" -version = "40.0.2" +version = "41.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, + {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, + {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, + {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, ] [package.dependencies] @@ -579,18 +549,17 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -602,7 +571,6 @@ files = [ name = "deepdiff" version = "5.8.1" description = "Deep Difference and Search of any Python object/data." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -620,7 +588,6 @@ cli = ["clevercsv (==0.7.1)", "click (==8.0.3)", "pyyaml (==5.4.1)", "toml (==0. name = "distlib" version = "0.3.6" description = "Distribution utilities" -category = "main" optional = false python-versions = "*" files = [ @@ -632,7 +599,6 @@ files = [ name = "dnspython" version = "2.3.0" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -653,7 +619,6 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "dulwich" version = "0.21.5" description = "Python Git Library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -728,7 +693,6 @@ pgp = ["gpg"] name = "email-validator" version = "1.3.1" description = "A robust email address syntax and deliverability validation library." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -742,14 +706,13 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, ] [package.extras] @@ -759,7 +722,6 @@ test = ["pytest (>=6)"] name = "execnet" version = "1.9.0" description = "execnet: rapid multi-Python deployment" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -770,44 +732,55 @@ files = [ [package.extras] testing = ["pre-commit"] +[[package]] +name = "fake-useragent" +version = "1.1.3" +description = "Up-to-date simple useragent faker with real world database" +optional = false +python-versions = "*" +files = [ + {file = "fake-useragent-1.1.3.tar.gz", hash = "sha256:1c06f0aa7d6e4894b919b30b9c7ebd72ff497325191057fbb5df3d5db06b93fc"}, + {file = "fake_useragent-1.1.3-py3-none-any.whl", hash = "sha256:695d3b1bf7d11d04ab0f971fb73b0ca8de98b78bbadfbc8bacbc9a48423f7531"}, +] + +[package.dependencies] +importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""} + [[package]] name = "filelock" -version = "3.12.0" +version = "3.12.2" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, - {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" +version = "2.3.0" +description = "the modular source code checker: pep8, pyflakes and co" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = "*" files = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, + {file = "flake8-2.3.0-py2.py3-none-any.whl", hash = "sha256:c99cc9716d6655d9c8bcb1e77632b8615bf0abd282d7abd9f5c2148cad7fc669"}, + {file = "flake8-2.3.0.tar.gz", hash = "sha256:5ee1a43ccd0716d6061521eec6937c983efa027793013e572712c4da55c7c83e"}, ] [package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" +mccabe = ">=0.2.1" +pep8 = ">=1.5.7" +pyflakes = ">=0.8.1" [[package]] name = "flask" version = "2.3.2" description = "A simple framework for building complex web applications." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -831,7 +804,6 @@ dotenv = ["python-dotenv"] name = "flask-cors" version = "3.0.10" description = "A Flask extension adding a decorator for CORS support" -category = "dev" optional = false python-versions = "*" files = [ @@ -847,7 +819,6 @@ Six = "*" name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -862,7 +833,6 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.31" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -877,7 +847,6 @@ gitdb = ">=4.0.1,<5" name = "html5lib" version = "1.1" description = "HTML parser based on the WHATWG HTML specification" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -899,7 +868,6 @@ lxml = ["lxml"] name = "identify" version = "2.5.24" description = "File identification library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -914,7 +882,6 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -924,14 +891,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -940,13 +906,12 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -965,7 +930,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -977,7 +941,6 @@ files = [ name = "installer" version = "0.7.0" description = "A library for installing Python wheels." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -989,7 +952,6 @@ files = [ name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1007,7 +969,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1019,7 +980,6 @@ files = [ name = "jaraco-classes" version = "3.2.3" description = "Utility functions for Python class constructs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1038,7 +998,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1054,7 +1013,6 @@ trio = ["async_generator", "trio"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1072,7 +1030,6 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1084,7 +1041,6 @@ files = [ name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1106,7 +1062,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "keyring" version = "23.13.1" description = "Store and access your passwords safely." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1131,7 +1086,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "lockfile" version = "0.12.2" description = "Platform-independent file locking module" -category = "dev" optional = false python-versions = "*" files = [ @@ -1139,95 +1093,80 @@ files = [ {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, ] -[[package]] -name = "loremipsum" -version = "1.0.5" -description = "A Lorem Ipsum text generator" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "loremipsum-1.0.5.tar.gz", hash = "sha256:b849c69305c3f52badfe25ecc0495b991769d96cafdfd99014d17f50ee523af5"}, - {file = "loremipsum-1.0.5.zip", hash = "sha256:a38672c145c0e0790cb40403d46bee695e5e9a0350f0643199a012a18f65449a"}, -] - [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] name = "mccabe" -version = "0.6.1" +version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] [[package]] name = "mock" version = "5.0.2" description = "Rolling backport of unittest.mock for all Pythons" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1244,7 +1183,6 @@ test = ["pytest", "pytest-cov"] name = "more-itertools" version = "9.1.0" description = "More routines for operating on iterables, beyond itertools" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1254,14 +1192,13 @@ files = [ [[package]] name = "moto" -version = "4.1.9" +version = "4.1.12" description = "" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "moto-4.1.9-py2.py3-none-any.whl", hash = "sha256:d9f5d0e3d027df350ff3552da851644ce192cbf7e7a9e8766fca4b5b6b550df0"}, - {file = "moto-4.1.9.tar.gz", hash = "sha256:d4bb629686b8b92e480f9784316bd0f379b148a5caee7c07aecbde6033a885e1"}, + {file = "moto-4.1.12-py2.py3-none-any.whl", hash = "sha256:6f40141ff2f3a309c19faa169433afdf48d28733d328b08a843021ae36f005d9"}, + {file = "moto-4.1.12.tar.gz", hash = "sha256:25577e4cf55f05235f4efe78bcfeb5a7704fb75c16b426a5de2fc1e6b7b8545b"}, ] [package.dependencies] @@ -1276,17 +1213,17 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] apigatewayv2 = ["PyYAML (>=5.1)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] ds = ["sshpubkeys (>=3.1.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.0)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.3)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.3)"] ebs = ["sshpubkeys (>=3.1.0)"] ec2 = ["sshpubkeys (>=3.1.0)"] efs = ["sshpubkeys (>=3.1.0)"] @@ -1294,8 +1231,8 @@ eks = ["sshpubkeys (>=3.1.0)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.3.0)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.3.3)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -1303,7 +1240,6 @@ xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] name = "msgpack" version = "1.0.5" description = "MessagePack serializer" -category = "dev" optional = false python-versions = "*" files = [ @@ -1376,7 +1312,6 @@ files = [ name = "mypy" version = "0.931" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1415,7 +1350,6 @@ python2 = ["typed-ast (>=1.4.0,<2)"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1423,52 +1357,10 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "netcdf4" -version = "1.6.3" -description = "Provides an object-oriented python interface to the netCDF version 4 library" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "netCDF4-1.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ddd889dff168baa2fe4777e9117175ecd127e224304950786499744c8956d877"}, - {file = "netCDF4-1.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cb1647d2878a081b4a83fe6d5d5792d8befa59b2b18487bee93aae4b8efa0762"}, - {file = "netCDF4-1.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85384e575ddc7e329ca409d380a2d92bb52da5917a171055cf49435f0b6ce07e"}, - {file = "netCDF4-1.6.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:729544be6ca6a4507d4b3fdd46af578d17c834e1bb53719bebb90ac108035f6a"}, - {file = "netCDF4-1.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e62554a197a344858a526c0cc8340b56d4ab7708feab08528bb602150b8139b1"}, - {file = "netCDF4-1.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:6fc24dc9d39fee9206710f660b12eb3529d6817583a9de8391e62d4f9f4367fb"}, - {file = "netCDF4-1.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46e8404e3526047070f88ad3a65acb813ffccf41d8ff12ff823320be730ba66"}, - {file = "netCDF4-1.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a02a8cd53311a447e0c5c185a0d79c3b5d57d49cb1743459417b5e8ca18561bb"}, - {file = "netCDF4-1.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbfc767980f87c184f6d6fc9d5a164caa0895c0a6b1820c779f7ec7789c01b0e"}, - {file = "netCDF4-1.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1831f99bb84d8db5901a4ecb7e382bb8d93847269a48a56b3b3e18cd2b564fd8"}, - {file = "netCDF4-1.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:4f7735871ffc0c8fef710a6b8c5f5af04ed480fee2dc23e737a378f9630d9475"}, - {file = "netCDF4-1.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a006a912ca204f74f7ec625b1b8d8b06e2f8366fd5be46bacb5e20760684a852"}, - {file = "netCDF4-1.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9c0b86f603fdaf4723fb92f728265f456f9090544cd62abe27794046bee507"}, - {file = "netCDF4-1.6.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d6a4f7da760ee713aa1197e073c9f066476699dc0cd277428bd5256ebb3878a"}, - {file = "netCDF4-1.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:628d48a31e4b094e807252423e5d1c5b39d4d173fde5cdbe7a18da7626cb606c"}, - {file = "netCDF4-1.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c94f95ac1ff5590aeb1793eee10519422a9a02da0ece1daf7efa597eabb4e246"}, - {file = "netCDF4-1.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7c45f7f729cfdca8cb7b80373085036a08c88bd6f9d8bfcea559056206f3c3d"}, - {file = "netCDF4-1.6.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb06c09bd5a6b44d65d38f544b62f503ede05dacdc9d5cfa6da25b6f738da1bb"}, - {file = "netCDF4-1.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dfab2b84b3f29902515897c6b2b5a92327a78abbfde6ac3917dae80dd17835b"}, - {file = "netCDF4-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:969470f70c6fb51f9fe851cc55d29769c798aeb2de2cb97200f8dac498f299b4"}, - {file = "netCDF4-1.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1194e88dbf5b35dc344a1e520c19140a0a51cc328c32f35d04c5a17ffc623614"}, - {file = "netCDF4-1.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f6b6d88480052efb3e8b0dd56f773064ed00edcf6ab028ef8fe79a9a06179f43"}, - {file = "netCDF4-1.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5de65543a325451c1ea23b1361fef19da42f7e9874a92a4475c290a045db90b4"}, - {file = "netCDF4-1.6.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f94a89db78f34fdf68342840efb064fe1474310e8359dffce42e90a9ddf88f2f"}, - {file = "netCDF4-1.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccb1524eb4ea9ec1c4360070b12840784c4afa5e539d509b1f2a921f26e49f39"}, - {file = "netCDF4-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:269f3817604bfbd08c7c8acccb6a5863a0a0222425203d9548ad42cbb554254b"}, - {file = "netCDF4-1.6.3.tar.gz", hash = "sha256:8c98a3a8cda06920ee8bd24a71226ddf0328c22bd838b0afca9cb45fb4580d99"}, -] - -[package.dependencies] -cftime = "*" -numpy = "*" - [[package]] name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -1479,49 +1371,10 @@ files = [ [package.dependencies] setuptools = "*" -[[package]] -name = "numpy" -version = "1.24.3" -description = "Fundamental package for array computing in Python" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, - {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, - {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, - {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, - {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, - {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, - {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, - {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, - {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, - {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, - {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, -] - [[package]] name = "ordered-set" version = "4.1.0" description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1536,7 +1389,6 @@ dev = ["black", "mypy", "pytest"] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1548,7 +1400,6 @@ files = [ name = "path" version = "16.6.0" description = "A module wrapper for os.path" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1564,7 +1415,6 @@ testing = ["appdirs", "flake8 (<5)", "packaging", "pygments", "pytest (>=6)", "p name = "path-py" version = "12.5.0" description = "A module wrapper for os.path" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1583,7 +1433,6 @@ testing = ["appdirs", "packaging", "pygments", "pytest (>=3.5,!=3.7.3)", "pytest name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1595,7 +1444,6 @@ files = [ name = "pbr" version = "5.11.1" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -1603,11 +1451,21 @@ files = [ {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] +[[package]] +name = "pep8" +version = "1.7.1" +description = "Python style guide checker" +optional = false +python-versions = "*" +files = [ + {file = "pep8-1.7.1-py2.py3-none-any.whl", hash = "sha256:b22cfae5db09833bb9bd7c8463b53e1a9c9b39f12e304a8d0bba729c501827ee"}, + {file = "pep8-1.7.1.tar.gz", hash = "sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374"}, +] + [[package]] name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -1622,7 +1480,6 @@ ptyprocess = ">=0.5" name = "pkginfo" version = "1.9.6" description = "Query metadata from sdists / bdists / installed packages." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1637,7 +1494,6 @@ testing = ["pytest", "pytest-cov"] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1647,30 +1503,28 @@ files = [ [[package]] name = "platformdirs" -version = "2.6.2" +version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, - {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, + {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, + {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.extras] @@ -1679,14 +1533,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "poetry" -version = "1.4.2" +version = "1.5.1" description = "Python dependency management and packaging made easy." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "poetry-1.4.2-py3-none-any.whl", hash = "sha256:c39c483cde7930915c992f932c163994ce3d870765efb8235ad0139cd65f0c5b"}, - {file = "poetry-1.4.2.tar.gz", hash = "sha256:0bd580a42482579635e774c5286ef73b8df3427567123cdb128b286cec671b3c"}, + {file = "poetry-1.5.1-py3-none-any.whl", hash = "sha256:dfc7ce3a38ae216c0465694e2e674bef6eb1a2ba81aa47a26f9dc03362fe2f5f"}, + {file = "poetry-1.5.1.tar.gz", hash = "sha256:cc7ea4524d1a11558006224bfe8ba8ed071417d4eb5ef6c89decc6a37d437eeb"}, ] [package.dependencies] @@ -1705,56 +1558,50 @@ lockfile = ">=0.12.2,<0.13.0" packaging = ">=20.4" pexpect = ">=4.7.0,<5.0.0" pkginfo = ">=1.9.4,<2.0.0" -platformdirs = ">=2.5.2,<3.0.0" -poetry-core = "1.5.2" -poetry-plugin-export = ">=1.3.0,<2.0.0" +platformdirs = ">=3.0.0,<4.0.0" +poetry-core = "1.6.1" +poetry-plugin-export = ">=1.4.0,<2.0.0" pyproject-hooks = ">=1.0.0,<2.0.0" requests = ">=2.18,<3.0" -requests-toolbelt = ">=0.9.1,<0.11.0" +requests-toolbelt = ">=0.9.1,<2" shellingham = ">=1.5,<2.0" tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.11.1,<0.11.2 || >0.11.2,<0.11.3 || >0.11.3,<1.0.0" +tomlkit = ">=0.11.4,<1.0.0" trove-classifiers = ">=2022.5.19" urllib3 = ">=1.26.0,<2.0.0" -virtualenv = [ - {version = ">=20.4.3,<20.4.5 || >20.4.5,<20.4.6 || >20.4.6,<21.0.0", markers = "sys_platform != \"win32\" or python_version != \"3.9\""}, - {version = ">=20.4.3,<20.4.5 || >20.4.5,<20.4.6 || >20.4.6,<20.16.6", markers = "sys_platform == \"win32\" and python_version == \"3.9\""}, -] +virtualenv = ">=20.22.0,<21.0.0" xattr = {version = ">=0.10.0,<0.11.0", markers = "sys_platform == \"darwin\""} [[package]] name = "poetry-core" -version = "1.5.2" +version = "1.6.1" description = "Poetry PEP 517 Build Backend" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "poetry_core-1.5.2-py3-none-any.whl", hash = "sha256:832d40a1ea5fd10c0f648d0575cadddc8b79f06f91d83a1f1a73a7e1dfacfbd7"}, - {file = "poetry_core-1.5.2.tar.gz", hash = "sha256:c6556c3b1ec5b8668e6ef5a4494726bc41d31907339425e194e78a6178436c14"}, + {file = "poetry_core-1.6.1-py3-none-any.whl", hash = "sha256:70707340447dee0e7f334f9495ae652481c67b32d8d218f296a376ac2ed73573"}, + {file = "poetry_core-1.6.1.tar.gz", hash = "sha256:0f9b0de39665f36d6594657e7d57b6f463cc10f30c28e6d1c3b9ff54c26c9ac3"}, ] [[package]] name = "poetry-plugin-export" -version = "1.3.1" +version = "1.4.0" description = "Poetry plugin to export the dependencies to various formats" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "poetry_plugin_export-1.3.1-py3-none-any.whl", hash = "sha256:941d7ba02a59671d6327b16dc6deecc9262477abbc120d728a500cf125bc1e06"}, - {file = "poetry_plugin_export-1.3.1.tar.gz", hash = "sha256:d949742757a8a5f0b5810495bffaf4ed8a767f2e2ffda9887cf72f896deabf84"}, + {file = "poetry_plugin_export-1.4.0-py3-none-any.whl", hash = "sha256:5d9186d6f77cf2bf35fc96bd11fe650cc7656e515b17d99cb65018d50ba22589"}, + {file = "poetry_plugin_export-1.4.0.tar.gz", hash = "sha256:f16974cd9f222d4ef640fa97a8d661b04d4fb339e51da93973f1bc9d578e183f"}, ] [package.dependencies] -poetry = ">=1.3.0,<2.0.0" -poetry-core = ">=1.3.0,<2.0.0" +poetry = ">=1.5.0,<2.0.0" +poetry-core = ">=1.6.0,<2.0.0" [[package]] name = "pre-commit" version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1773,7 +1620,6 @@ virtualenv = ">=20.10.0" name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -1783,21 +1629,19 @@ files = [ [[package]] name = "pycodestyle" -version = "2.7.0" +version = "2.10.0" description = "Python style guide checker" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" files = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, + {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, + {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, ] [[package]] name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1807,48 +1651,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.7" +version = "1.10.11" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, - {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, - {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, - {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, - {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, - {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, - {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, - {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, - {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, - {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, + {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, + {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, + {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, + {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, + {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, + {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, + {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, + {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, ] [package.dependencies] @@ -1863,7 +1706,6 @@ email = ["email-validator (>=1.0.3)"] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1879,21 +1721,19 @@ toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" -version = "2.3.1" +version = "3.0.1" description = "passive checker of Python programs" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" files = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, ] [[package]] name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1908,7 +1748,6 @@ plugins = ["importlib-metadata"] name = "pylama" version = "7.7.1" description = "pylama -- Code audit tool for python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1926,7 +1765,6 @@ pyflakes = ">=1.5.0" name = "pyproject-hooks" version = "1.0.0" description = "Wrappers to call pyproject.toml-based build backend hooks." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1941,7 +1779,6 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1976,14 +1813,13 @@ files = [ [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [package.dependencies] @@ -1995,18 +1831,17 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -2020,7 +1855,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-dependency" version = "0.5.1" description = "Manage dependencies of tests" -category = "dev" optional = false python-versions = "*" files = [ @@ -2034,7 +1868,6 @@ pytest = ">=3.6.0" name = "pytest-fixture-config" version = "1.7.0" description = "Fixture configuration utils for py.test" -category = "dev" optional = false python-versions = "*" files = [ @@ -2050,14 +1883,13 @@ tests = ["six"] [[package]] name = "pytest-mock" -version = "3.10.0" +version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, - {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, ] [package.dependencies] @@ -2070,7 +1902,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-shutil" version = "1.7.0" description = "A goodie-bag of unix shell and environment tools for py.test" -category = "dev" optional = false python-versions = "*" files = [ @@ -2094,7 +1925,6 @@ tests = ["pytest"] name = "pytest-virtualenv" version = "1.7.0" description = "Virtualenv fixture for py.test" -category = "dev" optional = false python-versions = "*" files = [ @@ -2115,7 +1945,6 @@ tests = ["mock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2128,21 +1957,19 @@ six = ">=1.5" [[package]] name = "pywin32-ctypes" -version = "0.2.0" -description = "" -category = "dev" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, - {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, ] [[package]] name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2192,7 +2019,6 @@ files = [ name = "rapidfuzz" version = "2.15.1" description = "rapid fuzzy string matching" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2295,14 +2121,13 @@ full = ["numpy"] [[package]] name = "requests" -version = "2.30.0" +version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, - {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] @@ -2317,14 +2142,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-mock" -version = "1.10.0" +version = "1.11.0" description = "Mock out responses from the requests package" -category = "dev" optional = false python-versions = "*" files = [ - {file = "requests-mock-1.10.0.tar.gz", hash = "sha256:59c9c32419a9fb1ae83ec242d98e889c45bd7d7a65d48375cc243ec08441658b"}, - {file = "requests_mock-1.10.0-py2.py3-none-any.whl", hash = "sha256:2fdbb637ad17ee15c06f33d31169e71bf9fe2bdb7bc9da26185be0dd8d842699"}, + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, ] [package.dependencies] @@ -2333,18 +2157,17 @@ six = "*" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testrepository (>=0.0.18)", "testtools"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "requests-toolbelt" -version = "0.10.1" +version = "1.0.0" description = "A utility belt for advanced users of python-requests" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "requests-toolbelt-0.10.1.tar.gz", hash = "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d"}, - {file = "requests_toolbelt-0.10.1-py2.py3-none-any.whl", hash = "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7"}, + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, ] [package.dependencies] @@ -2354,7 +2177,6 @@ requests = ">=2.0.1,<3.0.0" name = "responses" version = "0.23.1" description = "A utility library for mocking out the `requests` Python library." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2375,7 +2197,6 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rich" version = "11.2.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2395,7 +2216,6 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] name = "s3transfer" version = "0.6.1" description = "An Amazon S3 Transfer Manager" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2413,7 +2233,6 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2429,7 +2248,6 @@ jeepney = ">=0.6" name = "semver" version = "2.13.0" description = "Python helper for Semantic Versioning (http://semver.org/)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2439,26 +2257,24 @@ files = [ [[package]] name = "setuptools" -version = "67.7.2" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, - {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shellingham" version = "1.5.0.post1" description = "Tool to Detect Surrounding Shell" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2470,7 +2286,6 @@ files = [ name = "simplejson" version = "3.19.1" description = "Simple, fast, extensible JSON encoder/decoder for Python" -category = "main" optional = false python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2565,7 +2380,6 @@ files = [ name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2577,7 +2391,6 @@ files = [ name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2589,7 +2402,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -2601,7 +2413,6 @@ files = [ name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2616,7 +2427,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "termcolor" version = "2.3.0" description = "ANSI color formatting for output in terminal" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2631,7 +2441,6 @@ tests = ["pytest", "pytest-cov"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2643,7 +2452,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2655,7 +2463,6 @@ files = [ name = "tomlkit" version = "0.11.8" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2665,50 +2472,46 @@ files = [ [[package]] name = "trove-classifiers" -version = "2023.5.2" +version = "2023.5.24" description = "Canonical source for classifiers on PyPI (pypi.org)." -category = "dev" optional = false python-versions = "*" files = [ - {file = "trove-classifiers-2023.5.2.tar.gz", hash = "sha256:c46d6e40a9581599b16c712e0164fec3764872a4085c673c07559787caedb867"}, - {file = "trove_classifiers-2023.5.2-py3-none-any.whl", hash = "sha256:0f3eceb7d16186211bcd7edafc7b7934399f738ed985998e4e557e52fe136a71"}, + {file = "trove-classifiers-2023.5.24.tar.gz", hash = "sha256:fd5a1546283be941f47540a135bdeae8fb261380a6a204d9c18012f2a1b0ceae"}, + {file = "trove_classifiers-2023.5.24-py3-none-any.whl", hash = "sha256:d9d7ae14fb90bf3d50bef99c3941b176b5326509e6e9037e622562d6352629d0"}, ] [[package]] name = "types-pyyaml" -version = "6.0.12.9" +version = "6.0.12.10" description = "Typing stubs for PyYAML" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"}, - {file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"}, + {file = "types-PyYAML-6.0.12.10.tar.gz", hash = "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"}, + {file = "types_PyYAML-6.0.12.10-py3-none-any.whl", hash = "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f"}, ] [[package]] name = "typing-extensions" -version = "4.5.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "urllib3" -version = "1.26.15" +version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] [package.extras] @@ -2720,7 +2523,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "validators" version = "0.18.2" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -2737,51 +2539,28 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] [[package]] name = "virtualenv" -version = "20.16.5" -description = "Virtual Python Environment builder" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "virtualenv-20.16.5-py3-none-any.whl", hash = "sha256:d07dfc5df5e4e0dbc92862350ad87a36ed505b978f6c39609dc489eadd5b0d27"}, - {file = "virtualenv-20.16.5.tar.gz", hash = "sha256:227ea1b9994fdc5ea31977ba3383ef296d7472ea85be9d6732e42a91c04e80da"}, -] - -[package.dependencies] -distlib = ">=0.3.5,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<3" - -[package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "virtualenv" -version = "20.21.1" +version = "20.23.1" description = "Virtual Python Environment builder" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.21.1-py3-none-any.whl", hash = "sha256:09ddbe1af0c8ed2bb4d6ed226b9e6415718ad18aef9fa0ba023d96b7a8356049"}, - {file = "virtualenv-20.21.1.tar.gz", hash = "sha256:4c104ccde994f8b108163cf9ba58f3d11511d9403de87fb9b4f52bf33dbc8668"}, + {file = "virtualenv-20.23.1-py3-none-any.whl", hash = "sha256:34da10f14fea9be20e0fd7f04aba9732f84e593dac291b757ce42e3368a39419"}, + {file = "virtualenv-20.23.1.tar.gz", hash = "sha256:8ff19a38c1021c742148edc4f81cb43d7f8c6816d2ede2ab72af5b84c749ade1"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +filelock = ">=3.12,<4" +platformdirs = ">=3.5.1,<4" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] [[package]] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "dev" optional = false python-versions = "*" files = [ @@ -2791,14 +2570,13 @@ files = [ [[package]] name = "werkzeug" -version = "2.3.4" +version = "2.3.6" description = "The comprehensive WSGI web application library." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "Werkzeug-2.3.4-py3-none-any.whl", hash = "sha256:48e5e61472fee0ddee27ebad085614ebedb7af41e88f687aaf881afb723a162f"}, - {file = "Werkzeug-2.3.4.tar.gz", hash = "sha256:1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76"}, + {file = "Werkzeug-2.3.6-py3-none-any.whl", hash = "sha256:935539fa1413afbb9195b24880778422ed620c0fc09670945185cce4d91a8890"}, + {file = "Werkzeug-2.3.6.tar.gz", hash = "sha256:98c774df2f91b05550078891dee5f0eb0cb797a522c757a2452b9cee5b202330"}, ] [package.dependencies] @@ -2811,7 +2589,6 @@ watchdog = ["watchdog (>=2.3)"] name = "xattr" version = "0.10.1" description = "Python wrapper for extended filesystem attributes" -category = "dev" optional = false python-versions = "*" files = [ @@ -2896,7 +2673,6 @@ cffi = ">=1.0" name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" -category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -2908,7 +2684,6 @@ files = [ name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2926,4 +2701,4 @@ all = [] [metadata] lock-version = "2.0" python-versions = "^3.8.0,<4.0" -content-hash = "2c771870de73f8de6878039123f9a7c5af76fc3a926c76edd2d59345a7ca1d86" +content-hash = "c5c00f92c8a2d3cecd6ff3b050215ef37f262fdb8de67772858fe70518f865fe" diff --git a/pyproject.toml b/pyproject.toml index 7eb86fb..c456ca4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ ] description = "Synchronization interface for the SCRC FAIR Data Pipeline registry" name = "fair-cli" -version = "0.7.3" +version = "0.8.0" homepage = "https://www.fairdatapipeline.org/" @@ -64,28 +64,27 @@ semver = "^2.13.0" simplejson = "^3.17.5" toml = "^0.10.2" validators = "^0.18.2" -netCDF4 = "^1.5.8" +fake-useragent = "^1" [tool.poetry.dev-dependencies] -bandit = "^1.7.2" -black = "^22.1" -coverage = "^6.3" +bandit = "*" +black = "*" +coverage = "7.2.*" deepdiff = "^5.5.0" -flake8 = "^3.9.2" +flake8 = "*" isort = "^5.10.1" -loremipsum = "^1.0.5" mypy = "^0.931" poetry = "^1.1.12" -pre-commit = "^2.16.0" +pre-commit = "^2.17.0" pycodestyle = "^2.7.0" pydocstyle = "^6.0.0" pylama = "^7.7.1" -pytest = "^7.0.0" -pytest-cov = "^4.0.0" -pytest-dependency = "^0.5.1" -pytest-mock = "^3.7.0" -pytest-virtualenv = "^1.7.0" -requests-mock = "^1.9.3" +pytest = "*" +pytest-cov = "*" +pytest-dependency = "*" +pytest-mock = "*" +pytest-virtualenv = "*" +requests-mock = "*" boto3 = "^1.24" moto = "^4.0.3" Flask = "^2.2.2" @@ -96,7 +95,7 @@ fair = 'fair.cli:cli' [build-system] build-backend = "poetry.core.masonry.api" -requires = ["poetry-core>=1.0.0"] +requires = ["poetry>=1.0.0"] [tool.poetry.urls] "Issue Tracker" = "https://github.com/FAIRDataPipeline/FAIR-CLI/issues" diff --git a/pytest.ini b/pytest.ini index 71e50a1..a5875f2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -16,4 +16,5 @@ markers= faircli_staging: tests for the 'staging' submodule faircli_cli: tests for the CLI itself faircli_sync: sync tests + faircli_register: CLI Register Tests faircli_user_config: User Configuration Tests diff --git a/tests/conftest.py b/tests/conftest.py index 09f48a8..8de0971 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,10 +2,10 @@ import os import shutil import signal -import tempfile import time import typing import subprocess +import platform import git import pytest @@ -18,6 +18,9 @@ import os from moto.server import ThreadedMotoServer +import requests +from urllib3.exceptions import InsecureRequestWarning + import fair.common as fdp_com import fair.registry.server as fdp_serv import fair.testing as fdp_test @@ -34,6 +37,13 @@ logging.getLogger("FAIRDataPipeline").setLevel(logging.DEBUG) +def test_can_be_run(url): + _header = {"Accept": "application/json"} + requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) + _response = requests.get(url, verify = False, headers = _header, allow_redirects = True) + if _response.status_code == 200: + return True + return False def get_example_entries(registry_dir: str): """ @@ -77,16 +87,22 @@ def get_example_entries(registry_dir: str): @pytest.fixture(scope="module") -def pyDataPipeline(): - with tempfile.TemporaryDirectory() as _repo_path: - _repo = git.Repo.clone_from(PYTHON_API_GIT, _repo_path) - _repo.git.checkout("dev") - _model_path = os.path.join(_repo_path, "model") - _model = git.Repo.clone_from(PYTHON_MODEL_GIT, _model_path) - _model.git.checkout("main") - simple_model = os.path.join(_model_path, "simpleModel") - shutil.move(simple_model, _repo_path) - yield _repo_path +def pyDataPipeline(tmp_path_factory): + _repo_path = tmp_path_factory.mktemp("repo_path").__str__() + _repo = git.Repo.clone_from(PYTHON_API_GIT, _repo_path) + _repo.git.checkout("main") + _model_path = os.path.join(_repo_path, "model") + _model = git.Repo.clone_from(PYTHON_MODEL_GIT, _model_path) + _model.git.checkout("main") + simple_model = os.path.join(_model_path, "simpleModel") + shutil.move(simple_model, _repo_path) + yield _repo_path + +@pytest.fixture() +def pySimpleModel(tmp_path): + _repo_path = tmp_path.__str__() + _repo = git.Repo.clone_from(PYTHON_MODEL_GIT, _repo_path) + yield _repo_path @pytest.fixture(scope="session") @pytest_fixture_config.yield_requires_config( @@ -124,80 +140,75 @@ def monkeypatch_module(): m.undo() @pytest.fixture -def local_config(mocker: pytest_mock.MockerFixture): - with tempfile.TemporaryDirectory() as tempg: - os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "registry")) - os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "sessions")) - _gconfig_path = os.path.join( - tempg, fdp_com.FAIR_FOLDER, fdp_com.FAIR_CLI_CONFIG - ) - _cfgg = fdp_test.create_configurations(tempg, None, None, tempg, True) - yaml.dump(_cfgg, open(_gconfig_path, "w")) - mocker.patch( - "fair.common.global_config_dir", - lambda: os.path.dirname(_gconfig_path), - ) - mocker.patch("fair.common.global_fdpconfig", lambda: _gconfig_path) - - with open(fdp_com.registry_session_port_file(), "w") as pf: - pf.write("8001") - - with tempfile.TemporaryDirectory() as templ: - os.makedirs(os.path.join(templ, fdp_com.FAIR_FOLDER)) - _lconfig_path = os.path.join( - templ, fdp_com.FAIR_FOLDER, fdp_com.FAIR_CLI_CONFIG - ) - _cfgl = fdp_test.create_configurations( - templ, None, None, templ, True - ) - yaml.dump(_cfgl, open(_lconfig_path, "w")) - with open( - os.path.join(templ, fdp_com.USER_CONFIG_FILE), "w" - ) as conf: - yaml.dump({"run_metadata": {}}, conf) - mocker.patch("fair.common.find_fair_root", lambda *args: templ) - yield (tempg, templ) +def local_config(mocker: pytest_mock.MockerFixture, tmp_path): + tempg = os.path.join(tmp_path, "tempg").__str__() + templ = os.path.join(tmp_path, "tempd").__str__() + os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "registry")) + os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "sessions")) + _gconfig_path = os.path.join( + tempg, fdp_com.FAIR_FOLDER, fdp_com.FAIR_CLI_CONFIG + ) + _cfgg = fdp_test.create_configurations(tempg, None, None, tempg, True) + yaml.dump(_cfgg, open(_gconfig_path, "w")) + mocker.patch( + "fair.common.global_config_dir", + lambda: os.path.dirname(_gconfig_path), + ) + mocker.patch("fair.common.global_fdpconfig", lambda: _gconfig_path) + with open(fdp_com.registry_session_port_file(), "w") as pf: + pf.write("8001") + os.makedirs(os.path.join(templ, fdp_com.FAIR_FOLDER)) + _lconfig_path = os.path.join( + templ, fdp_com.FAIR_FOLDER, fdp_com.FAIR_CLI_CONFIG + ) + _cfgl = fdp_test.create_configurations( + templ, None, None, templ, True + ) + yaml.dump(_cfgl, open(_lconfig_path, "w")) + with open( + os.path.join(templ, fdp_com.USER_CONFIG_FILE), "w" + ) as conf: + yaml.dump({"run_metadata": {}}, conf) + mocker.patch("fair.common.find_fair_root", lambda *args: templ) + yield (tempg, templ) @pytest.fixture(scope = "module") -def global_config(monkeypatch_module): - with tempfile.TemporaryDirectory() as tempg: - os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "registry")) - os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "sessions")) - _gconfig_path = os.path.join( - tempg, fdp_com.FAIR_FOLDER, fdp_com.FAIR_CLI_CONFIG - ) - _cfgg = fdp_test.create_configurations(tempg, None, None, tempg, True) - yaml.dump(_cfgg, open(_gconfig_path, "w")) - monkeypatch_module.setattr( - "fair.common.global_config_dir", - lambda: os.path.dirname(_gconfig_path), - ) - monkeypatch_module.setattr("fair.common.global_fdpconfig", lambda: _gconfig_path) - yield (tempg) - -@pytest.fixture -def job_directory(mocker: pytest_mock.MockerFixture) -> str: - with tempfile.TemporaryDirectory() as tempd: - # Set default to point to temporary - mocker.patch("fair.common.default_jobs_dir", lambda *args: tempd) - - # Create a mock job directory - os.makedirs(os.path.join(tempd, TEST_JOB_FILE_TIMESTAMP)) - yield os.path.join(tempd, TEST_JOB_FILE_TIMESTAMP) +def global_config(monkeypatch_module, tmp_path_factory): + tempg = tmp_path_factory.mktemp("tempg").__str__() + os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "registry")) + os.makedirs(os.path.join(tempg, fdp_com.FAIR_FOLDER, "sessions")) + _gconfig_path = os.path.join( + tempg, fdp_com.FAIR_FOLDER, fdp_com.FAIR_CLI_CONFIG + ) + _cfgg = fdp_test.create_configurations(tempg, None, None, tempg, True) + yaml.dump(_cfgg, open(_gconfig_path, "w")) + monkeypatch_module.setattr( + "fair.common.global_config_dir", + lambda: os.path.dirname(_gconfig_path), + ) + monkeypatch_module.setattr("fair.common.global_fdpconfig", lambda: _gconfig_path) + yield (tempg) +@pytest.fixture(scope = "module") +def job_directory(monkeypatch_module, tmp_path_factory) -> str: + tempd = tmp_path_factory.mktemp("tempd").__str__() + # Set default to point to temporary + monkeypatch_module.setattr("fair.common.default_jobs_dir", lambda *args: tempd) + # Create a mock job directory + os.makedirs(os.path.join(tempd, TEST_JOB_FILE_TIMESTAMP)) + yield os.path.join(tempd, TEST_JOB_FILE_TIMESTAMP) -@pytest.fixture -def job_log(mocker: pytest_mock.MockerFixture) -> str: - with tempfile.TemporaryDirectory() as tempd: - # Set the log directory - mocker.patch("fair.history.history_directory", lambda *args: tempd) - - # Create mock job log - with open( - os.path.join(tempd, f"job_{TEST_JOB_FILE_TIMESTAMP}.log"), "w" - ) as out_f: - out_f.write( - """-------------------------------- +@pytest.fixture(scope = "module") +def job_log(monkeypatch_module, tmp_path_factory) -> str: + tempd = tmp_path_factory.mktemp("tempd").__str__() + # Set the log directory + monkeypatch_module.setattr("fair.history.history_directory", lambda *args: tempd) + # Create mock job log + with open( + os.path.join(tempd, f"job_{TEST_JOB_FILE_TIMESTAMP}.log"), "w" + ) as out_f: + out_f.write( + """-------------------------------- Commenced = Fri Oct 08 14:45:43 2021 Author = Interface Test Command = fair pull @@ -205,7 +216,7 @@ def job_log(mocker: pytest_mock.MockerFixture) -> str: ------- time taken 0:00:00.791088 -------""" ) - yield tempd + yield tempd class RegistryTest: @@ -236,14 +247,15 @@ def __init__( remote= remote ) while not os.path.exists(os.path.join(self._install, "token")): - time.sleep(5) + time.sleep(3) self._token = open(os.path.join(self._install, "token")).read().strip() assert self._token - os.kill(_process.pid, signal.SIGTERM) + pid_kill(_process.pid) def rebuild(self): + _venv_bin_dir = "Scripts" if platform.system() == "Windows" else "bin" test_reg.rebuild_local( - os.path.join(self._venv_dir, "bin", "python"), self._install, remote=self._remote + os.path.join(self._venv_dir, _venv_bin_dir, "python"), self._install, remote=self._remote ) def launch(self): @@ -257,7 +269,7 @@ def launch(self): def kill(self): if self._process: - os.kill(self._process.pid, signal.SIGTERM) + pid_kill(self._process.pid) def __enter__(self): try: @@ -270,15 +282,15 @@ def __enter__(self): ) except KeyboardInterrupt as e: - os.kill(self._process.pid, signal.SIGTERM) + pid_kill(self._process.pid) raise e def __exit__(self, type, value, tb): - os.kill(self._process.pid, signal.SIGTERM) + pid_kill(self._process.pid) self._process = None @pytest.fixture(scope="module") -def local_registry(session_virtualenv: pytest_virtualenv.VirtualEnv): +def local_registry(session_virtualenv: pytest_virtualenv.VirtualEnv, tmp_path_factory): if fdp_serv.check_server_running("http://127.0.0.1:8000"): pytest.skip( "Cannot run registry tests, a server is already running on port 8000" @@ -286,15 +298,15 @@ def local_registry(session_virtualenv: pytest_virtualenv.VirtualEnv): session_virtualenv.env = test_reg.django_environ( session_virtualenv.env ) - with tempfile.TemporaryDirectory() as tempd: - rtest = RegistryTest(tempd, session_virtualenv, port=8000) - yield rtest + tempd = tmp_path_factory.mktemp("tempd").__str__() + rtest = RegistryTest(tempd, session_virtualenv, port=8000) + yield rtest if rtest._process: - os.kill(rtest._process.pid, signal.SIGTERM) + pid_kill(rtest._process.pid) print("TearDown of Local Registry Complete") @pytest.fixture(scope="module") -def remote_registry(session_virtualenv: pytest_virtualenv.VirtualEnv): +def remote_registry(session_virtualenv: pytest_virtualenv.VirtualEnv, tmp_path_factory): if fdp_serv.check_server_running("http://127.0.0.1:8001"): pytest.skip( "Cannot run registry tests, a server is already running on port 8001" @@ -302,11 +314,11 @@ def remote_registry(session_virtualenv: pytest_virtualenv.VirtualEnv): session_virtualenv.env = test_reg.django_environ( session_virtualenv.env, True ) - with tempfile.TemporaryDirectory() as tempd: - rtest = RegistryTest(tempd, session_virtualenv, port=8001, remote= True) - yield rtest + tempd = tmp_path_factory.mktemp("tempd").__str__() + rtest = RegistryTest(tempd, session_virtualenv, port=8001, remote= True) + yield rtest if rtest._process: - os.kill(rtest._process.pid, signal.SIGTERM) + pid_kill(rtest._process.pid) print("TearDown of Remote Registry Complete") @pytest.fixture(scope="module") @@ -339,3 +351,8 @@ def __enter__(self): def __exit__(self, type, value, tb): self._server.stop() +def pid_kill(pid): + if platform.system() == "Windows": + subprocess.call(['taskkill', '/F', '/T', '/PID', str(pid)]) + else: + os.kill(pid, signal.SIGTERM) diff --git a/tests/data/test1.csv b/tests/data/test1.csv new file mode 100644 index 0000000..56a6051 --- /dev/null +++ b/tests/data/test1.csv @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/tests/data/test2.csv b/tests/data/test2.csv new file mode 100644 index 0000000..d8263ee --- /dev/null +++ b/tests/data/test2.csv @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/tests/data/test3.csv b/tests/data/test3.csv new file mode 100644 index 0000000..e440e5c --- /dev/null +++ b/tests/data/test3.csv @@ -0,0 +1 @@ +3 \ No newline at end of file diff --git a/tests/data/test4.csv b/tests/data/test4.csv new file mode 100644 index 0000000..bf0d87a --- /dev/null +++ b/tests/data/test4.csv @@ -0,0 +1 @@ +4 \ No newline at end of file diff --git a/tests/data/test_register.yaml b/tests/data/test_register.yaml new file mode 100644 index 0000000..704642c --- /dev/null +++ b/tests/data/test_register.yaml @@ -0,0 +1,46 @@ +run_metadata: + description: Test Register Data Products + default_input_namespace: TestDataProduct + default_output_namespace: TestDataProduct + script: echo test + +register: +- namespace: TestNameSpace + full_name: FairDataPipeline Namespace Test + +- data_product: Namespace/test/no/namespace + root: + path: test1.csv + file_type: csv + version: "1.0.0" + +- data_product: Namespace/test/use/namespace + root: + path: test2.csv + file_type: csv + version: "1.0.0" + use: + namespace: PSU + +- data_product: Namespace/test/namespace/name + root: + path: test3.csv + file_type: csv + version: "1.0.0" + namespace_name: TestNameSpace + +- data_product: Namespace/test/use/namespace/unregistered + root: + path: test4.csv + file_type: csv + version: "1.0.0" + use: + namespace: UnregisteredNamespace + +- data_product: Namespace/test/use/same/file + root: + path: test4.csv + file_type: csv + version: "1.0.0" + use: + namespace: UnregisteredNamespace \ No newline at end of file diff --git a/tests/registry_install.py b/tests/registry_install.py index ca008bf..d1accbd 100644 --- a/tests/registry_install.py +++ b/tests/registry_install.py @@ -6,12 +6,14 @@ import subprocess import time import typing +import platform import click import git import requests from fair.common import FAIR_FOLDER +from fair.common import remove_readonly from fair.virtualenv import FAIREnv FAIR_REGISTRY_REPO = "https://github.com/FAIRDataPipeline/data-registry.git" @@ -104,7 +106,7 @@ def rebuild_local( def install_registry( repository: str = FAIR_REGISTRY_REPO, - reference: str = "remote-test-registry", + reference: str = None, install_dir: str = None, silent: bool = False, force: bool = False, @@ -118,7 +120,7 @@ def install_registry( ) if force: - shutil.rmtree(install_dir, ignore_errors=True) + shutil.rmtree(install_dir, onerror=remove_readonly) os.makedirs(os.path.dirname(install_dir), exist_ok=True) @@ -138,7 +140,8 @@ def install_registry( _venv.create(venv_dir) - _venv_python = shutil.which("python", path=os.path.join(venv_dir, "bin")) + _venv_bin_dir = "Scripts" if platform.system() == "Windows" else "bin" + _venv_python = shutil.which("python", path=os.path.join(venv_dir, _venv_bin_dir)) if not _venv_python: raise FileNotFoundError( @@ -200,7 +203,8 @@ def refresh( f"Location '{install_dir}' is not a valid registry install" ) - _venv_python = shutil.which("python", path=os.path.join(_venv_dir, "bin")) + _venv_bin_dir = "Scripts" if platform.system() == "Windows" else "bin" + _venv_python = shutil.which("python", path=os.path.join(_venv_dir, _venv_bin_dir)) rebuild_local(_venv_python, install_dir, silent, remote) @@ -226,7 +230,9 @@ def launch( _manage = os.path.join(install_dir, "manage.py") - _venv_python = shutil.which("python", path=os.path.join(_venv_dir, "bin")) + _venv_bin_dir = "Scripts" if platform.system() == "Windows" else "bin" + + _venv_python = shutil.which("python", path=os.path.join(_venv_dir, _venv_bin_dir)) with open(os.path.join(install_dir, "session_port.log"), "w") as out_f: out_f.write(str(port)) @@ -304,11 +310,19 @@ def stop(install_dir: str = None, port: int = 8000, silent: bool = False): _manage = os.path.join(install_dir, "manage.py") + if platform.system() == "Windows": + _call = os.path.join( + install_dir, "scripts", "stop_fair_registry_windows.bat" + ) + else: + _call = ["pgrep", "-f", f'"{_manage} runserver"', "|", "xargs", "kill"] + subprocess.check_call( - ["pgrep", "-f", f'"{_manage} runserver"', "|", "xargs", "kill"], + _call, env=django_environ(), shell=False, ) + try: requests.get(f"http://127.0.0.1:{port}/api") raise AssertionError("Expected registry termination") diff --git a/tests/test_cli.py b/tests/test_cli.py index 962c662..7874a12 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -11,10 +11,11 @@ import os import shutil import sys -import tempfile import typing import uuid +import platform from urllib.parse import urljoin +from pathlib import Path import click.testing import git @@ -30,7 +31,6 @@ LOCAL_REGISTRY_URL = "http://127.0.0.1:8000/api" - @pytest.fixture def click_test(): click_test = click.testing.CliRunner() @@ -39,7 +39,6 @@ def click_test(): _repo.create_remote("origin", "git@notagit.com") yield click_test - @pytest.mark.faircli_cli def test_status( local_config: typing.Tuple[str, str], @@ -131,262 +130,245 @@ def test_create( assert _result.exit_code == 0 assert os.path.exists(_out_config) - @pytest.mark.faircli_cli def test_init_from_existing( local_registry: conf.RegistryTest, click_test: click.testing.CliRunner, mocker: pytest_mock.MockerFixture, + tmp_path, + pySimpleModel ): mocker.patch("fair.common.registry_home", lambda: local_registry._install) _out_config = os.path.join(os.getcwd(), fdp_com.USER_CONFIG_FILE) - with tempfile.TemporaryDirectory() as tempd: - _out_cli_config = os.path.join(tempd, "cli-config.yaml") - with local_registry: - _result = click_test.invoke( - cli, - [ - "init", - "--debug", - "--ci", - "--registry", - local_registry._install, - "--config", - _out_config, - "--export", - _out_cli_config, - ], - ) - assert _result.exit_code == 0 - assert os.path.exists(_out_cli_config) - assert os.path.exists(_out_config) - assert os.path.exists( - os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER) - ) - - click_test = click.testing.CliRunner() - click_test.isolated_filesystem() - - _result = click_test.invoke( - cli, ["init", "--debug", "--using", _out_cli_config] - ) - + tempd = tmp_path.__str__() + _out_cli_config = os.path.join(tempd, "cli-config.yaml") + with local_registry: + _result = click_test.invoke( + cli, + [ + "init", + "--debug", + "--ci", + "--registry", + local_registry._install, + "--config", + _out_config, + "--export", + _out_cli_config, + ], + ) assert _result.exit_code == 0 - - assert os.path.exists(os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER)) - + assert os.path.exists(_out_cli_config) + assert os.path.exists(_out_config) + assert os.path.exists( + os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER) + ) + click_test = click.testing.CliRunner() + click_test.isolated_filesystem() + _result = click_test.invoke( + cli, ["init", "--debug", "--using", _out_cli_config] + ) + assert _result.exit_code == 0 + assert os.path.exists(os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER)) @pytest.mark.faircli_cli def test_init_from_env( local_registry: conf.RegistryTest, click_test: click.testing.CliRunner, mocker: pytest_mock.MockerFixture, + tmp_path, + pySimpleModel ): mocker.patch("fair.common.registry_home", lambda: local_registry._install) - _out_config = os.path.join(os.getcwd(), fdp_com.USER_CONFIG_FILE) - - with tempfile.TemporaryDirectory() as tempd: - _out_cli_config = os.path.join(tempd, "cli-config.yaml") - _env = os.environ.copy() - _env["FAIR_REGISTRY_DIR"] = local_registry._install - with local_registry: - _result = click_test.invoke( - cli, - [ - "init", - "--debug", - "--ci", - "--config", - _out_config, - "--export", - _out_cli_config, - ], - env=_env, - ) - assert _result.exit_code == 0 - assert os.path.exists(_out_cli_config) - assert os.path.exists(_out_config) - assert os.path.exists( - os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER) - ) - - click_test = click.testing.CliRunner() - click_test.isolated_filesystem() - - _result = click_test.invoke( - cli, ["init", "--debug", "--using", _out_cli_config] - ) + _out_config = os.path.normpath(os.path.join(os.getcwd(), fdp_com.USER_CONFIG_FILE)) + tempd = tmp_path.__str__() + _out_cli_config = os.path.join(tempd, "cli-config.yaml") + _env = os.environ.copy() + _env["FAIR_REGISTRY_DIR"] = local_registry._install + with local_registry: + _result = click_test.invoke( + cli, + [ + "init", + "--debug", + "--ci", + "--config", + _out_config, + "--export", + _out_cli_config, + ], + env=_env, + ) assert _result.exit_code == 0 - - assert os.path.exists(os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER)) - + assert os.path.exists(_out_cli_config) + assert os.path.exists(_out_config) + assert os.path.exists( + os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER) + ) + click_test = click.testing.CliRunner() + click_test.isolated_filesystem() + _result = click_test.invoke( + cli, ["init", "--debug", "--using", _out_cli_config] + ) + assert _result.exit_code == 0 + assert os.path.exists(os.path.join(os.getcwd(), fdp_com.FAIR_FOLDER)) @pytest.mark.faircli_cli def test_init_full( local_registry: conf.RegistryTest, click_test: click.testing.CliRunner, mocker: pytest_mock.MockerFixture, + monkeypatch: pytest.MonkeyPatch, + pySimpleModel ): - mocker.patch("fair.common.registry_home", lambda: local_registry._install) mocker.patch( "fair.registry.server.update_registry_post_setup", lambda *args: None ) with local_registry: - with tempfile.TemporaryDirectory() as tempd: - with open(os.path.join(tempd, "token"), "w") as tok_f: - tok_f.write("hjasdi324ji7823jdsf78234") - mocker.patch("fair.common.USER_FAIR_DIR", tempd) - _dummy_name = "Joseph Bloggs" - _dummy_email = "jbloggs@nowhere.com" - _args = [ - "8007", - "", - "", - os.path.join(tempd, "token"), - "", - "", - _dummy_email, - "", - _dummy_name, - "", - "", - os.getcwd(), - "", - ] - - click_test.invoke( - cli, - ["init", "--debug", "--registry", local_registry._install], - input="\n".join(_args), - ) - - assert os.path.exists(fair.common.global_config_dir()) - assert os.path.exists( - os.path.join(os.getcwd(), fair.common.FAIR_FOLDER) - ) - - _cli_cfg = yaml.safe_load( - open( - os.path.join( - os.getcwd(), fair.common.FAIR_FOLDER, "cli-config.yaml" - ) + mocker.patch("fair.common.USER_FAIR_DIR", pySimpleModel) + _dummy_name = "Joseph Bloggs" + _dummy_email = "jbloggs@nowhere.com" + _args = [ + "8007", + "", + "", + "0123456789012345678901234567890123456789", + "", + _dummy_email, + "NONE", + _dummy_name, + "", + "", + "FAIRDataPipeline", + pySimpleModel, + "", + ] + monkeypatch.chdir(pySimpleModel) + print(os.getcwd()) + click_test.invoke( + cli, + ["init", "--debug", "--registry", local_registry._install], + input="\n".join(_args), + ) + assert os.path.exists(fair.common.global_config_dir()) + assert os.path.exists( + os.path.join(pySimpleModel, fair.common.FAIR_FOLDER) + ) + _cli_cfg = yaml.safe_load( + open( + os.path.join( + pySimpleModel, fair.common.FAIR_FOLDER, "cli-config.yaml" ) ) - - _cli_glob_cfg = yaml.safe_load( - open( - os.path.join( - fair.common.global_config_dir(), "cli-config.yaml" - ) + ) + _cli_glob_cfg = yaml.safe_load( + open( + os.path.join( + fair.common.global_config_dir(), "cli-config.yaml" ) ) - - _expected_url = fdp_com.DEFAULT_LOCAL_REGISTRY_URL.replace( - ":8000", ":8007" - ) - - assert _cli_cfg - assert _cli_cfg["git"]["local_repo"] == os.getcwd() - assert _cli_cfg["git"]["remote"] == "origin" - assert _cli_cfg["git"]["remote_repo"] == "git@notagit.com" - assert _cli_cfg["namespaces"]["input"] == "josephbloggs" - assert _cli_cfg["namespaces"]["output"] == "josephbloggs" - assert _cli_cfg["registries"]["origin"]["data_store"] == urljoin( - fair.common.DEFAULT_REGISTRY_DOMAIN, "data/" - ) - assert _cli_cfg["registries"]["origin"]["uri"] == urljoin( - fair.common.DEFAULT_REGISTRY_DOMAIN, "api/" - ) - assert _cli_glob_cfg["registries"]["local"]["uri"] == _expected_url - assert _cli_cfg["user"]["email"] == _dummy_email - assert _cli_cfg["user"]["family_name"] == "Bloggs" - assert _cli_cfg["user"]["given_names"] == "Joseph" - assert _cli_cfg["user"]["uuid"] - + ) + _expected_url = fdp_com.DEFAULT_LOCAL_REGISTRY_URL.replace( + ":8000", ":8007" + ) + assert _cli_cfg + assert _cli_cfg["git"]["local_repo"] == pySimpleModel + assert _cli_cfg["git"]["remote"] == "origin" + assert _cli_cfg["git"]["remote_repo"] == "https://github.com/FAIRDataPipeline/pySimpleModel.git" + assert _cli_cfg["namespaces"]["input"] == "josephbloggs" + assert _cli_cfg["namespaces"]["output"] == "josephbloggs" + assert _cli_cfg["registries"]["origin"]["data_store"] == urljoin( + fair.common.DEFAULT_REGISTRY_DOMAIN, "data/" + ) + assert _cli_cfg["registries"]["origin"]["uri"] == urljoin( + fair.common.DEFAULT_REGISTRY_DOMAIN, "api/" + ) + assert _cli_glob_cfg["registries"]["local"]["uri"] == _expected_url + assert _cli_cfg["user"]["email"] == _dummy_email + assert _cli_cfg["user"]["family_name"] == "Bloggs" + assert _cli_cfg["user"]["given_names"] == "Joseph" + assert _cli_cfg["user"]["uuid"] @pytest.mark.faircli_cli def test_init_local( local_registry: conf.RegistryTest, click_test: click.testing.CliRunner, mocker: pytest_mock.MockerFixture, + monkeypatch: pytest.MonkeyPatch, + pySimpleModel ): - mocker.patch("fair.common.registry_home", lambda: local_registry._install) - mocker.patch( - "fair.registry.server.update_registry_post_setup", lambda *args: None - ) with local_registry: - with tempfile.TemporaryDirectory() as tempd: - with open(os.path.join(tempd, "token"), "w") as tok_f: - tok_f.write("hjasdi324ji7823jdsf78234") - mocker.patch("fair.common.USER_FAIR_DIR", tempd) - _dummy_name = "Joseph Bloggs" - _dummy_email = "jbloggs@nowhere.com" - _args = [ - "8000", - "", - "", - "", - _dummy_email, - "", - _dummy_name, - "", - "", - os.getcwd(), - "", - ] - - click_test.invoke( - cli, - ["init", "--debug", "--local"], - input="\n".join(_args), - ) - - assert os.path.exists(fair.common.global_config_dir()) - assert os.path.exists( - os.path.join(os.getcwd(), fair.common.FAIR_FOLDER) - ) + mocker.patch( + "fair.registry.server.update_registry_post_setup", lambda *args: None + ) + mocker.patch("fair.common.USER_FAIR_DIR", pySimpleModel) + _dummy_name = "Joseph Bloggs" + _dummy_email = "jbloggs@nowhere.com" + _args = [ + "8000", + "", + _dummy_email, + "NONE", + _dummy_name, + "", + "", + pySimpleModel, + "", + ] + monkeypatch.chdir(pySimpleModel) + print(os.getcwd()) + click_test.invoke( + cli, + ["init", "--debug", "--local", "--registry", local_registry._install], + input="\n".join(_args), - _cli_cfg = yaml.safe_load( - open( - os.path.join( - os.getcwd(), fair.common.FAIR_FOLDER, "cli-config.yaml" - ) + ) + assert os.path.exists(fair.common.global_config_dir()) + assert os.path.exists( + os.path.join(pySimpleModel, fair.common.FAIR_FOLDER) + ) + _cli_cfg = yaml.safe_load( + open( + os.path.join( + pySimpleModel, fair.common.FAIR_FOLDER, "cli-config.yaml" ) ) - - _cli_glob_cfg = yaml.safe_load( - open( - os.path.join( - fair.common.global_config_dir(), "cli-config.yaml" - ) + ) + _cli_glob_cfg = yaml.safe_load( + open( + os.path.join( + fair.common.global_config_dir(), "cli-config.yaml" ) ) - - _expected_url = "http://127.0.0.1:8000/api/" - - assert _cli_cfg - assert _cli_cfg["git"]["local_repo"] == os.getcwd() - assert _cli_cfg["git"]["remote"] == "origin" - assert _cli_cfg["git"]["remote_repo"] == "git@notagit.com" - assert _cli_cfg["namespaces"]["input"] == "josephbloggs" - assert _cli_cfg["namespaces"]["output"] == "josephbloggs" + ) + _expected_url = "http://127.0.0.1:8000/api/" + assert _cli_cfg + assert _cli_cfg["git"]["local_repo"] == pySimpleModel + assert _cli_cfg["git"]["remote"] == "origin" + assert _cli_cfg["git"]["remote_repo"] == "https://github.com/FAIRDataPipeline/pySimpleModel.git" + assert _cli_cfg["namespaces"]["input"] == "josephbloggs" + assert _cli_cfg["namespaces"]["output"] == "josephbloggs" + if platform.system() == "Windows": + assert ( + _cli_cfg["registries"]["origin"]["data_store"] + == ".\\data_store\\" + ) + else: assert ( _cli_cfg["registries"]["origin"]["data_store"] == "./data_store/" ) - - assert _cli_cfg["registries"]["origin"]["uri"] == _expected_url - - assert _cli_glob_cfg["registries"]["local"]["uri"] == _expected_url - assert _cli_cfg["user"]["email"] == _dummy_email - assert _cli_cfg["user"]["family_name"] == "Bloggs" - assert _cli_cfg["user"]["given_names"] == "Joseph" - assert _cli_cfg["user"]["uuid"] - + assert _cli_cfg["registries"]["origin"]["uri"] == _expected_url + assert _cli_glob_cfg["registries"]["local"]["uri"] == _expected_url + assert _cli_cfg["user"]["email"] == _dummy_email + assert _cli_cfg["user"]["family_name"] == "Bloggs" + assert _cli_cfg["user"]["given_names"] == "Joseph" + assert _cli_cfg["user"]["github"] == "FAIRDataPipeline" + assert _cli_cfg["user"]["uuid"] @pytest.mark.faircli_cli def test_purge( @@ -420,48 +402,43 @@ def test_purge( os.path.join(local_config[0], fdp_com.FAIR_FOLDER) ) - @pytest.mark.faircli_cli def test_registry_cli( local_config: typing.Tuple[str, str], click_test: click.testing.CliRunner, mocker: pytest_mock.MockerFixture, + tmp_path ): mocker.patch( "fair.common.global_config_dir", lambda *args: local_config[0] ) - with tempfile.TemporaryDirectory() as tempd: - _reg_dir = os.path.join(tempd, "registry") - _result = click_test.invoke( - cli, ["registry", "install", "--directory", _reg_dir, "--debug"] - ) - - assert _result.exit_code == 0 - - _result = click_test.invoke(cli, ["registry", "start", "--debug"]) - _registry_status_result = click_test.invoke(cli, ["registry", "status", "--debug"]) - - assert _result.exit_code == 0 - assert _registry_status_result.exit_code == 0 - assert "Server running at: http://127.0.0.1:8000/api/" in _registry_status_result.output - assert requests.get(LOCAL_REGISTRY_URL).status_code == 200 - - _result = click_test.invoke(cli, ["registry", "stop", "--debug"]) - _registry_status_result = click_test.invoke(cli, ["registry", "status", "--debug"]) - - assert _result.exit_code == 0 - assert _registry_status_result.exit_code == 0 - assert "Server is not running" in _registry_status_result.output - with pytest.raises(requests.ConnectionError): - requests.get(LOCAL_REGISTRY_URL) - - _result = click_test.invoke( - cli, ["registry", "uninstall", "--debug"], input="Y" - ) - - assert _result.exit_code == 0 - assert not glob.glob(os.path.join(tempd, "*")) - + tempd = tmp_path.__str__() + _reg_dir = os.path.join(tempd, "registry") + _result = click_test.invoke( + cli, ["registry", "install", "--directory", _reg_dir, "--debug"] + ) + assert _result.exit_code == 0 + _result = click_test.invoke(cli, ["registry", "start", "--debug"]) + print(_result.exit_code) + assert _result.exit_code == 0 + _registry_status_result = click_test.invoke(cli, ["registry", "status", "--debug"]) + assert _registry_status_result.exit_code == 0 + assert "Server running at: http://127.0.0.1:8000/api/" in _registry_status_result.output + assert requests.get(LOCAL_REGISTRY_URL).status_code == 200 + _result = click_test.invoke(cli, ["registry", "stop", "--debug"]) + assert _result.exit_code == 0 + _registry_status_result = click_test.invoke(cli, ["registry", "status", "--debug"]) + assert _registry_status_result.exit_code == 0 + assert "Server is not running" in _registry_status_result.output + with pytest.raises(requests.ConnectionError): + requests.get(LOCAL_REGISTRY_URL) + _result = click_test.invoke( + cli, ["registry", "uninstall", "--debug"], input="Y" + ) + assert _result.exit_code == 0 + if platform.system() == "Windows": + tempd = Path(f"{tempd}") + assert not glob.glob(os.path.join(tempd, "registry", "*")) def test_cli_run( local_config: typing.Tuple[str, str], @@ -488,7 +465,6 @@ def test_cli_run( assert _result.exit_code == 0 - def test_cli_run_local( local_config: typing.Tuple[str, str], local_registry: conf.RegistryTest, diff --git a/tests/test_common.py b/tests/test_common.py index 2f0523e..837b364 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,5 +1,4 @@ import os -import tempfile import git import pytest @@ -11,78 +10,78 @@ @pytest.mark.faircli_common -def test_find_git_root(): - with tempfile.TemporaryDirectory() as tempd: - with pytest.raises(fdp_exc.UserConfigError): - fdp_com.find_git_root(tempd) - git.Repo.init(tempd) - _proj_dir = os.path.join(tempd, "project") - os.makedirs(_proj_dir) - assert fdp_com.find_git_root(tempd) == tempd - assert fdp_com.find_git_root(_proj_dir) == tempd +def test_find_git_root(tmp_path): + tempd = tmp_path.__str__() + with pytest.raises(fdp_exc.UserConfigError): + fdp_com.find_git_root(tempd) + git.Repo.init(tempd) + _proj_dir = os.path.join(tempd, "project") + os.makedirs(_proj_dir) + assert os.path.realpath(fdp_com.find_git_root(tempd)) == os.path.realpath(tempd) + assert os.path.realpath(fdp_com.find_git_root(_proj_dir)) == os.path.realpath(tempd) @pytest.mark.faircli_common -def test_find_fair_root(): - with tempfile.TemporaryDirectory() as tempd: - assert not fdp_com.find_fair_root(tempd) - os.makedirs(os.path.join(tempd, fdp_com.FAIR_FOLDER)) - _proj_dir = os.path.join(tempd, "project") - os.makedirs(_proj_dir) - assert fdp_com.find_fair_root(tempd) == tempd - assert fdp_com.find_fair_root(_proj_dir) == tempd +def test_find_fair_root(tmp_path): + tempd = tmp_path.__str__() + assert not fdp_com.find_fair_root(tempd) + os.makedirs(os.path.join(tempd, fdp_com.FAIR_FOLDER)) + _proj_dir = os.path.join(tempd, "project") + os.makedirs(_proj_dir) + assert fdp_com.find_fair_root(tempd) == tempd + assert fdp_com.find_fair_root(_proj_dir) == tempd @pytest.mark.faircli_common -def test_staging_cache(): - with tempfile.TemporaryDirectory() as tempd: - _fair_dir = os.path.join(tempd, fdp_com.FAIR_FOLDER) - os.makedirs(_fair_dir) - assert fdp_com.staging_cache(tempd) == os.path.join( - _fair_dir, "staging" - ) +def test_staging_cache(tmp_path): + tempd = tmp_path.__str__() + _fair_dir = os.path.join(tempd, fdp_com.FAIR_FOLDER) + os.makedirs(_fair_dir) + assert fdp_com.staging_cache(tempd) == os.path.join( + _fair_dir, "staging" + ) @pytest.mark.faircli_common -def test_default_data(mocker: pytest_mock.MockerFixture): - with tempfile.TemporaryDirectory() as tempd: - _glob_conf = os.path.join(tempd, "cli-config.yaml") - mocker.patch("fair.common.global_fdpconfig", lambda: _glob_conf) - with pytest.raises(fdp_exc.InternalError): - fdp_com.default_data_dir() - with open(_glob_conf, "w") as out_f: - yaml.dump({"registries": {"local": {}}}, out_f) - _fair_dir = os.path.join(tempd, fdp_com.FAIR_FOLDER) - mocker.patch("fair.common.USER_FAIR_DIR", _fair_dir) - assert fdp_com.default_data_dir() == os.path.join( - _fair_dir, f"data{os.path.sep}" +def test_default_data(mocker: pytest_mock.MockerFixture, tmp_path): + tempd = tmp_path.__str__() + _glob_conf = os.path.join(tempd, "cli-config.yaml") + mocker.patch("fair.common.global_fdpconfig", lambda: _glob_conf) + with pytest.raises(fdp_exc.InternalError): + fdp_com.default_data_dir() + with open(_glob_conf, "w") as out_f: + yaml.dump({"registries": {"local": {}}}, out_f) + _fair_dir = os.path.join(tempd, fdp_com.FAIR_FOLDER) + mocker.patch("fair.common.USER_FAIR_DIR", _fair_dir) + assert fdp_com.default_data_dir() == os.path.join( + _fair_dir, f"data{os.path.sep}" + ) + with open(_glob_conf, "w") as out_f: + yaml.dump( + {"registries": {"local": {"data_store": "data_store_1"}}}, + out_f, ) - with open(_glob_conf, "w") as out_f: - yaml.dump( - {"registries": {"local": {"data_store": "data_store_1"}}}, - out_f, - ) - assert fdp_com.default_data_dir() == "data_store_1" + assert fdp_com.default_data_dir() == "data_store_1" @pytest.mark.faircli_common -def test_registry_home(mocker: pytest_mock.MockerFixture): - with tempfile.TemporaryDirectory() as tempd: - _glob_conf = os.path.join(tempd, "cli-config.yaml") - mocker.patch("fair.common.global_fdpconfig", lambda: _glob_conf) - with open(_glob_conf, "w") as out_f: - yaml.dump({}, out_f) - assert fdp_com.registry_home() == fdp_com.DEFAULT_REGISTRY_LOCATION - with open(_glob_conf, "w") as out_f: - yaml.dump({"registries": {}}, out_f) - with pytest.raises(fdp_exc.CLIConfigurationError): - fdp_com.registry_home() - with open(_glob_conf, "w") as out_f: - yaml.dump({"registries": {"local": {}}}, out_f) - with pytest.raises(fdp_exc.CLIConfigurationError): - fdp_com.registry_home() - with open(_glob_conf, "w") as out_f: - yaml.dump( - {"registries": {"local": {"directory": "registry"}}}, out_f - ) - assert fdp_com.registry_home() == "registry" +def test_registry_home(mocker: pytest_mock.MockerFixture, tmp_path): + tempd = tmp_path.__str__() + _glob_conf = os.path.join(tempd, "cli-config.yaml") + mocker.patch("fair.common.global_fdpconfig", lambda: _glob_conf) + with open(_glob_conf, "w") as out_f: + yaml.dump({}, out_f) + assert fdp_com.registry_home() == fdp_com.DEFAULT_REGISTRY_LOCATION + with open(_glob_conf, "w") as out_f: + yaml.dump({"registries": {}}, out_f) + with pytest.raises(fdp_exc.CLIConfigurationError): + fdp_com.registry_home() + with open(_glob_conf, "w") as out_f: + yaml.dump({"registries": {"local": {}}}, out_f) + with pytest.raises(fdp_exc.CLIConfigurationError): + fdp_com.registry_home() + with open(_glob_conf, "w") as out_f: + yaml.dump( + {"registries": {"local": {"directory": "registry"}}}, out_f + ) + assert fdp_com.registry_home() == "registry" diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 05d2dd1..e0a7faf 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -1,15 +1,20 @@ import os -import tempfile +import platform import typing import deepdiff import pytest import pytest_mock +import warnings + +from . import conftest as conf import fair.common as fdp_com import fair.configuration as fdp_conf import fair.identifiers as fdp_id +GITHUB_USER = "FAIRDataPipeline" +ORCID_ID = "0000-0002-6773-1049" @pytest.mark.faircli_configuration def test_local_cli_config_read(local_config: typing.Tuple[str, str]): @@ -95,16 +100,16 @@ def test_get_remote_uri(local_config: typing.Tuple[str, str]): @pytest.mark.faircli_configuration -def test_get_remote_token(mocker: pytest_mock.MockerFixture): - with tempfile.TemporaryDirectory() as tempd: - _token = "t35tt0k3n" - _token_file = os.path.join(tempd, "token") - open(_token_file, "w").write(_token) - mocker.patch( - "fair.configuration.read_local_fdpconfig", - lambda *args: {"registries": {"origin": {"token": _token_file}}}, - ) - assert fdp_conf.get_remote_token("") == _token +def test_get_remote_token(mocker: pytest_mock.MockerFixture, tmp_path): + tempd = tmp_path.__str__() + _token = "t35tt0k3n" + _token_file = os.path.join(tempd, "token") + open(_token_file, "w").write(_token) + mocker.patch( + "fair.configuration.read_local_fdpconfig", + lambda *args: {"registries": {"origin": {"token": _token_file}}}, + ) + assert fdp_conf.get_remote_token("") == _token @pytest.mark.faircli_configuration @@ -121,7 +126,7 @@ def test_get_git_remote(local_config: typing.Tuple[str, str]): def test_get_orcid(local_config: typing.Tuple[str, str]): assert ( fdp_conf.get_current_user_uri(local_config[0]) - == f'{fdp_id.ID_URIS["orcid"]}000-0000-0000-0000' + == f'{fdp_id.ID_URIS["github"]}FAIRDataPipeline' ) @@ -154,20 +159,30 @@ def test_local_port(local_config: typing.Tuple[str, str]): @pytest.mark.faircli_configuration def test_user_info(mocker: pytest_mock.MockerFixture): + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["github"]}{GITHUB_USER}'): + warnings.warn("GitHub API Unavailable") + pytest.skip("Cannot Reach GitHub API") + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["orcid"]}{ORCID_ID}'): + warnings.warn("Orcid API Unavailable") + pytest.skip("Cannot Reach Orcid API") + _data = fdp_id.check_github("FAIRDataPipeline") _namepaces = {"input": "ispace", "output": "jbloggs"} + _email = "jbloggs@nowhere.com" + _github_username = "FAIRDataPipeline" _override = { - "Email": "jbloggs@nowhere.com", + "Email (optional)": _email, "Full Name": "Joseph Bloggs", "Default input namespace": _namepaces["input"], "Default output namespace": _namepaces["output"], - "User ID system (ORCID/ROR/GRID/None)": "None", + "User ID system (GITHUB/ORCID/ROR/GRID/None)": "None", + "GitHub Username": _github_username } _orcid_override = { "family_name": "Bloggs", "given_names": "Joseph", "uuid": None, - "email": _override["Email"], + "email": _email, } _uuid_override = _orcid_override.copy() _uuid_override["uuid"] = "f45sasd832j234gjk" @@ -178,7 +193,7 @@ def test_user_info(mocker: pytest_mock.MockerFixture): mocker.patch("uuid.uuid4", lambda: _uuid_override["uuid"]) _noorc = fdp_conf._get_user_info_and_namespaces() - _override["User ID system (ORCID/ROR/GRID/None)"] = "ORCID" + _override["User ID system (GITHUB/ORCID/ROR/GRID/None)"] = "ORCID" _override["ORCID"] = "0000-0000-0000" mocker.patch( @@ -187,6 +202,9 @@ def test_user_info(mocker: pytest_mock.MockerFixture): mocker.patch("fair.identifiers.check_orcid", lambda *args: _orcid_override) _orc = fdp_conf._get_user_info_and_namespaces() + _uuid_override["github"] = _github_username + _orcid_override["github"] = _github_username + _expect_noorc = {"user": _uuid_override, "namespaces": _namepaces} _expect_orcid = {"user": _orcid_override, "namespaces": _namepaces} @@ -197,11 +215,12 @@ def test_user_info(mocker: pytest_mock.MockerFixture): @pytest.mark.faircli_configuration def test_global_config_query( - mocker: pytest_mock.MockerFixture, local_config: typing.Tuple[str, str] + mocker: pytest_mock.MockerFixture, local_config: typing.Tuple[str, str], + tmp_path ): _override = { "Remote Data Storage Root": "", - "Remote API Token File": os.path.join(local_config[0], "token.txt"), + "Remote API Token": "0000000000000000000000000000000000000000", "Default Data Store": "data_store/", "Local Registry Port": "8001", "Remote API URL": "http://127.0.0.1:8007/api/", @@ -223,13 +242,17 @@ def test_global_config_query( "fair.registry.requests.local_token", lambda *args, **kwargs: "92342343243224", ) + mocker.patch( + "fair.common.global_config_dir", + lambda *args, **kwargs: tmp_path.__str__() + ) mocker.patch( "click.prompt", lambda x, default=None: _override[x] or default ) mocker.patch("click.confirm", lambda *args, **kwargs: False) mocker.patch( "fair.configuration._get_user_info_and_namespaces", - lambda: _default_user, + lambda local: _default_user, ) _expected = { @@ -242,7 +265,7 @@ def test_global_config_query( }, "origin": { "uri": _override["Remote API URL"], - "token": _override["Remote API Token File"], + "token": os.path.join(tmp_path, "remotetoken.txt"), "data_store": _override["Remote API URL"].replace( "api", "data" ), @@ -251,8 +274,11 @@ def test_global_config_query( } _expected.update(_default_user) + if platform.system() == "Windows": + _expected['registries']['local']['data_store'] = "data_store/\\" + assert not deepdiff.DeepDiff( - _expected, fdp_conf.global_config_query(local_config[0]) + _expected, fdp_conf.global_config_query(registry = local_config[0]) ) diff --git a/tests/test_identifiers.py b/tests/test_identifiers.py index 41f1b12..a1ebb25 100644 --- a/tests/test_identifiers.py +++ b/tests/test_identifiers.py @@ -1,20 +1,32 @@ import pytest import fair.identifiers as fdp_id +from . import conftest as conf +import warnings +GITHUB_USER = "FAIRDataPipeline" +ORCID_ID = "0000-0002-6773-1049" +ROR_ID = "049s0ch10" +GRID_ID = "grid.438622.9" @pytest.mark.faircli_ids def test_check_orcid(): - _data = fdp_id.check_orcid("0000-0002-6773-1049") + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["orcid"]}{ORCID_ID}'): + warnings.warn(f'Orcid API {fdp_id.QUERY_URLS["orcid"]} Unavailable') + pytest.skip("Cannot Reach Orcid API") + _data = fdp_id.check_orcid(ORCID_ID) assert _data["name"] == "Kristian Zarębski" assert _data["family_name"] == "Zarębski" assert _data["given_names"] == "Kristian" - assert _data["orcid"] == "0000-0002-6773-1049" + assert _data["orcid"] == ORCID_ID assert not fdp_id.check_orcid("notanid!") @pytest.mark.faircli_ids def test_check_generic_ror(): - _data = fdp_id._check_generic_ror("049s0ch10") + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["ror"]}{ROR_ID}'): + warnings.warn("ROR API Unavailable") + pytest.skip("Cannot Reach ROR API") + _data = fdp_id._check_generic_ror(ROR_ID) assert _data["name"] == "Rakon (France)" == _data["family_name"] assert not "ror" in _data assert not "grid" in _data @@ -22,22 +34,44 @@ def test_check_generic_ror(): @pytest.mark.faircli_ids def test_check_ror(): - _data = fdp_id.check_ror("049s0ch10") + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["ror"]}{ROR_ID}'): + warnings.warn("ROR API Unavailable") + pytest.skip("Cannot Reach ROR API") + _data = fdp_id.check_ror(ROR_ID) assert _data["name"] == "Rakon (France)" == _data["family_name"] - assert _data["ror"] == "049s0ch10" + assert _data["ror"] == ROR_ID assert _data['uri'] == "https://ror.org/049s0ch10" assert not fdp_id.check_ror("notanid!") @pytest.mark.faircli_ids def test_check_grid(): - _data = fdp_id.check_grid("grid.438622.9") + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["ror"]}{ROR_ID}'): + warnings.warn("ROR API Unavailable") + pytest.skip("Cannot Reach ROR API") + _data = fdp_id.check_grid(GRID_ID) assert _data["name"] == "Rakon (France)" == _data["family_name"] - assert _data["grid"] == "grid.438622.9" + assert _data["grid"] == GRID_ID assert _data['uri'] == "https://ror.org/049s0ch10" assert not fdp_id.check_grid("notanid!") +@pytest.mark.faircli_ids +def test_check_github(): + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["github"]}{GITHUB_USER}'): + warnings.warn("GitHub API Unavailable") + pytest.skip("Cannot Reach GitHub API") + _data = fdp_id.check_github("FAIRDataPipeline") + assert _data["name"] == "FAIR Data Pipeline" + assert _data["family_name"] == "Pipeline" + assert _data["given_names"] == "FAIR Data" + assert _data["github"] == GITHUB_USER + assert _data['uri'] == f"https://github.com/{GITHUB_USER}" + assert not fdp_id.check_github("notanid!") + @pytest.mark.faircli_ids def test_check_permitted(): + if not conf.test_can_be_run(f'{fdp_id.QUERY_URLS["orcid"]}{ORCID_ID}'): + warnings.warn("Orcid API Unavailable") + pytest.skip("Cannot Reach Orcid API") assert fdp_id.check_id_permitted("https://orcid.org/0000-0002-6773-1049") assert not fdp_id.check_id_permitted("notanid!") diff --git a/tests/test_register.py b/tests/test_register.py new file mode 100644 index 0000000..90ba635 --- /dev/null +++ b/tests/test_register.py @@ -0,0 +1,101 @@ +import os +import pathlib +import typing + +import click.testing +import pytest +import pytest_mock +import yaml + +import fair.registry.sync as fdp_sync +import fair.registry.server as fdp_serv +from fair.cli import cli +from fair.common import FAIR_FOLDER +from fair.registry.requests import get, url_get +from tests.conftest import RegistryTest +from tests.conftest import MotoTestServer +import fair.session as fdp_session +import fair.common as fdp_com +import fair.testing as fdp_test + +TEST_DATA_DIR = f'file://{os.path.dirname(__file__)}{os.path.sep}data{os.path.sep}' + +TEST_REGISTER_CFG = os.path.join( + os.path.dirname(__file__), "data", "test_register.yaml" +) + +@pytest.mark.faircli_register +def test_register( + global_config, + local_registry, + remote_registry, + pyDataPipeline: str, + monkeypatch_module, + tmp_path, + capsys, +): + try: + import data_pipeline_api # noqa + except ModuleNotFoundError: + pytest.skip("Python API implementation not installed") + monkeypatch_module.chdir(pyDataPipeline) + monkeypatch_module.setattr( + "fair.registry.server.launch_server", lambda *args, **kwargs: False + ) + _cli_runner = click.testing.CliRunner() + config_path = os.path.join( + pyDataPipeline, fdp_com.FAIR_CLI_CONFIG + ) + _config = fdp_test.create_configurations(local_registry._install, pyDataPipeline, remote_registry._install, global_config, True) + yaml.dump(_config, open(config_path, "w")) + with capsys.disabled(): + print (_config) + print(f"\tRUNNING: fair init --debug") + with local_registry, remote_registry: + _res = _cli_runner.invoke( + cli, + ["init", "--debug", "--using", config_path], + catch_exceptions = True + ) + with capsys.disabled(): + print(f'exit code: {_res.exit_code}') + print(f'exc info: {_res.exc_info}') + print(f'exception: {_res.exception}') + assert _res.exit_code == 0 + + _cfg_path = os.path.join( + pyDataPipeline, "simpleModel", "ext", "SEIRSconfig.yaml" + ) + _res = _cli_runner.invoke( + cli, ["pull", _cfg_path, "--debug"], + catch_exceptions = True + ) + with capsys.disabled(): + print(f'exit code: {_res.exit_code}') + print(f'exc info: {_res.exc_info}') + print(f'exception: {_res.exception}') + assert _res.exit_code == 0 + + _working_yaml_path = os.path.join(tmp_path, "working_yaml.yaml") + _cfg_str = {} + + with open(TEST_REGISTER_CFG) as cfg_file: + _cfg_str = cfg_file.read() + + print(f'Test Data Directory {TEST_DATA_DIR}') + _cfg_str = _cfg_str.replace("", TEST_DATA_DIR) + + _cfg = yaml.safe_load(_cfg_str) + + with open(_working_yaml_path, "w") as f: + yaml.dump(_cfg, f, sort_keys=False) + + _res = _cli_runner.invoke( + cli, ["pull", _working_yaml_path, "--debug"], + catch_exceptions = True + ) + with capsys.disabled(): + print(f'exit code: {_res.exit_code}') + print(f'exc info: {_res.exc_info}') + print(f'exception: {_res.exception}') + assert _res.exit_code == 0 \ No newline at end of file diff --git a/tests/test_requests.py b/tests/test_requests.py index 0d18311..28ce894 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -1,5 +1,4 @@ import os -import tempfile import pytest import pytest_mock @@ -26,15 +25,15 @@ def test_split_url(): @pytest.mark.faircli_requests -def test_local_token(mocker: pytest_mock.MockerFixture): +def test_local_token(mocker: pytest_mock.MockerFixture, tmp_path): _dummy_key = "sdfd234ersdf45234" - with tempfile.TemporaryDirectory() as tempd: - _token_file = os.path.join(tempd, "token") - mocker.patch("fair.common.registry_home", lambda: tempd) - with pytest.raises(fdp_exc.FileNotFoundError): - fdp_req.local_token() - open(_token_file, "w").write(_dummy_key) - assert fdp_req.local_token() == _dummy_key + tempd = tmp_path.__str__() + _token_file = os.path.join(tempd, "token") + mocker.patch("fair.common.registry_home", lambda: tempd) + with pytest.raises(fdp_exc.FileNotFoundError): + fdp_req.local_token() + open(_token_file, "w").write(_dummy_key) + assert fdp_req.local_token() == _dummy_key @pytest.mark.faircli_requests def test_request_error_registy_not_running(): diff --git a/tests/test_server.py b/tests/test_server.py index 27875b5..340c120 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -1,5 +1,4 @@ import os -import tempfile import time import typing @@ -23,38 +22,38 @@ def test_check_server_running( assert fdp_serv.check_server_running(LOCAL_REGISTRY_URL) @pytest.mark.faircli_server -def test_registry_install_uninstall(mocker: pytest_mock.MockerFixture): - with tempfile.TemporaryDirectory() as tempd: - reg_dir = os.path.join(tempd, "registry") - mocker.patch("fair.common.DEFAULT_REGISTRY_LOCATION", reg_dir) - fdp_serv.install_registry(install_dir=reg_dir) - assert os.path.exists(os.path.join(reg_dir, "db.sqlite3")) - fdp_serv.uninstall_registry() +def test_registry_install_uninstall(mocker: pytest_mock.MockerFixture, tmp_path): + tempd = tmp_path.__str__() + reg_dir = os.path.join(tempd, "registry") + mocker.patch("fair.common.DEFAULT_REGISTRY_LOCATION", reg_dir) + fdp_serv.install_registry(install_dir=reg_dir) + assert os.path.exists(os.path.join(reg_dir, "db.sqlite3")) + fdp_serv.uninstall_registry() @pytest.mark.faircli_server def test_launch_stop_server( local_config: typing.Tuple[str, str], - local_registry: conf.RegistryTest, mocker: pytest_mock.MockerFixture, + tmp_path ): - with tempfile.TemporaryDirectory() as tempd: - reg_dir = os.path.join(tempd, "registry") - mocker.patch("fair.common.DEFAULT_REGISTRY_LOCATION", reg_dir) - fdp_serv.install_registry(install_dir=reg_dir) - fdp_serv.launch_server() - time.sleep(2) - fdp_serv.stop_server(force=True) + tempd = tmp_path.__str__() + reg_dir = os.path.join(tempd, "registry") + mocker.patch("fair.common.DEFAULT_REGISTRY_LOCATION", reg_dir) + fdp_serv.install_registry(install_dir=reg_dir) + fdp_serv.launch_server() + time.sleep(5) + fdp_serv.stop_server(force=True) @pytest.mark.faircli_server def test_launch_stop_server_with_port( local_config: typing.Tuple[str, str], - local_registry: conf.RegistryTest, mocker: pytest_mock.MockerFixture, + tmp_path ): - with tempfile.TemporaryDirectory() as tempd: - reg_dir = os.path.join(tempd, "registry") - mocker.patch("fair.common.DEFAULT_REGISTRY_LOCATION", reg_dir) - fdp_serv.install_registry(install_dir=reg_dir) - fdp_serv.launch_server(port=8005, address='0.0.0.0', verbose= True) - time.sleep(2) - fdp_serv.stop_server(force=True, local_uri="http://127.0.0.1:8005/api") \ No newline at end of file + tempd = tmp_path.__str__() + reg_dir = os.path.join(tempd, "registry") + mocker.patch("fair.common.DEFAULT_REGISTRY_LOCATION", reg_dir) + fdp_serv.install_registry(install_dir=reg_dir) + fdp_serv.launch_server(port=8005, address='0.0.0.0', verbose= True) + time.sleep(5) + fdp_serv.stop_server(force=True, local_uri="http://127.0.0.1:8005/api") \ No newline at end of file diff --git a/tests/test_staging.py b/tests/test_staging.py index 478b1ff..20e9e04 100644 --- a/tests/test_staging.py +++ b/tests/test_staging.py @@ -1,6 +1,5 @@ import os import shutil -import tempfile import typing import uuid @@ -82,6 +81,7 @@ def test_get_job_data( local_config: typing.Tuple[str, str], mocker: pytest_mock.MockerFixture, pyDataPipeline: str, + tmp_path ): with local_registry: mocker.patch( @@ -92,40 +92,34 @@ def test_get_job_data( with pytest.raises(fdp_exc.StagingError): stager.get_job_data(LOCAL_REGISTRY_URL, _id) - with tempfile.TemporaryDirectory() as tempd: - _job_dir = os.path.join(tempd, str(_id)) - os.makedirs(_job_dir) - mocker.patch("fair.run.get_job_dir", lambda x: _job_dir) - mocker.patch("fair.common.JOBS_DIR", tempd) - with pytest.raises(fdp_exc.FileNotFoundError): - stager.get_job_data(LOCAL_REGISTRY_URL, _id) - - _dummy_url = "http://not-a-url.com" - mocker.patch.object( - stager, - "find_registry_entry_for_file", - lambda *args: {"url": _dummy_url}, - ) - - mocker.patch( - "fair.registry.requests.get", - lambda *args, **kwargs: [{"url": _dummy_url}], - ) - - _cfg_path = os.path.join( - pyDataPipeline, "simpleModel", "ext", "SEIRSconfig.yaml" - ) - - shutil.copy( - _cfg_path, - os.path.join(_job_dir, fdp_com.USER_CONFIG_FILE), - ) - - _jobs = stager.get_job_data(LOCAL_REGISTRY_URL, _id) - - assert _jobs == { - "jobs": [], - "user_written_objects": 2 * [_dummy_url], - "config_file": _dummy_url, - "script_file": None, - } + tempd = tmp_path.__str__() + _job_dir = os.path.join(tempd, str(_id)) + os.makedirs(_job_dir) + mocker.patch("fair.run.get_job_dir", lambda x: _job_dir) + mocker.patch("fair.common.JOBS_DIR", tempd) + with pytest.raises(fdp_exc.FileNotFoundError): + stager.get_job_data(LOCAL_REGISTRY_URL, _id) + _dummy_url = "http://not-a-url.com" + mocker.patch.object( + stager, + "find_registry_entry_for_file", + lambda *args: {"url": _dummy_url}, + ) + mocker.patch( + "fair.registry.requests.get", + lambda *args, **kwargs: [{"url": _dummy_url}], + ) + _cfg_path = os.path.join( + pyDataPipeline, "simpleModel", "ext", "SEIRSconfig.yaml" + ) + shutil.copy( + _cfg_path, + os.path.join(_job_dir, fdp_com.USER_CONFIG_FILE), + ) + _jobs = stager.get_job_data(LOCAL_REGISTRY_URL, _id) + assert _jobs == { + "jobs": [], + "user_written_objects": 2 * [_dummy_url], + "config_file": _dummy_url, + "script_file": None, + } diff --git a/tests/test_storage.py b/tests/test_storage.py index 28f53da..a0ba8cd 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -1,7 +1,7 @@ import string -import tempfile import typing import os +import hashlib import pytest import pytest_mock @@ -48,18 +48,18 @@ def test_store_working_config( local_config: typing.Tuple[str, str], local_registry: conf.RegistryTest, mocker: pytest_mock.MockerFixture, + tmp_path ): mocker.patch("fair.common.registry_home", lambda: local_registry._install) with local_registry: - with tempfile.NamedTemporaryFile( - mode="w+", suffix=".yaml", delete=False - ) as tempf: + temp_file_name = os.path.join(tmp_path, f'{hashlib.sha1(tmp_path.__str__().encode("utf-8")).hexdigest()}.yaml') + with open(temp_file_name, "w") as tempf: yaml.dump( - {"run_metadata": {"write_data_store": "data_store"}}, tempf + {"run_metadata": {"write_data_store": os.path.dirname(temp_file_name)}}, tempf ) assert fdp_store.store_working_config( - local_config[1], LOCAL_URL, tempf.name, local_registry._token + local_config[1], LOCAL_URL, temp_file_name, local_registry._token ) @@ -68,23 +68,25 @@ def test_store_working_script( local_config: typing.Tuple[str, str], local_registry: conf.RegistryTest, mocker: pytest_mock.MockerFixture, + tmp_path ): mocker.patch("fair.common.registry_home", lambda: local_registry._install) with local_registry: - with tempfile.NamedTemporaryFile( - mode="w+", suffix=".yaml", delete=False - ) as tempf: + temp_file_name = os.path.join(tmp_path, f'{hashlib.sha1(tmp_path.__str__().encode("utf-8")).hexdigest()}.yaml') + with open(temp_file_name, "w") as tempf: yaml.dump( - {"run_metadata": {"write_data_store": "data_store"}}, tempf + {"run_metadata": {"write_data_store": os.path.dirname(temp_file_name)}}, tempf ) - _temp_script = tempfile.NamedTemporaryFile(suffix=".sh", delete=False) + temp_script_name = os.path.join(tmp_path, f'{hashlib.sha1(tmp_path.__str__().encode("utf-8")).hexdigest()}.sh') + with open(temp_script_name, "w") as _temp_script: + _temp_script.write(string.ascii_letters) assert fdp_store.store_working_script( local_config[1], LOCAL_URL, - _temp_script.name, - tempf.name, + temp_script_name, + temp_file_name, local_registry._token, ) @@ -105,10 +107,9 @@ def test_store_namespace( @pytest.mark.faircli_storage -def test_calc_file_hash(): - with tempfile.NamedTemporaryFile( - mode="w+", suffix=".txt", delete=False - ) as tempf: +def test_calc_file_hash(tmp_path): + temp_file_name = os.path.join(tmp_path, f'{hashlib.sha1(tmp_path.__str__().encode("utf-8")).hexdigest()}.txt') + with open(temp_file_name, "w") as tempf: tempf.write(string.ascii_letters) _HASH = "db16441c4b330570a9ac83b0e0b006fcd74cc32b" # Based on hash calculated at 2021-10-15 diff --git a/tests/test_sync.py b/tests/test_sync.py index df00a27..2000ffd 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -1,4 +1,3 @@ -import tempfile import os import pathlib import typing @@ -38,67 +37,62 @@ def test_pull_download(): @pytest.mark.faircli_sync -def test_fetch_data_product(mocker: pytest_mock.MockerFixture): - - with tempfile.TemporaryDirectory() as tempd: - _dummy_data_product_name = "test" - _dummy_data_product_version = "2.3.0" - _dummy_data_product_namespace = "testing" - - def mock_get(url, obj, *args, **kwargs): - if obj == "storage_location": - return [ - { - "path": "/this/is/a/dummy/path", - "storage_root": "https://dummyurl/", - } - ] - elif obj == "storage_root": - return [{"root": "https://fake/root/"}] - elif obj == "namespace": - return [ - {"name": _dummy_data_product_namespace, "url": "namespace"} - ] - elif obj == "data_product": - return [ - { - "data_product": _dummy_data_product_name, - "version": _dummy_data_product_version, - "namespace": "namespace", - } - ] - - def mock_url_get(url, *args, **kwargs): - if "storage_location" in url: - return { - "path": "FAIRDataPipeline/FAIR-CLI/archive/refs/heads/main.zip", - "storage_root": "storage_root", +def test_fetch_data_product(mocker: pytest_mock.MockerFixture, tmp_path): + + tempd = tmp_path.__str__() + _dummy_data_product_name = "test" + _dummy_data_product_version = "2.3.0" + _dummy_data_product_namespace = "testing" + def mock_get(url, obj, *args, **kwargs): + if obj == "storage_location": + return [ + { + "path": "/this/is/a/dummy/path", + "storage_root": "https://dummyurl/", } - elif "storage_root" in url: - return {"root": "https://github.com/"} - elif "namespace" in url: - return { - "name": _dummy_data_product_namespace, - "url": "namespace", + ] + elif obj == "storage_root": + return [{"root": "https://fake/root/"}] + elif obj == "namespace": + return [ + {"name": _dummy_data_product_namespace, "url": "namespace"} + ] + elif obj == "data_product": + return [ + { + "data_product": _dummy_data_product_name, + "version": _dummy_data_product_version, + "namespace": "namespace", } - elif "object" in url: - return { - "storage_location": "storage_location", - "url": "object", - } - - mocker.patch("fair.registry.requests.get", mock_get) - mocker.patch("fair.registry.requests.url_get", mock_url_get) - - _example_data_product = { - "version": _dummy_data_product_version, - "namespace": "namespace", - "name": _dummy_data_product_name, - "data_product": _dummy_data_product_name, - "object": "object", - } - - fdp_sync.fetch_data_product("", tempd, _example_data_product) + ] + def mock_url_get(url, *args, **kwargs): + if "storage_location" in url: + return { + "path": "FAIRDataPipeline/FAIR-CLI/archive/refs/heads/main.zip", + "storage_root": "storage_root", + } + elif "storage_root" in url: + return {"root": "https://github.com/"} + elif "namespace" in url: + return { + "name": _dummy_data_product_namespace, + "url": "namespace", + } + elif "object" in url: + return { + "storage_location": "storage_location", + "url": "object", + } + mocker.patch("fair.registry.requests.get", mock_get) + mocker.patch("fair.registry.requests.url_get", mock_url_get) + _example_data_product = { + "version": _dummy_data_product_version, + "namespace": "namespace", + "name": _dummy_data_product_name, + "data_product": _dummy_data_product_name, + "object": "object", + } + fdp_sync.fetch_data_product("", tempd, _example_data_product) @pytest.mark.faircli_sync @pytest.mark.dependency(name="init") @@ -205,6 +199,7 @@ def test_push( remote_registry: RegistryTest, pyDataPipeline: str, fair_bucket: MotoTestServer, + mocker: pytest_mock.MockerFixture, capsys, ): try: @@ -214,6 +209,10 @@ def test_push( _cli_runner = click.testing.CliRunner() with remote_registry, local_registry, fair_bucket: + mocker.patch( + "fair.configuration.get_current_user_github", + lambda *args, **kwargs: "admin", + ) _res = _cli_runner.invoke( cli, ["list"] )