diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 85a8e093..1f879fec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -76,7 +76,7 @@ jobs: - name: "Check out repository code" uses: "actions/checkout@v4" - name: "Setup environment" - run: "pip install ruff==0.8.6" + run: "pip install ruff==0.11.0" - name: "Linting: ruff check" run: "ruff check ." - name: "Linting: ruff format" @@ -212,6 +212,7 @@ jobs: - "3.10" - "3.11" - "3.12" + - "3.13" if: | always() && !cancelled() && !contains(needs.*.result, 'failure') && @@ -299,77 +300,79 @@ jobs: env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - integration-tests-local-infrahub: - if: | - always() && !cancelled() && - !contains(needs.*.result, 'failure') && - !contains(needs.*.result, 'cancelled') && - needs.files-changed.outputs.python == 'true' && - (github.base_ref == 'stable' || github.base_ref == 'develop') - needs: ["files-changed", "yaml-lint", "python-lint"] - runs-on: - group: "huge-runners" - timeout-minutes: 30 - steps: - - name: "Check out repository code" - uses: "actions/checkout@v4" + # NOTE: Disabling this test for now because it's expected that we can't start the latest version of infrahub + # with the current shipping version of infrahub-testcontainers + # integration-tests-local-infrahub: + # if: | + # always() && !cancelled() && + # !contains(needs.*.result, 'failure') && + # !contains(needs.*.result, 'cancelled') && + # needs.files-changed.outputs.python == 'true' && + # (github.base_ref == 'stable' || github.base_ref == 'develop') + # needs: ["files-changed", "yaml-lint", "python-lint"] + # runs-on: + # group: "huge-runners" + # timeout-minutes: 30 + # steps: + # - name: "Check out repository code" + # uses: "actions/checkout@v4" - - name: "Extract target branch name" - id: extract_branch - run: echo "TARGET_BRANCH=${{ github.base_ref }}" >> $GITHUB_ENV + # - name: "Extract target branch name" + # id: extract_branch + # run: echo "TARGET_BRANCH=${{ github.base_ref }}" >> $GITHUB_ENV - - name: "Checkout infrahub repository" - uses: "actions/checkout@v4" - with: - repository: "opsmill/infrahub" - path: "infrahub-server" - ref: ${{ github.base_ref }} - submodules: true + # - name: "Checkout infrahub repository" + # uses: "actions/checkout@v4" + # with: + # repository: "opsmill/infrahub" + # path: "infrahub-server" + # ref: ${{ github.base_ref }} + # submodules: true - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.12" + # - name: Set up Python + # uses: actions/setup-python@v5 + # with: + # python-version: "3.12" - - name: "Setup git credentials prior dev.build" - run: | - cd infrahub-server - git config --global user.name 'Infrahub' - git config --global user.email 'infrahub@opsmill.com' - git config --global --add safe.directory '*' - git config --global credential.usehttppath true - git config --global credential.helper /usr/local/bin/infrahub-git-credential + # - name: "Setup git credentials prior dev.build" + # run: | + # cd infrahub-server + # git config --global user.name 'Infrahub' + # git config --global user.email 'infrahub@opsmill.com' + # git config --global --add safe.directory '*' + # git config --global credential.usehttppath true + # git config --global credential.helper /usr/local/bin/infrahub-git-credential - - name: "Set environment variables prior dev.build" - run: | - echo "INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }}" >> $GITHUB_ENV - RUNNER_NAME=$(echo "${{ runner.name }}" | grep -o 'ghrunner[0-9]\+' | sed 's/ghrunner\([0-9]\+\)/ghrunner_\1/') - echo "PYTEST_DEBUG_TEMPROOT=/var/lib/github/${RUNNER_NAME}/_temp" >> $GITHUB_ENV - echo "INFRAHUB_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }}" >> $GITHUB_ENV - echo "INFRAHUB_TESTING_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }}" >> $GITHUB_ENV - echo "INFRAHUB_TESTING_DOCKER_IMAGE=opsmill/infrahub" >> $GITHUB_ENV + # - name: "Set environment variables prior dev.build" + # run: | + # echo "INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }}" >> $GITHUB_ENV + # RUNNER_NAME=$(echo "${{ runner.name }}" | grep -o 'ghrunner[0-9]\+' | sed 's/ghrunner\([0-9]\+\)/ghrunner_\1/') + # echo "PYTEST_DEBUG_TEMPROOT=/var/lib/github/${RUNNER_NAME}/_temp" >> $GITHUB_ENV + # echo "INFRAHUB_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }}" >> $GITHUB_ENV + # echo "INFRAHUB_TESTING_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }}" >> $GITHUB_ENV + # echo "INFRAHUB_TESTING_DOCKER_IMAGE=opsmill/infrahub" >> $GITHUB_ENV - - name: "Build container" - run: | - cd infrahub-server - inv dev.build + # - name: "Build container" + # run: | + # cd infrahub-server + # inv dev.build - - name: "Setup environment" - run: | - pipx install poetry==1.8.5 - poetry config virtualenvs.create true --local - pip install invoke toml codecov + # - name: "Setup environment" + # run: | + # pipx install poetry==1.8.5 + # poetry config virtualenvs.create true --local + # pip install invoke toml codecov - - name: "Install Package" - run: "poetry install --all-extras" + # - name: "Install Package" + # run: "poetry install --all-extras" - - name: "Integration Tests" - run: | - echo "Running tests for version: $INFRAHUB_TESTING_IMAGE_VER" - poetry run pytest --cov infrahub_sdk tests/integration/ + # - name: "Integration Tests" + # run: | + # echo "Running tests for version: $INFRAHUB_TESTING_IMAGE_VER" + # poetry run pytest --cov infrahub_sdk tests/integration/ - - name: "Upload coverage to Codecov" - run: | - codecov --flags integration-tests - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + # - name: "Upload coverage to Codecov" + # run: | + # codecov --flags integration-tests + # env: + # CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 8c57d1c8..8b242f64 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,21 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang +## [1.8.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.8.0) - 2025-03-19 + +### Deprecated + +- Timestamp: Direct access to `obj` and `add_delta` have been deprecated and will be removed in a future version. ([#255](https://github.com/opsmill/infrahub-sdk-python/issues/255)) + +### Added + +- Added support for Enum in GraphQL query and mutation. ([#18](https://github.com/opsmill/infrahub-sdk-python/issues/18)) + +### Fixed + +- Refactored Timestamp to use `whenever` instead of `pendulum` and extend Timestamp with `add()`, `subtract()`, and `to_datetime()`. ([#255](https://github.com/opsmill/infrahub-sdk-python/issues/255)) +- Fixed support for Python 3.13 as it's no longer required to have Rust installed on the system. + ## [1.7.2](https://github.com/opsmill/infrahub-sdk-python/tree/v1.7.2) - 2025-03-07 ### Added @@ -26,6 +41,14 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang ### Fixed - `protocols` CTL command properly gets default branch setting from environment variable. ([#104](https://github.com/opsmill/infrahub-sdk-python/issues/104)) +- Fix typing for Python 3.9 ([#251](https://github.com/opsmill/infrahub-sdk-python/issues/251)) +- Refactor Timestamp to use `whenever` instead of `pendulum` and extend Timestamp with add(), subtract(), and to_datetime(). ([#255](https://github.com/opsmill/infrahub-sdk-python/issues/255)) +- Remove default value "main" for branch parameter from all Infrahub CTL commands. ([#264](https://github.com/opsmill/infrahub-sdk-python/issues/264)) +- Fixed support for Python 3.13, it's no longer required to have Rust installed on the system. + +### Housekeeping + +- Move the function `read_file` from the ctl module to the SDK. - Fixed typing for Python 3.9 and removed support for Python 3.13. ([#251](https://github.com/opsmill/infrahub-sdk-python/issues/251)) - Removed default value "main" for branch parameter from all Infrahub CTL commands. ([#264](https://github.com/opsmill/infrahub-sdk-python/issues/264)) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 0d689afc..00664009 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -52,6 +52,7 @@ from .query_groups import InfrahubGroupContext, InfrahubGroupContextSync from .schema import InfrahubSchema, InfrahubSchemaSync, NodeSchemaAPI from .store import NodeStore, NodeStoreSync +from .task.manager import InfrahubTaskManager, InfrahubTaskManagerSync from .timestamp import Timestamp from .types import AsyncRequester, HTTPMethod, Order, SyncRequester from .utils import decode_json, get_user_permissions, is_valid_uuid @@ -59,6 +60,8 @@ if TYPE_CHECKING: from types import TracebackType + from .context import RequestContext + SchemaType = TypeVar("SchemaType", bound=CoreNode) SchemaTypeSync = TypeVar("SchemaTypeSync", bound=CoreNodeSync) @@ -140,6 +143,7 @@ def __init__( self.identifier = self.config.identifier self.group_context: InfrahubGroupContext | InfrahubGroupContextSync self._initialize() + self._request_context: RequestContext | None = None def _initialize(self) -> None: """Sets the properties for each version of the client""" @@ -154,6 +158,14 @@ def _echo(self, url: str, query: str, variables: dict | None = None) -> None: if variables: print(f"VARIABLES:\n{ujson.dumps(variables, indent=4)}\n") + @property + def request_context(self) -> RequestContext | None: + return self._request_context + + @request_context.setter + def request_context(self, request_context: RequestContext) -> None: + self._request_context = request_context + def start_tracking( self, identifier: str | None = None, @@ -270,6 +282,7 @@ def _initialize(self) -> None: self.branch = InfrahubBranchManager(self) self.object_store = ObjectStore(self) self.store = NodeStore() + self.task = InfrahubTaskManager(self) self.concurrent_execution_limit = asyncio.Semaphore(self.max_concurrent_execution) self._request_method: AsyncRequester = self.config.requester or self._default_request_method self.group_context = InfrahubGroupContext(self) @@ -1505,6 +1518,11 @@ async def __aexit__( class InfrahubClientSync(BaseClient): + schema: InfrahubSchemaSync + branch: InfrahubBranchManagerSync + object_store: ObjectStoreSync + store: NodeStoreSync + task: InfrahubTaskManagerSync group_context: InfrahubGroupContextSync def _initialize(self) -> None: @@ -1512,6 +1530,7 @@ def _initialize(self) -> None: self.branch = InfrahubBranchManagerSync(self) self.object_store = ObjectStoreSync(self) self.store = NodeStoreSync() + self.task = InfrahubTaskManagerSync(self) self._request_method: SyncRequester = self.config.sync_requester or self._default_request_method self.group_context = InfrahubGroupContextSync(self) diff --git a/infrahub_sdk/context.py b/infrahub_sdk/context.py new file mode 100644 index 00000000..201a9ef9 --- /dev/null +++ b/infrahub_sdk/context.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from pydantic import BaseModel, Field + + +class ContextAccount(BaseModel): + id: str = Field(..., description="The ID of the account") + + +class RequestContext(BaseModel): + """The context can be used to override settings such as the account within mutations.""" + + account: ContextAccount | None = Field(default=None, description="Account tied to the context") diff --git a/infrahub_sdk/ctl/branch.py b/infrahub_sdk/ctl/branch.py index f77aa073..b44be462 100644 --- a/infrahub_sdk/ctl/branch.py +++ b/infrahub_sdk/ctl/branch.py @@ -5,9 +5,10 @@ from rich.table import Table from ..async_typer import AsyncTyper -from ..ctl.client import initialize_client -from ..ctl.utils import calculate_time_diff, catch_exception +from ..utils import calculate_time_diff +from .client import initialize_client from .parameters import CONFIG_PARAM +from .utils import catch_exception app = AsyncTyper() console = Console() diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py index 4c627119..898095c7 100644 --- a/infrahub_sdk/ctl/utils.py +++ b/infrahub_sdk/ctl/utils.py @@ -8,11 +8,9 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, NoReturn, Optional, TypeVar -import pendulum import typer from click.exceptions import Exit from httpx import HTTPError -from pendulum.datetime import DateTime from rich.console import Console from rich.logging import RichHandler from rich.markup import escape @@ -152,20 +150,6 @@ def parse_cli_vars(variables: Optional[list[str]]) -> dict[str, str]: return {var.split("=")[0]: var.split("=")[1] for var in variables if "=" in var} -def calculate_time_diff(value: str) -> str | None: - """Calculate the time in human format between a timedate in string format and now.""" - try: - time_value = pendulum.parse(value) - except pendulum.parsing.exceptions.ParserError: - return None - - if not isinstance(time_value, DateTime): - return None - - pendulum.set_locale("en") - return time_value.diff_for_humans(other=pendulum.now(), absolute=True) - - def find_graphql_query(name: str, directory: str | Path = ".") -> str: if isinstance(directory, str): directory = Path(directory) diff --git a/infrahub_sdk/exceptions.py b/infrahub_sdk/exceptions.py index 8b5b4b43..257ce6b4 100644 --- a/infrahub_sdk/exceptions.py +++ b/infrahub_sdk/exceptions.py @@ -143,3 +143,9 @@ class FileNotValidError(Error): def __init__(self, name: str, message: str = ""): self.message = message or f"Cannot parse '{name}' content." super().__init__(self.message) + + +class TimestampFormatError(Error): + def __init__(self, message: str | None = None): + self.message = message or "Invalid timestamp format" + super().__init__(self.message) diff --git a/infrahub_sdk/generator.py b/infrahub_sdk/generator.py index 3ba6c767..831d9c98 100644 --- a/infrahub_sdk/generator.py +++ b/infrahub_sdk/generator.py @@ -11,6 +11,7 @@ if TYPE_CHECKING: from .client import InfrahubClient + from .context import RequestContext from .node import InfrahubNode from .store import NodeStore @@ -29,6 +30,7 @@ def __init__( params: dict | None = None, convert_query_response: bool = False, logger: logging.Logger | None = None, + request_context: RequestContext | None = None, ) -> None: self.query = query self.branch = branch @@ -44,6 +46,7 @@ def __init__( self.infrahub_node = infrahub_node self.convert_query_response = convert_query_response self.logger = logger if logger else logging.getLogger("infrahub.tasks") + self.request_context = request_context @property def store(self) -> NodeStore: diff --git a/infrahub_sdk/graphql.py b/infrahub_sdk/graphql.py index 9b7722da..abb12d8b 100644 --- a/infrahub_sdk/graphql.py +++ b/infrahub_sdk/graphql.py @@ -1,5 +1,6 @@ from __future__ import annotations +from enum import Enum from typing import Any from pydantic import BaseModel @@ -7,19 +8,30 @@ VARIABLE_TYPE_MAPPING = ((str, "String!"), (int, "Int!"), (float, "Float!"), (bool, "Boolean!")) -def convert_to_graphql_as_string(value: str | bool | list) -> str: +def convert_to_graphql_as_string(value: str | bool | list | BaseModel | Enum | Any, convert_enum: bool = False) -> str: # noqa: PLR0911 if isinstance(value, str) and value.startswith("$"): return value + if isinstance(value, Enum): + if convert_enum: + return convert_to_graphql_as_string(value=value.value, convert_enum=True) + return value.name if isinstance(value, str): return f'"{value}"' if isinstance(value, bool): return repr(value).lower() if isinstance(value, list): - values_as_string = [convert_to_graphql_as_string(item) for item in value] + values_as_string = [convert_to_graphql_as_string(value=item, convert_enum=convert_enum) for item in value] return "[" + ", ".join(values_as_string) + "]" if isinstance(value, BaseModel): data = value.model_dump() - return "{ " + ", ".join(f"{key}: {convert_to_graphql_as_string(val)}" for key, val in data.items()) + " }" + return ( + "{ " + + ", ".join( + f"{key}: {convert_to_graphql_as_string(value=val, convert_enum=convert_enum)}" + for key, val in data.items() + ) + + " }" + ) return str(value) @@ -38,7 +50,7 @@ def render_variables_to_string(data: dict[str, type[str | int | float | bool]]) return ", ".join([f"{key}: {value}" for key, value in vars_dict.items()]) -def render_query_block(data: dict, offset: int = 4, indentation: int = 4) -> list[str]: +def render_query_block(data: dict, offset: int = 4, indentation: int = 4, convert_enum: bool = False) -> list[str]: FILTERS_KEY = "@filters" ALIAS_KEY = "@alias" KEYWORDS_TO_SKIP = [FILTERS_KEY, ALIAS_KEY] @@ -60,25 +72,36 @@ def render_query_block(data: dict, offset: int = 4, indentation: int = 4) -> lis if value.get(FILTERS_KEY): filters_str = ", ".join( - [f"{key2}: {convert_to_graphql_as_string(value2)}" for key2, value2 in value[FILTERS_KEY].items()] + [ + f"{key2}: {convert_to_graphql_as_string(value=value2, convert_enum=convert_enum)}" + for key2, value2 in value[FILTERS_KEY].items() + ] ) lines.append(f"{offset_str}{key_str}({filters_str}) " + "{") else: lines.append(f"{offset_str}{key_str} " + "{") - lines.extend(render_query_block(data=value, offset=offset + indentation, indentation=indentation)) + lines.extend( + render_query_block( + data=value, offset=offset + indentation, indentation=indentation, convert_enum=convert_enum + ) + ) lines.append(offset_str + "}") return lines -def render_input_block(data: dict, offset: int = 4, indentation: int = 4) -> list[str]: +def render_input_block(data: dict, offset: int = 4, indentation: int = 4, convert_enum: bool = False) -> list[str]: offset_str = " " * offset lines = [] for key, value in data.items(): if isinstance(value, dict): lines.append(f"{offset_str}{key}: " + "{") - lines.extend(render_input_block(data=value, offset=offset + indentation, indentation=indentation)) + lines.extend( + render_input_block( + data=value, offset=offset + indentation, indentation=indentation, convert_enum=convert_enum + ) + ) lines.append(offset_str + "}") elif isinstance(value, list): lines.append(f"{offset_str}{key}: " + "[") @@ -90,14 +113,17 @@ def render_input_block(data: dict, offset: int = 4, indentation: int = 4) -> lis data=item, offset=offset + indentation + indentation, indentation=indentation, + convert_enum=convert_enum, ) ) lines.append(f"{offset_str}{' ' * indentation}" + "},") else: - lines.append(f"{offset_str}{' ' * indentation}{convert_to_graphql_as_string(item)},") + lines.append( + f"{offset_str}{' ' * indentation}{convert_to_graphql_as_string(value=item, convert_enum=convert_enum)}," + ) lines.append(offset_str + "]") else: - lines.append(f"{offset_str}{key}: {convert_to_graphql_as_string(value)}") + lines.append(f"{offset_str}{key}: {convert_to_graphql_as_string(value=value, convert_enum=convert_enum)}") return lines @@ -127,9 +153,13 @@ def render_first_line(self) -> str: class Query(BaseGraphQLQuery): query_type = "query" - def render(self) -> str: + def render(self, convert_enum: bool = False) -> str: lines = [self.render_first_line()] - lines.extend(render_query_block(data=self.query, indentation=self.indentation, offset=self.indentation)) + lines.extend( + render_query_block( + data=self.query, indentation=self.indentation, offset=self.indentation, convert_enum=convert_enum + ) + ) lines.append("}") return "\n" + "\n".join(lines) + "\n" @@ -143,7 +173,7 @@ def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any): self.mutation = mutation super().__init__(*args, **kwargs) - def render(self) -> str: + def render(self, convert_enum: bool = False) -> str: lines = [self.render_first_line()] lines.append(" " * self.indentation + f"{self.mutation}(") lines.extend( @@ -151,6 +181,7 @@ def render(self) -> str: data=self.input_data, indentation=self.indentation, offset=self.indentation * 2, + convert_enum=convert_enum, ) ) lines.append(" " * self.indentation + "){") @@ -159,6 +190,7 @@ def render(self) -> str: data=self.query, indentation=self.indentation, offset=self.indentation * 2, + convert_enum=convert_enum, ) ) lines.append(" " * self.indentation + "}") diff --git a/infrahub_sdk/node.py b/infrahub_sdk/node.py index fde6eb93..864abf1c 100644 --- a/infrahub_sdk/node.py +++ b/infrahub_sdk/node.py @@ -22,6 +22,7 @@ from typing_extensions import Self from .client import InfrahubClient, InfrahubClientSync + from .context import RequestContext from .schema import AttributeSchemaAPI, MainSchemaTypesAPI, RelationshipSchemaAPI from .types import Order @@ -769,6 +770,16 @@ def _init_attributes(self, data: dict | None = None) -> None: Attribute(name=attr_name, schema=attr_schema, data=attr_data), ) + def _get_request_context(self, request_context: RequestContext | None = None) -> dict[str, Any] | None: + if request_context: + return request_context.model_dump(exclude_none=True) + + client: InfrahubClient | InfrahubClientSync | None = getattr(self, "_client", None) + if not client or not client.request_context: + return None + + return client.request_context.model_dump(exclude_none=True) + def _init_relationships(self, data: dict | None = None) -> None: pass @@ -797,7 +808,12 @@ def is_resource_pool(self) -> bool: def get_raw_graphql_data(self) -> dict | None: return self._data - def _generate_input_data(self, exclude_unmodified: bool = False, exclude_hfid: bool = False) -> dict[str, dict]: # noqa: C901 + def _generate_input_data( # noqa: C901 + self, + exclude_unmodified: bool = False, + exclude_hfid: bool = False, + request_context: RequestContext | None = None, + ) -> dict[str, dict]: """Generate a dictionary that represent the input data required by a mutation. Returns: @@ -875,7 +891,15 @@ def _generate_input_data(self, exclude_unmodified: bool = False, exclude_hfid: b elif self.hfid is not None and not exclude_hfid: data["hfid"] = self.hfid - return {"data": {"data": data}, "variables": variables, "mutation_variables": mutation_variables} + mutation_payload = {"data": data} + if context_data := self._get_request_context(request_context=request_context): + mutation_payload["context"] = context_data + + return { + "data": mutation_payload, + "variables": variables, + "mutation_variables": mutation_variables, + } @staticmethod def _strip_unmodified_dict(data: dict, original_data: dict, variables: dict, item: str) -> None: @@ -1132,8 +1156,11 @@ async def artifact_fetch(self, name: str) -> str | dict[str, Any]: content = await self._client.object_store.get(identifier=artifact.storage_id.value) # type: ignore[attr-defined] return content - async def delete(self, timeout: int | None = None) -> None: + async def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None: input_data = {"data": {"id": self.id}} + if context_data := self._get_request_context(request_context=request_context): + input_data["context"] = context_data + mutation_query = {"ok": None} query = Mutation( mutation=f"{self._schema.kind}Delete", @@ -1148,12 +1175,16 @@ async def delete(self, timeout: int | None = None) -> None: ) async def save( - self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None + self, + allow_upsert: bool = False, + update_group_context: bool | None = None, + timeout: int | None = None, + request_context: RequestContext | None = None, ) -> None: if self._existing is False or allow_upsert is True: - await self.create(allow_upsert=allow_upsert, timeout=timeout) + await self.create(allow_upsert=allow_upsert, timeout=timeout, request_context=request_context) else: - await self.update(timeout=timeout) + await self.update(timeout=timeout, request_context=request_context) if update_group_context is None and self._client.mode == InfrahubClientMode.TRACKING: update_group_context = True @@ -1382,15 +1413,17 @@ async def _process_mutation_result( await related_node.fetch(timeout=timeout) setattr(self, rel_name, related_node) - async def create(self, allow_upsert: bool = False, timeout: int | None = None) -> None: + async def create( + self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: mutation_query = self._generate_mutation_query() if allow_upsert: - input_data = self._generate_input_data(exclude_hfid=False) + input_data = self._generate_input_data(exclude_hfid=False, request_context=request_context) mutation_name = f"{self._schema.kind}Upsert" tracker = f"mutation-{str(self._schema.kind).lower()}-upsert" else: - input_data = self._generate_input_data(exclude_hfid=True) + input_data = self._generate_input_data(exclude_hfid=True, request_context=request_context) mutation_name = f"{self._schema.kind}Create" tracker = f"mutation-{str(self._schema.kind).lower()}-create" query = Mutation( @@ -1408,8 +1441,10 @@ async def create(self, allow_upsert: bool = False, timeout: int | None = None) - ) await self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout) - async def update(self, do_full_update: bool = False, timeout: int | None = None) -> None: - input_data = self._generate_input_data(exclude_unmodified=not do_full_update) + async def update( + self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: + input_data = self._generate_input_data(exclude_unmodified=not do_full_update, request_context=request_context) mutation_query = self._generate_mutation_query() mutation_name = f"{self._schema.kind}Update" @@ -1648,8 +1683,11 @@ def artifact_fetch(self, name: str) -> str | dict[str, Any]: content = self._client.object_store.get(identifier=artifact.storage_id.value) # type: ignore[attr-defined] return content - def delete(self, timeout: int | None = None) -> None: + def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None: input_data = {"data": {"id": self.id}} + if context_data := self._get_request_context(request_context=request_context): + input_data["context"] = context_data + mutation_query = {"ok": None} query = Mutation( mutation=f"{self._schema.kind}Delete", @@ -1664,12 +1702,16 @@ def delete(self, timeout: int | None = None) -> None: ) def save( - self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None + self, + allow_upsert: bool = False, + update_group_context: bool | None = None, + timeout: int | None = None, + request_context: RequestContext | None = None, ) -> None: if self._existing is False or allow_upsert is True: - self.create(allow_upsert=allow_upsert, timeout=timeout) + self.create(allow_upsert=allow_upsert, timeout=timeout, request_context=request_context) else: - self.update(timeout=timeout) + self.update(timeout=timeout, request_context=request_context) if update_group_context is None and self._client.mode == InfrahubClientMode.TRACKING: update_group_context = True @@ -1893,15 +1935,17 @@ def _process_mutation_result( related_node.fetch(timeout=timeout) setattr(self, rel_name, related_node) - def create(self, allow_upsert: bool = False, timeout: int | None = None) -> None: + def create( + self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: mutation_query = self._generate_mutation_query() if allow_upsert: - input_data = self._generate_input_data(exclude_hfid=False) + input_data = self._generate_input_data(exclude_hfid=False, request_context=request_context) mutation_name = f"{self._schema.kind}Upsert" tracker = f"mutation-{str(self._schema.kind).lower()}-upsert" else: - input_data = self._generate_input_data(exclude_hfid=True) + input_data = self._generate_input_data(exclude_hfid=True, request_context=request_context) mutation_name = f"{self._schema.kind}Create" tracker = f"mutation-{str(self._schema.kind).lower()}-create" query = Mutation( @@ -1920,8 +1964,10 @@ def create(self, allow_upsert: bool = False, timeout: int | None = None) -> None ) self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout) - def update(self, do_full_update: bool = False, timeout: int | None = None) -> None: - input_data = self._generate_input_data(exclude_unmodified=not do_full_update) + def update( + self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: + input_data = self._generate_input_data(exclude_unmodified=not do_full_update, request_context=request_context) mutation_query = self._generate_mutation_query() mutation_name = f"{self._schema.kind}Update" diff --git a/infrahub_sdk/protocols.py b/infrahub_sdk/protocols.py index 14b04c7b..2ec1d0f3 100644 --- a/infrahub_sdk/protocols.py +++ b/infrahub_sdk/protocols.py @@ -29,6 +29,7 @@ StringOptional, ) +# pylint: disable=too-many-ancestors # --------------------------------------------- # ASYNC @@ -72,7 +73,7 @@ class CoreArtifactTarget(CoreNode): class CoreBasePermission(CoreNode): - decision: Enum + description: StringOptional identifier: StringOptional roles: RelationshipManager @@ -107,7 +108,6 @@ class CoreGenericAccount(CoreNode): label: StringOptional description: StringOptional account_type: Enum - role: Enum status: Dropdown tokens: RelationshipManager @@ -154,6 +154,10 @@ class CoreMenu(CoreNode): children: RelationshipManager +class CoreObjectTemplate(CoreNode): + template_name: String + + class CoreProfile(CoreNode): profile_name: String profile_priority: IntegerOptional @@ -199,6 +203,8 @@ class CoreValidator(CoreNode): class CoreWebhook(CoreNode): name: String + event_type: Enum + branch_scope: Dropdown description: StringOptional url: URL validate_certificates: BooleanOptional @@ -221,7 +227,7 @@ class CoreAccount(LineageOwner, LineageSource, CoreGenericAccount): pass -class CoreAccountGroup(CoreGroup): +class CoreAccountGroup(LineageOwner, LineageSource, CoreGroup): roles: RelationshipManager @@ -349,8 +355,8 @@ class CoreGeneratorValidator(CoreValidator): class CoreGlobalPermission(CoreBasePermission): - name: String action: Dropdown + decision: Enum class CoreGraphQLQuery(CoreNode): @@ -401,6 +407,7 @@ class CoreObjectPermission(CoreBasePermission): namespace: String name: String action: Enum + decision: Enum class CoreObjectThread(CoreThread): @@ -537,7 +544,7 @@ class CoreArtifactTargetSync(CoreNodeSync): class CoreBasePermissionSync(CoreNodeSync): - decision: Enum + description: StringOptional identifier: StringOptional roles: RelationshipManagerSync @@ -572,7 +579,6 @@ class CoreGenericAccountSync(CoreNodeSync): label: StringOptional description: StringOptional account_type: Enum - role: Enum status: Dropdown tokens: RelationshipManagerSync @@ -619,6 +625,10 @@ class CoreMenuSync(CoreNodeSync): children: RelationshipManagerSync +class CoreObjectTemplateSync(CoreNodeSync): + template_name: String + + class CoreProfileSync(CoreNodeSync): profile_name: String profile_priority: IntegerOptional @@ -664,6 +674,8 @@ class CoreValidatorSync(CoreNodeSync): class CoreWebhookSync(CoreNodeSync): name: String + event_type: Enum + branch_scope: Dropdown description: StringOptional url: URL validate_certificates: BooleanOptional @@ -686,7 +698,7 @@ class CoreAccountSync(LineageOwnerSync, LineageSourceSync, CoreGenericAccountSyn pass -class CoreAccountGroupSync(CoreGroupSync): +class CoreAccountGroupSync(LineageOwnerSync, LineageSourceSync, CoreGroupSync): roles: RelationshipManagerSync @@ -814,8 +826,8 @@ class CoreGeneratorValidatorSync(CoreValidatorSync): class CoreGlobalPermissionSync(CoreBasePermissionSync): - name: String action: Dropdown + decision: Enum class CoreGraphQLQuerySync(CoreNodeSync): @@ -866,6 +878,7 @@ class CoreObjectPermissionSync(CoreBasePermissionSync): namespace: String name: String action: Enum + decision: Enum class CoreObjectThreadSync(CoreThreadSync): diff --git a/infrahub_sdk/protocols_base.py b/infrahub_sdk/protocols_base.py index 4c227117..c634d37f 100644 --- a/infrahub_sdk/protocols_base.py +++ b/infrahub_sdk/protocols_base.py @@ -5,6 +5,7 @@ if TYPE_CHECKING: import ipaddress + from .context import RequestContext from .schema import MainSchemaTypes @@ -169,13 +170,23 @@ def extract(self, params: dict[str, str]) -> dict[str, Any]: ... @runtime_checkable class CoreNode(CoreNodeBase, Protocol): - async def save(self, allow_upsert: bool = False, update_group_context: bool | None = None) -> None: ... + async def save( + self, + allow_upsert: bool = False, + update_group_context: bool | None = None, + timeout: int | None = None, + request_context: RequestContext | None = None, + ) -> None: ... - async def delete(self) -> None: ... + async def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None: ... - async def update(self, do_full_update: bool) -> None: ... + async def update( + self, do_full_update: bool, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: ... - async def create(self, allow_upsert: bool = False) -> None: ... + async def create( + self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: ... async def add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None: ... @@ -184,13 +195,23 @@ async def remove_relationships(self, relation_to_update: str, related_nodes: lis @runtime_checkable class CoreNodeSync(CoreNodeBase, Protocol): - def save(self, allow_upsert: bool = False, update_group_context: bool | None = None) -> None: ... - - def delete(self) -> None: ... - - def update(self, do_full_update: bool) -> None: ... - - def create(self, allow_upsert: bool = False) -> None: ... + def save( + self, + allow_upsert: bool = False, + update_group_context: bool | None = None, + timeout: int | None = None, + request_context: RequestContext | None = None, + ) -> None: ... + + def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None: ... + + def update( + self, do_full_update: bool, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: ... + + def create( + self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None + ) -> None: ... def add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None: ... diff --git a/infrahub_sdk/schema/__init__.py b/infrahub_sdk/schema/__init__.py index 790b5873..9b23fe49 100644 --- a/infrahub_sdk/schema/__init__.py +++ b/infrahub_sdk/schema/__init__.py @@ -34,6 +34,7 @@ RelationshipSchemaAPI, SchemaRoot, SchemaRootAPI, + TemplateSchemaAPI, ) if TYPE_CHECKING: @@ -58,6 +59,7 @@ "RelationshipSchemaAPI", "SchemaRoot", "SchemaRootAPI", + "TemplateSchemaAPI", ] @@ -78,8 +80,10 @@ class EnumMutation(str, Enum): MainSchemaTypes: TypeAlias = Union[NodeSchema, GenericSchema] -MainSchemaTypesAPI: TypeAlias = Union[NodeSchemaAPI, GenericSchemaAPI, ProfileSchemaAPI] -MainSchemaTypesAll: TypeAlias = Union[NodeSchema, GenericSchema, NodeSchemaAPI, GenericSchemaAPI, ProfileSchemaAPI] +MainSchemaTypesAPI: TypeAlias = Union[NodeSchemaAPI, GenericSchemaAPI, ProfileSchemaAPI, TemplateSchemaAPI] +MainSchemaTypesAll: TypeAlias = Union[ + NodeSchema, GenericSchema, NodeSchemaAPI, GenericSchemaAPI, ProfileSchemaAPI, TemplateSchemaAPI +] class InfrahubSchemaBase: @@ -417,6 +421,10 @@ async def fetch( profile = ProfileSchemaAPI(**profile_schema) nodes[profile.kind] = profile + for template_schema in data.get("templates", []): + template = TemplateSchemaAPI(**template_schema) + nodes[template.kind] = template + return nodes @@ -621,6 +629,10 @@ def fetch( profile = ProfileSchemaAPI(**profile_schema) nodes[profile.kind] = profile + for template_schema in data.get("templates", []): + template = TemplateSchemaAPI(**template_schema) + nodes[template.kind] = template + return nodes def load( diff --git a/infrahub_sdk/schema/main.py b/infrahub_sdk/schema/main.py index 760d7430..57aaa890 100644 --- a/infrahub_sdk/schema/main.py +++ b/infrahub_sdk/schema/main.py @@ -31,6 +31,7 @@ class RelationshipKind(str, Enum): GROUP = "Group" HIERARCHY = "Hierarchy" PROFILE = "Profile" + TEMPLATE = "Template" class RelationshipDirection(str, Enum): @@ -290,6 +291,7 @@ class BaseNodeSchema(BaseSchema): branch: BranchSupportType | None = None default_filter: str | None = None generate_profile: bool | None = None + generate_template: bool | None = None parent: str | None = None children: str | None = None @@ -308,6 +310,10 @@ class ProfileSchemaAPI(BaseSchema, BaseSchemaAttrRelAPI): inherit_from: list[str] = Field(default_factory=list) +class TemplateSchemaAPI(BaseSchema, BaseSchemaAttrRelAPI): + inherit_from: list[str] = Field(default_factory=list) + + class NodeExtensionSchema(BaseModel): model_config = ConfigDict(use_enum_values=True) @@ -341,3 +347,4 @@ class SchemaRootAPI(BaseModel): generics: list[GenericSchemaAPI] = Field(default_factory=list) nodes: list[NodeSchemaAPI] = Field(default_factory=list) profiles: list[ProfileSchemaAPI] = Field(default_factory=list) + templates: list[TemplateSchemaAPI] = Field(default_factory=list) diff --git a/infrahub_sdk/task/__init__.py b/infrahub_sdk/task/__init__.py new file mode 100644 index 00000000..60180315 --- /dev/null +++ b/infrahub_sdk/task/__init__.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from .models import Task, TaskFilter, TaskLog, TaskRelatedNode, TaskState + +__all__ = [ + "Task", + "TaskFilter", + "TaskLog", + "TaskRelatedNode", + "TaskState", +] diff --git a/infrahub_sdk/task/constants.py b/infrahub_sdk/task/constants.py new file mode 100644 index 00000000..13917ff5 --- /dev/null +++ b/infrahub_sdk/task/constants.py @@ -0,0 +1,3 @@ +from .models import TaskState + +FINAL_STATES = [TaskState.COMPLETED, TaskState.FAILED, TaskState.CANCELLED, TaskState.CRASHED] diff --git a/infrahub_sdk/task/exceptions.py b/infrahub_sdk/task/exceptions.py new file mode 100644 index 00000000..add4e72e --- /dev/null +++ b/infrahub_sdk/task/exceptions.py @@ -0,0 +1,25 @@ +from __future__ import annotations + + +class TaskError(Exception): + def __init__(self, message: str | None = None): + self.message = message + super().__init__(self.message) + + +class TaskNotFoundError(TaskError): + def __init__(self, id: str): + self.message = f"Task with id {id} not found" + super().__init__(self.message) + + +class TooManyTasksError(TaskError): + def __init__(self, expected_id: str, received_ids: list[str]): + self.message = f"Expected 1 task with id {expected_id}, but got {len(received_ids)}" + super().__init__(self.message) + + +class TaskNotCompletedError(TaskError): + def __init__(self, id: str, message: str | None = None): + self.message = message or f"Task with id {id} is not completed" + super().__init__(self.message) diff --git a/infrahub_sdk/task/manager.py b/infrahub_sdk/task/manager.py new file mode 100644 index 00000000..910030dd --- /dev/null +++ b/infrahub_sdk/task/manager.py @@ -0,0 +1,551 @@ +from __future__ import annotations + +import asyncio +import time +from typing import TYPE_CHECKING, Any + +from ..graphql import Query +from .constants import FINAL_STATES +from .exceptions import TaskNotCompletedError, TaskNotFoundError, TooManyTasksError +from .models import Task, TaskFilter + +if TYPE_CHECKING: + from ..client import InfrahubClient, InfrahubClientSync + + +class InfraHubTaskManagerBase: + @classmethod + def _generate_query( + cls, + filters: TaskFilter | None = None, + include_logs: bool = False, + include_related_nodes: bool = False, + offset: int | None = None, + limit: int | None = None, + count: bool = False, + ) -> Query: + query: dict[str, Any] = { + "InfrahubTask": { + "edges": { + "node": { + "id": None, + "title": None, + "state": None, + "progress": None, + "workflow": None, + "branch": None, + "created_at": None, + "updated_at": None, + } + } + } + } + + if not filters and (offset or limit): + filters = TaskFilter(offset=offset, limit=limit) + elif filters and offset: + filters.offset = offset + elif filters and limit: + filters.limit = limit + + if filters: + query["InfrahubTask"]["@filters"] = filters.to_dict() + + if count: + query["InfrahubTask"]["count"] = None + + if include_logs: + query["InfrahubTask"]["edges"]["node"]["logs"] = { + "edges": { + "node": { + "message": None, + "severity": None, + "timestamp": None, + } + } + } + + if include_related_nodes: + query["InfrahubTask"]["edges"]["node"]["related_nodes"] = {"id": None, "kind": None} + + return Query(query=query) + + @classmethod + def _generate_count_query(cls, filters: TaskFilter | None = None) -> Query: + query: dict[str, Any] = { + "InfrahubTask": { + "count": None, + } + } + if filters: + query["InfrahubTask"]["@filters"] = filters.to_dict() + + return Query(query=query) + + +class InfrahubTaskManager(InfraHubTaskManagerBase): + client: InfrahubClient + + def __init__(self, client: InfrahubClient): + self.client = client + + async def count(self, filters: TaskFilter | None = None) -> int: + """Count the number of tasks. + + Args: + filters: The filter to apply to the tasks. Defaults to None. + + Returns: + The number of tasks. + """ + + query = self._generate_count_query(filters=filters) + response = await self.client.execute_graphql( + query=query.render(convert_enum=False), tracker="query-tasks-count" + ) + return int(response["InfrahubTask"]["count"]) + + async def all( + self, + limit: int | None = None, + offset: int | None = None, + timeout: int | None = None, + parallel: bool = False, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Get all tasks. + + Args: + limit: The maximum number of tasks to return. Defaults to None. + offset: The offset to start the tasks from. Defaults to None. + timeout: The timeout to wait for the tasks to complete. Defaults to None. + parallel: Whether to query the tasks in parallel. Defaults to False. + include_logs: Whether to include the logs in the tasks. Defaults to False. + include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False. + + Returns: + A list of tasks. + """ + + return await self.filter( + limit=limit, + offset=offset, + timeout=timeout, + parallel=parallel, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + ) + + async def filter( + self, + filter: TaskFilter | None = None, + limit: int | None = None, + offset: int | None = None, + timeout: int | None = None, + parallel: bool = False, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Filter tasks. + + Args: + filter: The filter to apply to the tasks. Defaults to None. + limit: The maximum number of tasks to return. Defaults to None. + offset: The offset to start the tasks from. Defaults to None. + timeout: The timeout to wait for the tasks to complete. Defaults to None. + parallel: Whether to query the tasks in parallel. Defaults to False. + include_logs: Whether to include the logs in the tasks. Defaults to False. + include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False. + + Returns: + A list of tasks. + """ + if filter is None: + filter = TaskFilter() + + if limit: + tasks, _ = await self.process_page( + self.client, self._generate_query(filters=filter, offset=offset, limit=limit, count=False), 1, timeout + ) + return tasks + + if parallel: + return await self.process_batch( + filters=filter, + timeout=timeout, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + ) + + return await self.process_non_batch( + filters=filter, + offset=offset, + limit=limit, + timeout=timeout, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + ) + + async def get(self, id: str, include_logs: bool = False, include_related_nodes: bool = False) -> Task: + tasks = await self.filter( + filter=TaskFilter(ids=[id]), + include_logs=include_logs, + include_related_nodes=include_related_nodes, + parallel=False, + ) + if not tasks: + raise TaskNotFoundError(id=id) + + if len(tasks) != 1: + raise TooManyTasksError(expected_id=id, received_ids=[task.id for task in tasks]) + + return tasks[0] + + async def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 60) -> Task: + """Wait for a task to complete. + + Args: + id: The id of the task to wait for. + interval: The interval to check the task state. Defaults to 1. + timeout: The timeout to wait for the task to complete. Defaults to 60. + + Raises: + TaskNotCompletedError: The task did not complete in the given timeout. + + Returns: + The task object. + """ + for _ in range(timeout // interval): + task = await self.get(id=id) + if task.state in FINAL_STATES: + return task + await asyncio.sleep(interval) + raise TaskNotCompletedError(id=id, message=f"Task {id} did not complete in {timeout} seconds") + + @staticmethod + async def process_page( + client: InfrahubClient, query: Query, page_number: int, timeout: int | None = None + ) -> tuple[list[Task], int | None]: + """Process a single page of results. + + Args: + client: The client to use to execute the query. + query: The query to execute. + page_number: The page number to process. + timeout: The timeout to wait for the query to complete. Defaults to None. + + Returns: + A tuple containing a list of tasks and the count of tasks. + """ + + response = await client.execute_graphql( + query=query.render(convert_enum=False), + tracker=f"query-tasks-page{page_number}", + timeout=timeout, + ) + count = response["InfrahubTask"].get("count", None) + return [Task.from_graphql(task["node"]) for task in response["InfrahubTask"]["edges"]], count + + async def process_batch( + self, + filters: TaskFilter | None = None, + timeout: int | None = None, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Process queries in parallel mode.""" + pagination_size = self.client.pagination_size + tasks = [] + batch_process = await self.client.create_batch() + count = await self.count(filters=filters) + total_pages = (count + pagination_size - 1) // pagination_size + + for page_number in range(1, total_pages + 1): + page_offset = (page_number - 1) * pagination_size + query = self._generate_query( + filters=filters, + offset=page_offset, + limit=pagination_size, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + count=False, + ) + batch_process.add( + task=self.process_page, client=self.client, query=query, page_number=page_number, timeout=timeout + ) + + async for _, (new_tasks, _) in batch_process.execute(): + tasks.extend(new_tasks) + + return tasks + + async def process_non_batch( + self, + filters: TaskFilter | None = None, + offset: int | None = None, + limit: int | None = None, + timeout: int | None = None, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Process queries without parallel mode.""" + tasks = [] + has_remaining_items = True + page_number = 1 + + while has_remaining_items: + page_offset = (page_number - 1) * self.client.pagination_size + query = self._generate_query( + filters=filters, + offset=page_offset, + limit=self.client.pagination_size, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + count=True, + ) + new_tasks, count = await self.process_page( + client=self.client, query=query, page_number=page_number, timeout=timeout + ) + if count is None: + raise ValueError("Count is None, a value must be retrieve from the query") + + tasks.extend(new_tasks) + remaining_items = count - (page_offset + self.client.pagination_size) + if remaining_items < 0 or offset is not None or limit is not None: + has_remaining_items = False + page_number += 1 + return tasks + + +class InfrahubTaskManagerSync(InfraHubTaskManagerBase): + client: InfrahubClientSync + + def __init__(self, client: InfrahubClientSync): + self.client = client + + def count(self, filters: TaskFilter | None = None) -> int: + """Count the number of tasks. + + Args: + filters: The filter to apply to the tasks. Defaults to None. + + Returns: + The number of tasks. + """ + + query = self._generate_count_query(filters=filters) + response = self.client.execute_graphql(query=query.render(convert_enum=False), tracker="query-tasks-count") + return int(response["InfrahubTask"]["count"]) + + def all( + self, + limit: int | None = None, + offset: int | None = None, + timeout: int | None = None, + parallel: bool = False, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Get all tasks. + + Args: + limit: The maximum number of tasks to return. Defaults to None. + offset: The offset to start the tasks from. Defaults to None. + timeout: The timeout to wait for the tasks to complete. Defaults to None. + parallel: Whether to query the tasks in parallel. Defaults to False. + include_logs: Whether to include the logs in the tasks. Defaults to False. + include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False. + + Returns: + A list of tasks. + """ + + return self.filter( + limit=limit, + offset=offset, + timeout=timeout, + parallel=parallel, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + ) + + def filter( + self, + filter: TaskFilter | None = None, + limit: int | None = None, + offset: int | None = None, + timeout: int | None = None, + parallel: bool = False, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Filter tasks. + + Args: + filter: The filter to apply to the tasks. Defaults to None. + limit: The maximum number of tasks to return. Defaults to None. + offset: The offset to start the tasks from. Defaults to None. + timeout: The timeout to wait for the tasks to complete. Defaults to None. + parallel: Whether to query the tasks in parallel. Defaults to False. + include_logs: Whether to include the logs in the tasks. Defaults to False. + include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False. + + Returns: + A list of tasks. + """ + if filter is None: + filter = TaskFilter() + + if limit: + tasks, _ = self.process_page( + self.client, self._generate_query(filters=filter, offset=offset, limit=limit, count=False), 1, timeout + ) + return tasks + + if parallel: + return self.process_batch( + filters=filter, + timeout=timeout, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + ) + + return self.process_non_batch( + filters=filter, + offset=offset, + limit=limit, + timeout=timeout, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + ) + + def get(self, id: str, include_logs: bool = False, include_related_nodes: bool = False) -> Task: + tasks = self.filter( + filter=TaskFilter(ids=[id]), + include_logs=include_logs, + include_related_nodes=include_related_nodes, + parallel=False, + ) + if not tasks: + raise TaskNotFoundError(id=id) + + if len(tasks) != 1: + raise TooManyTasksError(expected_id=id, received_ids=[task.id for task in tasks]) + + return tasks[0] + + def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 60) -> Task: + """Wait for a task to complete. + + Args: + id: The id of the task to wait for. + interval: The interval to check the task state. Defaults to 1. + timeout: The timeout to wait for the task to complete. Defaults to 60. + + Raises: + TaskNotCompletedError: The task did not complete in the given timeout. + + Returns: + The task object. + """ + for _ in range(timeout // interval): + task = self.get(id=id) + if task.state in FINAL_STATES: + return task + time.sleep(interval) + raise TaskNotCompletedError(id=id, message=f"Task {id} did not complete in {timeout} seconds") + + @staticmethod + def process_page( + client: InfrahubClientSync, query: Query, page_number: int, timeout: int | None = None + ) -> tuple[list[Task], int | None]: + """Process a single page of results. + + Args: + client: The client to use to execute the query. + query: The query to execute. + page_number: The page number to process. + timeout: The timeout to wait for the query to complete. Defaults to None. + + Returns: + A tuple containing a list of tasks and the count of tasks. + """ + + response = client.execute_graphql( + query=query.render(convert_enum=False), + tracker=f"query-tasks-page{page_number}", + timeout=timeout, + ) + count = response["InfrahubTask"].get("count", None) + return [Task.from_graphql(task["node"]) for task in response["InfrahubTask"]["edges"]], count + + def process_batch( + self, + filters: TaskFilter | None = None, + timeout: int | None = None, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Process queries in parallel mode.""" + pagination_size = self.client.pagination_size + tasks = [] + batch_process = self.client.create_batch() + count = self.count(filters=filters) + total_pages = (count + pagination_size - 1) // pagination_size + + for page_number in range(1, total_pages + 1): + page_offset = (page_number - 1) * pagination_size + query = self._generate_query( + filters=filters, + offset=page_offset, + limit=pagination_size, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + count=False, + ) + batch_process.add( + task=self.process_page, client=self.client, query=query, page_number=page_number, timeout=timeout + ) + + for _, (new_tasks, _) in batch_process.execute(): + tasks.extend(new_tasks) + + return tasks + + def process_non_batch( + self, + filters: TaskFilter | None = None, + offset: int | None = None, + limit: int | None = None, + timeout: int | None = None, + include_logs: bool = False, + include_related_nodes: bool = False, + ) -> list[Task]: + """Process queries without parallel mode.""" + tasks = [] + has_remaining_items = True + page_number = 1 + + while has_remaining_items: + page_offset = (page_number - 1) * self.client.pagination_size + query = self._generate_query( + filters=filters, + offset=page_offset, + limit=self.client.pagination_size, + include_logs=include_logs, + include_related_nodes=include_related_nodes, + count=True, + ) + new_tasks, count = self.process_page( + client=self.client, query=query, page_number=page_number, timeout=timeout + ) + if count is None: + raise ValueError("Count is None, a value must be retrieve from the query") + + tasks.extend(new_tasks) + remaining_items = count - (page_offset + self.client.pagination_size) + if remaining_items < 0 or offset is not None or limit is not None: + has_remaining_items = False + page_number += 1 + return tasks diff --git a/infrahub_sdk/task/models.py b/infrahub_sdk/task/models.py new file mode 100644 index 00000000..266f6a4a --- /dev/null +++ b/infrahub_sdk/task/models.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from datetime import datetime +from enum import Enum + +from pydantic import BaseModel, Field + + +class TaskState(str, Enum): + SCHEDULED = "SCHEDULED" + PENDING = "PENDING" + RUNNING = "RUNNING" + COMPLETED = "COMPLETED" + FAILED = "FAILED" + CANCELLED = "CANCELLED" + CRASHED = "CRASHED" + PAUSED = "PAUSED" + CANCELLING = "CANCELLING" + + +class TaskLog(BaseModel): + message: str + severity: str + timestamp: datetime + + +class TaskRelatedNode(BaseModel): + id: str + kind: str + + +class Task(BaseModel): + id: str + title: str + state: TaskState + progress: float | None = None + workflow: str | None = None + branch: str | None = None + # start_time: datetime # Is it still required + created_at: datetime + updated_at: datetime + parameters: dict | None = None + tags: list[str] | None = None + related_nodes: list[TaskRelatedNode] = Field(default_factory=list) + logs: list[TaskLog] = Field(default_factory=list) + + @classmethod + def from_graphql(cls, data: dict) -> Task: + related_nodes: list[TaskRelatedNode] = [] + logs: list[TaskLog] = [] + + if data.get("related_nodes"): + related_nodes = [TaskRelatedNode(**item) for item in data["related_nodes"]] + del data["related_nodes"] + + if data.get("logs"): + logs = [TaskLog(**item["node"]) for item in data["logs"]["edges"]] + del data["logs"] + + return cls(**data, related_nodes=related_nodes, logs=logs) + + +class TaskFilter(BaseModel): + ids: list[str] | None = None + q: str | None = None + branch: str | None = None + state: list[TaskState] | None = None + workflow: list[str] | None = None + limit: int | None = None + offset: int | None = None + related_node__ids: list[str] | None = None + + def to_dict(self) -> dict: + return self.model_dump(exclude_none=True) diff --git a/infrahub_sdk/timestamp.py b/infrahub_sdk/timestamp.py index 5a2f58d5..ffeaf214 100644 --- a/infrahub_sdk/timestamp.py +++ b/infrahub_sdk/timestamp.py @@ -1,9 +1,15 @@ from __future__ import annotations import re +import warnings +from datetime import datetime, timezone +from typing import Literal -import pendulum -from pendulum.datetime import DateTime +from whenever import Date, Instant, LocalDateTime, OffsetDateTime, Time, ZonedDateTime + +from .exceptions import TimestampFormatError + +UTC = timezone.utc # Required for older versions of Python REGEX_MAPPING = { "seconds": r"(\d+)(s|sec|second|seconds)", @@ -12,80 +18,183 @@ } -class TimestampFormatError(ValueError): ... - - class Timestamp: - def __init__(self, value: str | DateTime | Timestamp | None = None): - if value and isinstance(value, DateTime): - self.obj = value + _obj: ZonedDateTime + + def __init__(self, value: str | ZonedDateTime | Timestamp | None = None): + if value and isinstance(value, ZonedDateTime): + self._obj = value elif value and isinstance(value, self.__class__): - self.obj = value.obj + self._obj = value._obj elif isinstance(value, str): - self.obj = self._parse_string(value) + self._obj = self._parse_string(value) else: - self.obj = DateTime.now(tz="UTC") + self._obj = ZonedDateTime.now("UTC").round(unit="microsecond") + + @property + def obj(self) -> ZonedDateTime: + warnings.warn( + "Direct access to obj property is deprecated. Use to_string(), to_timestamp(), or to_datetime() instead.", + UserWarning, + stacklevel=2, + ) + return self._obj @classmethod - def _parse_string(cls, value: str) -> DateTime: + def _parse_string(cls, value: str) -> ZonedDateTime: + try: + zoned_date = ZonedDateTime.parse_common_iso(value) + return zoned_date + except ValueError: + pass + + try: + instant_date = Instant.parse_common_iso(value) + return instant_date.to_tz("UTC") + except ValueError: + pass + + try: + local_date_time = LocalDateTime.parse_common_iso(value) + return local_date_time.assume_utc().to_tz("UTC") + except ValueError: + pass + try: - parsed_date = pendulum.parse(value) - if isinstance(parsed_date, DateTime): - return parsed_date - except (pendulum.parsing.exceptions.ParserError, ValueError): + offset_date_time = OffsetDateTime.parse_common_iso(value) + return offset_date_time.to_tz("UTC") + except ValueError: pass - params = {} + try: + date = Date.parse_common_iso(value) + local_date = date.at(Time(12, 00)) + return local_date.assume_tz("UTC", disambiguate="compatible") + except ValueError: + pass + + params: dict[str, float] = {} for key, regex in REGEX_MAPPING.items(): match = re.search(regex, value) if match: - params[key] = int(match.group(1)) + params[key] = float(match.group(1)) - if not params: - raise TimestampFormatError(f"Invalid time format for {value}") + if params: + return ZonedDateTime.now("UTC").subtract(**params) # type: ignore[call-overload] - return DateTime.now(tz="UTC").subtract(**params) + raise TimestampFormatError(f"Invalid time format for {value}") def __repr__(self) -> str: return f"Timestamp: {self.to_string()}" def to_string(self, with_z: bool = True) -> str: - iso8601_string = self.obj.to_iso8601_string() - if not with_z and iso8601_string[-1] == "Z": - iso8601_string = iso8601_string[:-1] + "+00:00" - return iso8601_string + time_str = self.to_datetime().isoformat(timespec="microseconds") + if with_z and time_str.endswith("+00:00"): + time_str = time_str[:-6] + time_str += "Z" + return time_str def to_timestamp(self) -> int: - return self.obj.int_timestamp + return self._obj.timestamp() + + def to_datetime(self) -> datetime: + return self._obj.py_datetime() + + def get_obj(self) -> ZonedDateTime: + return self._obj def __eq__(self, other: object) -> bool: if not isinstance(other, Timestamp): return NotImplemented - return self.obj == other.obj + return self._obj == other._obj def __lt__(self, other: object) -> bool: if not isinstance(other, Timestamp): return NotImplemented - return self.obj < other.obj + return self._obj < other._obj def __gt__(self, other: object) -> bool: if not isinstance(other, Timestamp): return NotImplemented - return self.obj > other.obj + return self._obj > other._obj def __le__(self, other: object) -> bool: if not isinstance(other, Timestamp): return NotImplemented - return self.obj <= other.obj + return self._obj <= other._obj def __ge__(self, other: object) -> bool: if not isinstance(other, Timestamp): return NotImplemented - return self.obj >= other.obj + return self._obj >= other._obj def __hash__(self) -> int: return hash(self.to_string()) def add_delta(self, hours: int = 0, minutes: int = 0, seconds: int = 0, microseconds: int = 0) -> Timestamp: - time = self.obj.add(hours=hours, minutes=minutes, seconds=seconds, microseconds=microseconds) - return Timestamp(time) + warnings.warn( + "add_delta() is deprecated. Use add() instead.", + UserWarning, + stacklevel=2, + ) + return self.add(hours=hours, minutes=minutes, seconds=seconds, microseconds=microseconds) + + def add( + self, + years: int = 0, + months: int = 0, + weeks: int = 0, + days: int = 0, + hours: float = 0, + minutes: float = 0, + seconds: float = 0, + milliseconds: float = 0, + microseconds: float = 0, + nanoseconds: int = 0, + disambiguate: Literal["compatible"] = "compatible", + ) -> Timestamp: + return Timestamp( + self._obj.add( + years=years, + months=months, + weeks=weeks, + days=days, + hours=hours, + minutes=minutes, + seconds=seconds, + milliseconds=milliseconds, + microseconds=microseconds, + nanoseconds=nanoseconds, + disambiguate=disambiguate, + ) + ) + + def subtract( + self, + years: int = 0, + months: int = 0, + weeks: int = 0, + days: int = 0, + hours: float = 0, + minutes: float = 0, + seconds: float = 0, + milliseconds: float = 0, + microseconds: float = 0, + nanoseconds: int = 0, + disambiguate: Literal["compatible"] = "compatible", + ) -> Timestamp: + return Timestamp( + self._obj.subtract( + years=years, + months=months, + weeks=weeks, + days=days, + hours=hours, + minutes=minutes, + seconds=seconds, + milliseconds=milliseconds, + microseconds=microseconds, + nanoseconds=nanoseconds, + disambiguate=disambiguate, + ) + ) diff --git a/infrahub_sdk/utils.py b/infrahub_sdk/utils.py index 2627a062..a45a65aa 100644 --- a/infrahub_sdk/utils.py +++ b/infrahub_sdk/utils.py @@ -17,10 +17,12 @@ from infrahub_sdk.repository import GitRepoManager -from .exceptions import FileNotValidError, JsonDecodeError +from .exceptions import FileNotValidError, JsonDecodeError, TimestampFormatError +from .timestamp import Timestamp if TYPE_CHECKING: from graphql import GraphQLResolveInfo + from whenever import TimeDelta def base36encode(number: int) -> str: @@ -367,3 +369,29 @@ def get_user_permissions(data: list[dict]) -> dict: groups[group_name] = permissions return groups + + +def calculate_time_diff(value: str) -> str | None: + """Calculate the time in human format between a timedate in string format and now.""" + try: + time_value = Timestamp(value) + except TimestampFormatError: + return None + + delta: TimeDelta = Timestamp().get_obj().difference(time_value.get_obj()) + (hrs, mins, secs, nanos) = delta.in_hrs_mins_secs_nanos() + + if nanos and nanos > 500_000_000: + secs += 1 + + if hrs and hrs < 24 and mins: + return f"{hrs}h {mins}m and {secs}s ago" + if hrs and hrs > 24: + remaining_hrs = hrs % 24 + days = int((hrs - remaining_hrs) / 24) + return f"{days}d and {remaining_hrs}h ago" + if hrs == 0 and mins and secs: + return f"{mins}m and {secs}s ago" + if hrs == 0 and mins == 0 and secs: + return f"{secs}s ago" + return "now" diff --git a/poetry.lock b/poetry.lock index d433ec45..281e570f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -1021,105 +1021,6 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[[package]] -name = "pendulum" -version = "3.0.0" -description = "Python datetimes made easy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, -] - -[package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] - [[package]] name = "pexpect" version = "4.9.0" @@ -1301,7 +1202,10 @@ files = [ [package.dependencies] annotated-types = ">=0.6.0" pydantic-core = "2.23.3" -typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} +typing-extensions = [ + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1553,20 +1457,6 @@ psutil = ["psutil (>=3.0)"] setproctitle = ["setproctitle"] testing = ["filelock"] -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - [[package]] name = "python-dotenv" version = "1.0.1" @@ -1711,29 +1601,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.8.6" +version = "0.11.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"}, - {file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"}, - {file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"}, - {file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"}, - {file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"}, - {file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"}, - {file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"}, + {file = "ruff-0.11.0-py3-none-linux_armv6l.whl", hash = "sha256:dc67e32bc3b29557513eb7eeabb23efdb25753684b913bebb8a0c62495095acb"}, + {file = "ruff-0.11.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38c23fd9bdec4eb437b4c1e3595905a0a8edfccd63a790f818b28c78fe345639"}, + {file = "ruff-0.11.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7c8661b0be91a38bd56db593e9331beaf9064a79028adee2d5f392674bbc5e88"}, + {file = "ruff-0.11.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6c0e8d3d2db7e9f6efd884f44b8dc542d5b6b590fc4bb334fdbc624d93a29a2"}, + {file = "ruff-0.11.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c3156d3f4b42e57247275a0a7e15a851c165a4fc89c5e8fa30ea6da4f7407b8"}, + {file = "ruff-0.11.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:490b1e147c1260545f6d041c4092483e3f6d8eba81dc2875eaebcf9140b53905"}, + {file = "ruff-0.11.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1bc09a7419e09662983b1312f6fa5dab829d6ab5d11f18c3760be7ca521c9329"}, + {file = "ruff-0.11.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfa478daf61ac8002214eb2ca5f3e9365048506a9d52b11bea3ecea822bb844"}, + {file = "ruff-0.11.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fbb2aed66fe742a6a3a0075ed467a459b7cedc5ae01008340075909d819df1e"}, + {file = "ruff-0.11.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92c0c1ff014351c0b0cdfdb1e35fa83b780f1e065667167bb9502d47ca41e6db"}, + {file = "ruff-0.11.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e4fd5ff5de5f83e0458a138e8a869c7c5e907541aec32b707f57cf9a5e124445"}, + {file = "ruff-0.11.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:96bc89a5c5fd21a04939773f9e0e276308be0935de06845110f43fd5c2e4ead7"}, + {file = "ruff-0.11.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a9352b9d767889ec5df1483f94870564e8102d4d7e99da52ebf564b882cdc2c7"}, + {file = "ruff-0.11.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:049a191969a10897fe052ef9cc7491b3ef6de79acd7790af7d7897b7a9bfbcb6"}, + {file = "ruff-0.11.0-py3-none-win32.whl", hash = "sha256:3191e9116b6b5bbe187447656f0c8526f0d36b6fd89ad78ccaad6bdc2fad7df2"}, + {file = "ruff-0.11.0-py3-none-win_amd64.whl", hash = "sha256:c58bfa00e740ca0a6c43d41fb004cd22d165302f360aaa56f7126d544db31a21"}, + {file = "ruff-0.11.0-py3-none-win_arm64.whl", hash = "sha256:868364fc23f5aa122b00c6f794211e85f7e78f5dffdf7c590ab90b8c4e69b657"}, + {file = "ruff-0.11.0.tar.gz", hash = "sha256:e55c620690a4a7ee6f1cccb256ec2157dc597d109400ae75bbf944fc9d6462e2"}, ] [[package]] @@ -2116,6 +2006,89 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "whenever" +version = "0.7.2" +description = "Modern datetime library for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "whenever-0.7.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a87864d3e7679dbedc55d3aa8c6cef5ffdc45520e16805f4c5a3cf71241fb986"}, + {file = "whenever-0.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f37dc37d1bea611af16a3aaba5960038604ddfb4a592b1d72a3efccd5853b6da"}, + {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3da602b9fb80f8c6495e0495638c54a8b9a43362769199fcfe4e4fc6df33697"}, + {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7afaeaada1d244016ce38252f0c0340bd7d199b4a240ba986efaab66b02f2"}, + {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f71fd077601c27830e202ed652bd89b46ae6f1ba0f96d29897038dae9c80eead"}, + {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40c74e8f0f3a9a540f580d44a22f2f9dc54b17b68d64abb1c0c961ab1343d43b"}, + {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de72b15de311b28e6fcdb45bd436fbb0bde0d4596e0c446f9301bb523b6f2369"}, + {file = "whenever-0.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a84ff30d230e56250f89e99f5442d51a5215e10f6b7902d0d7ec51d8b06b6b2"}, + {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2af28fa6c8446f513ed3c71275349831e79df021dadb0051fb5b6cbd353d16d6"}, + {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:7f65c163f80b397f532d6dd9f56ead5b5d8b76bc24b1587dbb152bb466bd7de0"}, + {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ef6c83a20b9ccfe10623596dda19d666cc95c0e83260a6568d767bc926da3781"}, + {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6322b78dd97b295164f0d59115be71e9242f74c100899736876b1e8f19b2ff0f"}, + {file = "whenever-0.7.2-cp310-cp310-win32.whl", hash = "sha256:b9a2fc32a8914771d994d6349dcf25208c82d0eb6cf33f27b2309d9e8f58a51a"}, + {file = "whenever-0.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:1440b8e1ef507c318a741bede7a43263f84909c43cf48f110de509233b89d77c"}, + {file = "whenever-0.7.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0b5aaa62551213b3b099b460331fce75c7dbabc2f6696fe3be845cb4ecc8a856"}, + {file = "whenever-0.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4720cc7bf704e92b89bf60329f21084256b4b4a9dcc47a782461f7918d7e1fb"}, + {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91a18c81e517124463200b7fcde40ddcc18c959791b219dd681dc5fdec04f050"}, + {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:85ef3850f6a9ce3d5349a4f5a1d7fda14c68d3f18c0d18a890bcb11955709a8c"}, + {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af47e5ff5d8528a7149f253276e1094bb944335074241d7e9f6c26ea12aa9ac"}, + {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389afeb92b6272f35132a428884ba03f52ca5a9e80c1b28e0f9699f6098abf34"}, + {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:825a9567ba0b91f1e970cd59f0bbf7b6c2c12c41621fd3264e2d1a0f596c3efe"}, + {file = "whenever-0.7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d303cb0c691784219b7539e537167ea573cf58acc42696159585d27dacd10af"}, + {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6194cf9bf76cb0e3c8593d757b73b41cb33c1137ce1a79795812d43be8a29a95"}, + {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:59c0fb56aed72a0ec10a83b99f8eee2e96e4b32045e4ecfe85027129295cde6a"}, + {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f50068f98c85706e384a84e6a6e5d0d38760bbcb770fbd140596d2228f101c2e"}, + {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:993c98e9956597b61e21c3c65da8d5e9eb342fe6c6efc2135432be56aa64a116"}, + {file = "whenever-0.7.2-cp311-cp311-win32.whl", hash = "sha256:e1d0ea62becd437ae9c911303cbcc5ba66107a79c9e60a4e0f965537878a3c77"}, + {file = "whenever-0.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:c70a6ab84a4d7bb44e86fa9ebec2ea36a456457d211dcb48f16f54487774ec45"}, + {file = "whenever-0.7.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:acaedfa0327e8859c078c40c2e17a3d169ce9f784c3735c09fd701d4035b7432"}, + {file = "whenever-0.7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38de1c34ab4e42eda4006e8635cadc0c526094a546aa5ebf6a903c61d33053f3"}, + {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e62291c4a0f212a13053f021b9255e0b820e57303c96e94b48304b84a1849d"}, + {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cbf68b2833e6766fb4898ebe432406ce6ead7ac846f7b15427bfbd560d5939"}, + {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2220cf0d818d960d4a7ec1b05ffbed7b81e482807be0b4bb7a5466418a4c8f79"}, + {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7395c69109113eb1666bac29b6207caf28e38e25d332c57649a7e710f0d863db"}, + {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efcbffe9a510f310f019fe5bfe877e591ea8cdad90ac8fe6868a80659d411ac5"}, + {file = "whenever-0.7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2d836ad37f4333e938779eae6e64f532f27ce19529ee9c09bfb62f796e41db1"}, + {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:59f1949d1efe4a85cfe81130159dc2c871ea5b56bae6e9782d5e344a747a758e"}, + {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:030836f2cb15eb33631c3d3c2f904d481edc797df063814f9c77d060db5db17d"}, + {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b25d8fd6ade64cf1492707c019cccb726aa07dfb20f79a4751eccb56555c2012"}, + {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:247e0255c6c8ded48a6d3734aabf448f2bf07bb2abb65b2828104df1eaab82cf"}, + {file = "whenever-0.7.2-cp312-cp312-win32.whl", hash = "sha256:81fcef2c6917333d3aa8d24043e01323d8831c1354cabcb935e29b2a1f6a7a4f"}, + {file = "whenever-0.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:2cdb4ddd2b2e908a076232a60577e4616096d4cf166da9373c4a03bf9d81721e"}, + {file = "whenever-0.7.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:c35f47f613a7816d602fd39594400bfe7fff70a3bd7272cd9b8c736ffc13feed"}, + {file = "whenever-0.7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0cb7515e180aa2fea6c2d1855607011dd08d14acaba750b0673d7d6f536b0f5e"}, + {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac40aed9be0bc1aeba2662e17f145987f84e8a0bafbfa5f938b40db82fc7aba"}, + {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccd0840d04648dad3c5ae81a53a56e08a971a316d4167921665a7aa5fa8f0085"}, + {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf5199ffa1da783207b0c75d478ab6d808309cc0cbb2631640393bd943b6167e"}, + {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7be787eeb542f86cc2d0081c541e89e4417261976a50a7824f6e43248fadb294"}, + {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d78e13c648ce246dbaa54f78faf1d2f3d8107619f3c598d3d127ca45fd5d792a"}, + {file = "whenever-0.7.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cc490b577f38bb55957e04d6a1b594c5365f01a6f3429c38b26243d3cf473d80"}, + {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bf322daa4184e7d89a4549498c8408e6c4a0bd2309eacd4b21151020bf51870c"}, + {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:00b60a9af13e4c6b618f52a55ae7c15c36eb3ff42bfc6cb050981e8a2402bc9f"}, + {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a2f82fd85a6521090d3f44412f4c76687a0e141df215541f6f0f6691276257e7"}, + {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a386df7b5e08f56a49f8a00991e54c3f5ebb218570d7a98c726d793859a2b0ea"}, + {file = "whenever-0.7.2-cp313-cp313-win32.whl", hash = "sha256:46e51abd495c91fd586828401884750d7eb96ca3658d3d9f228f62beb140c758"}, + {file = "whenever-0.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:af67395516ed16a8423735a4dd5a8795353f39e758b7428178dbe8de06977f21"}, + {file = "whenever-0.7.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a8b25304ffc9563bf17914a9a9bf6642456923c727d330fcfa483d303f549805"}, + {file = "whenever-0.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2978fb80700e583e2f957cd47c51d6b161f38a50b85a1744fcf3b13e53acf113"}, + {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:246ce04d18469169582cd492b6a4f74f6c166ed2caa869679522b02228c0bbf8"}, + {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d464feea543f36dd712eee0f47ea690cf1a4d474c39ddaafe30254434ac9b2e"}, + {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:253460d1194a1dcb27a47a0c6cead61cbf0a29d5bb795e7f42caa0e7be32cae9"}, + {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a6b35953ca90ef5f0f2a7f3e951d110239fcccde5eccf08c4a0872821d41066"}, + {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6c9bb2528c345d552e0e25ab82276dd9765185718dfdf2654f0d84771eb3fa9"}, + {file = "whenever-0.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bc0a7e6e5bfa15531910ca4a062fdc20c071747f016599999eac3d8fef7ea4db"}, + {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8c16c03a556819c8f1738dbcfa2793c8c0d2a9a496e0ec1524fea8a124d20037"}, + {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:43c2f1be68f638c7f3f27c60e5851b5b94aa3ba0186e84bc2010c880e71f7f84"}, + {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:715c4da7fbef766bfb5511017782873c98adac9f5f982806ead9b4a99f7bb086"}, + {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3daadd03d392048a4041969132ae2a6b57941b172870c526b14c8343721967d"}, + {file = "whenever-0.7.2-cp39-cp39-win32.whl", hash = "sha256:7b3c1d9ec5dc844686aad66bb0e14dda7d9667a113757c1f566a8e8036e4585f"}, + {file = "whenever-0.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:166f4d31f0be9ee59d00670f52a724c4d1090688b46e3531d0ccb74ae3157032"}, + {file = "whenever-0.7.2.tar.gz", hash = "sha256:a292dddd4d635a5b597686117e455d41e6134716a7be66b3903554514df8729c"}, +] + +[package.dependencies] +tzdata = {version = ">=2020.1", markers = "sys_platform == \"win32\""} + [[package]] name = "wrapt" version = "1.17.0" @@ -2234,5 +2207,5 @@ tests = ["Jinja2", "pytest", "pyyaml", "rich"] [metadata] lock-version = "2.0" -python-versions = "^3.9, < 3.13" -content-hash = "7cf3b9fd5e6ad627c30cb1660ef9c45d5b6a264150d064bc47cc7ae7a2be4030" +python-versions = "^3.9, <3.14" +content-hash = "b3e5f33a5e7089dfb49e9d4fd41b71feba6a5f2ec50c67f18202caa973baf1b3" diff --git a/pyproject.toml b/pyproject.toml index 0427195c..abfadc14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "infrahub-sdk" -version = "1.7.3a0" +version = "1.8.0" description = "Python Client to interact with Infrahub" authors = ["OpsMill "] readme = "README.md" @@ -16,10 +16,11 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] [tool.poetry.dependencies] -python = "^3.9, < 3.13" +python = "^3.9, <3.14" pydantic = ">=2.0.0,!=2.0.1,!=2.1.0,<3.0.0" pydantic-settings = ">=2.0" graphql-core = ">=3.1,<3.3" @@ -27,10 +28,6 @@ httpx = [ { version = ">=0.20", python = ">=3.9,<3.11" }, { version = ">=0.23", python = ">=3.11" }, ] -pendulum = [ - { version = ">=2", python = ">=3.9,<3.12" }, - { version = ">=3", python = ">=3.12" }, -] ujson = "^5" Jinja2 = { version = "^3", optional = true } numpy = [ @@ -45,6 +42,7 @@ pytest = { version = "*", optional = true } pyyaml = { version = "^6", optional = true } eval-type-backport = { version = "^0.2.2", python = "~3.9" } dulwich = "^0.21.4" +whenever = "0.7.2" [tool.poetry.group.dev.dependencies] pytest = "*" @@ -60,7 +58,7 @@ pre-commit = "^2.20.0" types-toml = "*" types-ujson = "*" types-pyyaml = "*" -ruff = "0.8.6" +ruff = "0.11.0" pytest-xdist = "^3.3.1" types-python-slugify = "^8.0.0.3" invoke = "^2.2.0" @@ -215,6 +213,7 @@ ignore = [ "SIM118", # Use `key in dict` instead of `key in dict.keys) "TC003", # Move standard library import `collections.abc.Iterable` into a type-checking block "UP031", # Use format specifiers instead of percent format + "UP045", # Use `X | None` for type annotations ] diff --git a/tests/fixtures/tasks/mock_query_tasks_01_page1.json b/tests/fixtures/tasks/mock_query_tasks_01_page1.json new file mode 100644 index 00000000..733078a9 --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_01_page1.json @@ -0,0 +1,45 @@ +{ + "data": { + "InfrahubTask": { + "count": 5, + "edges": [ + { + "node": { + "created_at": "2025-02-21T04:05:53.804642Z", + "id": "b6752ef3-d51c-48c2-a8d7-17a061e1484b", + "branch": null, + "title": "Configuration webhook automation and populate cache", + "updated_at": "2025-02-21T04:06:46.647125Z", + "state": "COMPLETED", + "progress": null, + "workflow": "webhook-setup-automations" + } + }, + { + "node": { + "created_at": "2025-02-10T13:40:40.699598Z", + "id": "06c14140-8404-47cf-a8b8-585dd7302653", + "branch": "main", + "title": "Configuration webhook automation and populate cache", + "updated_at": "2025-02-10T13:41:21.228266Z", + "state": "COMPLETED", + "progress": null, + "workflow": "webhook-setup-automations" + } + }, + { + "node": { + "created_at": "2025-02-10T13:37:04.191653Z", + "id": "2210f266-2705-4581-ae45-4d82852cb811", + "branch": null, + "title": "Configuration webhook automation and populate cache", + "updated_at": "2025-02-10T13:37:50.591615Z", + "state": "COMPLETED", + "progress": null, + "workflow": "webhook-setup-automations" + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_01_page2.json b/tests/fixtures/tasks/mock_query_tasks_01_page2.json new file mode 100644 index 00000000..42a2fe63 --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_01_page2.json @@ -0,0 +1,33 @@ +{ + "data": { + "InfrahubTask": { + "count": 5, + "edges": [ + { + "node": { + "created_at": "2025-01-18T22:12:23.104373Z", + "id": "a60f4431-6a43-451e-8f42-9ec5db9a9370", + "branch": "main", + "title": "Setup computed attributes for Python transforms in task-manager", + "updated_at": "2025-01-18T22:12:25.436646Z", + "state": "COMPLETED", + "progress": null, + "workflow": "computed-attribute-setup-python" + } + }, + { + "node": { + "created_at": "2025-01-18T22:12:19.440880Z", + "id": "6aad5002-cf46-4756-95de-c5f95191f70c", + "branch": "main", + "title": "Setup computed attributes for Python transforms in task-manager", + "updated_at": "2025-01-18T22:12:21.214943Z", + "state": "COMPLETED", + "progress": null, + "workflow": "computed-attribute-setup-python" + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_02_page1.json b/tests/fixtures/tasks/mock_query_tasks_02_page1.json new file mode 100644 index 00000000..42916b6b --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_02_page1.json @@ -0,0 +1,33 @@ +{ + "data": { + "InfrahubTask": { + "count": 2, + "edges": [ + { + "node": { + "created_at": "2025-01-18T22:12:23.104373Z", + "id": "a60f4431-6a43-451e-8f42-9ec5db9a9370", + "branch": "main", + "title": "Setup computed attributes for Python transforms in task-manager", + "updated_at": "2025-01-18T22:12:25.436646Z", + "state": "COMPLETED", + "progress": null, + "workflow": "computed-attribute-setup-python" + } + }, + { + "node": { + "created_at": "2025-01-18T22:12:19.440880Z", + "id": "6aad5002-cf46-4756-95de-c5f95191f70c", + "branch": "main", + "title": "Setup computed attributes for Python transforms in task-manager", + "updated_at": "2025-01-18T22:12:21.214943Z", + "state": "COMPLETED", + "progress": null, + "workflow": "computed-attribute-setup-python" + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_03_page1.json b/tests/fixtures/tasks/mock_query_tasks_03_page1.json new file mode 100644 index 00000000..216fedbc --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_03_page1.json @@ -0,0 +1,21 @@ +{ + "data": { + "InfrahubTask": { + "count": 2, + "edges": [ + { + "node": { + "created_at": "2025-01-18T22:12:23.104373Z", + "id": "a60f4431-6a43-451e-8f42-9ec5db9a9370", + "branch": "main", + "title": "Setup computed attributes for Python transforms in task-manager", + "updated_at": "2025-01-18T22:12:25.436646Z", + "state": "COMPLETED", + "progress": null, + "workflow": "computed-attribute-setup-python" + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_04_page1.json b/tests/fixtures/tasks/mock_query_tasks_04_page1.json new file mode 100644 index 00000000..ccc45a42 --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_04_page1.json @@ -0,0 +1,84 @@ +{ + "data": { + "InfrahubTask": { + "count": 8, + "edges": [ + { + "node": { + "created_at": "2025-02-21T04:05:53.804642Z", + "id": "b6752ef3-d51c-48c2-a8d7-17a061e1484b", + "branch": null, + "title": "Configuration webhook automation and populate cache", + "updated_at": "2025-02-21T04:06:46.647125Z", + "state": "COMPLETED", + "progress": null, + "workflow": "webhook-setup-automations", + "related_nodes": [], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Worker 'InfrahubEntWorkerAsync ebde081b-2cb6-4a3a-afc1-30b9074cb570' submitting flow run 'b6752ef3-d51c-48c2-a8d7-17a061e1484b'", + "timestamp": "2025-02-21T04:06:31.055559Z" + + } + } + ] + } + } + }, + { + "node": { + "created_at": "2025-02-10T13:40:40.699598Z", + "id": "06c14140-8404-47cf-a8b8-585dd7302653", + "branch": null, + "title": "Configuration webhook automation and populate cache", + "updated_at": "2025-02-10T13:41:21.228266Z", + "state": "COMPLETED", + "progress": null, + "workflow": "webhook-setup-automations", + "related_nodes": [], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Worker 'InfrahubEntWorkerAsync e6c576a1-5be3-4647-af78-e803b2fb4b5c' submitting flow run '06c14140-8404-47cf-a8b8-585dd7302653'", + "timestamp": "2025-02-10T13:41:15.444768Z" + + } + } + ] + } + } + }, + { + "node": { + "created_at": "2025-02-10T13:37:04.191653Z", + "id": "2210f266-2705-4581-ae45-4d82852cb811", + "branch": null, + "title": "Configuration webhook automation and populate cache", + "updated_at": "2025-02-10T13:37:50.591615Z", + "state": "COMPLETED", + "progress": null, + "workflow": "webhook-setup-automations", + "related_nodes": [], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Worker 'InfrahubEntWorkerAsync 647a77bf-5328-4f21-9841-63b3705ab64c' submitting flow run '2210f266-2705-4581-ae45-4d82852cb811'", + "timestamp": "2025-02-10T13:37:39.662456Z" + + } + } + ] + } + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_04_page2.json b/tests/fixtures/tasks/mock_query_tasks_04_page2.json new file mode 100644 index 00000000..d84632ff --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_04_page2.json @@ -0,0 +1,145 @@ +{ + "data": { + "InfrahubTask": { + "count": 8, + "edges": [ + { + "node": { + "created_at": "2025-01-18T22:12:23.104373Z", + "id": "a60f4431-6a43-451e-8f42-9ec5db9a9370", + "branch": "main", + "title": "Setup computed attributes for Python transforms in task-manager", + "updated_at": "2025-01-18T22:12:25.436646Z", + "state": "COMPLETED", + "progress": null, + "workflow": "computed-attribute-setup-python", + "related_nodes": [], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Worker 'InfrahubEntWorkerAsync 0cd915bf-686c-4809-8fc4-294188e46fee' submitting flow run 'a60f4431-6a43-451e-8f42-9ec5db9a9370'", + "timestamp": "2025-01-18T22:12:25.192381Z" + } + }, + { + "node": { + "severity": "info", + "message": "Schema converged after 0.2 seconds", + "timestamp": "2025-01-18T22:12:25.341294Z" + } + } + ] + } + } + }, + { + "node": { + "created_at": "2025-01-18T22:12:19.440880Z", + "id": "6aad5002-cf46-4756-95de-c5f95191f70c", + "branch": "main", + "title": "Setup computed attributes for Python transforms in task-manager", + "updated_at": "2025-01-18T22:12:21.214943Z", + "state": "COMPLETED", + "progress": null, + "workflow": "computed-attribute-setup-python", + "related_nodes": [], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Worker 'InfrahubEntWorkerAsync 0cd915bf-686c-4809-8fc4-294188e46fee' submitting flow run '6aad5002-cf46-4756-95de-c5f95191f70c'", + "timestamp": "2025-01-18T22:12:20.695955Z" + } + }, + { + "node": { + "severity": "info", + "message": "Schema converged after 0.2 seconds", + "timestamp": "2025-01-18T22:12:21.048654Z" + } + } + ] + } + } + }, + { + "node": { + "created_at": "2025-01-18T22:12:20.228112Z", + "id": "32116fcd-9071-43a7-9f14-777901020b5b", + "branch": "main", + "title": "Import Python file", + "updated_at": "2025-01-18T22:12:22.044921Z", + "state": "COMPLETED", + "progress": null, + "workflow": "import-python-files", + "related_nodes": [ + { + "id": "1808d478-e51e-7504-d0ef-c513f1cd69a5", + "kind": "CoreReadOnlyRepository" + } + ], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Found 1 check definitions in the repository", + "timestamp": "2025-01-18T22:12:20.371699Z" + + } + }, + { + "node": { + "severity": "info", + "message": "Found 3 Python transforms in the repository", + "timestamp": "2025-01-18T22:12:20.603709Z" + + } + }, + { + "node": { + "severity": "info", + "message": "Found 4 generator definitions in the repository", + "timestamp": "2025-01-18T22:12:21.259186Z" + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator update_upstream_interfaces_description (generators/upstream_interfaces.py)", + "timestamp": "2025-01-18T22:12:21.259692Z", + + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator create_circuit_endpoints (generators/circuit_endpoints.py)", + "timestamp": "2025-01-18T22:12:21.261714Z" + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator drained_circuit_bgp_sessions (generators/drained_circuit_bgp_sessions.py)", + "timestamp": "2025-01-18T22:12:21.263222Z" + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator backbone_service (generators/backbone_service.py)", + "timestamp": "2025-01-18T22:12:21.264613Z" + } + } + ] + } + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_04_page3.json b/tests/fixtures/tasks/mock_query_tasks_04_page3.json new file mode 100644 index 00000000..1b250884 --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_04_page3.json @@ -0,0 +1,157 @@ +{ + "data": { + "InfrahubTask": { + "count": 8, + "edges": [ + { + "node": { + "created_at": "2025-01-18T22:12:19.512289Z", + "id": "40ff9289-18d1-4eba-9ff8-3c7eb744364e", + "branch": "main", + "title": "Import objects", + "updated_at": "2025-01-18T22:12:22.999826Z", + "state": "COMPLETED", + "progress": null, + "workflow": "import-object-from-file", + "related_nodes": [ + { + "id": "1808d478-e51e-7504-d0ef-c513f1cd69a5", + "kind": "CoreReadOnlyRepository" + } + ], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Successfully parsed .infrahub.yml", + "timestamp": "2025-01-18T22:12:19.899046Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Created subflow run 'impressive-termite' for flow 'import-python-files'", + "timestamp": "2025-01-18T22:12:20.256939Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Found 2 Jinja2 transforms in the repository", + "timestamp": "2025-01-18T22:12:22.203059Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "New version of the Jinja2 Transform 'clab_topology' found, updating", + "timestamp": "2025-01-18T22:12:22.391083Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "New version of the Jinja2 Transform 'device_startup' found, updating", + "timestamp": "2025-01-18T22:12:22.508961Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Found 2 artifact definitions in the repository", + "timestamp": "2025-01-18T22:12:22.829151Z", + "id": null + } + } + ] + } + } + }, + { + "node": { + "created_at": "2025-01-18T22:12:16.755287Z", + "id": "4a7e52c0-c5b3-4e22-b401-62044ebb5e1f", + "branch": "main", + "title": "Import Python file", + "updated_at": "2025-01-18T22:12:18.437961Z", + "state": "COMPLETED", + "progress": null, + "workflow": "import-python-files", + "related_nodes": [ + { + "id": "1808d478-e51e-7504-d0ef-c513f1cd69a5", + "kind": "CoreReadOnlyRepository" + } + ], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Found 1 check definitions in the repository", + "timestamp": "2025-01-18T22:12:16.902608Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Found 3 Python transforms in the repository", + "timestamp": "2025-01-18T22:12:17.155336Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Found 4 generator definitions in the repository", + "timestamp": "2025-01-18T22:12:17.581382Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator update_upstream_interfaces_description (generators/upstream_interfaces.py)", + "timestamp": "2025-01-18T22:12:17.581995Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator create_circuit_endpoints (generators/circuit_endpoints.py)", + "timestamp": "2025-01-18T22:12:17.583801Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator drained_circuit_bgp_sessions (generators/drained_circuit_bgp_sessions.py)", + "timestamp": "2025-01-18T22:12:17.585317Z", + "id": null + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator backbone_service (generators/backbone_service.py)", + "timestamp": "2025-01-18T22:12:17.586920Z", + "id": null + } + } + ] + } + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_05_page1.json b/tests/fixtures/tasks/mock_query_tasks_05_page1.json new file mode 100644 index 00000000..b418ecb0 --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_05_page1.json @@ -0,0 +1,63 @@ +{ + "data": { + "InfrahubTask": { + "count": 1, + "edges": [ + { + "node": { + "created_at": "2025-01-18T22:12:20.228112Z", + "id": "32116fcd-9071-43a7-9f14-777901020b5b", + "branch": "main", + "title": "Import Python file", + "updated_at": "2025-01-18T22:12:22.044921Z", + "state": "COMPLETED", + "progress": null, + "workflow": "import-python-files", + "related_nodes": [ + { + "id": "1808d478-e51e-7504-d0ef-c513f1cd69a5", + "kind": "CoreReadOnlyRepository" + }, + { + "id": "1808d478-e51e-7504-aaaa-c513f1cd69a5", + "kind": "TestMyKind" + } + ], + "logs": { + "edges": [ + { + "node": { + "severity": "info", + "message": "Found 1 check definitions in the repository", + "timestamp": "2025-01-18T22:12:20.371699Z" + } + }, + { + "node": { + "severity": "info", + "message": "Found 3 Python transforms in the repository", + "timestamp": "2025-01-18T22:12:20.603709Z" + } + }, + { + "node": { + "severity": "info", + "message": "Found 4 generator definitions in the repository", + "timestamp": "2025-01-18T22:12:21.259186Z" + } + }, + { + "node": { + "severity": "info", + "message": "Processing generator update_upstream_interfaces_description (generators/upstream_interfaces.py)", + "timestamp": "2025-01-18T22:12:21.259692Z" + } + } + ] + } + } + } + ] + } + } + } \ No newline at end of file diff --git a/tests/fixtures/tasks/mock_query_tasks_empty.json b/tests/fixtures/tasks/mock_query_tasks_empty.json new file mode 100644 index 00000000..66690b00 --- /dev/null +++ b/tests/fixtures/tasks/mock_query_tasks_empty.json @@ -0,0 +1,8 @@ +{ + "data": { + "InfrahubTask": { + "count": 0, + "edges": [] + } + } + } \ No newline at end of file diff --git a/tests/integration/test_infrahub_client.py b/tests/integration/test_infrahub_client.py index 20af4a35..375d4cfd 100644 --- a/tests/integration/test_infrahub_client.py +++ b/tests/integration/test_infrahub_client.py @@ -8,6 +8,7 @@ from infrahub_sdk.exceptions import BranchNotFoundError, URLNotFoundError from infrahub_sdk.node import InfrahubNode from infrahub_sdk.schema import ProfileSchemaAPI +from infrahub_sdk.task.models import Task, TaskFilter, TaskLog, TaskState from infrahub_sdk.testing.docker import TestInfrahubDockerClient from infrahub_sdk.testing.schemas.animal import TESTING_ANIMAL, TESTING_CAT, TESTING_DOG, TESTING_PERSON, SchemaAnimal @@ -31,6 +32,13 @@ async def base_dataset( ): await client.branch.create(branch_name="branch01") + @pytest.fixture + async def set_pagination_size3(self, client: InfrahubClient): + original_pagination_size = client.pagination_size + client.pagination_size = 3 + yield + client.pagination_size = original_pagination_size + async def test_query_branches(self, client: InfrahubClient, base_dataset): branches = await client.branch.all() main = await client.branch.get(branch_name="main") @@ -154,14 +162,52 @@ async def test_create_generic_rel_with_hfid( self, client: InfrahubClient, base_dataset, cat_luna, person_sophia, schema_animal, schema_cat ): # See https://github.com/opsmill/infrahub-sdk-python/issues/277 - assert ( - schema_animal.human_friendly_id != schema_cat.human_friendly_id - ), "Inherited node schema should have a different hfid than generic one for this test to be relevant" + assert schema_animal.human_friendly_id != schema_cat.human_friendly_id, ( + "Inherited node schema should have a different hfid than generic one for this test to be relevant" + ) person_sophia.favorite_animal = {"hfid": cat_luna.hfid, "kind": TESTING_CAT} await person_sophia.save() person_sophia = await client.get(kind=TESTING_PERSON, id=person_sophia.id, prefetch_relationships=True) assert person_sophia.favorite_animal.id == cat_luna.id + async def test_task_query(self, client: InfrahubClient, base_dataset, set_pagination_size3): + nbr_tasks = await client.task.count() + assert nbr_tasks + + tasks = await client.task.filter(filter=TaskFilter(state=[TaskState.COMPLETED])) + assert tasks + task_ids = [task.id for task in tasks] + + # Query Tasks using Parallel mode + tasks_parallel = await client.task.filter(filter=TaskFilter(state=[TaskState.COMPLETED]), parallel=True) + assert tasks_parallel + assert len(tasks_parallel) == len(tasks) + + # Query Tasks by ID + tasks_parallel_filtered = await client.task.filter(filter=TaskFilter(ids=task_ids[:2]), parallel=True) + assert tasks_parallel_filtered + assert len(tasks_parallel_filtered) == 2 + + # Query individual Task + task = await client.task.get(id=tasks[0].id) + assert task + assert isinstance(task, Task) + assert task.logs == [] + + # Wait for Task completion + task = await client.task.wait_for_completion(id=tasks[0].id) + assert task + assert isinstance(task, Task) + + # Query Tasks with logs + tasks = await client.task.filter(filter=TaskFilter(state=[TaskState.COMPLETED]), include_logs=True) + all_logs = [log for task in tasks for log in task.logs] + assert all_logs + assert isinstance(all_logs[0], TaskLog) + assert all_logs[0].message + assert all_logs[0].timestamp + assert all_logs[0].severity + # async def test_get_generic_filter_source(self, client: InfrahubClient, base_dataset): # admin = await client.get(kind="CoreAccount", name__value="admin") diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index d3dc9dd8..afb9250c 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -2507,3 +2507,77 @@ async def mock_query_location_batch(httpx_mock: HTTPXMock, client: InfrahubClien match_headers={"X-Infrahub-Tracker": f"query-builtinlocation-page{i}"}, ) return httpx_mock + + +@pytest.fixture +async def mock_query_tasks_01(httpx_mock: HTTPXMock) -> HTTPXMock: + for i in [1, 2]: + filename = get_fixtures_dir() / "tasks" / f"mock_query_tasks_01_page{i}.json" + response_text = filename.read_text(encoding="UTF-8") + httpx_mock.add_response( + method="POST", + json=ujson.loads(response_text), + match_headers={"X-Infrahub-Tracker": f"query-tasks-page{i}"}, + ) + return httpx_mock + + +@pytest.fixture +async def mock_query_tasks_02_main(httpx_mock: HTTPXMock) -> HTTPXMock: + filename = get_fixtures_dir() / "tasks" / "mock_query_tasks_02_page1.json" + response_text = filename.read_text(encoding="UTF-8") + httpx_mock.add_response( + method="POST", + json=ujson.loads(response_text), + match_headers={"X-Infrahub-Tracker": "query-tasks-page1"}, + ) + return httpx_mock + + +@pytest.fixture +async def mock_query_tasks_empty(httpx_mock: HTTPXMock) -> HTTPXMock: + filename = get_fixtures_dir() / "tasks" / "mock_query_tasks_empty.json" + response_text = filename.read_text(encoding="UTF-8") + httpx_mock.add_response( + method="POST", + json=ujson.loads(response_text), + match_headers={"X-Infrahub-Tracker": "query-tasks-page1"}, + ) + return httpx_mock + + +@pytest.fixture +async def mock_query_tasks_03(httpx_mock: HTTPXMock) -> HTTPXMock: + filename = get_fixtures_dir() / "tasks" / "mock_query_tasks_03_page1.json" + response_text = filename.read_text(encoding="UTF-8") + httpx_mock.add_response( + method="POST", + json=ujson.loads(response_text), + match_headers={"X-Infrahub-Tracker": "query-tasks-page1"}, + ) + return httpx_mock + + +@pytest.fixture +async def mock_query_tasks_04_full(httpx_mock: HTTPXMock) -> HTTPXMock: + for i in [1, 2, 3]: + filename = get_fixtures_dir() / "tasks" / f"mock_query_tasks_04_page{i}.json" + response_text = filename.read_text(encoding="UTF-8") + httpx_mock.add_response( + method="POST", + json=ujson.loads(response_text), + match_headers={"X-Infrahub-Tracker": f"query-tasks-page{i}"}, + ) + return httpx_mock + + +@pytest.fixture +async def mock_query_tasks_05(httpx_mock: HTTPXMock) -> HTTPXMock: + filename = get_fixtures_dir() / "tasks" / "mock_query_tasks_05_page1.json" + response_text = filename.read_text(encoding="UTF-8") + httpx_mock.add_response( + method="POST", + json=ujson.loads(response_text), + match_headers={"X-Infrahub-Tracker": "query-tasks-page1"}, + ) + return httpx_mock diff --git a/tests/unit/sdk/test_client.py b/tests/unit/sdk/test_client.py index 4d849221..9f0f4e33 100644 --- a/tests/unit/sdk/test_client.py +++ b/tests/unit/sdk/test_client.py @@ -7,8 +7,14 @@ from infrahub_sdk.exceptions import NodeNotFoundError from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync -async_client_methods = [method for method in dir(InfrahubClient) if not method.startswith("_")] -sync_client_methods = [method for method in dir(InfrahubClientSync) if not method.startswith("_")] +excluded_methods = ["request_context"] + +async_client_methods = [ + method for method in dir(InfrahubClient) if not method.startswith("_") and method not in excluded_methods +] +sync_client_methods = [ + method for method in dir(InfrahubClientSync) if not method.startswith("_") and method not in excluded_methods +] batch_client_types = [ ("standard", False), diff --git a/tests/unit/sdk/test_graphql.py b/tests/unit/sdk/test_graphql.py index de16fe02..06b23022 100644 --- a/tests/unit/sdk/test_graphql.py +++ b/tests/unit/sdk/test_graphql.py @@ -1,8 +1,20 @@ +from enum import Enum + import pytest from infrahub_sdk.graphql import Mutation, Query, render_input_block, render_query_block +class MyStrEnum(str, Enum): + VALUE1 = "value1" + VALUE2 = "value2" + + +class MyIntEnum(int, Enum): + VALUE1 = 12 + VALUE2 = 24 + + @pytest.fixture def query_data_no_filter(): data = { @@ -78,10 +90,10 @@ def query_data_filters_01(): def query_data_filters_02(): data = { "device": { - "@filters": {"name__value": "myname", "integer__value": 44}, + "@filters": {"name__value": "myname", "integer__value": 44, "enumstr__value": MyStrEnum.VALUE2}, "name": {"value": None}, "interfaces": { - "@filters": {"enabled__value": True}, + "@filters": {"enabled__value": True, "enumint__value": MyIntEnum.VALUE1}, "name": {"value": None}, }, } @@ -324,11 +336,11 @@ def test_query_rendering_with_filters(query_data_filters_02): expected_query = """ query { - device(name__value: "myname", integer__value: 44) { + device(name__value: "myname", integer__value: 44, enumstr__value: VALUE2) { name { value } - interfaces(enabled__value: true) { + interfaces(enabled__value: true, enumint__value: VALUE1) { name { value } @@ -339,6 +351,26 @@ def test_query_rendering_with_filters(query_data_filters_02): assert query.render() == expected_query +def test_query_rendering_with_filters_convert_enum(query_data_filters_02): + query = Query(query=query_data_filters_02) + + expected_query = """ +query { + device(name__value: "myname", integer__value: 44, enumstr__value: "value2") { + name { + value + } + interfaces(enabled__value: true, enumint__value: 12) { + name { + value + } + } + } +} +""" + assert query.render(convert_enum=True) == expected_query + + def test_mutation_rendering_no_vars(input_data_01): query_data = {"ok": None, "object": {"id": None}} @@ -425,6 +457,40 @@ def test_mutation_rendering_many_relationships(): assert query.render() == expected_query +def test_mutation_rendering_enum(): + query_data = {"ok": None, "object": {"id": None}} + input_data = { + "data": { + "description": {"value": MyStrEnum.VALUE1}, + "size": {"value": MyIntEnum.VALUE2}, + } + } + + query = Mutation(mutation="myobject", query=query_data, input_data=input_data) + + expected_query = """ +mutation { + myobject( + data: { + description: { + value: VALUE1 + } + size: { + value: VALUE2 + } + } + ){ + ok + object { + id + } + } +} +""" + assert query.render_first_line() == "mutation {" + assert query.render() == expected_query + + def test_mutation_rendering_with_vars(input_data_01): query_data = {"ok": None, "object": {"id": None}} variables = {"name": str, "description": str, "number": int} diff --git a/tests/unit/sdk/test_task.py b/tests/unit/sdk/test_task.py new file mode 100644 index 00000000..8837d36f --- /dev/null +++ b/tests/unit/sdk/test_task.py @@ -0,0 +1,155 @@ +from datetime import datetime, timezone + +import pytest + +from infrahub_sdk.task.exceptions import TaskNotFoundError, TooManyTasksError +from infrahub_sdk.task.manager import InfraHubTaskManagerBase +from infrahub_sdk.task.models import Task, TaskFilter, TaskState + +client_types = ["standard", "sync"] + + +@pytest.mark.parametrize("client_type", client_types) +async def test_method_all(clients, mock_query_tasks_01, client_type): + if client_type == "standard": + tasks = await clients.standard.task.all() + else: + tasks = clients.sync.task.all() + + assert len(tasks) == 5 + assert isinstance(tasks[0], Task) + + +@pytest.mark.parametrize("client_type", client_types) +async def test_method_all_full(clients, mock_query_tasks_01, client_type): + if client_type == "standard": + tasks = await clients.standard.task.all(include_logs=True, include_related_nodes=True) + else: + tasks = clients.sync.task.all(include_logs=True, include_related_nodes=True) + + assert len(tasks) == 5 + assert isinstance(tasks[0], Task) + + +async def test_generate_count_query(): + query = InfraHubTaskManagerBase._generate_count_query() + assert query + assert ( + query.render() + == """ +query { + InfrahubTask { + count + } +} +""" + ) + + query2 = InfraHubTaskManagerBase._generate_count_query( + filters=TaskFilter(ids=["azerty", "qwerty"], state=[TaskState.COMPLETED]) + ) + assert query2 + assert ( + query2.render() + == """ +query { + InfrahubTask(ids: ["azerty", "qwerty"], state: [COMPLETED]) { + count + } +} +""" + ) + + +@pytest.mark.parametrize("client_type", client_types) +async def test_method_filters(clients, mock_query_tasks_02_main, client_type): + if client_type == "standard": + tasks = await clients.standard.task.filter(filter=TaskFilter(branch="main")) + else: + tasks = clients.sync.task.filter(filter=TaskFilter(branch="main")) + + assert len(tasks) == 2 + assert isinstance(tasks[0], Task) + + +@pytest.mark.parametrize("client_type", client_types) +async def test_method_get_too_many(clients, mock_query_tasks_02_main, client_type): + with pytest.raises(TooManyTasksError): + if client_type == "standard": + await clients.standard.task.get(id="a60f4431-6a43-451e-8f42-9ec5db9a9370") + else: + clients.sync.task.get(id="a60f4431-6a43-451e-8f42-9ec5db9a9370") + + +@pytest.mark.parametrize("client_type", client_types) +async def test_method_get_not_found(clients, mock_query_tasks_empty, client_type): + with pytest.raises(TaskNotFoundError): + if client_type == "standard": + await clients.standard.task.get(id="a60f4431-6a43-451e-8f42-9ec5db9a9370") + else: + clients.sync.task.get(id="a60f4431-6a43-451e-8f42-9ec5db9a9370") + + +@pytest.mark.parametrize("client_type", client_types) +async def test_method_get(clients, mock_query_tasks_03, client_type): + if client_type == "standard": + task = await clients.standard.task.get(id="a60f4431-6a43-451e-8f42-9ec5db9a9370") + else: + task = clients.sync.task.get(id="a60f4431-6a43-451e-8f42-9ec5db9a9370") + + assert task + assert task.id == "a60f4431-6a43-451e-8f42-9ec5db9a9370" + + +@pytest.mark.parametrize("client_type", client_types) +async def test_method_get_full(clients, mock_query_tasks_05, client_type): + if client_type == "standard": + task = await clients.standard.task.get(id="32116fcd-9071-43a7-9f14-777901020b5b") + else: + task = clients.sync.task.get(id="32116fcd-9071-43a7-9f14-777901020b5b") + + assert task + assert task.id == "32116fcd-9071-43a7-9f14-777901020b5b" + assert len(task.logs) == 4 + assert len(task.related_nodes) == 2 + assert task.model_dump() == { + "branch": "main", + "created_at": datetime(2025, 1, 18, 22, 12, 20, 228112, tzinfo=timezone.utc), + "id": "32116fcd-9071-43a7-9f14-777901020b5b", + "logs": [ + { + "message": "Found 1 check definitions in the repository", + "severity": "info", + "timestamp": datetime(2025, 1, 18, 22, 12, 20, 371699, tzinfo=timezone.utc), + }, + { + "message": "Found 3 Python transforms in the repository", + "severity": "info", + "timestamp": datetime(2025, 1, 18, 22, 12, 20, 603709, tzinfo=timezone.utc), + }, + { + "message": "Found 4 generator definitions in the repository", + "severity": "info", + "timestamp": datetime(2025, 1, 18, 22, 12, 21, 259186, tzinfo=timezone.utc), + }, + { + "message": "Processing generator update_upstream_interfaces_description (generators/upstream_interfaces.py)", + "severity": "info", + "timestamp": datetime(2025, 1, 18, 22, 12, 21, 259692, tzinfo=timezone.utc), + }, + ], + "parameters": None, + "progress": None, + "related_nodes": [ + { + "id": "1808d478-e51e-7504-d0ef-c513f1cd69a5", + "kind": "CoreReadOnlyRepository", + }, + {"id": "1808d478-e51e-7504-aaaa-c513f1cd69a5", "kind": "TestMyKind"}, + ], + "state": TaskState.COMPLETED, + "tags": None, + "title": "Import Python file", + "updated_at": datetime(2025, 1, 18, 22, 12, 22, 44921, tzinfo=timezone.utc), + "workflow": "import-python-files", + } diff --git a/tests/unit/sdk/test_timestamp.py b/tests/unit/sdk/test_timestamp.py index bcdf18a7..c800e46a 100644 --- a/tests/unit/sdk/test_timestamp.py +++ b/tests/unit/sdk/test_timestamp.py @@ -1,17 +1,22 @@ -import pendulum +from datetime import datetime, timezone + import pytest +from whenever import Instant + +from infrahub_sdk.exceptions import TimestampFormatError +from infrahub_sdk.timestamp import Timestamp -from infrahub_sdk.timestamp import Timestamp, TimestampFormatError +UTC = timezone.utc # Required for older versions of Python def test_init_empty(): t1 = Timestamp() assert isinstance(t1, Timestamp) - assert t1.to_string() == t1.obj.to_iso8601_string() + assert t1.to_datetime() == t1._obj.py_datetime() t2 = Timestamp(None) assert isinstance(t2, Timestamp) - assert t2.to_string() == t2.obj.to_iso8601_string() + assert t2.to_datetime() == t2._obj.py_datetime() def test_init_timestamp(): @@ -19,22 +24,85 @@ def test_init_timestamp(): t2 = Timestamp(t1) assert t1.to_string() == t2.to_string() assert isinstance(t2, Timestamp) - assert t2.to_string() == t2.obj.to_iso8601_string() + assert t2.to_datetime() == t2._obj.py_datetime() def test_parse_string(): REF = "2022-01-01T10:00:00.000000Z" - assert Timestamp._parse_string(REF) == pendulum.parse(REF) + assert Timestamp._parse_string(REF).instant() == Instant.parse_common_iso(REF) assert Timestamp._parse_string("5m") assert Timestamp._parse_string("10min") assert Timestamp._parse_string("2h") assert Timestamp._parse_string("10s") - - with pytest.raises(ValueError): + assert Timestamp._parse_string("2025-01-02") + assert Timestamp._parse_string("2024-06-04T03:13:03.386270") + assert Timestamp._parse_string("2025-03-05T18:01:52+01:00") + with pytest.raises(TimestampFormatError): Timestamp._parse_string("notvalid") +@pytest.mark.parametrize( + "input_str,expected_datetime", + [ + pytest.param( + "2022-01-01T10:01:01.123000Z", datetime(2022, 1, 1, 10, 1, 1, 123000, tzinfo=UTC), id="milliseconds" + ), + pytest.param( + "2023-12-31T23:59:59.999999Z", datetime(2023, 12, 31, 23, 59, 59, 999999, tzinfo=UTC), id="microseconds" + ), + pytest.param( + "2025-02-25T05:58:54.524191Z", + datetime(2025, 2, 25, 5, 58, 54, 524191, tzinfo=UTC), + id="milliseconds_with_offset", + ), + pytest.param( + "2025-02-25T06:38:37.753389419Z", + datetime(2025, 2, 25, 6, 38, 37, 753389, tzinfo=UTC), + id="nanoseconds", + ), + ], +) +def test_to_datetime(input_str, expected_datetime): + assert isinstance(Timestamp(input_str).to_datetime(), datetime) + assert Timestamp(input_str).to_datetime() == expected_datetime + + +@pytest.mark.parametrize( + "input_str,expected_str,expected_str_no_z", + [ + pytest.param( + "2022-01-01T10:01:01.123000Z", + "2022-01-01T10:01:01.123000Z", + "2022-01-01T10:01:01.123000+00:00", + id="milliseconds", + ), + pytest.param( + "2023-12-31T23:59:59.999999Z", + "2023-12-31T23:59:59.999999Z", + "2023-12-31T23:59:59.999999+00:00", + id="microseconds", + ), + ], +) +def test_to_string_default(input_str, expected_str, expected_str_no_z): + assert isinstance(Timestamp(input_str).to_string(), str) + assert Timestamp(input_str).to_string() == expected_str + assert Timestamp(input_str).to_string(with_z=False) == expected_str_no_z + + +def test_add(): + t1 = Timestamp("2022-01-01T10:01:01.123Z") + t2 = t1.add(hours=1) + assert t2.to_string() == "2022-01-01T11:01:01.123000Z" + + +def test_subtract(): + t1 = Timestamp("2022-01-01T10:05:01.123Z") + t2 = t1.subtract(hours=1) + assert t2.to_string() == "2022-01-01T09:05:01.123000Z" + + def test_compare(): time1 = "2022-01-01T11:00:00.000000Z" time2 = "2022-02-01T11:00:00.000000Z" @@ -51,6 +119,15 @@ def test_compare(): assert t11 == t12 +def test_serialize(): + time_no_z = "2022-01-01T11:00:00.000000+00:00" + time = "2022-01-01T11:00:00.000000Z" + timestamp = Timestamp(time) + + assert timestamp.to_string(with_z=False) == time_no_z + assert timestamp.to_string() == time + + @pytest.mark.parametrize("invalid_str", ["blurple", "1122334455667788", "2023-45-99"]) def test_invalid_raises_correct_error(invalid_str): with pytest.raises(TimestampFormatError): diff --git a/tests/unit/sdk/test_utils.py b/tests/unit/sdk/test_utils.py index df424d2a..7f628c62 100644 --- a/tests/unit/sdk/test_utils.py +++ b/tests/unit/sdk/test_utils.py @@ -4,6 +4,7 @@ import pytest from graphql import parse +from whenever import Instant from infrahub_sdk.node import InfrahubNode from infrahub_sdk.utils import ( @@ -11,6 +12,7 @@ base16encode, base36decode, base36encode, + calculate_time_diff, compare_lists, deep_merge_dict, dict_hash, @@ -207,3 +209,20 @@ def test_write_to_file(): assert write_to_file(directory / "file.txt", {"key": "value"}) is True tmp_dir.cleanup() + + +def test_calculate_time_diff(): + time1 = Instant.now().subtract(seconds=98).format_common_iso() + assert calculate_time_diff(time1) == "1m and 38s ago" + + time2 = Instant.now().subtract(hours=1, minutes=12, seconds=34).format_common_iso() + assert calculate_time_diff(time2) == "1h 12m and 34s ago" + + time3 = Instant.now().format_common_iso() + assert calculate_time_diff(time3) == "now" + + time4 = Instant.now().subtract(seconds=23).format_common_iso() + assert calculate_time_diff(time4) == "23s ago" + + time5 = Instant.now().subtract(hours=77, minutes=12, seconds=34).format_common_iso() + assert calculate_time_diff(time5) == "3d and 5h ago"