diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6c91790..ce40b3e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -22,11 +22,28 @@ jobs: - name: Install requirements run: | + sudo add-apt-repository ppa:deadsnakes/ppa sudo apt-get update - sudo apt-get install python3.10 nodejs - pip install -r requirements/dev.txt + sudo apt-get install python3.10 python3-pip python3.10-dev python3.10-distutils nodejs + sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.8 1 + sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.10 2 + sudo update-alternatives --set python /usr/bin/python3.10 + sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1 + sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 2 + sudo update-alternatives --set python3 /usr/bin/python3.10 + python3.10 -m pip install -r requirements/dev.txt npm install @semantic-release/changelog @semantic-release/git conventional-changelog-conventionalcommits + - name: Check python version + run: | + whereis python + whereis python3 + python --version + python3 --version + which pip + whereis pip + pip --version + - name: Package the app run: | pyinstaller idr_client.spec diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1be73fb..463d36a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,7 @@ repos: rev: v2.37.3 hooks: - id: pyupgrade - args: [--py37-plus] + args: [--py38-plus] - repo: https://gitlab.com/pycqa/flake8 rev: 3.9.2 diff --git a/README.md b/README.md index 49a9db4..7cd6f57 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,7 @@ pip install -r requirements/dev.txt And then create the binary using the following command:- ```bash -pyinstaller app/__main__.py --hidden-import apps/imp --collect-all app --name idr_client_temp -F +pyinstaller app/__main__.py idr_client.spec ``` This will create an executable but the executable will still depend on the target system/computer having the correct system libraries. More details on this diff --git a/app/__init__.py b/app/__init__.py index 3820e39..422ae33 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,15 +1,6 @@ +from collections.abc import Mapping, Sequence from logging.config import dictConfig -from typing import ( - Any, - Dict, - Final, - List, - Mapping, - Optional, - Sequence, - Type, - cast, -) +from typing import Any, Final, Optional, cast import yaml from yaml import Loader @@ -39,7 +30,7 @@ str ] = "SUPPORTED_DATA_SOURCE_TYPES" # noqa -_DEFAULT_CONFIG: Final[Dict[str, Any]] = { +_DEFAULT_CONFIG: Final[dict[str, Any]] = { _LOGGING_CONFIG_KEY: { "version": 1, "disable_existing_loggers": False, @@ -101,10 +92,10 @@ def _load_config_file( def _load_settings_initializers( initializers_dotted_paths: Sequence[str], ) -> Sequence[SettingInitializer]: - initializers: List[SettingInitializer] = list() + initializers: list[SettingInitializer] = list() for _initializer_dotted_path in initializers_dotted_paths: try: - initializer_klass: Type[SettingInitializer] + initializer_klass: type[SettingInitializer] initializer_klass = import_string_as_klass( _initializer_dotted_path, SettingInitializer ) @@ -172,7 +163,7 @@ def setting(self) -> str: return _LOGGING_CONFIG_KEY def execute(self, an_input: Optional[Mapping[str, Any]]) -> Any: - logging_config: Dict[str, Any] = dict( + logging_config: dict[str, Any] = dict( an_input or _DEFAULT_CONFIG[self.setting] ) dictConfig(logging_config) @@ -207,9 +198,9 @@ def execute(self, an_input: Optional[Sequence[str]]) -> Any: @staticmethod def _dotted_path_to_data_source_type_klass( dotted_path: str, - ) -> Type[DataSourceType]: + ) -> type[DataSourceType]: try: - data_source_type_klass: Type[DataSourceType] + data_source_type_klass: type[DataSourceType] data_source_type_klass = import_string_as_klass( dotted_path, DataSourceType ) @@ -252,13 +243,13 @@ def setup( registry = AppRegistry() # type: ignore # Load the application settings - _settings_dict: Dict[str, Any] = dict(initial_settings or _DEFAULT_CONFIG) + _settings_dict: dict[str, Any] = dict(initial_settings or _DEFAULT_CONFIG) # Load config from a file when provided if config_file_path: # pragma: no branch _settings_dict.update(_load_config_file(config_file_path)) # Load initializers - _initializers: List[Any] = list(settings_initializers or []) + _initializers: list[Any] = list(settings_initializers or []) _initializers.extend( _load_settings_initializers( _settings_dict.get(_SETTINGS_INITIALIZERS_CONFIG_KEY, tuple()) diff --git a/app/__main__.py b/app/__main__.py index 3798425..9967b48 100644 --- a/app/__main__.py +++ b/app/__main__.py @@ -1,5 +1,6 @@ from argparse import ArgumentParser -from typing import Optional, Sequence +from collections.abc import Sequence +from typing import Optional import app from app.__version__ import __title__, __version__ diff --git a/app/core/domain.py b/app/core/domain.py index 9b713af..4c6c8b2 100644 --- a/app/core/domain.py +++ b/app/core/domain.py @@ -1,6 +1,7 @@ from abc import ABCMeta, abstractmethod +from collections.abc import Mapping, Sequence from functools import lru_cache -from typing import Any, Generic, Mapping, Optional, Sequence, Type, TypeVar +from typing import Any, Generic, Optional, TypeVar from typing_inspect import is_optional_type @@ -23,7 +24,7 @@ @lru_cache(maxsize=None) -def _get_available_annotations(do_klass: Type[_ADO]) -> Mapping[str, Any]: +def _get_available_annotations(do_klass: type[_ADO]) -> Mapping[str, Any]: """Extract all annotations available on a domain object class. This includes all annotations defined on the class's ancestors. @@ -45,7 +46,7 @@ def _get_available_annotations(do_klass: Type[_ADO]) -> Mapping[str, Any]: @lru_cache(maxsize=None) -def _get_required_fields_names(do_klass: Type[_ADO]) -> Sequence[str]: +def _get_required_fields_names(do_klass: type[_ADO]) -> Sequence[str]: """Determine and return the required fields of a domain object class. A required field in the context of this method is defined as one whose @@ -444,7 +445,7 @@ def __str__(self) -> str: @classmethod @abstractmethod - def imp_data_source_klass(cls) -> Type[DataSource]: + def imp_data_source_klass(cls) -> type[DataSource]: """ Return the :class:`DataSource` concrete implementation class for this data source type. @@ -455,7 +456,7 @@ def imp_data_source_klass(cls) -> Type[DataSource]: @classmethod @abstractmethod - def imp_extract_metadata_klass(cls) -> Type[ExtractMetadata]: + def imp_extract_metadata_klass(cls) -> type[ExtractMetadata]: """ Return the :class:`ExtractMetadata` concrete implementation class for this dats source type. @@ -467,7 +468,7 @@ def imp_extract_metadata_klass(cls) -> Type[ExtractMetadata]: @classmethod @abstractmethod - def imp_upload_chunk_klass(cls) -> Type[UploadChunk]: + def imp_upload_chunk_klass(cls) -> type[UploadChunk]: """ Return the :class:`UploadChunk` concrete implementation class for this data source type. @@ -478,7 +479,7 @@ def imp_upload_chunk_klass(cls) -> Type[UploadChunk]: @classmethod @abstractmethod - def imp_upload_metadata_klass(cls) -> Type[UploadMetadata]: + def imp_upload_metadata_klass(cls) -> type[UploadMetadata]: """ Return the :class:`UploadMetadata` concrete implementation class for this data source type. diff --git a/app/core/mixins.py b/app/core/mixins.py index 6b40e17..d3ee757 100644 --- a/app/core/mixins.py +++ b/app/core/mixins.py @@ -1,14 +1,7 @@ from abc import ABCMeta, abstractmethod +from collections.abc import Mapping from types import TracebackType -from typing import ( - Any, - ContextManager, - Generic, - Mapping, - Optional, - Type, - TypeVar, -) +from typing import Any, ContextManager, Generic, Optional, TypeVar from .task import Task @@ -30,7 +23,7 @@ class Disposable(ContextManager, metaclass=ABCMeta): def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> Optional[bool]: diff --git a/app/core/transport.py b/app/core/transport.py index ad2889e..be32483 100644 --- a/app/core/transport.py +++ b/app/core/transport.py @@ -1,5 +1,6 @@ from abc import ABCMeta, abstractmethod -from typing import Any, Mapping, Optional, Sequence +from collections.abc import Mapping, Sequence +from typing import Any, Optional from .domain import ( DataSource, diff --git a/app/imp/sql_data/domain.py b/app/imp/sql_data/domain.py index 6ac29ff..9b58e8e 100644 --- a/app/imp/sql_data/domain.py +++ b/app/imp/sql_data/domain.py @@ -1,7 +1,8 @@ import io +from collections.abc import Mapping, Sequence from enum import Enum from logging import getLogger -from typing import Any, Dict, Final, Mapping, Optional, Sequence, Type +from typing import Any, Final, Optional import pandas as pd import pyarrow as pa @@ -89,7 +90,7 @@ def __init__(self, **kwargs): data_source_type: SQLDataSourceType = kwargs.pop("data_source_type") super().__init__(**kwargs) self._data_source_type: SQLDataSourceType = data_source_type - self._extract_metadata: Dict[str, "SQLExtractMetadata"] = dict() + self._extract_metadata: dict[str, "SQLExtractMetadata"] = dict() self._engine: Optional[Engine] = None def __enter__(self) -> "SQLDataSource": @@ -228,7 +229,7 @@ def __init__(self, **kwargs): "description", "Represents SQL databases as a source type." ) super().__init__(**kwargs) - self._data_sources: Dict[str, SQLDataSource] = dict() + self._data_sources: dict[str, SQLDataSource] = dict() @property def code(self) -> str: @@ -243,19 +244,19 @@ def data_sources(self, data_sources: Mapping[str, SQLDataSource]) -> None: self._data_sources = dict(**data_sources) @classmethod - def imp_data_source_klass(cls) -> Type[DataSource]: + def imp_data_source_klass(cls) -> type[DataSource]: return SQLDataSource @classmethod - def imp_extract_metadata_klass(cls) -> Type[ExtractMetadata]: + def imp_extract_metadata_klass(cls) -> type[ExtractMetadata]: return SQLExtractMetadata @classmethod - def imp_upload_chunk_klass(cls) -> Type[UploadChunk]: + def imp_upload_chunk_klass(cls) -> type[UploadChunk]: return SQLUploadChunk @classmethod - def imp_upload_metadata_klass(cls) -> Type[UploadMetadata]: + def imp_upload_metadata_klass(cls) -> type[UploadMetadata]: return SQLUploadMetadata diff --git a/app/lib/app_registry.py b/app/lib/app_registry.py index 1719e0c..8a4c54d 100644 --- a/app/lib/app_registry.py +++ b/app/lib/app_registry.py @@ -1,4 +1,5 @@ -from typing import Callable, Dict, Mapping, Optional +from collections.abc import Mapping +from typing import Callable, Optional from app.core import DataSourceType, Transport @@ -24,7 +25,7 @@ class AppRegistry: """ def __init__(self): - self._data_source_types: Dict[str, DataSourceType] = dict() + self._data_source_types: dict[str, DataSourceType] = dict() self._default_transport_factory: Optional[ DefaultTransportFactory ] = None diff --git a/app/lib/checkers.py b/app/lib/checkers.py index 1d8b00c..90eb31d 100644 --- a/app/lib/checkers.py +++ b/app/lib/checkers.py @@ -1,5 +1,6 @@ from abc import abstractmethod -from typing import Optional, Protocol, Sized, SupportsFloat, TypeVar +from collections.abc import Sized +from typing import Optional, Protocol, SupportsFloat, TypeVar # ============================================================================= # TYPES diff --git a/app/lib/config/config.py b/app/lib/config/config.py index 1aab120..ed9e487 100644 --- a/app/lib/config/config.py +++ b/app/lib/config/config.py @@ -1,5 +1,6 @@ import logging -from typing import Any, Dict, List, Mapping, Optional, Sequence +from collections.abc import Mapping, Sequence +from typing import Any, Optional from .exceptions import MissingSettingError from .setting_initializer import SettingInitializer @@ -55,7 +56,7 @@ def __init__( :param settings_initializers: Optional initializers to perform post initialization tasks. """ - self._settings: Dict[str, Any] = dict(settings or {}) + self._settings: dict[str, Any] = dict(settings or {}) self._initializers: Mapping[ str, Sequence[SettingInitializer] ] = self._group_related_initializers(settings_initializers or tuple()) @@ -111,7 +112,7 @@ def _run_initializers(self) -> None: def _group_related_initializers( initializers: Sequence[SettingInitializer], ) -> Mapping[str, Sequence[SettingInitializer]]: - grouped_initializers: Dict[str, List[SettingInitializer]] = dict() + grouped_initializers: dict[str, list[SettingInitializer]] = dict() for _initializer in initializers: grouped_initializers.setdefault(_initializer.setting, []).append( _initializer diff --git a/app/lib/module_loading.py b/app/lib/module_loading.py index 4b3c09a..6868027 100644 --- a/app/lib/module_loading.py +++ b/app/lib/module_loading.py @@ -3,7 +3,7 @@ import sys from importlib import import_module from types import ModuleType -from typing import Type, TypeVar, cast +from typing import TypeVar, cast # ============================================================================= # TYPES @@ -62,8 +62,8 @@ def import_string(dotted_path: str) -> ModuleType: def import_string_as_klass( - dotted_path: str, target_klass: Type[_T] -) -> Type[_T]: + dotted_path: str, target_klass: type[_T] +) -> type[_T]: """ Import a dotted module as the given class type. Raise ``ImportError`` if the import failed and a ``TypeError`` if the imported module is not of the @@ -88,4 +88,4 @@ def import_string_as_klass( % (dotted_path, target_klass.__qualname__) ) - return cast(Type[target_klass], _module) + return cast(type[target_klass], _module) diff --git a/app/lib/retry/retry.py b/app/lib/retry/retry.py index 6809c34..b394078 100644 --- a/app/lib/retry/retry.py +++ b/app/lib/retry/retry.py @@ -1,18 +1,10 @@ import random import time +from collections.abc import Generator, Mapping from datetime import datetime, timedelta from functools import partial from logging import Logger, getLogger -from typing import ( - Any, - Callable, - Final, - Generator, - Mapping, - Optional, - Tuple, - Type, -) +from typing import Any, Callable, Final, Optional import wrapt @@ -49,7 +41,7 @@ def _enable_retries() -> bool: ) -def if_exception_type_factory(*exp_types: Type[BaseException]) -> Predicate: +def if_exception_type_factory(*exp_types: type[BaseException]) -> Predicate: """Create a retry predicate for the given exception types. :param exp_types: The exception types to check for. @@ -129,7 +121,7 @@ def __call__( # Types and default values are included on the rest of the arguments to # quiet pyright. instance: Any = None, - args: Tuple[Any, ...] = tuple(), + args: tuple[Any, ...] = tuple(), kwargs: Optional[Mapping[str, Any]] = None, ) -> Any: self.load_config() diff --git a/app/lib/tasks/common.py b/app/lib/tasks/common.py index 08544b0..a28e43e 100644 --- a/app/lib/tasks/common.py +++ b/app/lib/tasks/common.py @@ -1,5 +1,6 @@ +from collections.abc import Sequence from functools import reduce -from typing import Any, Callable, Generic, Sequence, TypeVar, cast +from typing import Any, Callable, Generic, TypeVar, cast from app.core import Task diff --git a/app/lib/tasks/concurrent.py b/app/lib/tasks/concurrent.py index 73d2dd2..31664c3 100644 --- a/app/lib/tasks/concurrent.py +++ b/app/lib/tasks/concurrent.py @@ -1,15 +1,8 @@ -import sys +from collections.abc import MutableSequence, Sequence from concurrent.futures import Executor, Future, ThreadPoolExecutor from functools import reduce from logging import getLogger -from typing import ( - Callable, - Generic, - MutableSequence, - Optional, - Sequence, - TypeVar, -) +from typing import Any, Callable, Generic, Optional, TypeVar from app.core import Disposable, IDRClientException, Task @@ -22,15 +15,10 @@ _RT = TypeVar("_RT") -if sys.version_info >= (3, 9): # pragma: no branch - Accumulator = Callable[ - [MutableSequence[Future[_RT]], Future[_RT]], - MutableSequence[Future[_RT]], - ] -else: # pragma: no branch - Accumulator = Callable[ - [MutableSequence[Future], Future], MutableSequence[Future] - ] +Accumulator = Callable[ + [MutableSequence[Future[_RT]], Future[_RT]], + MutableSequence[Future[_RT]], +] # ============================================================================= @@ -46,7 +34,7 @@ # ============================================================================= -def completed_successfully(future: Future) -> bool: +def completed_successfully(future: Future[Any]) -> bool: """ Checks if a :class:`future ` completed successfully and returns ``True`` if so and ``False`` otherwise. In this context a *future* is @@ -106,7 +94,7 @@ def __init__( self, *tasks: Task[_IN, _RT], accumulator: Optional[Accumulator] = None, - initial_value: Optional[MutableSequence["Future[_RT]"]] = None, + initial_value: Optional[MutableSequence[Future[_RT]]] = None, executor: Optional[Executor] = None, ): """ @@ -129,7 +117,7 @@ def __init__( self._accumulator: Accumulator = ( accumulator or self._default_accumulator ) - self._initial_value: MutableSequence["Future[_RT]"] + self._initial_value: MutableSequence[Future[_RT]] self._initial_value = initial_value or list() self._executor: Executor = executor or ThreadPoolExecutor() self._is_disposed: bool = False @@ -150,7 +138,7 @@ def dispose(self) -> None: self._executor.shutdown(wait=True) self._is_disposed = True - def execute(self, an_input: _IN) -> MutableSequence["Future[_RT]"]: + def execute(self, an_input: _IN) -> MutableSequence[Future[_RT]]: self._ensure_not_disposed() return reduce( lambda _partial, _tsk: self._accumulator( @@ -167,9 +155,9 @@ def _ensure_not_disposed(self) -> None: @staticmethod def _default_accumulator( - partial_results: MutableSequence["Future[_RT]"], - task_output: "Future[_RT]", - ) -> MutableSequence["Future[_RT]"]: + partial_results: MutableSequence[Future[_RT]], + task_output: Future[_RT], + ) -> MutableSequence[Future[_RT]]: partial_results.append(task_output) return partial_results diff --git a/app/lib/tasks/pandas.py b/app/lib/tasks/pandas.py index 4ef41f1..e095d78 100644 --- a/app/lib/tasks/pandas.py +++ b/app/lib/tasks/pandas.py @@ -1,4 +1,4 @@ -from typing import Sequence +from collections.abc import Sequence import pandas as pd diff --git a/app/lib/transports/http/__init__.py b/app/lib/transports/http/__init__.py index e39b3bd..6ccfbbf 100644 --- a/app/lib/transports/http/__init__.py +++ b/app/lib/transports/http/__init__.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Final, Mapping, Optional, cast +from collections.abc import Mapping +from typing import Any, Callable, Final, Optional, cast from .api_v1_dialect import IDRServerAPIv1, idr_server_api_v1_dialect_factory from .http_api_dialect import HTTPAPIDialect diff --git a/app/lib/transports/http/api_v1_dialect.py b/app/lib/transports/http/api_v1_dialect.py index be9198d..f3f25ee 100644 --- a/app/lib/transports/http/api_v1_dialect.py +++ b/app/lib/transports/http/api_v1_dialect.py @@ -1,6 +1,7 @@ import json from base64 import b64encode -from typing import Any, Final, Mapping, Optional, Sequence +from collections.abc import Mapping, Sequence +from typing import Any, Final, Optional from app.core import ( DataSource, diff --git a/app/lib/transports/http/http_api_dialect.py b/app/lib/transports/http/http_api_dialect.py index 6a6e509..7a2c2a5 100644 --- a/app/lib/transports/http/http_api_dialect.py +++ b/app/lib/transports/http/http_api_dialect.py @@ -1,5 +1,6 @@ from abc import ABCMeta, abstractmethod -from typing import Any, Mapping, Optional, Sequence +from collections.abc import Mapping, Sequence +from typing import Any, Optional from app.core import ( DataSource, diff --git a/app/lib/transports/http/http_transport.py b/app/lib/transports/http/http_transport.py index e3df74c..fbcd699 100644 --- a/app/lib/transports/http/http_transport.py +++ b/app/lib/transports/http/http_transport.py @@ -1,6 +1,7 @@ import logging +from collections.abc import Mapping, Sequence from threading import RLock -from typing import Any, Mapping, Optional, Sequence +from typing import Any, Optional from requests.auth import AuthBase from requests.exceptions import RequestException diff --git a/app/lib/transports/http/types.py b/app/lib/transports/http/types.py index 06cfb9a..8298828 100644 --- a/app/lib/transports/http/types.py +++ b/app/lib/transports/http/types.py @@ -1,20 +1,12 @@ -from typing import ( - Any, - Iterable, - Mapping, - Optional, - Sequence, - Tuple, - TypedDict, - Union, -) +from collections.abc import Iterable, Mapping, Sequence +from typing import Any, Optional, TypedDict, Union -_FileSpec = Tuple[ +_FileSpec = tuple[ str, bytes, str # File name # File content # File content type ] -_Files = Union[Mapping[str, _FileSpec], Iterable[Tuple[str, _FileSpec]]] +_Files = Union[Mapping[str, _FileSpec], Iterable[tuple[str, _FileSpec]]] class _OptionalAdapterRequestParams(TypedDict, total=False): diff --git a/app/use_cases/fetch_metadata.py b/app/use_cases/fetch_metadata.py index dd17cd1..3e92170 100644 --- a/app/use_cases/fetch_metadata.py +++ b/app/use_cases/fetch_metadata.py @@ -1,7 +1,7 @@ +from collections.abc import Iterable, Mapping, Sequence from concurrent.futures import Future, as_completed from itertools import chain from logging import getLogger -from typing import Iterable, Mapping, Sequence from app.core import ( DataSource, diff --git a/app/use_cases/main_pipeline.py b/app/use_cases/main_pipeline.py index 6341c4c..fc0ad57 100644 --- a/app/use_cases/main_pipeline.py +++ b/app/use_cases/main_pipeline.py @@ -1,4 +1,5 @@ -from typing import Any, Sequence +from collections.abc import Sequence +from typing import Any from app.core import DataSourceType, ExtractMetadata, Transport from app.lib import Pipeline diff --git a/app/use_cases/run_extraction.py b/app/use_cases/run_extraction.py index 4109ddc..86f37d2 100644 --- a/app/use_cases/run_extraction.py +++ b/app/use_cases/run_extraction.py @@ -1,7 +1,8 @@ +from collections.abc import Sequence from concurrent.futures import Future, as_completed from itertools import chain, groupby from logging import getLogger -from typing import Any, Sequence, Tuple +from typing import Any from app.core import ( DataSource, @@ -29,7 +30,7 @@ # TYPES # ============================================================================= -_GroupedSiblingExtracts = Tuple[DataSource, Sequence[ExtractMetadata]] +_GroupedSiblingExtracts = tuple[DataSource, Sequence[ExtractMetadata]] # ============================================================================= diff --git a/app/use_cases/types.py b/app/use_cases/types.py index f3bc180..e12e005 100644 --- a/app/use_cases/types.py +++ b/app/use_cases/types.py @@ -1,7 +1,8 @@ -from typing import Any, Sequence, Tuple +from collections.abc import Sequence +from typing import Any from app.core import ExtractMetadata, UploadChunk, UploadMetadata -RunExtractionResult = Tuple[ExtractMetadata, Any] +RunExtractionResult = tuple[ExtractMetadata, Any] -UploadExtractResult = Tuple[UploadMetadata, Sequence[UploadChunk]] +UploadExtractResult = tuple[UploadMetadata, Sequence[UploadChunk]] diff --git a/app/use_cases/upload_extracts.py b/app/use_cases/upload_extracts.py index 4df9f63..c3e8a57 100644 --- a/app/use_cases/upload_extracts.py +++ b/app/use_cases/upload_extracts.py @@ -1,6 +1,7 @@ +from collections.abc import Iterable, Sequence from concurrent.futures import Future, as_completed from logging import getLogger -from typing import Any, Iterable, Sequence, Tuple, Type +from typing import Any import app from app.core import ( @@ -34,9 +35,9 @@ # TYPES # ============================================================================= -_PostedUpload = Tuple[UploadMetadata, Any] +_PostedUpload = tuple[UploadMetadata, Any] -_PreparedChunks = Tuple[UploadMetadata, Sequence[bytes]] +_PreparedChunks = tuple[UploadMetadata, Sequence[bytes]] # ============================================================================= @@ -57,7 +58,7 @@ def execute(self, an_input: Transport) -> _PostedUpload: extract_meta: ExtractMetadata = self._extract[0] parent_ds: DataSource = extract_meta.data_source parent_dst: DataSourceType = parent_ds.data_source_type - upload_meta_klass: Type[ + upload_meta_klass: type[ UploadMetadata ] = parent_dst.imp_upload_metadata_klass() content_type: str = upload_meta_klass.get_content_type() diff --git a/tests/core/factories.py b/tests/core/factories.py index 30714ad..40a6320 100644 --- a/tests/core/factories.py +++ b/tests/core/factories.py @@ -1,5 +1,6 @@ import uuid -from typing import Any, Mapping, Optional, Sequence, Type +from collections.abc import Mapping, Sequence +from typing import Any, Optional import factory @@ -74,19 +75,19 @@ def data_sources(self, data_sources: Mapping[str, FakeDataSource]): self._data_sources = data_sources @classmethod - def imp_data_source_klass(cls) -> Type[DataSource]: + def imp_data_source_klass(cls) -> type[DataSource]: return FakeDataSource @classmethod - def imp_extract_metadata_klass(cls) -> Type[ExtractMetadata]: + def imp_extract_metadata_klass(cls) -> type[ExtractMetadata]: return FakeExtractMetadata @classmethod - def imp_upload_chunk_klass(cls) -> Type[UploadChunk]: + def imp_upload_chunk_klass(cls) -> type[UploadChunk]: return FakeUploadChunk @classmethod - def imp_upload_metadata_klass(cls) -> Type[UploadMetadata]: + def imp_upload_metadata_klass(cls) -> type[UploadMetadata]: return FakeUploadMetadata diff --git a/tests/factories.py b/tests/factories.py index c201911..acc7c3c 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -1,7 +1,7 @@ -from typing import Any, Dict +from typing import Any -def config_factory() -> Dict[str, Any]: +def config_factory() -> dict[str, Any]: """ A factory that returns a configuration object that can be used for testing. diff --git a/tests/imp/sql_data/factories.py b/tests/imp/sql_data/factories.py index 2999305..59c1262 100644 --- a/tests/imp/sql_data/factories.py +++ b/tests/imp/sql_data/factories.py @@ -1,4 +1,5 @@ -from typing import Any, Generator, Optional +from collections.abc import Generator +from typing import Any, Optional import factory diff --git a/tests/imp/sql_data/test_domain.py b/tests/imp/sql_data/test_domain.py index bf4ce53..0be27ce 100644 --- a/tests/imp/sql_data/test_domain.py +++ b/tests/imp/sql_data/test_domain.py @@ -1,5 +1,6 @@ import os -from typing import Any, Mapping, Sequence +from collections.abc import Mapping, Sequence +from typing import Any from unittest import TestCase from unittest.mock import patch diff --git a/tests/lib/config/test_config.py b/tests/lib/config/test_config.py index 80cf352..67445c3 100644 --- a/tests/lib/config/test_config.py +++ b/tests/lib/config/test_config.py @@ -1,4 +1,5 @@ -from typing import Any, Mapping, Sequence +from collections.abc import Mapping, Sequence +from typing import Any from unittest import TestCase import pytest diff --git a/tests/lib/tasks/test_concurrent.py b/tests/lib/tasks/test_concurrent.py index 1354d53..31358ca 100644 --- a/tests/lib/tasks/test_concurrent.py +++ b/tests/lib/tasks/test_concurrent.py @@ -1,5 +1,5 @@ +from collections.abc import Sequence from concurrent.futures import Executor, ThreadPoolExecutor, wait -from typing import Sequence from unittest import TestCase import pytest diff --git a/tests/lib/test_app_registry.py b/tests/lib/test_app_registry.py index aede269..84d43cf 100644 --- a/tests/lib/test_app_registry.py +++ b/tests/lib/test_app_registry.py @@ -1,4 +1,4 @@ -from typing import Dict, Mapping +from collections.abc import Mapping from unittest import TestCase import pytest @@ -24,7 +24,7 @@ def test_immutability_of_data_source_types_property_content(self) -> None: Assert that once set, the contents of the ``data_source_types`` property cannot be modified by modifying the original mapping. """ - data_source_types: Dict[str, DataSourceType] = { + data_source_types: dict[str, DataSourceType] = { **self._data_source_types } self._app_registry.data_source_types = data_source_types diff --git a/tests/lib/test_checkers.py b/tests/lib/test_checkers.py index 1cb66d4..b941fb6 100644 --- a/tests/lib/test_checkers.py +++ b/tests/lib/test_checkers.py @@ -1,4 +1,5 @@ -from typing import Iterable, Sequence, SupportsFloat, Tuple +from collections.abc import Iterable, Sequence +from typing import SupportsFloat import pytest @@ -28,7 +29,7 @@ def test_ensure_fails_on_invalid_input() -> None: is not greater than the given ``base_value``. """ - inputs: Iterable[Tuple[SupportsFloat, SupportsFloat]] = ( + inputs: Iterable[tuple[SupportsFloat, SupportsFloat]] = ( (0, 1), (-1, 0), (-1.0, -0.0), diff --git a/tests/lib/transports/http/factories.py b/tests/lib/transports/http/factories.py index 316d52b..c5f1db0 100644 --- a/tests/lib/transports/http/factories.py +++ b/tests/lib/transports/http/factories.py @@ -1,4 +1,5 @@ -from typing import Any, Mapping, Optional, Sequence +from collections.abc import Mapping, Sequence +from typing import Any, Optional import factory diff --git a/tests/lib/transports/http/test_api_v1_dialect.py b/tests/lib/transports/http/test_api_v1_dialect.py index 1428f03..c371cbf 100644 --- a/tests/lib/transports/http/test_api_v1_dialect.py +++ b/tests/lib/transports/http/test_api_v1_dialect.py @@ -1,5 +1,6 @@ import json -from typing import Any, Mapping +from collections.abc import Mapping +from typing import Any from unittest import TestCase from unittest.mock import patch diff --git a/tests/lib/transports/http/test_http.py b/tests/lib/transports/http/test_http.py index b3ce66f..1a2a882 100644 --- a/tests/lib/transports/http/test_http.py +++ b/tests/lib/transports/http/test_http.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from unittest import TestCase from unittest.mock import patch @@ -15,10 +15,10 @@ class TestHTTPModule(TestCase): def setUp(self) -> None: super().setUp() - self._app_config: Dict[str, Any] = config_factory() + self._app_config: dict[str, Any] = config_factory() self._api_dialect_config_key: str = "default_http_api_dialect_factory" self._http_config_key: str = "HTTP_TRANSPORT" - self._http_config: Dict[str, Any] = { + self._http_config: dict[str, Any] = { self._api_dialect_config_key: ( "tests.lib.transports.http.factories.FakeHTTPAPIDialectFactory" ), @@ -42,31 +42,31 @@ def test_http_transport_factory_with_invalid_settings_fails(self) -> None: Assert that an invalid config results in the expected errors being raised. """ - config1: Dict[str, Any] = dict(self._app_config) + config1: dict[str, Any] = dict(self._app_config) del config1[self._http_config_key] with patch("app.settings", config1): with pytest.raises(ImproperlyConfiguredError): http_transport_factory() - config2: Dict[str, Any] = dict(self._app_config) + config2: dict[str, Any] = dict(self._app_config) config2[self._http_config_key] = 3 with patch("app.settings", config2): with pytest.raises(ImproperlyConfiguredError): http_transport_factory() - config3: Dict[str, Any] = dict(self._app_config) + config3: dict[str, Any] = dict(self._app_config) del config3[self._http_config_key][self._api_dialect_config_key] with patch("app.settings", config3): with pytest.raises(ImproperlyConfiguredError): http_transport_factory() - config4: Dict[str, Any] = dict(self._app_config) + config4: dict[str, Any] = dict(self._app_config) config4[self._http_config_key][self._api_dialect_config_key] = None with patch("app.settings", config4): with pytest.raises(ImproperlyConfiguredError): http_transport_factory() - config5: Dict[str, Any] = dict(self._app_config) + config5: dict[str, Any] = dict(self._app_config) config5[self._http_config_key][self._api_dialect_config_key] = "12345" with patch("app.settings", config5): with pytest.raises(ImproperlyConfiguredError): diff --git a/tests/test_app.py b/tests/test_app.py index 9757d09..6c6ac69 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from unittest import TestCase import pytest @@ -45,7 +45,7 @@ class TestAppModule(TestCase): def setUp(self) -> None: super().setUp() - self._default_config: Dict[str, Any] = dict() + self._default_config: dict[str, Any] = dict() self._some_state: int = 0 def test_valid_config_is_successful(self) -> None: @@ -59,8 +59,8 @@ def test_invalid_settings_initializers_config_causes_error(self) -> None: :config:`setting initializers ` on the *SETTINGS_INITIALIZERS* setting results in errors. """ - config1: Dict[str, Any] = dict(self._default_config) - config2: Dict[str, Any] = dict(self._default_config) + config1: dict[str, Any] = dict(self._default_config) + config2: dict[str, Any] = dict(self._default_config) config1["SETTINGS_INITIALIZERS"] = ["invalid_dotted_path"] config2["SETTINGS_INITIALIZERS"] = ["app.core.Task"] @@ -77,8 +77,8 @@ def test_invalid_data_source_types_config_causes_error(self) -> None: :config:`data source types ` on the *SUPPORTED_DATA_SOURCE_TYPES* setting result in errors. """ - config1: Dict[str, Any] = dict(self._default_config) - config2: Dict[str, Any] = dict(self._default_config) + config1: dict[str, Any] = dict(self._default_config) + config2: dict[str, Any] = dict(self._default_config) config1["SUPPORTED_DATA_SOURCE_TYPES"] = ["invalid_dotted_path"] config2["SUPPORTED_DATA_SOURCE_TYPES"] = ["app.core.Task"] @@ -94,7 +94,7 @@ def test_missing_default_transport_factory_setting_is_ok(self) -> None: Assert that a missing setting for the default transport factory is allowed. """ - config: Dict[str, Any] = dict() + config: dict[str, Any] = dict() app.setup(initial_settings=config) assert app.settings.get("DEFAULT_TRANSPORT_FACTORY") is None @@ -104,8 +104,8 @@ def test_that_invalid_default_transport_setting_causes_error(self) -> None: Assert that invalid values for the default transport factory setting cause expected errors. """ - config1: Dict[str, Any] = {"DEFAULT_TRANSPORT_FACTORY": 4} - config2: Dict[str, Any] = {"DEFAULT_TRANSPORT_FACTORY": "invalid_path"} + config1: dict[str, Any] = {"DEFAULT_TRANSPORT_FACTORY": 4} + config2: dict[str, Any] = {"DEFAULT_TRANSPORT_FACTORY": "invalid_path"} # The setting value must be a string with pytest.raises(ImproperlyConfiguredError): diff --git a/tests/use_cases/test_fetch_metadata.py b/tests/use_cases/test_fetch_metadata.py index 1165dc6..0e6a333 100644 --- a/tests/use_cases/test_fetch_metadata.py +++ b/tests/use_cases/test_fetch_metadata.py @@ -1,4 +1,4 @@ -from typing import Sequence +from collections.abc import Sequence from unittest import TestCase from app.core import DataSource, DataSourceType, Transport diff --git a/tests/use_cases/test_run_extraction.py b/tests/use_cases/test_run_extraction.py index dde9ec6..bf48171 100644 --- a/tests/use_cases/test_run_extraction.py +++ b/tests/use_cases/test_run_extraction.py @@ -1,4 +1,4 @@ -from typing import Sequence, Tuple +from collections.abc import Sequence from unittest import TestCase from app.core import DataSource, ExtractMetadata @@ -70,7 +70,7 @@ class TestRunDataSourceExtracts(TestGroupSiblingExtracts): def setUp(self) -> None: super().setUp() self._grouped_extracts: Sequence[ - Tuple[DataSource, Sequence[ExtractMetadata]] + tuple[DataSource, Sequence[ExtractMetadata]] ] = GroupSiblingExtracts().execute(self._all_extracts) self._instance: RunDataSourceExtracts = RunDataSourceExtracts() diff --git a/tests/use_cases/test_upload_extracts.py b/tests/use_cases/test_upload_extracts.py index f368d86..76c64dc 100644 --- a/tests/use_cases/test_upload_extracts.py +++ b/tests/use_cases/test_upload_extracts.py @@ -1,4 +1,5 @@ -from typing import Any, Dict, Sequence, Tuple +from collections.abc import Sequence +from typing import Any from unittest import TestCase from unittest.mock import patch @@ -81,7 +82,7 @@ def setUp(self) -> None: def test_execute_return_value(self) -> None: """Assert that the ``execute()`` method returns the expected value.""" - config: Dict[str, Any] = { + config: dict[str, Any] = { "ORG_UNIT_CODE": self._org_unit_code, "ORG_UNIT_NAME": self._org_unit_name, } @@ -119,7 +120,7 @@ def setUp(self) -> None: def test_execute_return_value(self) -> None: """Assert that the ``execute()`` method returns the expected value.""" - config: Dict[str, Any] = { + config: dict[str, Any] = { "ORG_UNIT_CODE": self._org_unit_code, "ORG_UNIT_NAME": self._org_unit_name, } @@ -150,7 +151,7 @@ def setUp(self) -> None: size=self._max_items, chunk_count=self._chunk_count, ) - self._posted_uploads: Sequence[Tuple[UploadMetadata, Any]] = tuple( + self._posted_uploads: Sequence[tuple[UploadMetadata, Any]] = tuple( (_upload, _data) for _upload, _data in zip(self._upload_metas, self._extract_data) ) @@ -169,7 +170,7 @@ class TestPostUploadChunks(TestPrepareUploadChunks): def setUp(self) -> None: super().setUp() - self._prepared_chunks: Sequence[Tuple[UploadMetadata, Sequence[bytes]]] + self._prepared_chunks: Sequence[tuple[UploadMetadata, Sequence[bytes]]] self._prepared_chunks = PrepareUploadChunks().execute( self._posted_uploads )