diff --git a/.gitignore b/.gitignore index 41a41ed..b1834a2 100644 --- a/.gitignore +++ b/.gitignore @@ -291,6 +291,9 @@ GitHub.sublime-settings # Session Session.vim +# Ruff cache +.ruff_cache + # Temporary .netrwhist .~lock.* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a7e5137..92d075b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,6 +9,12 @@ repos: - id: check-yaml - id: check-added-large-files + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "v0.0.261" + hooks: + - id: ruff + args: [ --fix, --exit-non-zero-on-fix ] + - repo: https://github.com/psf/black rev: 23.3.0 hooks: @@ -24,14 +30,7 @@ repos: rev: v3.3.1 hooks: - id: pyupgrade - args: [--py310-plus] - - - repo: https://github.com/pycqa/flake8 - rev: 6.0.0 - hooks: - - id: flake8 - args: ["--config=tox.ini"] - additional_dependencies: [flake8-isort] + args: ["--py310-plus"] # sets up .pre-commit-ci.yaml to ensure pre-commit dependencies stay up to date ci: diff --git a/app/__init__.py b/app/__init__.py index ced4e9f..4ceb5d3 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -3,7 +3,6 @@ from typing import Any, Final, cast import yaml -from yaml import Loader from app.core import DataSourceType from app.lib import ( @@ -28,7 +27,7 @@ _SUPPORTED_DATA_SOURCE_TYPES_CONFIG_KEY: Final[ str -] = "SUPPORTED_DATA_SOURCE_TYPES" # noqa +] = "SUPPORTED_DATA_SOURCE_TYPES" _DEFAULT_CONFIG: Final[dict[str, Any]] = { _LOGGING_CONFIG_KEY: { @@ -39,15 +38,15 @@ "format": ( "%(levelname)s: %(asctime)s %(module)s " "%(process)d %(thread)d %(message)s" - ) - } + ), + }, }, "handlers": { "console": { "level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose", - } + }, }, "root": {"level": "INFO", "handlers": ["console"]}, }, @@ -86,24 +85,25 @@ def _load_config_file( # TODO: Ensure that a valid config file path was given and if not raise an # appropriate Exception. with open(config_file_path, "rb") as config_file: - return yaml.load(config_file, Loader=Loader) + return yaml.safe_load(config_file) def _load_settings_initializers( initializers_dotted_paths: Sequence[str], ) -> Sequence[SettingInitializer]: - initializers: list[SettingInitializer] = list() + initializers: list[SettingInitializer] = [] for _initializer_dotted_path in initializers_dotted_paths: try: initializer_klass: type[SettingInitializer] initializer_klass = import_string_as_klass( - _initializer_dotted_path, SettingInitializer + _initializer_dotted_path, + SettingInitializer, ) initializers.append(initializer_klass()) # type: ignore except ImportError as exp: raise ImproperlyConfiguredError( message='"%s" does not seem to be a valid path.' - % _initializer_dotted_path + % _initializer_dotted_path, ) from exp except TypeError as exp: raise ImproperlyConfiguredError( @@ -111,7 +111,7 @@ def _load_settings_initializers( 'Invalid value, "%s" is either not class or is not a ' 'subclass of "app.lib.SettingInitializer".' % _initializer_dotted_path - ) + ), ) from exp return initializers @@ -127,7 +127,7 @@ class _DefaultTransportFactoryInitializer(SettingInitializer): def setting(self) -> str: return _DEFAULT_TRANSPORT_FACTORY_CONFIG_KEY - def execute(self, an_input: str | None) -> Any: + def execute(self, an_input: str | None) -> str | None: # If the default transport setting has not been provided or is empty, # do nothing. if not an_input: @@ -136,20 +136,21 @@ def execute(self, an_input: str | None) -> Any: if type(an_input) is not str: raise ImproperlyConfiguredError( message='The value of the "%s" setting must be a string' - % _DEFAULT_TRANSPORT_FACTORY_CONFIG_KEY + % _DEFAULT_TRANSPORT_FACTORY_CONFIG_KEY, ) default_transport_factory: DefaultTransportFactory try: default_transport_factory = cast( - DefaultTransportFactory, import_string(an_input) + DefaultTransportFactory, + import_string(an_input), ) global registry registry.default_transport_factory = default_transport_factory except (ImportError, TypeError) as exp: raise ImproperlyConfiguredError( message="Unable to import the default transport factory at " - '"%s". Ensure a valid path was given.' % an_input + '"%s". Ensure a valid path was given.' % an_input, ) from exp return an_input @@ -162,9 +163,9 @@ class _LoggingInitializer(SettingInitializer): def setting(self) -> str: return _LOGGING_CONFIG_KEY - def execute(self, an_input: Mapping[str, Any] | None) -> Any: + def execute(self, an_input: Mapping[str, Any] | None) -> Mapping[str, Any]: logging_config: dict[str, Any] = dict( - an_input or _DEFAULT_CONFIG[self.setting] + an_input or _DEFAULT_CONFIG[self.setting], ) dictConfig(logging_config) return logging_config @@ -180,17 +181,17 @@ class _SupportedDataSourceTypesInitializer(SettingInitializer): def setting(self) -> str: return _SUPPORTED_DATA_SOURCE_TYPES_CONFIG_KEY - def execute(self, an_input: Sequence[str] | None) -> Any: + def execute(self, an_input: Sequence[str] | None) -> Sequence[str]: supported_dst: Sequence[str] = ( an_input or _DEFAULT_CONFIG[self.setting] ) global registry - _dst: DataSourceType # noqa: F842 + _dst: DataSourceType registry.data_source_types = { _dst.code: _dst - for _dst in map( - lambda _s: self._dotted_path_to_data_source_type_klass(_s)(), - supported_dst, + for _dst in ( + self._dotted_path_to_data_source_type_klass(_s)() + for _s in supported_dst ) } return supported_dst @@ -202,19 +203,20 @@ def _dotted_path_to_data_source_type_klass( try: data_source_type_klass: type[DataSourceType] data_source_type_klass = import_string_as_klass( - dotted_path, DataSourceType + dotted_path, + DataSourceType, ) return data_source_type_klass except ImportError as exp: raise ImproperlyConfiguredError( - message='"%s" does not seem to be a valid path.' % dotted_path + message='"%s" does not seem to be a valid path.' % dotted_path, ) from exp except TypeError as exp: raise ImproperlyConfiguredError( message=( 'Invalid value, "%s" is either not class or is not a ' 'subclass of "app.core.DataSourceType".' % dotted_path - ) + ), ) from exp @@ -252,8 +254,8 @@ def setup( _initializers: list[Any] = list(settings_initializers or []) _initializers.extend( _load_settings_initializers( - _settings_dict.get(_SETTINGS_INITIALIZERS_CONFIG_KEY, tuple()) - ) + _settings_dict.get(_SETTINGS_INITIALIZERS_CONFIG_KEY, ()), + ), ) _initializers.insert(0, _LoggingInitializer()) _initializers.insert(1, _SupportedDataSourceTypesInitializer()) @@ -261,5 +263,6 @@ def setup( global settings settings = Config( # type: ignore - settings=_settings_dict, settings_initializers=_initializers + settings=_settings_dict, + settings_initializers=_initializers, ) diff --git a/app/__main__.py b/app/__main__.py index 88d29f8..37e0ecc 100644 --- a/app/__main__.py +++ b/app/__main__.py @@ -68,7 +68,9 @@ def argparse_factory(prog_name: str = __title__) -> ArgumentParser: ), ) parser.add_argument( - "--version", action="version", version="%(prog)s " + __version__ + "--version", + action="version", + version="%(prog)s " + __version__, ) return parser @@ -94,9 +96,9 @@ def main_pipeline_factory( _transport: Transport _transport = ( transport - or app.registry.get_default_transport_factory_or_raise( # noqa + or app.registry.get_default_transport_factory_or_raise( error_message="The default transport factory is required by the " - "main application pipeline." + "main application pipeline.", )() ) return Pipeline( @@ -125,10 +127,11 @@ def main() -> None: # pragma: no cover transport_factory = app.registry.get_default_transport_factory_or_raise() with transport_factory() as transport: main_pipeline: Pipeline[ - Sequence[DataSourceType], Sequence[UploadExtractResult] + Sequence[DataSourceType], + Sequence[UploadExtractResult], ] = main_pipeline_factory(transport=transport) main_pipeline.execute(tuple(app.registry.data_source_types.values())) - print("Done ...") + print("Done ...") # noqa: T201 if __name__ == "__main__": # pragma: no cover diff --git a/app/core/domain.py b/app/core/domain.py index 3a78203..56261df 100644 --- a/app/core/domain.py +++ b/app/core/domain.py @@ -1,6 +1,6 @@ from abc import ABCMeta, abstractmethod from collections.abc import Mapping, Sequence -from functools import lru_cache +from functools import cache from typing import Any, Generic, TypeVar from typing_inspect import is_optional_type @@ -23,7 +23,7 @@ # ============================================================================= -@lru_cache(maxsize=None) +@cache def _get_available_annotations(do_klass: type[_ADO]) -> Mapping[str, Any]: """Extract all annotations available on a domain object class. @@ -39,19 +39,20 @@ def _get_available_annotations(do_klass: type[_ADO]) -> Mapping[str, Any]: return { field_name: field_type for klass in filter( - lambda _klass: hasattr(_klass, "__annotations__"), do_klass.mro() + lambda _klass: hasattr(_klass, "__annotations__"), + do_klass.mro(), ) for field_name, field_type in klass.__annotations__.items() } -@lru_cache(maxsize=None) +@cache def _get_required_fields_names(do_klass: type[_ADO]) -> Sequence[str]: """Determine and return the required fields of a domain object class. A required field in the context of this method is defined as one whose type is not ``NoneType`` or direct union with it such as - ``typing.Optional``. These includes all the fields defined on the class's + ``typing.Optional``. These include all the fields defined on the class's ancestors. .. note:: @@ -62,7 +63,7 @@ def _get_required_fields_names(do_klass: type[_ADO]) -> Sequence[str]: :return: A sequence of the required field names of a domain object class. """ available_annotations: Mapping[str, Any] = _get_available_annotations( - do_klass=do_klass + do_klass=do_klass, ) return tuple( field_name @@ -76,10 +77,10 @@ def _get_required_fields_names(do_klass: type[_ADO]) -> Sequence[str]: # ============================================================================= -class AbstractDomainObject(metaclass=ABCMeta): +class AbstractDomainObject(metaclass=ABCMeta): # noqa: B024 """The base class for all domain objects in the app.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any): # noqa: ANN401 """ Initialize a domain object and set the object's fields using the provided kwargs. Note that fields without annotations are going to be @@ -97,7 +98,7 @@ def __init__(self, **kwargs): if any(set(required_fields).difference(set(kwargs.keys()))): raise ValueError( "The following values are required: %s" - % ", ".join(required_fields) + % ", ".join(required_fields), ) for valid_field in _get_available_annotations(self.__class__): @@ -120,7 +121,7 @@ def get_required_fields(cls) -> Sequence[str]: class IdentifiableDomainObject(AbstractDomainObject, metaclass=ABCMeta): """Describes a domain object that has an id property.""" - id: str + id: str # noqa: A003 class ExtractMetadata( @@ -150,7 +151,8 @@ def data_source(self) -> "DataSource": """ ... - def get_upload_meta_extra_init_kwargs( # noqa + @abstractmethod + def get_upload_meta_extra_init_kwargs( self, ) -> Mapping[str, Any] | None: """ @@ -204,7 +206,7 @@ class DataSource( ): """An interface representing an entity that contains data of interest.""" - id: str + id: str # noqa: A003 name: str description: str | None @@ -234,7 +236,8 @@ def extract_metadata(self) -> Mapping[str, ExtractMetadata[_RT, Any]]: @extract_metadata.setter @abstractmethod def extract_metadata( - self, extract_metadata: Mapping[str, ExtractMetadata[_RT, Any]] + self, + extract_metadata: Mapping[str, ExtractMetadata[_RT, Any]], ) -> None: """Set the extract metadata instances that belong to this data source. @@ -284,7 +287,9 @@ def of_mapping(cls, mapping: Mapping[str, Any]) -> "DataSource": class UploadChunk( - IdentifiableDomainObject, InitFromMapping, metaclass=ABCMeta + IdentifiableDomainObject, + InitFromMapping, + metaclass=ABCMeta, ): """An interface that represents part of an upload's content.""" @@ -320,7 +325,6 @@ class UploadMetadata( org_unit_code: str org_unit_name: str content_type: str - # chunks: Sequence[UploadChunk] @property @abstractmethod @@ -334,7 +338,7 @@ def extract_metadata(self) -> ExtractMetadata[Any, _RT]: """ ... - def get_upload_chunk_extra_init_kwargs( # noqa + def get_upload_chunk_extra_init_kwargs( self, ) -> Mapping[str, Any] | None: """ diff --git a/app/core/exceptions.py b/app/core/exceptions.py index 763c77d..bad6ad0 100644 --- a/app/core/exceptions.py +++ b/app/core/exceptions.py @@ -1,4 +1,4 @@ -class IDRClientException(Exception): +class IDRClientException(Exception): # noqa: N818 """Base exception for most explicit exceptions raised by this app.""" def __init__(self, message: str | None = None, *args): @@ -37,7 +37,9 @@ class DataSourceDisposedError(ExtractionOperationError): """ def __init__( - self, message: str | None = "Data source is disposed.", *args + self, + message: str | None = "Data source is disposed.", + *args, ): """Initialize an ``DataSourceDisposedError`` with the given parameters. @@ -61,7 +63,7 @@ class TransportClosedError(TransportError): """ def __init__(self, message: str | None = "Transport closed.", *args): - """Initialize an ``TransportClosedError`` with the given parameters. + """Initialize a ``TransportClosedError`` with the given parameters. :param message: An optional error message. :param args: args to pass to forward to the base exception. diff --git a/app/core/mixins.py b/app/core/mixins.py index 4c01e47..8df8e81 100644 --- a/app/core/mixins.py +++ b/app/core/mixins.py @@ -1,7 +1,8 @@ from abc import ABCMeta, abstractmethod from collections.abc import Mapping +from contextlib import AbstractContextManager from types import TracebackType -from typing import Any, ContextManager, Generic, TypeVar +from typing import Any, Generic, TypeVar from .task import Task @@ -18,7 +19,7 @@ # ============================================================================= -class Disposable(ContextManager, metaclass=ABCMeta): +class Disposable(AbstractContextManager, metaclass=ABCMeta): """Represents an entity that uses resources that need to be cleaned up.""" def __exit__( diff --git a/app/core/transport.py b/app/core/transport.py index 9a7d936..f8229d0 100644 --- a/app/core/transport.py +++ b/app/core/transport.py @@ -58,7 +58,9 @@ def fetch_data_source_extracts( @abstractmethod def fetch_data_sources( - self, data_source_type: DataSourceType, **options: TransportOptions + self, + data_source_type: DataSourceType, + **options: TransportOptions, ) -> Sequence[DataSource]: """ Fetch and return :class:`data sources ` of the given @@ -77,7 +79,9 @@ def fetch_data_sources( @abstractmethod def mark_upload_as_complete( - self, upload_metadata: UploadMetadata, **options: TransportOptions + self, + upload_metadata: UploadMetadata, + **options: TransportOptions, ) -> None: """ Mark the given :class:`upload metadata instance ` as diff --git a/app/imp/__init__.py b/app/imp/__init__.py index 23c3aa5..aaafcb1 100644 --- a/app/imp/__init__.py +++ b/app/imp/__init__.py @@ -1,4 +1,4 @@ -from .sql_data import * # noqa: F401,F403 +from .sql_data import * # noqa: F403 from .sql_data import __all__ as _all_sql_data __all__ = [] diff --git a/app/imp/sql_data/domain.py b/app/imp/sql_data/domain.py index 000eb77..3128ecf 100644 --- a/app/imp/sql_data/domain.py +++ b/app/imp/sql_data/domain.py @@ -49,7 +49,7 @@ class _DataFrameChunksToUploadChunks( - Task[Sequence[pd.DataFrame], Sequence[bytes]] + Task[Sequence[pd.DataFrame], Sequence[bytes]], ): def execute(self, an_input: Sequence[pd.DataFrame]) -> Sequence[bytes]: return tuple( @@ -60,7 +60,7 @@ def execute(self, an_input: Sequence[pd.DataFrame]) -> Sequence[bytes]: def data_frame_as_bytes(data_frame: pd.DataFrame) -> bytes: with io.BytesIO() as stream: pq.write_table( - pa.Table.from_pandas(df=data_frame), # noqa + pa.Table.from_pandas(df=data_frame), where=stream, compression="gzip", ) @@ -77,7 +77,6 @@ class SupportedDBVendors(Enum): MYSQL = "MySQL" SQLITE_MEM = "SqLite in-memory" - # POSTGRES_SQL = "Postgres SQL" class SQLDataSource(DataSource[Connection]): @@ -90,17 +89,18 @@ def __init__(self, **kwargs): data_source_type: SQLDataSourceType = kwargs.pop("data_source_type") super().__init__(**kwargs) self._data_source_type: SQLDataSourceType = data_source_type - self._extract_metadata: dict[str, "SQLExtractMetadata"] = dict() + self._extract_metadata: dict[str, "SQLExtractMetadata"] = {} self._engine: Engine | None = None def __enter__(self) -> "SQLDataSource": if self._engine is not None: # TODO: Consider moving this check on the "connect_to_db" method # instead. - raise SQLDataError( + err_msg: str = ( 'Incorrect usage of "SQLDataSource". Nesting of context ' "managers not allowed." ) + raise SQLDataError(err_msg) self.connect_to_db() return self @@ -114,7 +114,8 @@ def extract_metadata(self) -> Mapping[str, "ExtractMetadata"]: @extract_metadata.setter def extract_metadata( - self, extract_metadata: Mapping[str, "SQLExtractMetadata"] + self, + extract_metadata: Mapping[str, "SQLExtractMetadata"], ) -> None: self._extract_metadata = dict(**extract_metadata) @@ -142,7 +143,7 @@ def connect_to_db(self) -> None: else: # pragma: no cover raise SQLDataError( message='Unsupported db vendor "%s"' - % self.database_vendor.value + % self.database_vendor.value, ) def dispose(self) -> None: @@ -181,24 +182,26 @@ def _load_mysql_config(self) -> Engine: ) mysql_conf: Mapping[str, Any] = app.settings.get(_MYSQL_CONFIG_KEY) if mysql_conf is None or not isinstance(mysql_conf, dict): + err_msg: str = 'The setting "%s" is missing or is not valid.' raise ImproperlyConfiguredError( - 'The setting "%s" is missing or is not valid.' + err_msg, ) for setting in ("host", "port", "username", "password"): # TODO: Revisit this, confirm if username and password are a must. if setting not in mysql_conf: raise ImproperlyConfiguredError( 'The setting "%s" is missing in "%s".' - % (setting, _MYSQL_CONFIG_KEY) + % (setting, _MYSQL_CONFIG_KEY), ) try: port = int(mysql_conf["port"]) if port < 0 or port > 65535: - raise ValueError("Invalid port") + err_msg: str = "Invalid port" + raise ValueError(err_msg) except ValueError: raise ImproperlyConfiguredError( - '"%s" is not a valid port.' % mysql_conf["port"] - ) + '"%s" is not a valid port.' % mysql_conf["port"], + ) from None return create_engine( "mysql+pymysql://%s:%s@%s:%s/%s" @@ -208,10 +211,10 @@ def _load_mysql_config(self) -> Engine: mysql_conf["host"], mysql_conf["port"], self.database_name, - ) + ), ) - def _load_sqlite_in_memory_config(self) -> Engine: # noqa + def _load_sqlite_in_memory_config(self) -> Engine: _LOGGER.debug("Loading SqLite in memory database.") return create_engine("sqlite+pysqlite:///:memory:") @@ -226,10 +229,11 @@ class SQLDataSourceType(DataSourceType): def __init__(self, **kwargs): kwargs["name"] = "SQL Data Source Type" kwargs.setdefault( - "description", "Represents SQL databases as a source type." + "description", + "Represents SQL databases as a source type.", ) super().__init__(**kwargs) - self._data_sources: dict[str, SQLDataSource] = dict() + self._data_sources: dict[str, SQLDataSource] = {} @property def code(self) -> str: @@ -267,12 +271,27 @@ class SQLExtractMetadata(ExtractMetadata[Connection, Any]): def __init__(self, **kwargs): data_source: SQLDataSource = kwargs.pop("data_source") super().__init__(**kwargs) - self._data_source = data_source + self._data_source: SQLDataSource = data_source + self._upload_meta_init_kwargs: Mapping[str, Any] | None = None @property def data_source(self) -> SQLDataSource: return self._data_source + def get_upload_meta_extra_init_kwargs( + self, + ) -> Mapping[str, Any] | None: + """ + Return an optional mapping of extra keyword arguments to be used when + initializing the :class:`upload metadata ` instance + associated with this extract. + + :return: An optional mapping of extra keyword arguments to use when + initializing the upload metadata instance associated with this + extract. + """ + return self._upload_meta_init_kwargs + def to_task(self) -> SimpleSQLSelect: return SimpleSQLSelect(self.sql_query) @@ -291,7 +310,7 @@ def __init__(self, **kwargs): def extract_metadata(self) -> SQLExtractMetadata: return self._extract_metadata - def to_task(self) -> Pipeline[pd.DataFrame, Sequence[bytes]]: # noqa + def to_task(self) -> Pipeline[pd.DataFrame, Sequence[bytes]]: return Pipeline(ChunkDataFrame(), _DataFrameChunksToUploadChunks()) @classmethod diff --git a/app/imp/sql_data/exceptions.py b/app/imp/sql_data/exceptions.py index 02e847e..4da72d8 100644 --- a/app/imp/sql_data/exceptions.py +++ b/app/imp/sql_data/exceptions.py @@ -19,7 +19,8 @@ class SQLDataExtractionOperationError(SQLDataError, ExtractionOperationError): class SQLDataSourceDisposedError( - SQLDataExtractionOperationError, DataSourceDisposedError + SQLDataExtractionOperationError, + DataSourceDisposedError, ): """ An exception indicating that a forbidden operation was attempted on a diff --git a/app/lib/__init__.py b/app/lib/__init__.py index 56c72f5..2d69ca5 100644 --- a/app/lib/__init__.py +++ b/app/lib/__init__.py @@ -4,14 +4,14 @@ ensure_not_none, ensure_not_none_nor_empty, ) -from .config import * # noqa: F401,F403 +from .config import * # noqa: F403 from .config import __all__ as _all_config from .module_loading import import_string, import_string_as_klass -from .retry import * # noqa: F401,F403 +from .retry import * # noqa: F403 from .retry import __all__ as _all_retry -from .tasks import * # noqa: F401,F403 +from .tasks import * # noqa: F403 from .tasks import __all__ as _all_tasks -from .transports import * # noqa: F401,F403 +from .transports import * # noqa: F403 from .transports import __all__ as _all_transports __all__ = [ diff --git a/app/lib/app_registry.py b/app/lib/app_registry.py index 2727168..febfe28 100644 --- a/app/lib/app_registry.py +++ b/app/lib/app_registry.py @@ -24,7 +24,7 @@ class AppRegistry: """ def __init__(self): - self._data_source_types: dict[str, DataSourceType] = dict() + self._data_source_types: dict[str, DataSourceType] = {} self._default_transport_factory: None | ( DefaultTransportFactory ) = None @@ -42,7 +42,8 @@ def data_source_types(self) -> Mapping[str, DataSourceType]: @data_source_types.setter def data_source_types( - self, data_source_types: Mapping[str, DataSourceType] + self, + data_source_types: Mapping[str, DataSourceType], ) -> None: """Set the data sources supported by the app. @@ -64,7 +65,8 @@ def default_transport_factory(self) -> DefaultTransportFactory | None: @default_transport_factory.setter def default_transport_factory( - self, transport_factory: DefaultTransportFactory + self, + transport_factory: DefaultTransportFactory, ) -> None: """Set the default transport factory for the app. @@ -76,7 +78,8 @@ def default_transport_factory( self._default_transport_factory = transport_factory def get_default_transport_factory_or_raise( - self, error_message: str | None = None + self, + error_message: str | None = None, ) -> DefaultTransportFactory: """ Returns the default transport factory if set or raise an @@ -94,6 +97,6 @@ def get_default_transport_factory_or_raise( if not self.default_transport_factory: raise ImproperlyConfiguredError( message=error_message - or ("The default transport factor has not been set.") + or ("The default transport factor has not been set."), ) return self.default_transport_factory diff --git a/app/lib/checkers.py b/app/lib/checkers.py index 802dc29..67b0ba4 100644 --- a/app/lib/checkers.py +++ b/app/lib/checkers.py @@ -50,7 +50,8 @@ def ensure_greater_than( def ensure_not_none( - value: _T | None, message: str = '"value" cannot be None.' + value: _T | None, + message: str = '"value" cannot be None.', ) -> _T: """Check that a given value is not ``None``. @@ -70,7 +71,8 @@ def ensure_not_none( def ensure_not_none_nor_empty( - value: _S, message: str = '"value" cannot be None or empty.' + value: _S, + message: str = '"value" cannot be None or empty.', ) -> _S: """ Check that a sized value is not ``None`` or empty(has a size of zero). diff --git a/app/lib/config/config.py b/app/lib/config/config.py index b3a98f1..9edda94 100644 --- a/app/lib/config/config.py +++ b/app/lib/config/config.py @@ -58,18 +58,19 @@ def __init__( """ self._settings: dict[str, Any] = dict(settings or {}) self._initializers: Mapping[ - str, Sequence[SettingInitializer] - ] = self._group_related_initializers(settings_initializers or tuple()) + str, + Sequence[SettingInitializer], + ] = self._group_related_initializers(settings_initializers or ()) self._run_initializers() - def __getattr__(self, setting: str) -> Any: + def __getattr__(self, setting: str) -> Any: # noqa: ANN401 """Make settings available using the dot operator.""" try: return self._settings[setting] except KeyError: - raise MissingSettingError(setting=setting) + raise MissingSettingError(setting=setting) from None - def get(self, setting: str, default: Any = None) -> Any: + def get(self, setting: str, default: Any = None) -> Any: # noqa: ANN401 """ Retrieve the value of the given setting or return the given default if no such setting exists in this ``Config`` instance. This method can @@ -100,7 +101,7 @@ def _run_initializers(self) -> None: for _setting, _initializers in self._initializers.items(): raw_setting_val: Any = self._settings.get(_setting) initializer_pipeline: Pipeline = Pipeline(*_initializers) - setting_val: Any = initializer_pipeline(raw_setting_val) # noqa + setting_val: Any = initializer_pipeline(raw_setting_val) _LOGGER.debug( 'Ran initializer for the setting "%s" with raw value "%s".', str(_setting), @@ -112,9 +113,9 @@ def _run_initializers(self) -> None: def _group_related_initializers( initializers: Sequence[SettingInitializer], ) -> Mapping[str, Sequence[SettingInitializer]]: - grouped_initializers: dict[str, list[SettingInitializer]] = dict() + grouped_initializers: dict[str, list[SettingInitializer]] = {} for _initializer in initializers: grouped_initializers.setdefault(_initializer.setting, []).append( - _initializer + _initializer, ) return grouped_initializers diff --git a/app/lib/module_loading.py b/app/lib/module_loading.py index 6868027..4f535e4 100644 --- a/app/lib/module_loading.py +++ b/app/lib/module_loading.py @@ -49,7 +49,7 @@ def import_string(dotted_path: str) -> ModuleType: module_path, class_name = dotted_path.rsplit(".", 1) except ValueError as err: raise ImportError( - "%s doesn't look like a module path" % dotted_path + "%s doesn't look like a module path" % dotted_path, ) from err try: @@ -57,12 +57,13 @@ def import_string(dotted_path: str) -> ModuleType: except AttributeError as err: raise ImportError( 'Module "%s" does not define a "%s" attribute/class' - % (module_path, class_name) + % (module_path, class_name), ) from err def import_string_as_klass( - dotted_path: str, target_klass: type[_T] + dotted_path: str, + target_klass: type[_T], ) -> type[_T]: """ Import a dotted module as the given class type. Raise ``ImportError`` if @@ -80,12 +81,10 @@ def import_string_as_klass( derived from the given class. """ _module = import_string(dotted_path) - if not inspect.isclass(_module) or not issubclass( # noqa - _module, target_klass - ): + if not inspect.isclass(_module) or not issubclass(_module, target_klass): raise TypeError( 'Invalid value, "%s" is either not a class or a subclass of "%s".' - % (dotted_path, target_klass.__qualname__) + % (dotted_path, target_klass.__qualname__), ) return cast(type[target_klass], _module) diff --git a/app/lib/retry/exceptions.py b/app/lib/retry/exceptions.py index 704a282..82e6bca 100644 --- a/app/lib/retry/exceptions.py +++ b/app/lib/retry/exceptions.py @@ -4,6 +4,10 @@ class RetryError(IDRClientException): """An exception used to indicate that a retry failed.""" - def __init__(self, exp: BaseException, message="Deadline exceeded."): + def __init__( + self, + exp: BaseException, + message: str = "Deadline exceeded.", + ): self._exp: BaseException = exp super().__init__(message, self._exp) diff --git a/app/lib/retry/retry.py b/app/lib/retry/retry.py index b73dd70..8bf2869 100644 --- a/app/lib/retry/retry.py +++ b/app/lib/retry/retry.py @@ -37,7 +37,8 @@ def _enable_retries() -> bool: from app import settings return settings.get("RETRY", DEFAULT_RETRY_CONFIG).get( - "enable_retries", True + "enable_retries", + True, ) @@ -49,7 +50,7 @@ def if_exception_type_factory(*exp_types: type[BaseException]) -> Predicate: :return: A callable that takes an exception and returns ``True`` if the provided exception is of the given types. """ - _exp: BaseException # noqa: F842 + _exp: BaseException return lambda _exp: isinstance(_exp, exp_types) @@ -120,12 +121,12 @@ def __call__( wrapped: Callable[..., Any], # Types and default values are included on the rest of the arguments to # quiet pyright. - instance: Any = None, - args: tuple[Any, ...] = tuple(), + instance: Any = None, # noqa: ANN401 + args: tuple[Any, ...] = (), kwargs: Mapping[str, Any] | None = None, - ) -> Any: + ) -> Any: # noqa: ANN401 self.load_config() - kwargs = kwargs or dict() + kwargs = kwargs or {} return self.do_retry(partial(wrapped, *args, **kwargs)) def calculate_deadline_time(self) -> datetime | None: @@ -182,7 +183,7 @@ def deliberate_next_retry( remaining_time = (deadline_time - now).total_seconds() return min(remaining_time, next_delay_duration) - def do_retry(self, wrapped: Callable[[], Any]) -> Any: # noqa + def do_retry(self, wrapped: Callable[[], Any]) -> Any: # noqa: ANN401 """Implement the actual retry algorithm. Take a callable and retry it until a successful call or the set @@ -203,7 +204,7 @@ def do_retry(self, wrapped: Callable[[], Any]) -> Any: # noqa for sleep in self.exponential_delay_generator(): # pragma: no branch try: return wrapped() - except Exception as exp: + except Exception as exp: # noqa: BLE001 if not self._predicate(exp): raise last_exp = exp @@ -216,11 +217,17 @@ def do_retry(self, wrapped: Callable[[], Any]) -> Any: # noqa ) _LOGGER.debug( 'Retrying due to "{}", sleeping for {:.1f}s ...'.format( - last_exp, sleep - ) + last_exp, + sleep, + ), ) time.sleep(sleep) + # This should never be reached. This method should either exit by + # returning the wrapped callable's result or by raising an exception. + err_msg: str = "The program entered an invalid state. Exiting." # pragma: no cover # noqa: E501 + raise AssertionError(err_msg) + def exponential_delay_generator(self) -> Generator[float, None, None]: """ Return a generator that yields successive delay intervals based on the @@ -231,7 +238,10 @@ def exponential_delay_generator(self) -> Generator[float, None, None]: delay: float = self._initial_delay while True: # Introduce jitter by yielding a random delay. - yield min(random.uniform(0.0, delay * 2.0), self._maximum_delay) + yield min( + random.uniform(0.0, delay * 2.0), # noqa: # S311 + self._maximum_delay, + ) delay *= self._multiplicative_factor def load_config(self) -> None: @@ -275,11 +285,13 @@ class is used as a decorator on functions and methods which leads self._multiplicative_factor: float = ( # type: ignore self._multiplicative_factor or retry_config.get( - "default_multiplicative_factor", DEFAULT_MULTIPLICATIVE_FACTOR + "default_multiplicative_factor", + DEFAULT_MULTIPLICATIVE_FACTOR, ) ) self._deadline: float | None = self._kwargs.get( # type: ignore - "deadline", retry_config.get("default_deadline", DEFAULT_DEADLINE) + "deadline", + retry_config.get("default_deadline", DEFAULT_DEADLINE), ) self._check_invariants() @@ -299,7 +311,8 @@ def _check_invariants(self) -> None: message=( 'The maximum delay ("{:.2f}") must be greater than or ' 'equal to the initial value ("{:.2f}").'.format( - self._maximum_delay, self._initial_delay + self._maximum_delay, + self._initial_delay, ) ), ) diff --git a/app/lib/retry/setting_initializers.py b/app/lib/retry/setting_initializers.py index 30ef71e..30fa44e 100644 --- a/app/lib/retry/setting_initializers.py +++ b/app/lib/retry/setting_initializers.py @@ -24,43 +24,49 @@ def execute(self, an_input: RetryConfig | None) -> RetryConfig: def _sanitize_and_load_config(self, config: RetryConfig) -> None: if not isinstance(config, dict): - raise ImproperlyConfiguredError( - f'The setting "{self.setting}" is invalid.' - ) + err_msg: str = f'The setting "{self.setting}" is invalid.' + raise ImproperlyConfiguredError(message=err_msg) if "default_deadline" in config and config["default_deadline"]: self._ensure_value_is_float_and_greater_than_zero( - config=config, setting="default_deadline" + config=config, + setting="default_deadline", ) if "default_initial_delay" in config: self._ensure_value_is_float_and_greater_than_zero( - config=config, setting="default_initial_delay" + config=config, + setting="default_initial_delay", ) if "default_maximum_delay" in config: self._ensure_value_is_float_and_greater_than_zero( - config=config, setting="default_maximum_delay" + config=config, + setting="default_maximum_delay", ) if "default_multiplicative_factor" in config: self._ensure_value_is_float_and_greater_than_zero( - config=config, setting="default_multiplicative_factor" + config=config, + setting="default_multiplicative_factor", ) enable_retries = config.get("enable_retries", True) config["enable_retries"] = bool( - enable_retries is True or enable_retries == "true" + enable_retries is True or enable_retries == "true", ) def _ensure_value_is_float_and_greater_than_zero( - self, config: RetryConfig, setting: str + self, + config: RetryConfig, + setting: str, ) -> None: - value = config[setting] # noqa + value = config[setting] if not (self._is_float(value) and float(value) > 0.0): - raise ImproperlyConfiguredError( - 'The setting "{}" must be a subtype of float and greater ' - "than zero".format(setting) + err_msg: str = ( + 'The setting "{}" must be a subtype of float and greater than ' + "zero".format(setting) ) - config[setting] = float(value) # noqa + raise ImproperlyConfiguredError(message=err_msg) + config[setting] = float(value) @staticmethod - def _is_float(value: Any) -> bool: + def _is_float(value: Any) -> bool: # noqa: ANN401 try: float(value) return True diff --git a/app/lib/tasks/common.py b/app/lib/tasks/common.py index 181c29d..135c2fb 100644 --- a/app/lib/tasks/common.py +++ b/app/lib/tasks/common.py @@ -19,7 +19,8 @@ class Chainable( - Generic[_IN, _RT], Task[Callable[[_IN], _RT], "Chainable[_RT, Any]"] + Generic[_IN, _RT], + Task[Callable[[_IN], _RT], "Chainable[_RT, Any]"], ): def __init__(self, value: _IN): self._value: _IN = value @@ -59,8 +60,8 @@ def tasks(self) -> Sequence[Task[Any, Any]]: return self._tasks def execute(self, an_input: _IN) -> _RT: - _acc: Any # noqa: F842 - _tsk: Task[Any, Any] # noqa: F842 + _acc: Any + _tsk: Task[Any, Any] return cast( _RT, reduce( diff --git a/app/lib/tasks/concurrent.py b/app/lib/tasks/concurrent.py index 2d86a78..11dd962 100644 --- a/app/lib/tasks/concurrent.py +++ b/app/lib/tasks/concurrent.py @@ -36,7 +36,7 @@ def completed_successfully(future: Future[Any]) -> bool: """ - Checks if a :class:`future ` completed successfully and returns + Check if a :class:`future ` completed successfully and return ``True`` if so and ``False`` otherwise. In this context a *future* is considered to have completed successfully if it wasn't cancelled and no exception was raised on it's callee. @@ -46,7 +46,9 @@ def completed_successfully(future: Future[Any]) -> bool: :return: ``True` if the future completed successfully, ``False`` otherwise. """ return bool( - future.done() and not future.cancelled() and future.exception() is None + future.done() + and not future.cancelled() + and future.exception() is None, ) @@ -66,7 +68,9 @@ def __init__(self, message: str | None = "ConcurrentExecutor disposed."): class ConcurrentExecutor( - Generic[_IN, _RT], Task[_IN, MutableSequence["Future[_RT]"]], Disposable + Generic[_IN, _RT], + Task[_IN, MutableSequence["Future[_RT]"]], + Disposable, ): """ A :class:`task ` that takes multiple tasks with a common input and @@ -116,7 +120,7 @@ def __init__( accumulator or self._default_accumulator ) self._initial_value: MutableSequence[Future[_RT]] - self._initial_value = initial_value or list() + self._initial_value = initial_value or [] self._executor: Executor = executor or ThreadPoolExecutor() self._is_disposed: bool = False diff --git a/app/lib/transports/__init__.py b/app/lib/transports/__init__.py index 76beedd..a7c3d7a 100644 --- a/app/lib/transports/__init__.py +++ b/app/lib/transports/__init__.py @@ -1,4 +1,4 @@ -from .http import * # noqa: F401,F403 +from .http import * # noqa: F403 from .http import __all__ as _all_http __all__ = [] diff --git a/app/lib/transports/http/__init__.py b/app/lib/transports/http/__init__.py index 86434ae..0d5814e 100644 --- a/app/lib/transports/http/__init__.py +++ b/app/lib/transports/http/__init__.py @@ -18,7 +18,7 @@ _DEFAULT_API_DIALECT_FACTORY_CONF_KEY: Final[ str -] = "default_http_api_dialect_factory" # noqa +] = "default_http_api_dialect_factory" _HTTP_TRANSPORT_CONFIG_KEY: Final[str] = "HTTP_TRANSPORT" @@ -37,32 +37,33 @@ def http_transport_factory() -> HTTPTransport: from app.lib import ImproperlyConfiguredError, import_string http_transport_conf: Mapping[str, Any] | None = app.settings.get( - _HTTP_TRANSPORT_CONFIG_KEY + _HTTP_TRANSPORT_CONFIG_KEY, ) if not (http_transport_conf and isinstance(http_transport_conf, dict)): raise ImproperlyConfiguredError( message='The "%s" setting is missing, empty or not valid.' - % _HTTP_TRANSPORT_CONFIG_KEY + % _HTTP_TRANSPORT_CONFIG_KEY, ) api_dialect_factory_path: str | None = http_transport_conf.get( - _DEFAULT_API_DIALECT_FACTORY_CONF_KEY + _DEFAULT_API_DIALECT_FACTORY_CONF_KEY, ) if not api_dialect_factory_path: raise ImproperlyConfiguredError( message='The setting "%s" MUST be provided as part of the http ' - "transport config." % _DEFAULT_API_DIALECT_FACTORY_CONF_KEY + "transport config." % _DEFAULT_API_DIALECT_FACTORY_CONF_KEY, ) api_dialect_factory: _HTTPAPIDialectFactory try: api_dialect_factory = cast( - _HTTPAPIDialectFactory, import_string(api_dialect_factory_path) + _HTTPAPIDialectFactory, + import_string(api_dialect_factory_path), ) except (ImportError, TypeError) as exp: raise ImproperlyConfiguredError( message='Unable to import the http api dialect factory at "%s". ' - "Ensure a valid path was given." % api_dialect_factory_path + "Ensure a valid path was given." % api_dialect_factory_path, ) from exp return HTTPTransport( diff --git a/app/lib/transports/http/api_v1_dialect.py b/app/lib/transports/http/api_v1_dialect.py index 458f222..aebd58b 100644 --- a/app/lib/transports/http/api_v1_dialect.py +++ b/app/lib/transports/http/api_v1_dialect.py @@ -43,18 +43,18 @@ def idr_server_api_v1_dialect_factory() -> "IDRServerAPIv1": from app.lib import ImproperlyConfiguredError remote_server_conf: Mapping[str, str] | None = app.settings.get( - _REMOTE_SERVER_CONFIG_KEY + _REMOTE_SERVER_CONFIG_KEY, ) if not remote_server_conf or type(remote_server_conf) is not dict: raise ImproperlyConfiguredError( message='The "%s" setting is missing, empty or not valid.' - % _REMOTE_SERVER_CONFIG_KEY + % _REMOTE_SERVER_CONFIG_KEY, ) for _conf_key in ("host", "password", "username"): if _conf_key not in remote_server_conf: raise ImproperlyConfiguredError( message='The setting "%s" MUST be provided as part of the ' - "remote server config." % _conf_key + "remote server config." % _conf_key, ) return IDRServerAPIv1( @@ -79,13 +79,16 @@ def __init__(self, server_url: str, username: str, password: str): from app.lib import ensure_not_none, ensure_not_none_nor_empty self._server_url: str = ensure_not_none_nor_empty( - value=server_url, message='A valid "server_url" MUST be provided.' + value=server_url, + message='A valid "server_url" MUST be provided.', ) self._username: str = ensure_not_none_nor_empty( - value=username, message='"username" MUST not be none or empty.' + value=username, + message='"username" MUST not be none or empty.', ) self._password: str = ensure_not_none( - value=password, message='"password" MUST not be none.' + value=password, + message='"password" MUST not be none.', ) self._base_url: str = "%s/api" % self._server_url @@ -107,7 +110,9 @@ def authenticate(self, **options: TransportOptions) -> HTTPRequestParams: } def response_to_auth( - self, response_content: bytes, **options: TransportOptions + self, + response_content: bytes, + **options: TransportOptions, ) -> Mapping[str, str]: token: str = json.loads(response_content).get("token", "") return {"Authorization": "Token %s" % token} @@ -141,8 +146,8 @@ def response_to_data_source_extracts( dst: DataSourceType = data_source_type results: Sequence[Mapping[str, Any]] = json.loads( - response_content - ).get("results", tuple()) + response_content, + ).get("results", ()) return tuple( Chainable(_result). # Process/clean the response content in preparation for data @@ -150,13 +155,13 @@ def response_to_data_source_extracts( execute( lambda _r: { **_r, - "applicable_source_version": tuple(), + "applicable_source_version": (), "data_source": data_source, - } + }, ). # Initialize the data source. execute( - lambda _r: (dst.imp_extract_metadata_klass().of_mapping(_r)) + lambda _r: (dst.imp_extract_metadata_klass().of_mapping(_r)), ).value for _result in results ) @@ -164,7 +169,9 @@ def response_to_data_source_extracts( # DATA SOURCES RETRIEVAL # ------------------------------------------------------------------------- def fetch_data_sources( - self, data_source_type: DataSourceType, **options: TransportOptions + self, + data_source_type: DataSourceType, + **options: TransportOptions, ) -> HTTPRequestParams: return { "headers": {"Accept": "application/json"}, @@ -184,8 +191,8 @@ def response_to_data_sources( from app.lib import Chainable results: Sequence[Mapping[str, Any]] = json.loads( - response_content - ).get("results", tuple()) + response_content, + ).get("results", ()) return tuple( Chainable(_result). # Process/clean the response content in preparation for data @@ -195,13 +202,13 @@ def response_to_data_sources( **_r, "database_vendor": SupportedDBVendors.MYSQL, "data_source_type": data_source_type, - } + }, ). # Initialize the data source. execute( lambda _r: ( data_source_type.imp_data_source_klass().of_mapping(_r) - ) + ), ).value for _result in results ) @@ -209,7 +216,9 @@ def response_to_data_sources( # MARK UPLOAD COMPLETION # ------------------------------------------------------------------------- def mark_upload_as_complete( - self, upload_metadata: UploadMetadata, **options: TransportOptions + self, + upload_metadata: UploadMetadata, + **options: TransportOptions, ) -> HTTPRequestParams: parent_ds: DataSource = upload_metadata.extract_metadata.data_source parent_dst: DataSourceType = parent_ds.data_source_type @@ -252,7 +261,7 @@ def post_upload_chunk( "%d_%s" % (chunk_index, upload_metadata.id), chunk_content, upload_metadata.content_type, - ) + ), }, } @@ -297,7 +306,7 @@ def post_upload_metadata( "org_unit_name": org_unit_name, "content_type": content_type, "extract_metadata": extract_metadata.id, - } + }, ), } @@ -320,8 +329,8 @@ def response_to_upload_metadata( .execute(lambda _r: {**_r, "extract_metadata": extract_metadata}) .execute( lambda _r: parent_dst.imp_upload_metadata_klass().of_mapping( - _r - ) + _r, + ), ) .value ) @@ -335,6 +344,6 @@ def _b64encode_credentials(self) -> str: ( self._username.encode("latin1"), self._password.encode("latin1"), - ) - ) + ), + ), ).decode("ascii") diff --git a/app/lib/transports/http/http_api_dialect.py b/app/lib/transports/http/http_api_dialect.py index 8136349..e1f5236 100644 --- a/app/lib/transports/http/http_api_dialect.py +++ b/app/lib/transports/http/http_api_dialect.py @@ -51,7 +51,9 @@ def authenticate(self, **options: TransportOptions) -> HTTPRequestParams: @abstractmethod def response_to_auth( - self, response_content: bytes, **options: TransportOptions + self, + response_content: bytes, + **options: TransportOptions, ) -> Mapping[str, str]: """ Process the contents of an authentication response and return a mapping @@ -117,7 +119,9 @@ def response_to_data_source_extracts( # ------------------------------------------------------------------------- @abstractmethod def fetch_data_sources( - self, data_source_type: DataSourceType, **options: TransportOptions + self, + data_source_type: DataSourceType, + **options: TransportOptions, ) -> HTTPRequestParams: """ Construct and return a request object to fetch all @@ -157,7 +161,9 @@ def response_to_data_sources( # ------------------------------------------------------------------------- @abstractmethod def mark_upload_as_complete( - self, upload_metadata: UploadMetadata, **options: TransportOptions + self, + upload_metadata: UploadMetadata, + **options: TransportOptions, ) -> HTTPRequestParams: """ Construct and return a request object used to mark an @@ -181,7 +187,7 @@ def post_upload_chunk( self, upload_metadata: UploadMetadata, chunk_index: int, - chunk_content: Any, + chunk_content: Any, # noqa: ANN401 extra_init_kwargs: Mapping[str, Any] | None = None, **options: TransportOptions, ) -> HTTPRequestParams: diff --git a/app/lib/transports/http/http_transport.py b/app/lib/transports/http/http_transport.py index d0af0e1..d91e242 100644 --- a/app/lib/transports/http/http_transport.py +++ b/app/lib/transports/http/http_transport.py @@ -37,7 +37,9 @@ def _log_response( - response: Response, *args, **kwargs + response: Response, + *args, + **kwargs, ) -> None: # pragma: no cover request_message: str = "HTTP Request ({} | {})".format( response.request.method, @@ -78,7 +80,8 @@ def __init__( from app.lib import ensure_not_none self._api_dialect: HTTPAPIDialect = ensure_not_none( - api_dialect, '"api_dialect" MUST be provided and not none.' + api_dialect, + '"api_dialect" MUST be provided and not none.', ) self._timeout = ( (connect_timeout, read_timeout) @@ -90,7 +93,7 @@ def __init__( { "Accept": "*/*", "User-Agent": f"{__title__}/{__version__}", - } + }, ) self._session.hooks["response"].append(_log_response) self._lock: RLock = RLock() @@ -121,8 +124,10 @@ def fetch_data_source_extracts( self._ensure_not_closed() response: Response = self._make_request( self._api_dialect.fetch_data_source_extracts( - data_source_type, data_source, **options - ) + data_source_type, + data_source, + **options, + ), ) return self._api_dialect.response_to_data_source_extracts( response_content=response.content, @@ -134,11 +139,13 @@ def fetch_data_source_extracts( # FETCH DATA SOURCES # ------------------------------------------------------------------------- def fetch_data_sources( - self, data_source_type: DataSourceType, **options: TransportOptions + self, + data_source_type: DataSourceType, + **options: TransportOptions, ) -> Sequence[DataSource]: self._ensure_not_closed() response: Response = self._make_request( - self._api_dialect.fetch_data_sources(data_source_type, **options) + self._api_dialect.fetch_data_sources(data_source_type, **options), ) return self._api_dialect.response_to_data_sources( response_content=response.content, @@ -149,15 +156,18 @@ def fetch_data_sources( # MARK UPLOAD COMPLETION # ------------------------------------------------------------------------- def mark_upload_as_complete( - self, upload_metadata: UploadMetadata, **options: TransportOptions + self, + upload_metadata: UploadMetadata, + **options: TransportOptions, ) -> None: self._ensure_not_closed() self._make_request( self._api_dialect.mark_upload_as_complete( - upload_metadata, **options - ) + upload_metadata, + **options, + ), ) - return None + return # UPLOAD CHUNK POSTAGE # ------------------------------------------------------------------------- @@ -177,7 +187,7 @@ def post_upload_chunk( chunk_content=chunk_content, extra_init_kwargs=extra_init_kwargs, **options, - ) + ), ) return self._api_dialect.response_to_upload_chunk( response_content=response.content, @@ -205,7 +215,7 @@ def post_upload_metadata( org_unit_name=org_unit_name, extra_init_kwargs=extra_init_kwargs, **options, - ) + ), ) return self._api_dialect.response_to_upload_metadata( response_content=response.content, @@ -230,7 +240,7 @@ def _authenticate(self) -> AuthBase: ) except RequestException as exp: error_message: str = "Error authenticating the client: %s." % str( - exp + exp, ) _LOGGER.exception(error_message) raise TransportError(message=error_message) from exp @@ -246,8 +256,8 @@ def _authenticate(self) -> AuthBase: raise TransportError(error_message) return _HTTPTransportAuth( auth_headers=self._api_dialect.response_to_auth( - response_content=response.content - ) + response_content=response.content, + ), ) def _ensure_not_closed(self) -> None: @@ -297,7 +307,7 @@ def _make_request(self, request: HTTPRequestParams) -> Response: ) self._auth = self._authenticate() _LOGGER.debug( - "Re-authentication successful, retrying the request." + "Re-authentication successful, retrying the request.", ) # FIXME: This could lead into a stack overflow, revisit this. return self._make_request(request) @@ -332,7 +342,10 @@ def __init__(self, auth_headers: Mapping[str, str]): self._auth_headers = auth_headers def __call__( - self, r: PreparedRequest, *args, **kwargs + self, + r: PreparedRequest, + *args, + **kwargs, ) -> PreparedRequest: # pragma: no cover r.headers.update(self._auth_headers) return r @@ -344,6 +357,9 @@ class _NoAuth(AuthBase): """ def __call__( - self, r: PreparedRequest, *args, **kwargs + self, + r: PreparedRequest, + *args, + **kwargs, ) -> PreparedRequest: # pragma: no cover return r diff --git a/app/lib/transports/http/types.py b/app/lib/transports/http/types.py index 8fd38da..82cbc9d 100644 --- a/app/lib/transports/http/types.py +++ b/app/lib/transports/http/types.py @@ -2,7 +2,9 @@ from typing import Any, TypedDict, Union _FileSpec = tuple[ - str, bytes, str # File name # File content # File content type + str, + bytes, + str, # File name # File content # File content type ] diff --git a/app/use_cases/fetch_metadata.py b/app/use_cases/fetch_metadata.py index 3e92170..25b0037 100644 --- a/app/use_cases/fetch_metadata.py +++ b/app/use_cases/fetch_metadata.py @@ -44,7 +44,7 @@ def execute(self, an_input: Transport) -> Sequence[DataSource]: str(self._data_source_type), ) data_sources: Sequence[DataSource] = an_input.fetch_data_sources( - self._data_source_type + self._data_source_type, ) data_source_type_sources: Mapping[str, DataSource] = { _data_source.id: _data_source for _data_source in data_sources @@ -92,28 +92,29 @@ def __init__(self, transport: Transport): self._transport: Transport = transport def execute( - self, an_input: Sequence[DataSourceType] + self, + an_input: Sequence[DataSourceType], ) -> Sequence[DataSource]: _LOGGER.info("Fetching data sources.") executor: ConcurrentExecutor[Transport, Sequence[DataSource]] executor = ConcurrentExecutor( - *self._data_source_types_to_tasks(an_input) + *self._data_source_types_to_tasks(an_input), ) with executor: futures: Sequence[Future[Sequence[DataSource]]] - futures = executor(self._transport) # noqa + futures = executor.execute(self._transport) # Focus on completed tasks and ignore the ones that failed. completed_futures = as_completed(futures) return tuple( chain.from_iterable( - map( + map( # noqa: C417 lambda _f: _f.result(), filter( lambda _f: completed_successfully(_f), completed_futures, ), - ) - ) + ), + ), ) @staticmethod @@ -127,7 +128,7 @@ def _data_source_types_to_tasks( class FetchExtractMetadata( - Task[Sequence[DataSource], Sequence[ExtractMetadata]] + Task[Sequence[DataSource], Sequence[ExtractMetadata]], ): """ Fetch all :class:`extract metadata ` for the given @@ -139,26 +140,27 @@ def __init__(self, transport: Transport): super().__init__() def execute( - self, an_input: Sequence[DataSource] + self, + an_input: Sequence[DataSource], ) -> Sequence[ExtractMetadata]: _LOGGER.info("Fetching extract metadata.") executor: ConcurrentExecutor[Transport, Sequence[ExtractMetadata]] executor = ConcurrentExecutor(*self._data_sources_to_tasks(an_input)) with executor: futures: Sequence[Future[Sequence[ExtractMetadata]]] - futures = executor(self._transport) # noqa + futures = executor.execute(self._transport) # Focus on completed tasks and ignore the ones that failed. completed_futures = as_completed(futures) return tuple( chain.from_iterable( - map( + map( # noqa: C417 lambda _f: _f.result(), filter( lambda _f: completed_successfully(_f), completed_futures, ), - ) - ) + ), + ), ) @staticmethod diff --git a/app/use_cases/main_pipeline.py b/app/use_cases/main_pipeline.py index fc0ad57..12fcbbe 100644 --- a/app/use_cases/main_pipeline.py +++ b/app/use_cases/main_pipeline.py @@ -16,7 +16,7 @@ class FetchMetadata( - Pipeline[Sequence[DataSourceType], Sequence[ExtractMetadata]] + Pipeline[Sequence[DataSourceType], Sequence[ExtractMetadata]], ): """Connect to the remote server and fetch metadata.""" @@ -28,7 +28,7 @@ def __init__(self, transport: Transport): class RunExtraction( - Pipeline[Sequence[ExtractMetadata], Sequence[RunExtractionResult]] + Pipeline[Sequence[ExtractMetadata], Sequence[RunExtractionResult]], ): """ Run each extracts against their parent data source and return the results. diff --git a/app/use_cases/run_extraction.py b/app/use_cases/run_extraction.py index 86f37d2..771b75c 100644 --- a/app/use_cases/run_extraction.py +++ b/app/use_cases/run_extraction.py @@ -67,7 +67,7 @@ def execute(self, an_input: DataSource) -> RunExtractionResult: class GroupSiblingExtracts( - Task[Sequence[ExtractMetadata], Sequence[_GroupedSiblingExtracts]] + Task[Sequence[ExtractMetadata], Sequence[_GroupedSiblingExtracts]], ): """ Group extracts owned by the same :class:`data source ` together @@ -75,12 +75,14 @@ class GroupSiblingExtracts( """ def execute( - self, an_input: Sequence[ExtractMetadata] + self, + an_input: Sequence[ExtractMetadata], ) -> Sequence[_GroupedSiblingExtracts]: _LOGGER.debug("Grouping extracts.") # Sort the given extracts by their parent data source's id. extracts: Sequence[ExtractMetadata] = sorted( - an_input, key=lambda _e: _e.data_source.id + an_input, + key=lambda _e: _e.data_source.id, ) # Group extracts by their parent data source. Note unlike the previous # statement, the key function in this statement compares data source @@ -94,29 +96,33 @@ def execute( class RunDataSourceExtracts( - Task[Sequence[_GroupedSiblingExtracts], Sequence[RunExtractionResult]] + Task[Sequence[_GroupedSiblingExtracts], Sequence[RunExtractionResult]], ): """Run extracts for each data source and return the results.""" def execute( - self, an_input: Sequence[_GroupedSiblingExtracts] + self, + an_input: Sequence[_GroupedSiblingExtracts], ) -> Sequence[RunExtractionResult]: _LOGGER.debug("Running extraction for all data sources.") return tuple( chain.from_iterable( self.run_data_source_extracts( - _grouped_extract[0], _grouped_extract[1] + _grouped_extract[0], + _grouped_extract[1], ) for _grouped_extract in an_input - ) + ), ) @staticmethod def run_data_source_extracts( - data_source: DataSource, extracts: Sequence[ExtractMetadata] + data_source: DataSource, + extracts: Sequence[ExtractMetadata], ) -> Sequence[RunExtractionResult]: _LOGGER.info( - 'Running extraction for data source="%s"', str(data_source) + 'Running extraction for data source="%s"', + str(data_source), ) with data_source: executor: ConcurrentExecutor[DataSource, RunExtractionResult] @@ -125,15 +131,15 @@ def run_data_source_extracts( ) with executor: futures: Sequence[Future[RunExtractionResult]] - futures = executor(data_source) # noqa + futures = executor(data_source) # Focus on completed tasks and ignore the ones that failed. completed_futures = as_completed(futures) return tuple( - map( + map( # noqa: C417 lambda _f: _f.result(), filter( lambda _f: completed_successfully(_f), completed_futures, ), - ) + ), ) diff --git a/app/use_cases/upload_extracts.py b/app/use_cases/upload_extracts.py index c3e8a57..576abc4 100644 --- a/app/use_cases/upload_extracts.py +++ b/app/use_cases/upload_extracts.py @@ -75,7 +75,10 @@ def execute(self, an_input: Transport) -> _PostedUpload: class DoPostChunk(Task[Transport, UploadChunk]): def __init__( - self, upload: UploadMetadata, chunk_index: int, chunk_content: bytes + self, + upload: UploadMetadata, + chunk_index: int, + chunk_content: bytes, ): self._upload: UploadMetadata = upload self._chunk_index: int = chunk_index @@ -114,31 +117,33 @@ def execute(self, an_input: Transport) -> UploadMetadata: class PostUploads( - Task[Sequence[RunExtractionResult], Sequence[_PostedUpload]] + Task[Sequence[RunExtractionResult], Sequence[_PostedUpload]], ): def __init__(self, transport: Transport): self._transport: Transport = transport def execute( - self, an_input: Sequence[RunExtractionResult] + self, + an_input: Sequence[RunExtractionResult], ) -> Sequence[_PostedUpload]: _LOGGER.info("Posting uploads.") executor: ConcurrentExecutor[Transport, _PostedUpload] executor = ConcurrentExecutor( - *self._extraction_results_to_tasks(an_input) + *self._extraction_results_to_tasks(an_input), ) with executor: futures: Sequence[Future[_PostedUpload]] - futures = executor(self._transport) # noqa + futures = executor(self._transport) # Focus on completed tasks and ignore the ones that failed. completed_futures = as_completed(futures) return tuple( - map( + map( # noqa: C417 lambda _f: _f.result(), filter( - lambda _f: completed_successfully(_f), completed_futures + lambda _f: completed_successfully(_f), + completed_futures, ), - ) + ), ) @staticmethod @@ -149,10 +154,11 @@ def _extraction_results_to_tasks( class PrepareUploadChunks( - Task[Sequence[_PostedUpload], Sequence[_PreparedChunks]] + Task[Sequence[_PostedUpload], Sequence[_PreparedChunks]], ): def execute( - self, an_input: Sequence[_PostedUpload] + self, + an_input: Sequence[_PostedUpload], ) -> Sequence[_PreparedChunks]: _LOGGER.info("Preparing chunks.") return tuple( @@ -165,7 +171,8 @@ def _prepare_chunks_for_upload( posted_upload: _PostedUpload, ) -> _PreparedChunks: _LOGGER.info( - 'Preparing chunks for upload metadata="%s".', str(posted_upload[0]) + 'Preparing chunks for upload metadata="%s".', + str(posted_upload[0]), ) upload: UploadMetadata = posted_upload[0] extract_data: Any = posted_upload[1] @@ -174,13 +181,14 @@ def _prepare_chunks_for_upload( class PostUploadChunks( - Task[Sequence[_PreparedChunks], Sequence[UploadExtractResult]] + Task[Sequence[_PreparedChunks], Sequence[UploadExtractResult]], ): def __init__(self, transport: Transport): self._transport: Transport = transport def execute( - self, an_input: Sequence[_PreparedChunks] + self, + an_input: Sequence[_PreparedChunks], ) -> Sequence[UploadExtractResult]: _LOGGER.info("Posting upload chunks.") return tuple( @@ -194,29 +202,34 @@ def execute( @staticmethod def _post_upload_chunks( - upload: UploadMetadata, chunks: Sequence[bytes], transport: Transport + upload: UploadMetadata, + chunks: Sequence[bytes], + transport: Transport, ) -> UploadExtractResult: executor: ConcurrentExecutor[Transport, UploadChunk] executor = ConcurrentExecutor( *( DoPostChunk( - upload=upload, chunk_index=_index, chunk_content=_chunk + upload=upload, + chunk_index=_index, + chunk_content=_chunk, ) for _index, _chunk in enumerate(chunks) - ) + ), ) with executor: futures: Sequence[Future[UploadChunk]] - futures = executor(transport) # noqa + futures = executor(transport) # Focus on completed tasks and ignore the ones that failed. completed_futures = as_completed(futures) uploaded_chunks: Sequence[UploadChunk] = tuple( - map( + map( # noqa: C417 lambda _f: _f.result(), filter( - lambda _f: completed_successfully(_f), completed_futures + lambda _f: completed_successfully(_f), + completed_futures, ), - ) + ), ) return upload, uploaded_chunks @@ -227,7 +240,8 @@ def __init__(self, transport: Transport): self._transport: Transport = transport def _mark_uploads_as_complete( - self, posted_uploads: Sequence[UploadExtractResult] + self, + posted_uploads: Sequence[UploadExtractResult], ) -> None: _LOGGER.info("Marking completed upload as so.") executor: ConcurrentExecutor[Transport, Any] @@ -235,9 +249,9 @@ def _mark_uploads_as_complete( *( DoMarkUploadAsComplete(upload=_posted_upload[0]) for _posted_upload in posted_uploads - ) + ), ) with executor: futures: Sequence[Future[Any]] - futures = executor(an_input=self._transport) # noqa + futures = executor(an_input=self._transport) as_completed(futures) diff --git a/pyproject.toml b/pyproject.toml index 289a0f7..5458924 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,6 +2,9 @@ requires = ["setuptools>=62.4.0", "setuptools-scm"] build-backend = "setuptools.build_meta" +[project] +requires-python = ">=3.10" # Support Python 3.10+. + [tool.black] line-length = 79 target-version = ["py37"] @@ -33,6 +36,7 @@ source = ["app"] [tool.isort] extend_skip = "docs" extend_skip_glob = ["*/migrations/*", "*/static/CACHE/*"] +known_first_party = ["app", "tests"] line_length = 79 multi_line_output = 3 profile = "black" @@ -62,3 +66,142 @@ log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(linen log_cli_level = "ERROR" python_files = ["*test.py", "test_*.py"] norecursedirs = "node_modules venv build env bin .cache .tox" + +[tool.ruff] +exclude = [ + ".eggs", + ".git", + ".mypy_cache", + ".nox", + ".pytype", + ".ruff_cache", + ".tox", + ".venv", + "__pycache__", + "__pypackages__", + "_build", + "build", + "dist", + "docs", + "node_modules", + "venv", +] +ignore = [ + "ANN002", + "ANN003", + "ANN101", + "ANN102", + "ANN204", + "S101" +] +line-length = 79 +select = [ + "A", # flake8-builtins + "ANN", # flake8-annotations + "B", # flake8-bugbear + "BLE", # flake8-blind-except + "C4", # flake8-comprehensions + "C90", # mccabe + "COM", # flake8-commas + # "D", # pydocstyle + "E", # pycodestyle Error + "EM", # flake8-errmsg + "ERA", # eradicate + "F", # pyflakes + "G", # flake8-logging-format + "I", # isort + "ISC", # flake8-implicit-str-concat + "N", # pep8 Naming + "PD", # pandas-vet + "PT", # flake8-pytest-style + "PYI", # flake8-pyi + "Q", # flake8-quotes + "RET", # flake8-return + "RUF", # Ruff-specific rules + "S", # flake8-bandit + "SIM", # flake8-simplify + "T10", # flake8-debugger + "T20", # flake8-print + "TCH", # flake8-type-checking + "UP", # pyupgrade + "W", # pycodestyle Warning + "YTT", # flake8-2020 +] +src = ["app", "tests"] +target-version = "py310" + +[tool.ruff.flake8-quotes] +inline-quotes = "double" +docstring-quotes = "double" + +[tool.ruff.isort] +known-first-party = ["app", "tests"] + +[tool.ruff.mccabe] +max-complexity = 10 + +[tool.tox] +legacy_tox_ini = """ + [tox] + env_list = {py310, py311}, coveralls, package + isolated_build = true + no_package = true + requires = + tox>4 + skip_missing_interpreters = true + + + [gh-actions] + python = + 3.10: py310 + 3.11: py311, coveralls + + + [testenv] + commands = + python -m app --version + pyright . + ruff . + coverage erase + pytest --cov=app -n auto --durations=100 {posargs} + coverage html + deps = + -r{toxinidir}{/}requirements{/}test.txt + description = test and lint the project + download = true + pass_env = + MYSQL_TEST_DB_HOST + MYSQL_TEST_DB_NAME + MYSQL_TEST_DB_PASSWORD + MYSQL_TEST_DB_PORT + MYSQL_TEST_DB_USERNAME + set_env = + PYTHONPATH = {toxinidir} + + + ;If running outside Github, ensure that the the `COVERALLS_REPO_TOKEN` + ;environment variable is set. + [testenv:coveralls] + commands = + coveralls --service=github + description = submit coverage results to coverall.io + pass_env = + COVERALLS_REPO_TOKEN + GITHUB_* + MYSQL_TEST_DB_HOST + MYSQL_TEST_DB_NAME + MYSQL_TEST_DB_PASSWORD + MYSQL_TEST_DB_PORT + MYSQL_TEST_DB_USERNAME + + + [testenv:package] + allowlist_externals = {envdir}{/}idr_client + commands = + pyinstaller idr_client.spec + staticx dist/idr_client_temp {envdir}{/}idr_client + {envdir}{/}idr_client --version + deps = + -r{toxinidir}{/}requirements{/}build.txt + description = build an executable binary of the project +""" diff --git a/requirements/build.txt b/requirements/build.txt index 83c67a7..c02f50f 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -1,4 +1,4 @@ -r base.txt -pyinstaller~=5.5 +pyinstaller~=5.9.0 staticx~=0.13.8 diff --git a/requirements/test.txt b/requirements/test.txt index 4ea79df..cccf7ed 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -3,15 +3,15 @@ coverage~=6.5.0 coveralls~=3.3.1 factory-boy~=3.2.1 -flake8~=6.0.0 isort~=5.12.0 pandas-stubs~=1.5.3.230304 pyright~=1.1.302 -pytest~=7.2.2 +pytest~=7.3.0 pytest-cov~=4.0.0 pytest-forked~=1.6.0 -pytest-sugar~=0.9.5 +pytest-sugar~=0.9.7 pytest-xdist~=3.2.1 +ruff~=0.0.261 Sphinx~=6.1.3 tox~=4.4.11 tox-gh-actions~=3.1.0 diff --git a/tests/core/factories.py b/tests/core/factories.py index 56b7452..61a6687 100644 --- a/tests/core/factories.py +++ b/tests/core/factories.py @@ -27,7 +27,7 @@ def __init__(self, **kwargs): data_source_type: DataSourceType = kwargs.pop("data_source_type") super().__init__(**kwargs) self._data_source_type: DataSourceType = data_source_type - self._extract_metadata: Mapping[str, ExtractMetadata] = dict() + self._extract_metadata: Mapping[str, ExtractMetadata] = {} self._is_disposed: bool = False @property @@ -40,7 +40,8 @@ def extract_metadata(self) -> Mapping[str, "ExtractMetadata"]: @extract_metadata.setter def extract_metadata( - self, extract_metadata: Mapping[str, ExtractMetadata] + self, + extract_metadata: Mapping[str, ExtractMetadata], ) -> None: self._extract_metadata = extract_metadata @@ -51,7 +52,7 @@ def is_disposed(self) -> bool: def dispose(self) -> None: self._is_disposed = True - def get_extract_task_args(self) -> Any: + def get_extract_task_args(self) -> Any: # noqa: ANN401 return 0 @@ -60,7 +61,7 @@ class FakeDataSourceType(DataSourceType): def __init__(self, **kwargs): super().__init__(**kwargs) - self._data_sources: Mapping[str, FakeDataSource] = dict() + self._data_sources: Mapping[str, FakeDataSource] = {} @property def code(self) -> str: @@ -71,7 +72,7 @@ def data_sources(self) -> Mapping[str, FakeDataSource]: return self._data_sources @data_sources.setter - def data_sources(self, data_sources: Mapping[str, FakeDataSource]): + def data_sources(self, data_sources: Mapping[str, FakeDataSource]) -> None: self._data_sources = data_sources @classmethod @@ -103,13 +104,16 @@ def __init__(self, **kwargs): def data_source(self) -> DataSource: return self._data_source + def get_upload_meta_extra_init_kwargs(self) -> Any: # noqa: ANN401 + return None + def to_task(self) -> Task[Any, Any]: return self._FakeExtractTask() class _FakeExtractTask(Task[Any, Any]): """A fake task that doesn't do anything.""" - def execute(self, an_input: Any) -> Any: + def execute(self, an_input: Any) -> Any: # noqa: ANN401 return 0 @@ -141,22 +145,27 @@ def fetch_data_source_extracts( ) -> Sequence[ExtractMetadata]: return tuple( FakeExtractMetadataFactory.create_batch( - size=self._extracts_count, data_source=data_source - ) + size=self._extracts_count, + data_source=data_source, + ), ) def fetch_data_sources( - self, data_source_type: DataSourceType, **options: TransportOptions + self, + data_source_type: DataSourceType, + **options: TransportOptions, ) -> Sequence[DataSource]: return tuple( FakeDataSourceFactory.create_batch( size=self._data_sources_count, data_source_type=data_source_type, - ) + ), ) def mark_upload_as_complete( - self, upload_metadata: UploadMetadata, **options: TransportOptions + self, + upload_metadata: UploadMetadata, + **options: TransportOptions, ) -> None: return @@ -169,7 +178,8 @@ def post_upload_chunk( **options: TransportOptions, ) -> UploadChunk: return FakeUploadChunkFactory( - chunk_index=chunk_index, chunk_content=chunk_content + chunk_index=chunk_index, + chunk_content=chunk_content, ) def post_upload_metadata( @@ -221,7 +231,7 @@ class _FakeUploadTask(Task[Any, Sequence[bytes]]): def __init__(self, chunk_count: int): self._chunk_count: int = chunk_count - def execute(self, an_input: Any) -> Sequence[bytes]: + def execute(self, an_input: Any) -> Sequence[bytes]: # noqa: ANN401 return tuple( f"Bla bla bla {_index} ...".encode() for _index in range(self._chunk_count) @@ -243,7 +253,7 @@ class Meta: class IdentifiableDomainObjectFactory(AbstractDomainObjectFactory): """A base factory for ``IdentifiableDomainObject`` implementations.""" - id = factory.LazyFunction(lambda: str(uuid.uuid4())) + id = factory.LazyFunction(lambda: str(uuid.uuid4())) # noqa: A003 class Meta: abstract = True @@ -274,7 +284,7 @@ class ExtractMetadataFactory(IdentifiableDomainObjectFactory): name = factory.Sequence(lambda _n: "Extract Metadata %d" % _n) description = factory.Faker("sentence") preferred_uploads_name = factory.LazyAttribute( - lambda _o: "%s" % _o.name.lower().replace(" ", "_") + lambda _o: "%s" % _o.name.lower().replace(" ", "_"), ) class Meta: @@ -312,7 +322,7 @@ class FakeDataSourceFactory(DataSourceFactory): name = factory.Sequence(lambda _n: "Fake Data Source %d" % _n) data_source_type = factory.SubFactory( - "tests.core.factories.FakeDataSourceTypeFactory" + "tests.core.factories.FakeDataSourceTypeFactory", ) class Meta: @@ -335,7 +345,7 @@ class FakeExtractMetadataFactory(ExtractMetadataFactory): name = factory.Sequence(lambda _n: "Fake Extract Metadata %d" % _n) description = factory.Faker("sentence") preferred_uploads_name = factory.LazyAttribute( - lambda _o: "%s" % _o.name.lower().replace(" ", "_") + lambda _o: "%s" % _o.name.lower().replace(" ", "_"), ) data_source = factory.SubFactory(FakeDataSourceFactory) diff --git a/tests/core/test_domain.py b/tests/core/test_domain.py index 902f15b..19aea99 100644 --- a/tests/core/test_domain.py +++ b/tests/core/test_domain.py @@ -47,10 +47,13 @@ def test_domain_object_initialization_with_all_params(self) -> None: """ domain_object1 = _SimpleDomainObject( - first_name="Juha", last_name="Kalulu" + first_name="Juha", + last_name="Kalulu", ) domain_object2 = _SimpleDomainObject( - first_name="Juha", last_name="Kalulu", middle_name="Kijiko" + first_name="Juha", + last_name="Kalulu", + middle_name="Kijiko", ) assert domain_object1 is not None @@ -82,13 +85,11 @@ def test_get_required_fields_class_method(self) -> None: returns the expected value. """ - self.assertListEqual( - list(_SimpleDomainObject.get_required_fields()), - ["first_name", "last_name"], - ) - self.assertListEqual( - list(_SomeDomainObject.get_required_fields()), ["name", "id"] - ) + assert list(_SimpleDomainObject.get_required_fields()) == [ + "first_name", + "last_name", + ] + assert list(_SomeDomainObject.get_required_fields()) == ["name", "id"] class TestDataSourceInterface(TestCase): @@ -106,7 +107,7 @@ def test_of_mapping_class_method(self) -> None: "name": "Some data source", "description": "A very good description.", "data_source_type": FakeDataSourceTypeFactory(), - } + }, ) data_source2 = FakeDataSource.of_mapping( { @@ -114,7 +115,7 @@ def test_of_mapping_class_method(self) -> None: "name": "Some other data source", "preferred_uploads_name": "some_data", "data_source_type": FakeDataSourceTypeFactory(), - } + }, ) assert data_source1 is not None @@ -157,7 +158,9 @@ class TestExtractMetadataInterface(TestCase): def setUp(self) -> None: super().setUp() self._extract = FakeExtractMetadata( - id="1", name="Some data", data_source=FakeDataSourceFactory() + id="1", + name="Some data", + data_source=FakeDataSourceFactory(), ) def test_get_upload_meta_extra_init_kwargs(self) -> None: @@ -181,7 +184,7 @@ def test_of_mapping_class_method(self) -> None: "name": "Some data", "description": "A very good description.", "data_source": FakeDataSourceFactory(), - } + }, ) extract2 = FakeExtractMetadata.of_mapping( { @@ -189,7 +192,7 @@ def test_of_mapping_class_method(self) -> None: "name": "Some other data", "preferred_uploads_name": "some_data", "data_source": FakeDataSourceFactory(), - } + }, ) assert extract1 is not None @@ -217,7 +220,9 @@ class TestUploadChunkInterface(TestCase): def setUp(self) -> None: super().setUp() self._upload_chunk = FakeUploadChunk( - id="1", chunk_index=0, chunk_content=b"Bla bla bla ..." + id="1", + chunk_index=0, + chunk_content=b"Bla bla bla ...", ) def test_of_mapping_class_method(self) -> None: @@ -227,14 +232,14 @@ def test_of_mapping_class_method(self) -> None: """ upload_chunk1 = FakeUploadChunk.of_mapping( - {"id": "1", "chunk_index": 0, "chunk_content": b"Bla bla bla ..."} + {"id": "1", "chunk_index": 0, "chunk_content": b"Bla bla bla ..."}, ) upload_chunk2 = FakeUploadChunk.of_mapping( { "id": "2", "chunk_index": 1, "chunk_content": b"Bla bla bla bla ...", - } + }, ) assert upload_chunk1 is not None @@ -294,7 +299,7 @@ def test_of_mapping_class_method(self) -> None: "org_unit_name": org_unit_name, "content_type": content_type, "extract_metadata": self._extract_metadata, - } + }, ) assert upload_metadata is not None @@ -310,5 +315,6 @@ def test_string_representation(self) -> None: returns the expected value. """ assert str(self._upload_metadata) == "Upload {} for extract {}".format( - self._upload_metadata.id, str(self._extract_metadata) + self._upload_metadata.id, + str(self._extract_metadata), ) diff --git a/tests/factories.py b/tests/factories.py index acc7c3c..8429192 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -16,7 +16,7 @@ def config_factory() -> dict[str, Any]: "default_maximum_delay": 10.0, # 10 seconds }, "SETTINGS_INITIALIZERS": [ - "tests.test_app.FakeDataSourceTypesConfigInitializer" + "tests.test_app.FakeDataSourceTypesConfigInitializer", ], "SUPPORTED_DATA_SOURCE_TYPES": ["app.imp.sql_data.SQLDataSourceType"], } diff --git a/tests/imp/sql_data/factories.py b/tests/imp/sql_data/factories.py index 54f733e..7dc8c17 100644 --- a/tests/imp/sql_data/factories.py +++ b/tests/imp/sql_data/factories.py @@ -1,5 +1,4 @@ -from collections.abc import Generator -from typing import Any +from typing import TYPE_CHECKING, Any import factory @@ -19,6 +18,9 @@ UploadMetadataFactory, ) +if TYPE_CHECKING: + from collections.abc import Generator + class SQLDataSourceFactory(DataSourceFactory): """A factory for ``SQLDataSource`` instances.""" @@ -26,7 +28,7 @@ class SQLDataSourceFactory(DataSourceFactory): database_name = factory.Sequence(lambda _n: "Database %d" % _n) database_vendor = SupportedDBVendors.SQLITE_MEM data_source_type = factory.SubFactory( - "tests.imp.sql_data.factories.SQLDataSourceTypeFactory" + "tests.imp.sql_data.factories.SQLDataSourceTypeFactory", ) @factory.post_generation @@ -37,13 +39,14 @@ def extract_metadata( **kwargs, ) -> None: extract_metadata_count: int = kwargs.setdefault( - "extract_metadata_count", 5 + "extract_metadata_count", + 5, ) extract_metadata: Generator[SQLExtractMetadata, Any, Any] = ( SQLExtractMetadataFactory(data_source=obj) for _ in range(extract_metadata_count) ) - obj.extract_metadata = { # noqa + obj.extract_metadata = { _extract_meta.id: _extract_meta for _extract_meta in extract_metadata } @@ -70,7 +73,7 @@ def data_sources( SQLDataSourceFactory(data_source_type=obj) for _ in range(data_sources_count) ) - obj.data_sources = { # noqa + obj.data_sources = { _data_source.id: _data_source for _data_source in data_sources } @@ -85,7 +88,7 @@ class SQLExtractMetadataFactory(ExtractMetadataFactory): """A factory for ``SQLExtractMetadata`` instances.""" sql_query = "select 'hello world'" - applicable_source_versions = tuple() + applicable_source_versions = () data_source = factory.SubFactory(SQLDataSourceFactory) class Meta: diff --git a/tests/imp/sql_data/test_domain.py b/tests/imp/sql_data/test_domain.py index 0be27ce..d89d58e 100644 --- a/tests/imp/sql_data/test_domain.py +++ b/tests/imp/sql_data/test_domain.py @@ -1,12 +1,9 @@ import os -from collections.abc import Mapping, Sequence -from typing import Any +from typing import TYPE_CHECKING, Any from unittest import TestCase from unittest.mock import patch import pytest -from pandas import DataFrame -from sqlalchemy.engine import Connection from sqlalchemy.exc import DisconnectionError from app.imp.sql_data import ( @@ -29,6 +26,12 @@ SQLUploadMetadataFactory, ) +if TYPE_CHECKING: + from collections.abc import Mapping, Sequence + + from pandas import DataFrame + from sqlalchemy.engine import Connection + class TestSQLDataSource(TestCase): """Tests for the :class:`SQLDataSource` class.""" @@ -38,7 +41,7 @@ def setUp(self) -> None: self._data_source: SQLDataSource = SQLDataSourceFactory.build() self._extract_meta_1: SQLExtractMetadata = SQLExtractMetadataFactory() self._extract_meta_2: SQLExtractMetadata = SQLExtractMetadataFactory( - sql_query="select 'Bonjour le monde'" + sql_query="select 'Bonjour le monde'", ) self._data_source.extract_metadata = { "1": self._extract_meta_1, @@ -55,17 +58,18 @@ def test_a_disposed_data_source_raises_expected_errors(self) -> None: ``SQLDataSourceDisposedError`` on attempted usage. """ with pytest.raises( - SQLDataSourceDisposedError, match="Data source is disposed." + SQLDataSourceDisposedError, + match="Data source is disposed.", ): self._data_source.get_extract_task_args() def test_accessors(self) -> None: """Assert that accessors return the expected values.""" - self.assertDictEqual( - self._data_source.extract_metadata, - {"1": self._extract_meta_1, "2": self._extract_meta_2}, - ) + assert self._data_source.extract_metadata == { + "1": self._extract_meta_1, + "2": self._extract_meta_2, + } # This should remain true until after `self.connect_to_db()` is called. assert self._data_source.is_disposed assert self._data_source.data_source_type is not None @@ -85,9 +89,11 @@ def test_get_extract_task_args_errors(self) -> None: """ with patch("sqlalchemy.engine.Engine.connect", autospec=True) as c: c.side_effect = DisconnectionError - with pytest.raises(SQLDataExtractionOperationError) as exc_info: - with self._data_source as ds: - ds.get_extract_task_args() + with ( + pytest.raises(SQLDataExtractionOperationError) as exc_info, + self._data_source as ds, + ): + ds.get_extract_task_args() assert isinstance(exc_info.value.__cause__, DisconnectionError) @@ -165,29 +171,29 @@ def test_load_mysql_config_with_invalid_config(self) -> None: } self._data_source.database_vendor = SupportedDBVendors.MYSQL - with patch("app.settings", config1): - with pytest.raises( - ImproperlyConfiguredError, match="is missing or is not valid." - ): - self._data_source.connect_to_db() + with patch("app.settings", config1), pytest.raises( + ImproperlyConfiguredError, + match="is missing or is not valid.", + ): + self._data_source.connect_to_db() - with patch("app.settings", config2): - with pytest.raises( - ImproperlyConfiguredError, match="is missing or is not valid." - ): - self._data_source.connect_to_db() + with patch("app.settings", config2), pytest.raises( + ImproperlyConfiguredError, + match="is missing or is not valid.", + ): + self._data_source.connect_to_db() - with patch("app.settings", config3): - with pytest.raises( - ImproperlyConfiguredError, match="is missing in" + with patch("app.settings", config3), pytest.raises( + ImproperlyConfiguredError, + match="is missing in", + ): + self._data_source.connect_to_db() + for _conf in (config4, config5, config6): + with patch("app.settings", _conf), pytest.raises( + ImproperlyConfiguredError, + match="is not a valid port.", ): self._data_source.connect_to_db() - for _conf in (config4, config5, config6): - with patch("app.settings", _conf): - with pytest.raises( - ImproperlyConfiguredError, match="is not a valid port." - ): - self._data_source.connect_to_db() def test_object_initialization_from_a_mapping(self) -> None: """ @@ -204,7 +210,7 @@ def test_object_initialization_from_a_mapping(self) -> None: } data_source: SQLDataSource = SQLDataSource.of_mapping(mapping) - self.assertDictEqual(data_source.extract_metadata, {}) + assert data_source.extract_metadata == {} assert data_source is not None assert data_source.id == mapping["id"] assert data_source.name == mapping["name"] @@ -215,24 +221,23 @@ def test_sql_data_source_as_a_context_manager(self) -> None: """Assert that ``SQLDataSource`` can be used as a context manager.""" config: Config = Config(settings={"RETRY": {"enable_retries": False}}) - with patch("app.settings", config): - with self._data_source: - # This should work without raising any errors as using an - # SQLDataSource as a context manager should automatically - # result connect_to_db() being called. - with self._data_source.get_extract_task_args() as connection: - assert connection is not None + with patch("app.settings", config), self._data_source: # noqa: SIM117 + # This should work without raising any errors as using an + # SQLDataSource as a context manager should automatically + # result connect_to_db() being called. + with self._data_source.get_extract_task_args() as connection: + assert connection is not None def test_sql_data_source_context_manager_nesting_is_disallowed( self, - ) -> None: # noqa + ) -> None: """ Assert that nesting of ``SQLDataSource`` as a context manager is a programming error. """ - with self._data_source: - with pytest.raises(SQLDataError, match="Incorrect usage"): + with self._data_source: # noqa: SIM117 + with pytest.raises(SQLDataError, match="Incorrect usage"): # noqa with self._data_source: ... @@ -253,7 +258,7 @@ def test_accessors(self) -> None: assert ( self._data_source_type.imp_extract_metadata_klass() == SQLExtractMetadata - ) # noqa + ) assert ( self._data_source_type.imp_upload_chunk_klass() == SQLUploadChunk ) @@ -276,6 +281,7 @@ def test_accessors(self) -> None: task = self._extract_meta.to_task() assert task is not None assert self._extract_meta.data_source is not None + assert self._extract_meta.get_upload_meta_extra_init_kwargs() is None class TestSQLUploadMetadata(TestCase): @@ -291,7 +297,8 @@ def test_accessors(self) -> None: content_type = "application/vnd.apache-parquet" assert self._upload_meta.extract_metadata is not None assert isinstance( - self._upload_meta.extract_metadata, SQLExtractMetadata + self._upload_meta.extract_metadata, + SQLExtractMetadata, ) assert self._upload_meta.get_content_type() == content_type assert self._upload_meta.to_task() is not None @@ -306,12 +313,12 @@ def test_to_task(self) -> None: data_source: SQLDataSource = extract_meta.data_source with data_source: extracted_data: DataFrame = extract_meta.to_task().execute( - data_source.get_extract_task_args() + data_source.get_extract_task_args(), ) upload_task = self._upload_meta.to_task() processed_extract: Sequence[bytes] = upload_task.execute( - extracted_data + extracted_data, ) assert len(processed_extract) > 0 diff --git a/tests/lib/config/test_config.py b/tests/lib/config/test_config.py index 67445c3..f435981 100644 --- a/tests/lib/config/test_config.py +++ b/tests/lib/config/test_config.py @@ -1,11 +1,14 @@ -from collections.abc import Mapping, Sequence -from typing import Any +from typing import TYPE_CHECKING, Any from unittest import TestCase import pytest from app.lib import Config, MissingSettingError, SettingInitializer +if TYPE_CHECKING: + from collections.abc import Mapping, Sequence + + # ============================================================================= # HELPERS # ============================================================================= @@ -21,7 +24,7 @@ class _Setting1Initializer(SettingInitializer): def setting(self) -> str: return "SETTING_1" - def execute(self, an_input: Any) -> str: + def execute(self, an_input: Any) -> str: # noqa: ANN401 return str(an_input) @@ -70,7 +73,8 @@ def test_initializers_are_run(self) -> None: Assert that initializers are run and the settings value are updated. """ config = Config( - settings=self._settings, settings_initializers=self._initializers + settings=self._settings, + settings_initializers=self._initializers, ) assert config.SETTING_1 == "0" @@ -84,7 +88,8 @@ def test_initializers_can_set_default_values(self) -> None: """ self._settings = {"SETTING_1": self._setting_1_value} config = Config( - settings=self._settings, settings_initializers=self._initializers + settings=self._settings, + settings_initializers=self._initializers, ) assert config.SETTING_1 == "0" @@ -111,7 +116,8 @@ def test_settings_retrieval_using_get_method(self) -> None: "weird::setting::name": "some value", } config = Config( - settings=self._settings, settings_initializers=self._initializers + settings=self._settings, + settings_initializers=self._initializers, ) assert config.get("SETTING_1") == "0" @@ -127,7 +133,8 @@ def test_settings_without_initializers_are_not_modified(self) -> None: setting_3_value: str = "Do not modify!" self._settings = {**self._settings, "SETTING_3": setting_3_value} config = Config( - settings=self._settings, settings_initializers=self._initializers + settings=self._settings, + settings_initializers=self._initializers, ) - assert config.SETTING_3 == setting_3_value + assert setting_3_value == config.SETTING_3 diff --git a/tests/lib/retry/test_retry.py b/tests/lib/retry/test_retry.py index 14b2903..67e1106 100644 --- a/tests/lib/retry/test_retry.py +++ b/tests/lib/retry/test_retry.py @@ -46,7 +46,7 @@ def setUp(self) -> None: "RETRY": { "default_deadline": self._deadline, "enable_retries": False, - } + }, }, settings_initializers=(RetryInitializer(),), ) @@ -99,7 +99,7 @@ def test_config_is_auto_loaded_when_used_as_a_decorator(self) -> None: "default_maximum_delay": 1.0, "default_multiplicative_factor": 5, "enable_retries": True, - } + }, }, settings_initializers=(RetryInitializer(),), ) @@ -187,7 +187,7 @@ def test_do_retry_with_failing_callable_and_a_retryable_exception( a_callable1 = MagicMock(side_effect=AttributeError) # Callable that fails initially but eventually succeeds. a_callable2 = MagicMock( - side_effect=[ValueError, ValueError, AttributeError, 10] + side_effect=[ValueError, ValueError, AttributeError, 10], ) instance: Retry = Retry( @@ -203,8 +203,8 @@ def test_do_retry_with_failing_callable_and_a_retryable_exception( # This should continue until the deadline is exceeded and the # error is re-raised. instance.do_retry(a_callable1) - assert isinstance(exec_info.value.__cause__, AttributeError) + assert isinstance(exec_info.value.__cause__, AttributeError) # This should continue until a successful call. assert instance.do_retry(a_callable2) == 10 assert a_callable2.call_count == 4 @@ -219,7 +219,7 @@ def test_do_retry_with_failing_callable_and_non_retryable_exception( ``Retry`` instance. """ instance: Retry = Retry( - predicate=if_exception_type_factory(ValueError) + predicate=if_exception_type_factory(ValueError), ) instance.load_config() diff --git a/tests/lib/retry/test_setting_initializers.py b/tests/lib/retry/test_setting_initializers.py index 540bfe7..2442725 100644 --- a/tests/lib/retry/test_setting_initializers.py +++ b/tests/lib/retry/test_setting_initializers.py @@ -18,10 +18,7 @@ def test_execute_return_value_when_no_config_is_provided(self) -> None: Assert that the ``execute`` method returns a default configuration when one isn't provided. """ - self.assertDictEqual( - self._instance.execute(an_input=None), # noqa - DEFAULT_RETRY_CONFIG, # noqa - ) + assert self._instance.execute(an_input=None) == DEFAULT_RETRY_CONFIG def test_execute_when_invalid_config_is_provided(self) -> None: """ diff --git a/tests/lib/tasks/test_common.py b/tests/lib/tasks/test_common.py index 01a165a..c2457d9 100644 --- a/tests/lib/tasks/test_common.py +++ b/tests/lib/tasks/test_common.py @@ -1,4 +1,4 @@ -from collections.abc import Callable +from typing import TYPE_CHECKING from unittest import TestCase import pytest @@ -6,6 +6,10 @@ from app.core import Task from app.lib import Chainable, Consumer, Pipeline +if TYPE_CHECKING: + from collections.abc import Callable + + # ============================================================================= # HELPERS # ============================================================================= @@ -63,7 +67,7 @@ def test_execution_input_must_not_be_none(self) -> None: Assert that the input to :meth:`~Chainable.execute()` must not be ``None``. """ - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="cannot be None"): self._chainable.execute(None) # type: ignore @@ -76,7 +80,7 @@ def setUp(self) -> None: self._consumer: Consumer[int] = Consumer(consume=self._consume) def test_execution(self) -> None: - out: int = self._consumer(5) # noqa + out: int = self._consumer(5) assert self._global_state == 10 assert out == 5 @@ -86,7 +90,7 @@ def test_consume_constructor_arg_must_not_be_none(self) -> None: Assert that the ``consume`` argument to the constructor must not be ``None``. """ - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="cannot be None"): Consumer(consume=None) # type: ignore def _consume(self, an_input: int) -> None: @@ -114,7 +118,7 @@ def setUp(self) -> None: ) def test_execution(self) -> None: - val: str = self._pipeline(self._an_input) # noqa + val: str = self._pipeline(self._an_input) assert val == "5" assert self._global_state == 12 @@ -123,7 +127,7 @@ def test_that_a_pipeline_must_contains_at_least_one_task(self) -> None: """ Assert that a pipeline must contain one or more tasks to be valid. """ - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="cannot be None or empty"): Pipeline() def _consume(self, an_input: int) -> None: diff --git a/tests/lib/tasks/test_concurrent.py b/tests/lib/tasks/test_concurrent.py index 31358ca..3950d36 100644 --- a/tests/lib/tasks/test_concurrent.py +++ b/tests/lib/tasks/test_concurrent.py @@ -1,5 +1,5 @@ -from collections.abc import Sequence from concurrent.futures import Executor, ThreadPoolExecutor, wait +from typing import TYPE_CHECKING from unittest import TestCase import pytest @@ -8,6 +8,10 @@ from app.lib import ConcurrentExecutor, completed_successfully from app.lib.tasks.concurrent import ConcurrentExecutorDisposedError +if TYPE_CHECKING: + from collections.abc import Sequence + + # ============================================================================= # HELPERS # ============================================================================= @@ -54,7 +58,7 @@ def setUp(self) -> None: super().setUp() self._tasks: Sequence[_AddOne] = tuple(_AddOne() for _ in range(10)) self._instance: ConcurrentExecutor[int, int] = ConcurrentExecutor( - *self._tasks + *self._tasks, ) def tearDown(self) -> None: @@ -80,7 +84,9 @@ def test_dispose_method_side_effects(self) -> None: """ executor_service: Executor = ThreadPoolExecutor() instance: ConcurrentExecutor[int, str] = ConcurrentExecutor( - _IntToString(), _IntToString(), executor=executor_service + _IntToString(), + _IntToString(), + executor=executor_service, ) instance.dispose() @@ -96,10 +102,11 @@ def test_execute_method_return_value(self) -> None: """ results1 = self._instance.execute(0) results2 = ConcurrentExecutor( - _DivideByZero(), _DivideByZero() + _DivideByZero(), + _DivideByZero(), ).execute(1) result3 = ConcurrentExecutor( - *(_IntToString() for _ in range(10)) + *(_IntToString() for _ in range(10)), ).execute(10) for result in results1: @@ -115,7 +122,8 @@ def test_execute_method_return_value(self) -> None: def test_is_dispose_property_return_value(self) -> None: """Assert the ``is_disposed`` property returns the expected result.""" instance: ConcurrentExecutor[int, str] = ConcurrentExecutor( - _IntToString(), _IntToString() + _IntToString(), + _IntToString(), ) instance.dispose() @@ -128,7 +136,7 @@ def test_tasks_return_value(self) -> None: property should return a sequence of the same tasks given to the instance during initialization. """ - self.assertTupleEqual(tuple(self._tasks), tuple(self._instance.tasks)) + assert tuple(self._tasks) == tuple(self._instance.tasks) def test_using_a_disposed_executor_raises_expected_errors(self) -> None: """ @@ -165,7 +173,7 @@ def test_completed_successfully_function_return_value(self) -> None: results2 = c.execute(1) wait(results2) with ConcurrentExecutor( - *(tuple(self._erroneous_tasks) + tuple(self._valid_tasks)) + *(tuple(self._erroneous_tasks) + tuple(self._valid_tasks)), ) as c: results3 = c.execute(1) wait(results3) @@ -178,8 +186,11 @@ def test_completed_successfully_function_return_value(self) -> None: assert ( len( tuple( - filter(lambda _r: not completed_successfully(_r), results3) - ) + filter( + lambda _r: not completed_successfully(_r), + results3, + ), + ), ) == 10 ) diff --git a/tests/lib/tasks/test_sql.py b/tests/lib/tasks/test_sql.py index 28e64c1..e4596c8 100644 --- a/tests/lib/tasks/test_sql.py +++ b/tests/lib/tasks/test_sql.py @@ -1,15 +1,18 @@ +from typing import TYPE_CHECKING from unittest import TestCase from unittest.mock import patch import pytest -from pandas import DataFrame from sqlalchemy import create_engine -from sqlalchemy.engine import Engine from sqlalchemy.exc import DisconnectionError from app.core import ExtractionOperationError from app.lib import SimpleSQLSelect +if TYPE_CHECKING: + from pandas import DataFrame + from sqlalchemy.engine import Engine + class TestSimpleSQLSelect(TestCase): """Tests for the ``SimpleSQLSelect`` task.""" @@ -18,7 +21,7 @@ def setUp(self) -> None: super().setUp() self._engine: Engine = create_engine("sqlite+pysqlite:///:memory:") self._sql_select: SimpleSQLSelect = SimpleSQLSelect( - sql_query="select 'hello world'" + sql_query="select 'hello world'", ) def tearDown(self) -> None: @@ -43,8 +46,10 @@ def test_execute_errors(self) -> None: """ with patch("pandas.read_sql", autospec=True) as r: r.side_effect = DisconnectionError - with pytest.raises(ExtractionOperationError) as exc_info: - with self._engine.connect() as connection: - self._sql_select.execute(connection=connection) + with ( + pytest.raises(ExtractionOperationError) as exc_info, + self._engine.connect() as connection, + ): + self._sql_select.execute(connection=connection) assert isinstance(exc_info.value.__cause__, DisconnectionError) diff --git a/tests/lib/test_app_registry.py b/tests/lib/test_app_registry.py index 84d43cf..b1e85dd 100644 --- a/tests/lib/test_app_registry.py +++ b/tests/lib/test_app_registry.py @@ -1,13 +1,17 @@ -from collections.abc import Mapping +from typing import TYPE_CHECKING from unittest import TestCase import pytest -from app.core import DataSourceType from app.imp.sql_data import SQLDataSourceType from app.lib import AppRegistry, ImproperlyConfiguredError from app.lib.transports.http import http_transport_factory +if TYPE_CHECKING: + from collections.abc import Mapping + + from app.core import DataSourceType + class TestAppRegistry(TestCase): """Tests for the :class:`AppRegistry` class.""" @@ -16,7 +20,7 @@ def setUp(self) -> None: super().setUp() self._app_registry: AppRegistry = AppRegistry() self._data_source_types: Mapping[str, DataSourceType] = { - "sql_data": SQLDataSourceType() + "sql_data": SQLDataSourceType(), } def test_immutability_of_data_source_types_property_content(self) -> None: @@ -25,7 +29,7 @@ def test_immutability_of_data_source_types_property_content(self) -> None: property cannot be modified by modifying the original mapping. """ data_source_types: dict[str, DataSourceType] = { - **self._data_source_types + **self._data_source_types, } self._app_registry.data_source_types = data_source_types @@ -40,12 +44,10 @@ def test_retrieval_of_data_source_types(self) -> None: value. """ self._app_registry.data_source_types = self._data_source_types - self.assertDictEqual( - self._app_registry.data_source_types, self._data_source_types - ) + assert self._app_registry.data_source_types == self._data_source_types self._app_registry.data_source_types = {} - self.assertDictEqual(self._app_registry.data_source_types, {}) + assert self._app_registry.data_source_types == {} def test_retrieval_of_default_transport_factory(self) -> None: """ diff --git a/tests/lib/test_checkers.py b/tests/lib/test_checkers.py index b941fb6..e8aa097 100644 --- a/tests/lib/test_checkers.py +++ b/tests/lib/test_checkers.py @@ -1,5 +1,4 @@ -from collections.abc import Iterable, Sequence -from typing import SupportsFloat +from typing import TYPE_CHECKING, SupportsFloat import pytest @@ -9,6 +8,9 @@ ensure_not_none_nor_empty, ) +if TYPE_CHECKING: + from collections.abc import Iterable, Sequence + def test_ensure_greater_than_return_value_on_valid_input() -> None: """ @@ -37,20 +39,14 @@ def test_ensure_fails_on_invalid_input() -> None: (-30, -19), ) for value, base_value in inputs: - with pytest.raises(ValueError) as exp_info1: - message: str = "{} must be greater than {}".format( - value, base_value - ) + message: str = "{} must be greater than {}".format( + value, + base_value, + ) + with pytest.raises(ValueError, match="be greater than") as exp_info1: ensure_greater_than(value, base_value, message=message) - assert exp_info1.value.args[0] == message.format(value, base_value) - - with pytest.raises(ValueError) as exp_info2: - ensure_greater_than(0.0, 1.0) - - assert exp_info2.value.args[0] == ( - '"value" must greater than "base_value"' - ) + assert exp_info1.value.args[0] == message.format(value, base_value) def test_ensure_not_none_returns_input_value_if_valid() -> None: @@ -74,9 +70,9 @@ def test_ensure_not_none_fails_on_invalid_input() -> None: Assert that ``ensure_not_none`` raises ``ValueError`` when given a ``None`` value as input. """ - with pytest.raises(ValueError) as exp_info1: + with pytest.raises(ValueError, match="cannot be None") as exp_info1: ensure_not_none(None) - with pytest.raises(ValueError) as exp_info2: + with pytest.raises(ValueError, match="Invalid") as exp_info2: ensure_not_none(None, message="Invalid.") assert exp_info1.value.args[0] == '"value" cannot be None.' @@ -100,13 +96,13 @@ def test_ensure_not_none_nor_empty_fails_on_invalid_input() -> None: Assert that ``ensure_not_none_nor_empty`` raises ``ValueError`` when given a ``None`` or empty value as input. """ - with pytest.raises(ValueError) as exp_info1: + with pytest.raises(ValueError, match="cannot be None or emp") as exp_info1: ensure_not_none_nor_empty(None) # type: ignore - with pytest.raises(ValueError) as exp_info2: + with pytest.raises(ValueError, match="Invalid") as exp_info2: ensure_not_none_nor_empty(None, message="Invalid.") # type: ignore - with pytest.raises(ValueError) as exp_info3: + with pytest.raises(ValueError, match="cannot be None or emp") as exp_info3: ensure_not_none_nor_empty("") - with pytest.raises(ValueError) as exp_info4: + with pytest.raises(ValueError, match="Invalid") as exp_info4: ensure_not_none_nor_empty([], message="Invalid.") assert exp_info1.value.args[0] == '"value" cannot be None or empty.' diff --git a/tests/lib/test_module_loading.py b/tests/lib/test_module_loading.py index b7519b8..dd55264 100644 --- a/tests/lib/test_module_loading.py +++ b/tests/lib/test_module_loading.py @@ -12,7 +12,7 @@ def test_correct_return_value_on_valid_dotted_path1() -> None: assert import_string("app.lib.Config") is Config assert ( import_string("app.lib.module_loading.import_string") is import_string - ) # noqa + ) def test_correct_expected_behavior_on_invalid_dotted_path1() -> None: diff --git a/tests/lib/transports/http/factories.py b/tests/lib/transports/http/factories.py index 1acf286..1ae673e 100644 --- a/tests/lib/transports/http/factories.py +++ b/tests/lib/transports/http/factories.py @@ -42,7 +42,9 @@ def authenticate(self, **options: TransportOptions) -> HTTPRequestParams: } def response_to_auth( - self, response_content: bytes, **options: TransportOptions + self, + response_content: bytes, + **options: TransportOptions, ) -> Mapping[str, str]: return {"Authorization": "Bearer some_secure_token"} @@ -65,10 +67,12 @@ def response_to_data_source_extracts( data_source: DataSource, **options: TransportOptions, ) -> Sequence[ExtractMetadata]: - return tuple() + return () def fetch_data_sources( - self, data_source_type: DataSourceType, **options: TransportOptions + self, + data_source_type: DataSourceType, + **options: TransportOptions, ) -> HTTPRequestParams: return { "headers": {"Accept": "application/json"}, @@ -83,10 +87,12 @@ def response_to_data_sources( data_source_type: DataSourceType, **options: TransportOptions, ) -> Sequence[DataSource]: - return tuple() + return () def mark_upload_as_complete( - self, upload_metadata: UploadMetadata, **options: TransportOptions + self, + upload_metadata: UploadMetadata, + **options: TransportOptions, ) -> HTTPRequestParams: return { "headers": {"Accept": "application/json"}, @@ -99,7 +105,7 @@ def post_upload_chunk( self, upload_metadata: UploadMetadata, chunk_index: int, - chunk_content: Any, + chunk_content: Any, # noqa: ANN401 extra_init_kwargs: Mapping[str, Any] | None = None, **options: TransportOptions, ) -> HTTPRequestParams: diff --git a/tests/lib/transports/http/test_api_v1_dialect.py b/tests/lib/transports/http/test_api_v1_dialect.py index c371cbf..1ca65e0 100644 --- a/tests/lib/transports/http/test_api_v1_dialect.py +++ b/tests/lib/transports/http/test_api_v1_dialect.py @@ -1,6 +1,5 @@ import json -from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from unittest import TestCase from unittest.mock import patch @@ -20,6 +19,9 @@ UploadMetadata, ) +if TYPE_CHECKING: + from collections.abc import Mapping + def test_idr_server_api_v1_dialect_factory_results_on_valid_config() -> None: """ @@ -31,7 +33,7 @@ def test_idr_server_api_v1_dialect_factory_results_on_valid_config() -> None: "host": "http://test.example.com", "username": "admin", "password": "pa$$word", - } + }, } with patch("app.settings", config): api_dialect = idr_server_api_v1_dialect_factory() @@ -44,21 +46,26 @@ def test_idr_server_api_v1_dialect_factor_fails_with_invalid_config() -> None: errors when the app has missing settings or is incorrectly configured. """ - with patch("app.settings", {}): - with pytest.raises(ImproperlyConfiguredError): - idr_server_api_v1_dialect_factory() + with patch("app.settings", {}), pytest.raises(ImproperlyConfiguredError): + idr_server_api_v1_dialect_factory() - with patch("app.settings", {"REMOTE_SERVER": []}): - with pytest.raises(ImproperlyConfiguredError): - idr_server_api_v1_dialect_factory() + with ( + patch("app.settings", {"REMOTE_SERVER": []}), + pytest.raises(ImproperlyConfiguredError), + ): + idr_server_api_v1_dialect_factory() - with patch("app.settings", {"REMOTE_SERVER": {}}): - with pytest.raises(ImproperlyConfiguredError): - idr_server_api_v1_dialect_factory() + with ( + patch("app.settings", {"REMOTE_SERVER": {}}), + pytest.raises(ImproperlyConfiguredError), + ): + idr_server_api_v1_dialect_factory() - with patch("app.settings", {"REMOTE_SERVER": {"host": "http://test.com"}}): - with pytest.raises(ImproperlyConfiguredError): - idr_server_api_v1_dialect_factory() + with ( + patch("app.settings", {"REMOTE_SERVER": {"host": "http://test.com"}}), + pytest.raises(ImproperlyConfiguredError), + ): + idr_server_api_v1_dialect_factory() class TestIDRServerAPIv1(TestCase): @@ -100,12 +107,9 @@ def test_response_to_auth_return_value(self) -> None: """ token: str = "a_very_secure_token!!@@" - self.assertDictEqual( - self._api_dialect.response_to_auth( - json.dumps({"token": token}).encode("ascii") - ), - {"Authorization": "Token %s" % token}, - ) + assert self._api_dialect.response_to_auth( + json.dumps({"token": token}).encode("ascii"), + ) == {"Authorization": "Token %s" % token} def test_fetch_data_source_extracts_return_value(self) -> None: """ @@ -115,7 +119,8 @@ def test_fetch_data_source_extracts_return_value(self) -> None: data_source = FakeDataSourceFactory() data_source_type = FakeDataSourceTypeFactory() request_params = self._api_dialect.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) assert request_params # Should not be None or empty. @@ -131,15 +136,15 @@ def test_response_to_data_source_extracts_return_value(self) -> None: data_source_type = FakeDataSourceTypeFactory() response_content = {"results": []} - self.assertListEqual( + assert ( list( self._api_dialect.response_to_data_source_extracts( json.dumps(response_content).encode("ascii"), data_source_type=data_source_type, data_source=data_source, - ) - ), - [], + ), + ) + == [] ) def test_fetch_data_sources_return_value(self) -> None: @@ -149,7 +154,7 @@ def test_fetch_data_sources_return_value(self) -> None: """ data_source_type = FakeDataSourceTypeFactory() request_params = self._api_dialect.fetch_data_sources( - data_source_type=data_source_type + data_source_type=data_source_type, ) assert request_params # Should not be None or empty. @@ -164,14 +169,14 @@ def test_response_to_data_sources_return_value(self) -> None: data_source_type = FakeDataSourceTypeFactory() response_content = {"results": []} - self.assertListEqual( + assert ( list( self._api_dialect.response_to_data_sources( json.dumps(response_content).encode("ascii"), data_source_type=data_source_type, - ) - ), - [], + ), + ) + == [] ) def test_mark_upload_as_complete_return_value(self) -> None: @@ -181,7 +186,7 @@ def test_mark_upload_as_complete_return_value(self) -> None: """ upload_meta = FakeUploadMetadataFactory() request_params = self._api_dialect.mark_upload_as_complete( - upload_metadata=upload_meta + upload_metadata=upload_meta, ) assert request_params # Should not be None or empty. diff --git a/tests/lib/transports/http/test_http.py b/tests/lib/transports/http/test_http.py index 1a2a882..1e03ce6 100644 --- a/tests/lib/transports/http/test_http.py +++ b/tests/lib/transports/http/test_http.py @@ -44,30 +44,40 @@ def test_http_transport_factory_with_invalid_settings_fails(self) -> None: """ config1: dict[str, Any] = dict(self._app_config) del config1[self._http_config_key] - with patch("app.settings", config1): - with pytest.raises(ImproperlyConfiguredError): - http_transport_factory() + with ( + patch("app.settings", config1), + pytest.raises(ImproperlyConfiguredError), + ): + http_transport_factory() config2: dict[str, Any] = dict(self._app_config) config2[self._http_config_key] = 3 - with patch("app.settings", config2): - with pytest.raises(ImproperlyConfiguredError): - http_transport_factory() + with ( + patch("app.settings", config2), + pytest.raises(ImproperlyConfiguredError), + ): + http_transport_factory() config3: dict[str, Any] = dict(self._app_config) del config3[self._http_config_key][self._api_dialect_config_key] - with patch("app.settings", config3): - with pytest.raises(ImproperlyConfiguredError): - http_transport_factory() + with ( + patch("app.settings", config3), + pytest.raises(ImproperlyConfiguredError), + ): + http_transport_factory() config4: dict[str, Any] = dict(self._app_config) config4[self._http_config_key][self._api_dialect_config_key] = None - with patch("app.settings", config4): - with pytest.raises(ImproperlyConfiguredError): - http_transport_factory() + with ( + patch("app.settings", config4), + pytest.raises(ImproperlyConfiguredError), + ): + http_transport_factory() config5: dict[str, Any] = dict(self._app_config) config5[self._http_config_key][self._api_dialect_config_key] = "12345" - with patch("app.settings", config5): - with pytest.raises(ImproperlyConfiguredError): - http_transport_factory() + with ( + patch("app.settings", config5), + pytest.raises(ImproperlyConfiguredError), + ): + http_transport_factory() diff --git a/tests/lib/transports/http/test_http_transport.py b/tests/lib/transports/http/test_http_transport.py index c75eb47..ac1373b 100644 --- a/tests/lib/transports/http/test_http_transport.py +++ b/tests/lib/transports/http/test_http_transport.py @@ -39,7 +39,9 @@ def setUp(self) -> None: super().setUp() self._api_dialect: HTTPAPIDialect = FakeHTTPAPIDialectFactory() self._transport: HTTPTransport = HTTPTransport( - api_dialect=self._api_dialect, connect_timeout=10, read_timeout=10 + api_dialect=self._api_dialect, + connect_timeout=10, + read_timeout=10, ) def tearDown(self) -> None: @@ -50,7 +52,7 @@ def test_an_api_dialect_is_required_at_instantiation(self) -> None: Assert that the `api_dialect` parameter is a required parameter during the ``HTTPTransport`` class instantiation. """ - with pytest.raises(ValueError): + with pytest.raises(ValueError, match='"api_dialect" MUST be'): HTTPTransport(api_dialect=None) # type: ignore def test_dispose_returns_cleanly(self) -> None: @@ -73,15 +75,16 @@ def test_a_disposed_transport_raises_expected_errors(self) -> None: s.return_value = self._mock_response_factory() with pytest.raises(TransportClosedError): self._transport.fetch_data_sources( - data_source_type=data_source_type + data_source_type=data_source_type, ) with pytest.raises(TransportClosedError): self._transport.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) with pytest.raises(TransportClosedError): self._transport.mark_upload_as_complete( - upload_metadata=upload_meta + upload_metadata=upload_meta, ) s.return_value = self._mock_response_factory(status_code=201) @@ -109,9 +112,10 @@ def test_fetch_data_source_extracts_returns_expected_value(self) -> None: with patch("requests.sessions.Session.request", autospec=True) as s: s.return_value = self._mock_response_factory() results = self._transport.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) - self.assertListEqual(list(results), []) + assert list(results) == [] def test_fetch_data_sources_returns_expected_value(self) -> None: """ @@ -122,7 +126,7 @@ def test_fetch_data_sources_returns_expected_value(self) -> None: with patch("requests.sessions.Session.request", autospec=True) as s: s.return_value = self._mock_response_factory() results = self._transport.fetch_data_sources(data_source_type) - self.assertListEqual(list(results), []) + assert list(results) == [] def test_mark_upload_as_complete_exits_cleanly_on_valid_data(self) -> None: """ @@ -133,7 +137,7 @@ def test_mark_upload_as_complete_exits_cleanly_on_valid_data(self) -> None: with patch("requests.sessions.Session.request", autospec=True) as s: s.return_value = self._mock_response_factory() self._transport.mark_upload_as_complete( - upload_metadata=upload_meta + upload_metadata=upload_meta, ) def test_post_upload_chunk_returns_expected_value(self) -> None: @@ -184,10 +188,12 @@ def test_transport_authentication_errors(self) -> None: ConnectionError, ] with pytest.raises( - TransportError, match="Error authenticating the client" + TransportError, + match="Error authenticating the client", ) as exc_info: self._transport.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) assert isinstance(exc_info.value.__cause__, ConnectionError) @@ -210,7 +216,8 @@ def test_transport_re_authentication_failure(self) -> None: match="Unable to authenticate the client on IDR Server", ): self._transport.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) def test_transport_re_authentication_works(self) -> None: @@ -232,9 +239,10 @@ def test_transport_re_authentication_works(self) -> None: self._mock_response_factory(), ] results = self._transport.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) - self.assertListEqual(list(results), []) + assert list(results) == [] def test_request_errors(self) -> None: """ @@ -250,7 +258,8 @@ def test_request_errors(self) -> None: match="Unable to make a request to the remote server", ) as exc_info: self._transport.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) assert isinstance(exc_info.value.__cause__, ChunkedEncodingError) @@ -266,7 +275,8 @@ def test_request_failure(self) -> None: match="Expected response status 200, but got 500", ): self._transport.fetch_data_source_extracts( - data_source_type=data_source_type, data_source=data_source + data_source_type=data_source_type, + data_source=data_source, ) @staticmethod diff --git a/tests/test_app.py b/tests/test_app.py index 6c6ac69..1c6eee5 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -19,7 +19,7 @@ class FakeDataSourceTypesConfigInitializer(SettingInitializer): def setting(self) -> str: return "SUPPORTED_DATA_SOURCE_TYPES" - def execute(self, an_input: Any) -> Any: + def execute(self, an_input: Any) -> Any: # noqa: ANN401 # Do nothing return an_input @@ -45,7 +45,7 @@ class TestAppModule(TestCase): def setUp(self) -> None: super().setUp() - self._default_config: dict[str, Any] = dict() + self._default_config: dict[str, Any] = {} self._some_state: int = 0 def test_valid_config_is_successful(self) -> None: @@ -94,7 +94,7 @@ def test_missing_default_transport_factory_setting_is_ok(self) -> None: Assert that a missing setting for the default transport factory is allowed. """ - config: dict[str, Any] = dict() + config: dict[str, Any] = {} app.setup(initial_settings=config) assert app.settings.get("DEFAULT_TRANSPORT_FACTORY") is None diff --git a/tests/use_cases/test_fetch_metadata.py b/tests/use_cases/test_fetch_metadata.py index fd19451..e443c3b 100644 --- a/tests/use_cases/test_fetch_metadata.py +++ b/tests/use_cases/test_fetch_metadata.py @@ -1,8 +1,7 @@ -from collections.abc import Sequence +from typing import TYPE_CHECKING from unittest import TestCase import app -from app.core import DataSource, DataSourceType, Transport from app.use_cases.fetch_metadata import ( DoFetchDataSources, DoFetchExtractMetadata, @@ -16,6 +15,11 @@ ) from tests.factories import config_factory +if TYPE_CHECKING: + from collections.abc import Sequence + + from app.core import DataSource, DataSourceType, Transport + class TestDoFetchDataSources(TestCase): """Tests for the :class:`DoFetchDataSources` class.""" @@ -25,10 +29,10 @@ def setUp(self) -> None: app.setup(initial_settings=config_factory()) self._data_source_type: DataSourceType = FakeDataSourceTypeFactory() self._instance: DoFetchDataSources = DoFetchDataSources( - data_source_type=self._data_source_type + data_source_type=self._data_source_type, ) self._transport: Transport = FakeTransportFactory( - fetch_data_sources_count=5 + fetch_data_sources_count=5, ) def test_execute_return_value(self) -> None: @@ -55,10 +59,10 @@ def setUp(self) -> None: app.setup(initial_settings=config_factory()) self._data_source: DataSource = FakeDataSourceFactory() self._instance: DoFetchExtractMetadata = DoFetchExtractMetadata( - data_source=self._data_source + data_source=self._data_source, ) self._transport: Transport = FakeTransportFactory( - fetch_data_source_extracts_count=5 + fetch_data_source_extracts_count=5, ) def test_execute_return_value(self) -> None: @@ -88,14 +92,14 @@ def setUp(self) -> None: self._data_source_types: Sequence[DataSourceType] self._data_source_types = tuple( FakeDataSourceTypeFactory.create_batch( - size=self._max_data_source_types - ) + size=self._max_data_source_types, + ), ) self._transport: Transport = FakeTransportFactory( - fetch_data_sources_count=self._max_data_sources + fetch_data_sources_count=self._max_data_sources, ) self._instance: FetchDataSources = FetchDataSources( - transport=self._transport + transport=self._transport, ) def test_execute_return_value(self) -> None: @@ -127,13 +131,13 @@ def setUp(self) -> None: self._max_extracts: int = 7 self._data_sources: Sequence[DataSource] self._data_sources = tuple( - FakeDataSourceFactory.create_batch(size=self._max_data_sources) + FakeDataSourceFactory.create_batch(size=self._max_data_sources), ) self._transport: Transport = FakeTransportFactory( - fetch_data_source_extracts_count=self._max_extracts + fetch_data_source_extracts_count=self._max_extracts, ) self._instance: FetchExtractMetadata = FetchExtractMetadata( - transport=self._transport + transport=self._transport, ) def test_execute_return_value(self) -> None: diff --git a/tests/use_cases/test_run_extraction.py b/tests/use_cases/test_run_extraction.py index bf48171..87bc31f 100644 --- a/tests/use_cases/test_run_extraction.py +++ b/tests/use_cases/test_run_extraction.py @@ -1,7 +1,6 @@ -from collections.abc import Sequence +from typing import TYPE_CHECKING from unittest import TestCase -from app.core import DataSource, ExtractMetadata from app.use_cases.run_extraction import ( DoExtract, GroupSiblingExtracts, @@ -12,6 +11,11 @@ FakeExtractMetadataFactory, ) +if TYPE_CHECKING: + from collections.abc import Sequence + + from app.core import DataSource, ExtractMetadata + class TestDoExtract(TestCase): """Tests for the :class:`DoExtract` class.""" @@ -39,14 +43,16 @@ def setUp(self) -> None: self._ds1_extracts: Sequence[ExtractMetadata] self._ds1_extracts = tuple( FakeExtractMetadataFactory.create_batch( - size=5, data_source=self._data_source1 - ) + size=5, + data_source=self._data_source1, + ), ) self._ds2_extracts: Sequence[ExtractMetadata] self._ds2_extracts = tuple( FakeExtractMetadataFactory.create_batch( - size=7, data_source=self._data_source2 - ) + size=7, + data_source=self._data_source2, + ), ) self._all_extracts: Sequence[ExtractMetadata] self._all_extracts = self._ds1_extracts + self._ds2_extracts @@ -60,8 +66,8 @@ def test_execute_return_value(self) -> None: assert len(results) == 2 assert results[0][0] == self._data_source1 assert results[1][0] == self._data_source2 - self.assertTupleEqual(tuple(results[0][1]), tuple(self._ds1_extracts)) - self.assertTupleEqual(tuple(results[1][1]), tuple(self._ds2_extracts)) + assert tuple(results[0][1]) == tuple(self._ds1_extracts) + assert tuple(results[1][1]) == tuple(self._ds2_extracts) class TestRunDataSourceExtracts(TestGroupSiblingExtracts): diff --git a/tests/use_cases/test_upload_extracts.py b/tests/use_cases/test_upload_extracts.py index 76c64dc..d590ca0 100644 --- a/tests/use_cases/test_upload_extracts.py +++ b/tests/use_cases/test_upload_extracts.py @@ -1,9 +1,7 @@ -from collections.abc import Sequence -from typing import Any +from typing import TYPE_CHECKING, Any from unittest import TestCase from unittest.mock import patch -from app.core import ExtractMetadata, Transport, UploadMetadata from app.lib import Config from app.use_cases.upload_extracts import ( DoMarkUploadAsComplete, @@ -22,6 +20,11 @@ FakeUploadMetadataFactory, ) +if TYPE_CHECKING: + from collections.abc import Sequence + + from app.core import ExtractMetadata, Transport, UploadMetadata + class TestDoMarkUploadAsComplete(TestCase): """Tests for the :class:`DoMarkUploadAsComplete` class.""" @@ -30,7 +33,7 @@ def setUp(self) -> None: super().setUp() self._upload_meta: UploadMetadata = FakeUploadMetadataFactory() self._instance: DoMarkUploadAsComplete = DoMarkUploadAsComplete( - upload=self._upload_meta + upload=self._upload_meta, ) self._transport: Transport = FakeTransportFactory() @@ -77,7 +80,7 @@ def setUp(self) -> None: self._org_unit_name: str = "Test Facility" self._transport: Transport = FakeTransportFactory() self._instance: DoPostUpload = DoPostUpload( - extract=(self._extract_meta, self._data) + extract=(self._extract_meta, self._data), ) def test_execute_return_value(self) -> None: @@ -107,11 +110,15 @@ def setUp(self) -> None: ) self._extract_metas: Sequence[ExtractMetadata] self._extract_metas = FakeExtractMetadataFactory.create_batch( - size=self._max_items + size=self._max_items, ) self._extraction_result: Sequence[RunExtractionResult] = tuple( (_extract, _data) - for _extract, _data in zip(self._extract_metas, self._extract_data) + for _extract, _data in zip( + self._extract_metas, + self._extract_data, + strict=True, + ) ) self._org_unit_code: str = "12345" self._org_unit_name: str = "Test Facility" @@ -153,7 +160,11 @@ def setUp(self) -> None: ) self._posted_uploads: Sequence[tuple[UploadMetadata, Any]] = tuple( (_upload, _data) - for _upload, _data in zip(self._upload_metas, self._extract_data) + for _upload, _data in zip( + self._upload_metas, + self._extract_data, + strict=True, + ) ) def test_execute_return_value(self) -> None: @@ -172,7 +183,7 @@ def setUp(self) -> None: super().setUp() self._prepared_chunks: Sequence[tuple[UploadMetadata, Sequence[bytes]]] self._prepared_chunks = PrepareUploadChunks().execute( - self._posted_uploads + self._posted_uploads, ) self._instance: PostUploadChunks = PostUploadChunks(self._transport) @@ -191,11 +202,11 @@ class TestMarkUploadAsComplete(TestPostUploadChunks): def setUp(self) -> None: super().setUp() self._instance: MarkUploadsAsComplete = MarkUploadsAsComplete( - transport=self._transport + transport=self._transport, ) self._upload_results: Sequence[UploadExtractResult] self._upload_results = PostUploadChunks(self._transport).execute( - self._prepared_chunks + self._prepared_chunks, ) def test_execute_return_value(self) -> None: diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 3378cf5..0000000 --- a/tox.ini +++ /dev/null @@ -1,67 +0,0 @@ -[tox] -env_list = {py310, py311}, coveralls, package -isolated_build = true -no_package = true -requires = - tox>4 -skip_missing_interpreters = true - - -[gh-actions] -python = - 3.10: py310 - 3.11: py311, coveralls - - -[flake8] -exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv -max_line_length = 79 -max_complexity = 7 - - -[testenv] -commands = - python -m app --version - pyright . - flake8 app/ - coverage erase - pytest --cov=app -n auto --durations=100 {posargs} - coverage html -deps = - -r{toxinidir}{/}requirements{/}test.txt -description = test and lint the project -download = true -pass_env = - MYSQL_TEST_DB_HOST - MYSQL_TEST_DB_NAME - MYSQL_TEST_DB_PASSWORD - MYSQL_TEST_DB_PORT - MYSQL_TEST_DB_USERNAME -set_env = - PYTHONPATH = {toxinidir} - - -;This is only configured to be run on GITHUB only. It will fail if ran locally. -[testenv:coveralls] -commands = - coveralls --service=github -description = submit coverage results to coverall.io -pass_env = - COVERALLS_REPO_TOKEN - GITHUB_* - MYSQL_TEST_DB_HOST - MYSQL_TEST_DB_NAME - MYSQL_TEST_DB_PASSWORD - MYSQL_TEST_DB_PORT - MYSQL_TEST_DB_USERNAME - - -[testenv:package] -allowlist_externals = {envdir}{/}idr_client -commands = - pyinstaller idr_client.spec - staticx dist/idr_client_temp {envdir}{/}idr_client - {envdir}{/}idr_client --version -deps = - -r{toxinidir}{/}requirements{/}build.txt -description = build an executable binary of the project