diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 294a26b..ddddcec 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -45,6 +45,10 @@ jobs: # exit-zero treats all errors as warnings. poetry run flake8 . --count --exit-zero --statistics + - name: Typecheck with mypy + run: | + poetry run mypy mockafka tests || true + - name: Test with pytest run: | poetry run pytest --cov=./ --cov-report=xml diff --git a/mockafka/admin_client.py b/mockafka/admin_client.py index 6868823..a9efb8a 100644 --- a/mockafka/admin_client.py +++ b/mockafka/admin_client.py @@ -1,6 +1,6 @@ from __future__ import annotations -from confluent_kafka.cimpl import NewTopic, NewPartitions +from confluent_kafka.cimpl import NewTopic, NewPartitions # type: ignore[import-untyped] from mockafka.cluster_metadata import ClusterMetadata from mockafka.kafka_store import KafkaStore diff --git a/mockafka/aiokafka/aiokafka_admin_client.py b/mockafka/aiokafka/aiokafka_admin_client.py index c17b252..ab54e2b 100644 --- a/mockafka/aiokafka/aiokafka_admin_client.py +++ b/mockafka/aiokafka/aiokafka_admin_client.py @@ -2,7 +2,7 @@ from typing import Dict -from aiokafka.admin import NewTopic, NewPartitions +from aiokafka.admin import NewTopic, NewPartitions # type: ignore[import-untyped] from mockafka.kafka_store import KafkaStore diff --git a/mockafka/aiokafka/aiokafka_consumer.py b/mockafka/aiokafka/aiokafka_consumer.py index 98bd257..17b4be3 100644 --- a/mockafka/aiokafka/aiokafka_consumer.py +++ b/mockafka/aiokafka/aiokafka_consumer.py @@ -2,6 +2,7 @@ import random from copy import deepcopy +from typing import Any from mockafka.kafka_store import KafkaStore @@ -34,18 +35,18 @@ class FakeAIOKafkaConsumer: - getmany(): Currently just calls getone(). """ - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self.kafka = KafkaStore() - self.consumer_store = {} + self.consumer_store: dict[str, int] = {} self.subscribed_topic: list = [] - async def start(self): + async def start(self) -> None: self.consumer_store = {} - self.subscribed_topic: list = [] + self.subscribed_topic = [] - async def stop(self): + async def stop(self) -> None: self.consumer_store = {} - self.subscribed_topic: list = [] + self.subscribed_topic = [] async def commit(self): for item in self.consumer_store: diff --git a/mockafka/cluster_metadata.py b/mockafka/cluster_metadata.py index 04d11a1..8ab98a4 100644 --- a/mockafka/cluster_metadata.py +++ b/mockafka/cluster_metadata.py @@ -14,7 +14,7 @@ class ClusterMetadata(object): This class is typically not user instantiated. """ - def __init__(self, topic: str = None): + def __init__(self, topic: str | None = None): self.kafka = KafkaStore() self.cluster_id = "test" self.controller_id = 1 diff --git a/mockafka/conumser.py b/mockafka/conumser.py index 9d0eead..38ec5b7 100644 --- a/mockafka/conumser.py +++ b/mockafka/conumser.py @@ -2,6 +2,7 @@ import random from copy import deepcopy +from typing import Any from mockafka.cluster_metadata import ClusterMetadata from mockafka.kafka_store import KafkaStore @@ -40,7 +41,7 @@ class FakeConsumer(object): partitions) -> None: Incrementally unassign partitions (unsupported in mockafka). """ - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """ Initialize the FakeConsumer. @@ -49,7 +50,7 @@ def __init__(self, *args, **kwargs): - kwargs: Additional keyword arguments (unused). """ self.kafka = KafkaStore() - self.consumer_store = {} + self.consumer_store: dict[str, int] = {} self.subscribed_topic: list = [] def consume(self, num_messages=1, *args, **kwargs) -> list[Message]: @@ -83,7 +84,7 @@ def close(self, *args, **kwargs): self.consumer_store = {} self.subscribed_topic = [] - def commit(self, message: Message = None, *args, **kwargs): + def commit(self, message: Message | None = None, *args, **kwargs): """ Commit offsets for consumed messages. diff --git a/mockafka/decorators/asetup_kafka.py b/mockafka/decorators/asetup_kafka.py index 70343d1..860fd21 100644 --- a/mockafka/decorators/asetup_kafka.py +++ b/mockafka/decorators/asetup_kafka.py @@ -2,12 +2,12 @@ from functools import wraps -from aiokafka.admin import NewTopic +from aiokafka.admin import NewTopic # type: ignore[import-untyped] from mockafka.aiokafka import FakeAIOKafkaAdmin +from mockafka.decorators.typing import TopicConfig - -def asetup_kafka(topics: [dict[str, str]], clean: bool = False): +def asetup_kafka(topics: list[TopicConfig], clean: bool = False): """ asetup_kafka is a decorator for setting up mock Kafka topics using a FakeAIOKafkaAdminClient. diff --git a/mockafka/decorators/bulk_producer.py b/mockafka/decorators/bulk_producer.py index 0dd4c58..0f2c0cf 100644 --- a/mockafka/decorators/bulk_producer.py +++ b/mockafka/decorators/bulk_producer.py @@ -1,15 +1,17 @@ from __future__ import annotations from functools import wraps + from mockafka import FakeProducer +from mockafka.decorators.typing import MessageDict -def bulk_produce(list_of_messages: list[dict[str, str]]): +def bulk_produce(list_of_messages: list[MessageDict]): """ A decorator for bulk-producing messages using a FakeProducer. Parameters: - - list_of_messages (list[dict[str, str]]): A list of dictionaries containing message details. + - list_of_messages (list[dict]): A list of dictionaries containing message details. Each dictionary should have the following optional keys: - 'value': The value of the message. diff --git a/mockafka/decorators/setup_kafka.py b/mockafka/decorators/setup_kafka.py index 509de05..c266cce 100644 --- a/mockafka/decorators/setup_kafka.py +++ b/mockafka/decorators/setup_kafka.py @@ -1,15 +1,17 @@ from __future__ import annotations from functools import wraps + from mockafka.admin_client import FakeAdminClientImpl, NewTopic +from mockafka.decorators.typing import TopicConfig -def setup_kafka(topics: [dict[str, str]], clean: bool = False): +def setup_kafka(topics: list[TopicConfig], clean: bool = False): """ A decorator for setting up Mockafka with specified topics using a FakeAdminClient. Parameters: - - topics (list[dict[str, str]]): A list of dictionaries containing topic details. + - topics (list[dict]): A list of dictionaries containing topic details. Each dictionary should have the keys 'topic' and 'partition'. - clean (bool): Option to have a clean Kafka (remove existing topics) or not. diff --git a/mockafka/decorators/typing.py b/mockafka/decorators/typing.py new file mode 100644 index 0000000..794ce35 --- /dev/null +++ b/mockafka/decorators/typing.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from typing import TypedDict +from typing_extensions import NotRequired + + +class MessageDict(TypedDict): + value: NotRequired[str] + key: NotRequired[str] + topic: NotRequired[str] + partition: NotRequired[int] + timestamp: NotRequired[int] + headers: NotRequired[dict] + + +class TopicConfig(TypedDict): + topic: str + partition: int diff --git a/mockafka/kafka_store.py b/mockafka/kafka_store.py index 230236b..9b67d5d 100644 --- a/mockafka/kafka_store.py +++ b/mockafka/kafka_store.py @@ -18,7 +18,7 @@ from __future__ import annotations -from confluent_kafka import KafkaException +from confluent_kafka import KafkaException # type: ignore[import-untyped] from .message import Message from copy import deepcopy @@ -30,7 +30,7 @@ class SingletonMeta(type): - _instances = {} + _instances: dict[type[SingletonMeta], SingletonMeta] = {} def __call__(cls, *args, **kwargs): if cls not in cls._instances or "clean" in kwargs.keys(): diff --git a/mockafka/message.py b/mockafka/message.py index 5a4cadc..774623b 100644 --- a/mockafka/message.py +++ b/mockafka/message.py @@ -1,12 +1,12 @@ from __future__ import annotations -from typing import Optional +from typing import Optional, Any -from confluent_kafka import KafkaError +from confluent_kafka import KafkaError # type: ignore[import-untyped] class Message: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self._headers: Optional[dict] = kwargs.get("headers", None) self._key: Optional[str] = kwargs.get("key", None) self._value: Optional[str] = kwargs.get("value", None) diff --git a/mockafka/producer.py b/mockafka/producer.py index 51d5e7f..b156ff6 100644 --- a/mockafka/producer.py +++ b/mockafka/producer.py @@ -8,7 +8,7 @@ class FakeProducer(object): - def __init__(self, config: dict = None): + def __init__(self, config: dict | None = None): self.kafka = KafkaStore() def produce(self, topic, value=None, *args, **kwargs): diff --git a/poetry.lock b/poetry.lock index 6968391..b206d8b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1386,13 +1386,13 @@ testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-po [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -1505,4 +1505,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "aa3f5c161a600ec3a7478277fc2c3d8c84f897fcca9f8b4e110683c81530e219" +content-hash = "e1de492eaff94c83f70f6612174e90912732da75f2b68b2391475ef48c285998" diff --git a/pyproject.toml b/pyproject.toml index f21b827..883128c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ confluent-kafka = ">= 1.9.2" aiokafka = ">=0.10,<0.12" pytest-cov = ">=4.1,<6.0" pytest-asyncio = "^0.23.5" +typing-extensions = "^4.12.2" [tool.poetry.group.dev.dependencies] flake8 = "*" diff --git a/setup.cfg b/setup.cfg index e7bfb44..babab01 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,3 +2,7 @@ max-complexity = 10 # The GitHub editor is 127 chars wide max-line-length = 127 + + +[mypy] +enable_error_code = ignore-without-code diff --git a/tests/test_aiokafka/test_aiokafka_producer.py b/tests/test_aiokafka/test_aiokafka_producer.py index be4c243..95ef24e 100644 --- a/tests/test_aiokafka/test_aiokafka_producer.py +++ b/tests/test_aiokafka/test_aiokafka_producer.py @@ -3,7 +3,7 @@ from unittest import IsolatedAsyncioTestCase import pytest -from aiokafka.admin import NewTopic +from aiokafka.admin import NewTopic # type: ignore[import-untyped] from mockafka import Message from mockafka.aiokafka.aiokafka_admin_client import FakeAIOKafkaAdmin @@ -18,6 +18,10 @@ def setUp(self) -> None: self.producer = FakeAIOKafkaProducer() self.admin_client = FakeAIOKafkaAdmin() + self.topic = "test1" + self.key = "test_key" + self.value = "test_value" + async def _create_mock_topic(self): await self.admin_client.create_topics( new_topics=[ @@ -27,18 +31,6 @@ async def _create_mock_topic(self): ] ) - @pytest.fixture(autouse=True) - def topic(self): - self.topic = "test1" - - @pytest.fixture(autouse=True) - def key(self): - self.key = "test_key" - - @pytest.fixture(autouse=True) - def value(self): - self.value = "test_value" - async def test_produce_failed_topic_not_exist(self): with pytest.raises(KafkaException): await self.producer.send( @@ -69,7 +61,7 @@ async def test_produce_fail_for_none_partition(self): partition=None, ) - async def test_produce_once(self): + async def test_produce_once(self) -> None: await self._create_mock_topic() await self.producer.send( headers={}, @@ -88,7 +80,7 @@ async def test_produce_once(self): self.assertEqual(message.error(), None) self.assertEqual(message.latency(), None) - async def test_send_and_wait(self): + async def test_send_and_wait(self) -> None: await self._create_mock_topic() await self.producer.start() diff --git a/tests/test_aiokafka/test_async_decorators.py b/tests/test_aiokafka/test_async_decorators.py index 3ae7e17..5f10020 100644 --- a/tests/test_aiokafka/test_async_decorators.py +++ b/tests/test_aiokafka/test_async_decorators.py @@ -4,7 +4,7 @@ from unittest import IsolatedAsyncioTestCase import pytest -from aiokafka.admin import NewTopic +from aiokafka.admin import NewTopic # type: ignore[import-untyped] from mockafka import Message from mockafka.aiokafka import ( @@ -93,7 +93,7 @@ async def test_produce_with_kafka_setup_decorator(self): @aproduce(topic="test_topic", partition=5, key="test_", value="test_value1") @aproduce(topic="test_topic", partition=5, key="test_", value="test_value1") @aconsume(topics=["test_topic"]) - async def test_consumer_decorator(self, message: Message = None): + async def test_consumer_decorator(self, message: Message | None = None): if message is None: return diff --git a/tests/test_aiokafka/test_fake_aiokafka_admin_client.py b/tests/test_aiokafka/test_fake_aiokafka_admin_client.py index 1aa95b3..d63850f 100644 --- a/tests/test_aiokafka/test_fake_aiokafka_admin_client.py +++ b/tests/test_aiokafka/test_fake_aiokafka_admin_client.py @@ -3,7 +3,7 @@ from unittest import IsolatedAsyncioTestCase import pytest -from aiokafka.admin import NewTopic, NewPartitions +from aiokafka.admin import NewTopic, NewPartitions # type: ignore[import-untyped] from mockafka.aiokafka.aiokafka_admin_client import FakeAIOKafkaAdmin from mockafka.kafka_store import KafkaStore diff --git a/tests/test_async_mockafka.py b/tests/test_async_mockafka.py index e165ec7..a9ee517 100644 --- a/tests/test_async_mockafka.py +++ b/tests/test_async_mockafka.py @@ -1,7 +1,7 @@ from __future__ import annotations import pytest -from aiokafka.admin import NewTopic +from aiokafka.admin import NewTopic # type: ignore[import-untyped] from mockafka import aproduce, asetup_kafka, aconsume from mockafka.aiokafka import ( diff --git a/tests/test_docrators.py b/tests/test_docrators.py index ec5156f..f6d734e 100644 --- a/tests/test_docrators.py +++ b/tests/test_docrators.py @@ -3,10 +3,11 @@ from mockafka import FakeConsumer, produce, bulk_produce, setup_kafka, Message from mockafka.admin_client import FakeAdminClientImpl, NewTopic from mockafka.producer import FakeProducer +from mockafka.decorators.typing import MessageDict from mockafka.decorators.consumer import consume from unittest import TestCase -sample_for_bulk_produce = [ +sample_for_bulk_produce: list[MessageDict] = [ { "key": "test_key", "value": "test_value", @@ -98,7 +99,7 @@ def test_produce_with_kafka_setup_decorator(self): @produce(topic="test_topic", partition=5, key="test_", value="test_value1") @produce(topic="test_topic", partition=5, key="test_", value="test_value1") @consume(topics=["test_topic"]) - def test_consumer_decorator(self, message: Message = None): + def test_consumer_decorator(self, message: Message | None = None): if message is None: return diff --git a/tests/test_kafka_store.py b/tests/test_kafka_store.py index c76eaaa..a065eda 100644 --- a/tests/test_kafka_store.py +++ b/tests/test_kafka_store.py @@ -2,7 +2,7 @@ from unittest import TestCase -from parameterized import parameterized +from parameterized import parameterized # type: ignore[import-untyped] from mockafka.kafka_store import KafkaStore, KafkaException, Message diff --git a/tests/test_producer.py b/tests/test_producer.py index 24b5c07..ee17877 100644 --- a/tests/test_producer.py +++ b/tests/test_producer.py @@ -7,7 +7,7 @@ from mockafka.admin_client import FakeAdminClientImpl, NewTopic from mockafka.kafka_store import KafkaStore, KafkaException from mockafka.producer import FakeProducer -from confluent_kafka import Message +from confluent_kafka import Message # type: ignore[import-untyped] class TestFakeProducer(TestCase): @@ -24,16 +24,8 @@ def setUp(self) -> None: ] ) - @pytest.fixture(autouse=True) - def topic(self): self.topic = "test1" - - @pytest.fixture(autouse=True) - def key(self): self.key = "test_key" - - @pytest.fixture(autouse=True) - def value(self): self.value = "test_value" def test_produce_failed_topic_not_exist(self): @@ -66,7 +58,7 @@ def test_produce_fail_for_none_partition(self): partition=None, ) - def test_produce_once(self): + def test_produce_once(self) -> None: self.producer.produce( headers={}, key=self.key,