From 36c0db3986bf2b2f4185edb44c3b3ef4b83e1223 Mon Sep 17 00:00:00 2001 From: marcosschroh Date: Fri, 13 Oct 2023 16:38:29 +0200 Subject: [PATCH] feat: add pydantic v2 support. Closes #415 --- dataclasses_avroschema/fields/base.py | 7 +- dataclasses_avroschema/fields/fields.py | 45 +- dataclasses_avroschema/fields/mapper.py | 7 + dataclasses_avroschema/pydantic/__init__.py | 4 +- dataclasses_avroschema/pydantic/fields.py | 2 +- dataclasses_avroschema/pydantic/main.py | 25 +- dataclasses_avroschema/pydantic/mapper.py | 4 +- dataclasses_avroschema/pydantic/parser.py | 16 +- .../pydantic/v1/__init__.py | 6 + dataclasses_avroschema/pydantic/v1/main.py | 99 ++++ dataclasses_avroschema/pydantic/v1/mapper.py | 40 ++ dataclasses_avroschema/pydantic/v1/parser.py | 25 + dataclasses_avroschema/schema_generator.py | 4 +- dataclasses_avroschema/utils.py | 13 +- docs/pydantic.md | 120 ++-- docs/pydantic_v1.md | 506 +++++++++++++++++ mkdocs.yml | 1 + poetry.lock | 207 ++++--- pyproject.toml | 2 +- tests/fake/const.py | 38 +- tests/fake/test_fake.py | 130 ++--- tests/fake/test_fake_pydantic.py | 222 ++++++++ tests/fake/test_fake_pydantic_v1.py | 223 ++++++++ tests/fields/consts.py | 5 - tests/fields/pydantic/__init__.py | 0 tests/fields/pydantic/consts.py | 20 + tests/fields/pydantic/test_pydantic.py | 102 ++++ tests/fields/pydantic/test_pydantic_v1.py | 100 ++++ tests/fields/test_complex_types.py | 68 --- tests/schemas/conftest.py | 42 -- tests/schemas/pydantic/conftest.py | 82 +++ tests/schemas/pydantic/test_pydantic.py | 528 ++++++++++++++++++ .../test_pydantic_v1.py} | 83 +-- .../test_primitive_types_serialization.py | 29 +- tests/serialization/test_pydantic_fields.py | 176 +++++- .../serialization/test_pydantic_v1_fields.py | 217 +++++++ 36 files changed, 2765 insertions(+), 433 deletions(-) create mode 100644 dataclasses_avroschema/pydantic/v1/__init__.py create mode 100644 dataclasses_avroschema/pydantic/v1/main.py create mode 100644 dataclasses_avroschema/pydantic/v1/mapper.py create mode 100644 dataclasses_avroschema/pydantic/v1/parser.py create mode 100644 docs/pydantic_v1.md create mode 100644 tests/fake/test_fake_pydantic.py create mode 100644 tests/fake/test_fake_pydantic_v1.py create mode 100644 tests/fields/pydantic/__init__.py create mode 100644 tests/fields/pydantic/consts.py create mode 100644 tests/fields/pydantic/test_pydantic.py create mode 100644 tests/fields/pydantic/test_pydantic_v1.py create mode 100644 tests/schemas/pydantic/conftest.py create mode 100644 tests/schemas/pydantic/test_pydantic.py rename tests/schemas/{test_pydantic.py => pydantic/test_pydantic_v1.py} (85%) create mode 100644 tests/serialization/test_pydantic_v1_fields.py diff --git a/dataclasses_avroschema/fields/base.py b/dataclasses_avroschema/fields/base.py index 9b527407..840ee3bf 100644 --- a/dataclasses_avroschema/fields/base.py +++ b/dataclasses_avroschema/fields/base.py @@ -41,9 +41,10 @@ def avro_type(self) -> typing.Union[str, typing.Dict]: @staticmethod def _get_self_reference_type(a_type: typing.Any) -> str: - internal_type = a_type.__args__[0] - - return internal_type.__forward_arg__ + if getattr(a_type, "__args__", None): + internal_type = a_type.__args__[0] + return internal_type.__forward_arg__ + return a_type.__name__ @staticmethod def get_singular_name(name: str) -> str: diff --git a/dataclasses_avroschema/fields/fields.py b/dataclasses_avroschema/fields/fields.py index df262930..2f558006 100644 --- a/dataclasses_avroschema/fields/fields.py +++ b/dataclasses_avroschema/fields/fields.py @@ -476,6 +476,13 @@ def get_default_value(self) -> typing.Union[dataclasses._MISSING_TYPE, None]: return None return dataclasses.MISSING + def fake(self) -> typing.Any: + if getattr(self.type, "__args__", None): + # It means that self.type is `typing.Type['AType']`, and the argument is a string + # then we return None + return None + return self.type.fake() + @dataclasses.dataclass class DateField(ImmutableField): @@ -743,6 +750,7 @@ def get_avro_type(self) -> typing.Union[str, typing.List, typing.Dict]: if self.default is None: return [field_utils.NULL, record_type] + return record_type def default_to_avro(self, value: "schema_generator.AvroModel") -> typing.Dict: @@ -757,6 +765,7 @@ def fake(self) -> typing.Any: from .mapper import ( + ALL_TYPES_FIELD_CLASSES, CONTAINER_FIELDS_CLASSES, IMMUTABLE_FIELDS_CLASSES, LOGICAL_TYPES_FIELDS_CLASSES, @@ -764,7 +773,7 @@ def fake(self) -> typing.Any: ) LOGICAL_CLASSES = LOGICAL_TYPES_FIELDS_CLASSES.keys() -PYDANTIC_CUSTOM_CLASS_METHOD_NAMES = {"__get_validators__", "validate"} +PYDANTIC_CUSTOM_CLASS_METHOD_NAMES = {"__get_validators__", "__get_pydantic_core_schema__"} def field_factory( @@ -783,18 +792,15 @@ def field_factory( metadata = {} field_info = None - if native_type is None: native_type = type(None) - if native_type not in types.CUSTOM_TYPES and utils.is_annotated(native_type): + if utils.is_annotated(native_type) and native_type not in ALL_TYPES_FIELD_CLASSES: a_type, *extra_args = get_args(native_type) field_info = next((arg for arg in extra_args if isinstance(arg, types.FieldInfo)), None) - if field_info is None or a_type in (decimal.Decimal, types.Fixed): - # it means that it is a custom type defined by us - # `Int32`, `Float32`,`TimeMicro` or `DateTimeMicro` - # or a type Annotated with the end user - native_type = a_type + # it means that it is a custom type defined by us `Int32`, `Float32`,`TimeMicro` or `DateTimeMicro` + # or a known type Annotated with the end user + native_type = a_type if native_type in IMMUTABLE_FIELDS_CLASSES: klass = IMMUTABLE_FIELDS_CLASSES[native_type] @@ -823,7 +829,7 @@ def field_factory( parent=parent, ) - elif utils.is_self_referenced(native_type): + elif utils.is_self_referenced(native_type, parent): return SelfReferenceField( name=name, type=native_type, @@ -917,16 +923,19 @@ def field_factory( elif ( inspect.isclass(native_type) and not is_pydantic_model(native_type) - and all(method_name in dir(native_type) for method_name in PYDANTIC_CUSTOM_CLASS_METHOD_NAMES) + and any(method_name in dir(native_type) for method_name in PYDANTIC_CUSTOM_CLASS_METHOD_NAMES) ): - try: - # Build a field for the encoded type since that's what will be serialized - encoded_type = parent.__config__.json_encoders[native_type] - except KeyError: - raise ValueError( - f"Type {native_type} for field {name} must be listed in the pydantic 'json_encoders' config for {parent}" - " (or for one of the classes in its inheritance tree since pydantic configs are inherited)" - ) + if getattr(parent, "__config__", None): + try: + # Build a field for the encoded type since that's what will be serialized + encoded_type = parent.__config__.json_encoders[native_type] + except KeyError: + raise ValueError( + f"Type {native_type} for field {name} must be listed in the pydantic 'json_encoders' config for {parent}" + " (or for one of the classes in its inheritance tree since pydantic configs are inherited)" + ) + else: + encoded_type = parent.model_config["json_encoders"][native_type] # default_factory is not schema-friendly for Custom Classes since it could be returning # dynamically constructed values that should not be treated as defaults. For example, diff --git a/dataclasses_avroschema/fields/mapper.py b/dataclasses_avroschema/fields/mapper.py index ace8bdcc..fe20e4aa 100644 --- a/dataclasses_avroschema/fields/mapper.py +++ b/dataclasses_avroschema/fields/mapper.py @@ -47,3 +47,10 @@ decimal.Decimal: fields.DecimalField, types.Fixed: fields.FixedField, } + +ALL_TYPES_FIELD_CLASSES = { # type: ignore + **IMMUTABLE_FIELDS_CLASSES, + **CONTAINER_FIELDS_CLASSES, + **LOGICAL_TYPES_FIELDS_CLASSES, + **SPECIAL_ANNOTATED_TYPES, +} diff --git a/dataclasses_avroschema/pydantic/__init__.py b/dataclasses_avroschema/pydantic/__init__.py index 6b3e6805..13667950 100644 --- a/dataclasses_avroschema/pydantic/__init__.py +++ b/dataclasses_avroschema/pydantic/__init__.py @@ -2,5 +2,7 @@ from .mapper import PYDANTIC_INMUTABLE_FIELDS_CLASSES, PYDANTIC_LOGICAL_TYPES_FIELDS_CLASSES from dataclasses_avroschema.fields import mapper -mapper.IMMUTABLE_FIELDS_CLASSES.update(PYDANTIC_INMUTABLE_FIELDS_CLASSES) +mapper.IMMUTABLE_FIELDS_CLASSES.update(PYDANTIC_INMUTABLE_FIELDS_CLASSES) # type: ignore mapper.LOGICAL_TYPES_FIELDS_CLASSES.update(PYDANTIC_LOGICAL_TYPES_FIELDS_CLASSES) # type: ignore +mapper.ALL_TYPES_FIELD_CLASSES.update(PYDANTIC_INMUTABLE_FIELDS_CLASSES) +mapper.ALL_TYPES_FIELD_CLASSES.update(PYDANTIC_LOGICAL_TYPES_FIELDS_CLASSES) diff --git a/dataclasses_avroschema/pydantic/fields.py b/dataclasses_avroschema/pydantic/fields.py index d920a2bf..4c5f1204 100644 --- a/dataclasses_avroschema/pydantic/fields.py +++ b/dataclasses_avroschema/pydantic/fields.py @@ -157,7 +157,7 @@ class PositiveFloatField(PydanticField): avro_type: typing.ClassVar[typing.Dict[str, str]] = {"type": DOUBLE, "pydantic-class": "PositiveFloat"} def fake(self) -> float: - return fake.pyfloat(positive=True) + return fake.pyfloat(positive=True, min_value=1) class NegativeIntField(PydanticField): diff --git a/dataclasses_avroschema/pydantic/main.py b/dataclasses_avroschema/pydantic/main.py index de461050..0b3175ba 100644 --- a/dataclasses_avroschema/pydantic/main.py +++ b/dataclasses_avroschema/pydantic/main.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Optional, Type, TypeVar +import json +from typing import Any, Callable, Dict, Optional, Type, TypeVar from fastavro.validation import validate @@ -24,7 +25,7 @@ def generate_dataclass(cls: Type[CT]) -> Type[CT]: @classmethod def json_schema(cls: Type[CT], *args: Any, **kwargs: Any) -> str: - return cls.schema_json(*args, **kwargs) + return json.dumps(cls.model_json_schema(*args, **kwargs)) @classmethod def standardize_type(cls: Type[CT], data: dict) -> Any: @@ -33,14 +34,9 @@ def standardize_type(cls: Type[CT], data: dict) -> Any: user-defined pydantic json_encoders prior to passing values to the standard type conversion factory """ - encoders = cls.__config__.json_encoders - for k, v in data.items(): - v_type = type(v) - if v_type in encoders: - encode_method = encoders[v_type] - data[k] = encode_method(v) - elif isinstance(v, dict): - cls.standardize_type(v) + for value in data.values(): + if isinstance(value, dict): + cls.standardize_type(value) return standardize_custom_type(data) @@ -51,14 +47,17 @@ def asdict(self, standardize_factory: Optional[Callable[..., Any]] = None) -> Js It also doesn't provide the exclude, include, by_alias, etc. parameters that dict provides. """ - data = dict(self) - + data = self.model_dump() standardize_method = standardize_factory or self.standardize_type # the standardize called can be replaced if we have a custom implementation of asdict # for now I think is better to use the native implementation return standardize_method(data) + @classmethod + def parse_obj(cls: Type[CT], data: Dict) -> CT: + return cls.model_validate(obj=data) + def serialize(self, serialization_type: str = AVRO) -> bytes: """ Overrides the base AvroModel's serialize method to inject this @@ -91,7 +90,7 @@ def fake(cls: Type[CT], **data: Any) -> CT: payload = {field.name: field.fake() for field in cls.get_fields() if field.name not in data.keys()} payload.update(data) - return cls.parse_obj(payload) + return cls.model_validate(payload) @classmethod def _generate_parser(cls: Type[CT]) -> PydanticParser: diff --git a/dataclasses_avroschema/pydantic/mapper.py b/dataclasses_avroschema/pydantic/mapper.py index 1a747031..7cbc246e 100644 --- a/dataclasses_avroschema/pydantic/mapper.py +++ b/dataclasses_avroschema/pydantic/mapper.py @@ -1,4 +1,5 @@ import pydantic +from pydantic.v1 import ConstrainedInt from . import fields @@ -25,12 +26,13 @@ pydantic.PositiveFloat: fields.PositiveFloatField, pydantic.NegativeInt: fields.NegativeIntField, pydantic.PositiveInt: fields.PositiveIntField, - pydantic.ConstrainedInt: fields.ConstrainedIntField, + ConstrainedInt: fields.ConstrainedIntField, # ConstrainedIntValue is a dynamic type that needs to be referenced by qualified name # and cannot be imported directly "ConstrainedIntValue": fields.ConstrainedIntField, } + PYDANTIC_LOGICAL_TYPES_FIELDS_CLASSES = { pydantic.UUID1: fields.UUID1Field, pydantic.UUID3: fields.UUID3Field, diff --git a/dataclasses_avroschema/pydantic/parser.py b/dataclasses_avroschema/pydantic/parser.py index b2e09e80..048be74d 100644 --- a/dataclasses_avroschema/pydantic/parser.py +++ b/dataclasses_avroschema/pydantic/parser.py @@ -10,16 +10,16 @@ class PydanticParser(Parser): def parse_fields(self, exclude: typing.List) -> typing.List[Field]: return [ AvroField( - model_field.name, - model_field.annotation, + field_name, + field_info.rebuild_annotation(), default=dataclasses.MISSING - if model_field.required or model_field.default_factory - else model_field.default, - default_factory=model_field.default_factory, - metadata=model_field.field_info.extra.get("metadata", {}), + if field_info.is_required() or field_info.default_factory + else field_info.default, + default_factory=field_info.default_factory, + metadata=field_info.json_schema_extra.get("metadata", {}) if field_info.json_schema_extra else {}, model_metadata=self.metadata, parent=self.parent, ) - for model_field in self.type.__fields__.values() - if model_field.name not in exclude + for field_name, field_info in self.type.model_fields.items() + if field_name not in exclude and field_name != "model_config" ] diff --git a/dataclasses_avroschema/pydantic/v1/__init__.py b/dataclasses_avroschema/pydantic/v1/__init__.py new file mode 100644 index 00000000..6b3e6805 --- /dev/null +++ b/dataclasses_avroschema/pydantic/v1/__init__.py @@ -0,0 +1,6 @@ +from .main import AvroBaseModel # noqa: F401 I001 +from .mapper import PYDANTIC_INMUTABLE_FIELDS_CLASSES, PYDANTIC_LOGICAL_TYPES_FIELDS_CLASSES +from dataclasses_avroschema.fields import mapper + +mapper.IMMUTABLE_FIELDS_CLASSES.update(PYDANTIC_INMUTABLE_FIELDS_CLASSES) +mapper.LOGICAL_TYPES_FIELDS_CLASSES.update(PYDANTIC_LOGICAL_TYPES_FIELDS_CLASSES) # type: ignore diff --git a/dataclasses_avroschema/pydantic/v1/main.py b/dataclasses_avroschema/pydantic/v1/main.py new file mode 100644 index 00000000..ae4c72a5 --- /dev/null +++ b/dataclasses_avroschema/pydantic/v1/main.py @@ -0,0 +1,99 @@ +from typing import Any, Callable, Optional, Type, TypeVar + +from fastavro.validation import validate + +from dataclasses_avroschema import serialization +from dataclasses_avroschema.schema_generator import AVRO, AvroModel +from dataclasses_avroschema.types import JsonDict +from dataclasses_avroschema.utils import standardize_custom_type + +from .parser import PydanticV1Parser + +try: + from pydantic.v1 import BaseModel # pragma: no cover +except ImportError as ex: # pragma: no cover + raise Exception("pydantic must be installed in order to use AvroBaseModel") from ex # pragma: no cover + +CT = TypeVar("CT", bound="AvroBaseModel") + + +class AvroBaseModel(BaseModel, AvroModel): # type: ignore + @classmethod + def generate_dataclass(cls: Type[CT]) -> Type[CT]: + return cls + + @classmethod + def json_schema(cls: Type[CT], *args: Any, **kwargs: Any) -> str: + return cls.schema_json(*args, **kwargs) + + @classmethod + def standardize_type(cls: Type[CT], data: dict) -> Any: + """ + Standardization factory that converts data according to the + user-defined pydantic json_encoders prior to passing values + to the standard type conversion factory + """ + encoders = cls.__config__.json_encoders + for k, v in data.items(): + v_type = type(v) + if v_type in encoders: + encode_method = encoders[v_type] + data[k] = encode_method(v) + elif isinstance(v, dict): + cls.standardize_type(v) + + return standardize_custom_type(data) + + def asdict(self, standardize_factory: Optional[Callable[..., Any]] = None) -> JsonDict: + """ + Returns this model in dictionary form. This method differs from + pydantic's dict by converting all values to their Avro representation. + It also doesn't provide the exclude, include, by_alias, etc. + parameters that dict provides. + """ + data = dict(self) + + standardize_method = standardize_factory or self.standardize_type + + # the standardize called can be replaced if we have a custom implementation of asdict + # for now I think is better to use the native implementation + return standardize_method(data) + + def serialize(self, serialization_type: str = AVRO) -> bytes: + """ + Overrides the base AvroModel's serialize method to inject this + class's standardization factory method + """ + schema = self.avro_schema_to_python() + + return serialization.serialize( + self.asdict(standardize_factory=self.standardize_type), + schema, + serialization_type=serialization_type, + ) + + def validate_avro(self) -> bool: + """ + Validate that instance matches the avro schema + """ + schema = self.avro_schema_to_python() + return validate(self.asdict(), schema) + + @classmethod + def fake(cls: Type[CT], **data: Any) -> CT: + """ + Creates a fake instance of the model. + + Attributes: + data: Dict[str, Any] represent the user values to use in the instance + """ + # only generate fakes for fields that were not provided in data + payload = {field.name: field.fake() for field in cls.get_fields() if field.name not in data.keys()} + payload.update(data) + + return cls.parse_obj(payload) + + @classmethod + def _generate_parser(cls: Type[CT]) -> PydanticV1Parser: + cls._metadata = cls.generate_metadata() + return PydanticV1Parser(type=cls._klass, metadata=cls._metadata, parent=cls._parent or cls) diff --git a/dataclasses_avroschema/pydantic/v1/mapper.py b/dataclasses_avroschema/pydantic/v1/mapper.py new file mode 100644 index 00000000..e6090755 --- /dev/null +++ b/dataclasses_avroschema/pydantic/v1/mapper.py @@ -0,0 +1,40 @@ +from pydantic import v1 + +from dataclasses_avroschema.pydantic import fields + +PYDANTIC_INMUTABLE_FIELDS_CLASSES = { + v1.FilePath: fields.FilePathField, + v1.DirectoryPath: fields.DirectoryPathField, + v1.EmailStr: fields.EmailStrField, + v1.NameEmail: fields.NameEmailField, + v1.AnyUrl: fields.AnyUrlField, + v1.AnyHttpUrl: fields.AnyHttpUrlField, + v1.HttpUrl: fields.HttpUrlField, + v1.FileUrl: fields.FileUrlField, + v1.PostgresDsn: fields.PostgresDsnField, + v1.CockroachDsn: fields.CockroachDsnField, + v1.AmqpDsn: fields.AmqpDsnField, + v1.RedisDsn: fields.RedisDsnField, + v1.MongoDsn: fields.MongoDsnField, + v1.KafkaDsn: fields.KafkaDsnField, + v1.SecretStr: fields.SecretStrField, + v1.IPvAnyAddress: fields.IPvAnyAddressField, + v1.IPvAnyInterface: fields.IPvAnyInterfaceField, + v1.IPvAnyNetwork: fields.IPvAnyNetworkField, + v1.NegativeFloat: fields.NegativeFloatField, + v1.PositiveFloat: fields.PositiveFloatField, + v1.NegativeInt: fields.NegativeIntField, + v1.PositiveInt: fields.PositiveIntField, + v1.ConstrainedInt: fields.ConstrainedIntField, + # ConstrainedIntValue is a dynamic type that needs to be referenced by qualified name + # and cannot be imported directly + "ConstrainedIntValue": fields.ConstrainedIntField, +} + + +PYDANTIC_LOGICAL_TYPES_FIELDS_CLASSES = { + v1.UUID1: fields.UUID1Field, + v1.UUID3: fields.UUID3Field, + v1.UUID4: fields.UUID4Field, + v1.UUID5: fields.UUID5Field, +} diff --git a/dataclasses_avroschema/pydantic/v1/parser.py b/dataclasses_avroschema/pydantic/v1/parser.py new file mode 100644 index 00000000..8f8cd54e --- /dev/null +++ b/dataclasses_avroschema/pydantic/v1/parser.py @@ -0,0 +1,25 @@ +import dataclasses +import typing + +from dataclasses_avroschema.fields.base import Field +from dataclasses_avroschema.fields.fields import AvroField +from dataclasses_avroschema.parser import Parser + + +class PydanticV1Parser(Parser): + def parse_fields(self, exclude: typing.List) -> typing.List[Field]: + return [ + AvroField( + model_field.name, + model_field.annotation, + default=dataclasses.MISSING + if model_field.required or model_field.default_factory + else model_field.default, + default_factory=model_field.default_factory, + metadata=model_field.field_info.extra.get("metadata", {}), + model_metadata=self.metadata, + parent=self.parent, + ) + for model_field in self.type.__fields__.values() + if model_field.name not in exclude + ] diff --git a/dataclasses_avroschema/schema_generator.py b/dataclasses_avroschema/schema_generator.py index 4a45d21a..3d4581b9 100644 --- a/dataclasses_avroschema/schema_generator.py +++ b/dataclasses_avroschema/schema_generator.py @@ -158,9 +158,9 @@ def to_dict(self) -> JsonDict: # and after that convert into python return self.asdict() - def to_json(self) -> str: + def to_json(self, **kwargs) -> str: data = serialization.to_json(self.asdict()) - return json.dumps(data) + return json.dumps(data, **kwargs) @classmethod def config(cls: Type[CT]) -> Config: diff --git a/dataclasses_avroschema/utils.py b/dataclasses_avroschema/utils.py index d7f4bb31..01affc6b 100644 --- a/dataclasses_avroschema/utils.py +++ b/dataclasses_avroschema/utils.py @@ -9,13 +9,14 @@ try: import pydantic # pragma: no cover + from pydantic import v1 except ImportError: # type: ignore # pragma: no cover pydantic = None # type: ignore # pragma: no cover def is_pydantic_model(klass: type) -> bool: if pydantic is not None: - return issubclass(klass, pydantic.BaseModel) + return issubclass(klass, v1.BaseModel) or issubclass(klass, pydantic.BaseModel) return False @@ -32,19 +33,22 @@ def is_union(a_type: type) -> bool: return isinstance(a_type, typing._GenericAlias) and a_type.__origin__ is typing.Union # type: ignore -def is_self_referenced(a_type: type) -> bool: +def is_self_referenced(a_type: type, parent: type) -> bool: """ Given a python type, return True if is self referenced, meaning that is instance of typing.ForwardRef, otherwise False Arguments: a_type (typing.Any): python type + parent (typing.Any) python type Returns: bool Example: - a_type = typing.Type["User"]] + class User(...) + a_type_with_type: typing.Type["User"]] = None + a_type: "User" = None is_self_referenced(a_type) # True """ @@ -52,7 +56,7 @@ def is_self_referenced(a_type: type) -> bool: isinstance(a_type, typing._GenericAlias) # type: ignore and a_type.__args__ and isinstance(a_type.__args__[0], typing.ForwardRef) # type: ignore - ) + ) or a_type == parent def is_annotated(a_type: typing.Any) -> bool: @@ -71,6 +75,7 @@ def standardize_custom_type(value: typing.Any) -> typing.Any: return value.value elif is_pydantic_model(type(value)): return standardize_custom_type(value.asdict()) + return value diff --git a/docs/pydantic.md b/docs/pydantic.md index 634301bd..ee3f98c9 100644 --- a/docs/pydantic.md +++ b/docs/pydantic.md @@ -1,6 +1,6 @@ # Pydantic Integration -It is possible to use [pydantic](https://pydantic-docs.helpmanual.io/) with `dataclasses-avroschema` making use of `AvroBaseModel`: +It is possible to use [pydantic](https://docs.pydantic.dev/latest/) with `dataclasses-avroschema` making use of `AvroBaseModel`: You must use use all the `pydantic` features and all `dataclasses-avroschema` functionality will be injected. @@ -55,21 +55,24 @@ UserAdvance.avro_schema() }' # Json schema -UserAdvance.json_schema() +UserAdvance.model_json_schema() -'{ - "title": "UserAdvance", - "type": "object", - "properties": { - "name": {"title": "Name", "type": "string"}, - "age": {"title": "Age", "type": "integer"}, - "pets": {"title": "Pets", "type": "array", "items": {"type": "string"}}, - "accounts": {"title": "Accounts", "type": "object", "additionalProperties": {"type": "integer"}}, - "has_car": {"title": "Has Car", "default": false, "type": "boolean"}, - "favorite_colors": {"default": "BLUE", "allOf": [{"$ref": "#/definitions/FavoriteColor"}]}, - "country": {"title": "Country", "default": "Argentina", "type": "string"}, - "address": {"title": "Address", "type": "string"}}, - "required": ["name", "age"], "definitions": {"FavoriteColor": {"title": "FavoriteColor", "description": "An enumeration.", "enum": ["BLUE", "YELLOW", "GREEN"], "type": "string"}}}' +{ + 'required': ['name', 'age'], + 'title': 'UserAdvance', + 'type': 'object' + 'properties': { + 'name': {'title': 'Name', 'type': 'string'}, + 'age': {'title': 'Age', 'type': 'integer'}, + 'pets': {'items': {'type': 'string'}, 'title': 'Pets', 'type': 'array'}, + 'accounts': {'additionalProperties': {'type': 'integer'}, 'title': 'Accounts', 'type': 'object'}, + 'has_car': {'default': False, 'title': 'Has Car', 'type': 'boolean'}, + 'favorite_colors': {'allOf': [{'$ref': '#/$defs/FavoriteColor'}], 'default': 'BLUE'}, + 'country': {'default': 'Argentina', 'title': 'Country', 'type': 'string'}, + 'address': {'default': None, 'title': 'Address', 'type': 'string'} + }, + '$defs': {'FavoriteColor': {'enum': ['BLUE', 'YELLOW', 'GREEN'], 'title': 'FavoriteColor', 'type': 'string'}}, +} ``` *(This script is complete, it should run "as is")* @@ -109,7 +112,6 @@ using the key `pydantic-class`. | double | pydantic.PositiveFloat | | long | pydantic.NegativeInt | | long | pydantic.PositiveIntstr | -| long | pydantic.ConstrainedInt (conint) | | Avro Type | Logical type | Pydantic Type | |--------------|--------------|---------------| @@ -238,7 +240,6 @@ class Infrastructure(AvroBaseModel): | double | "pydantic-class": "PositiveFloat" | pydantic.PositiveFloat | | long | "pydantic-class": "NegativeInt" | pydantic.NegativeInt | | long | "pydantic-class": "PositiveInt" | pydantic.PositiveInt | -| long | "pydantic-class": ConstrainedInt" | pydantic.ConstrainedInt | |Avro Type | Logical Type | Metadata | Pydantic Type | |-----------|--------------|----------|------------------------------------| @@ -255,10 +256,10 @@ class Infrastructure(AvroBaseModel): user = UserAdvance(name="bond", age=50) # to_json from dataclasses-avroschema is the same that json from pydantic -assert user.to_json() == user.json() +assert user.to_json(separators=(",",":",)) == user.model_dump_json() # to_dict from dataclasses-avroschema is the same that dict from pydantic -assert user.to_dict() == user.dict() +assert user.to_dict() == user.model_dump() ``` ```python title="serialization" @@ -303,12 +304,15 @@ user = User.parse_obj(data=data_user) assert type(user.addresses[0]) is Address ``` +!!! note + The method `parse_obj` is defined by `dataclasses_avroschemas` which internally is calling `model_validate` (introduced in pydantic v2) + *(This script is complete, it should run "as is")* -```python title="parse_obj_as usage" +```python title="validate_python usage" from typing import List -from pydantic import parse_obj_as +from pydantic import TypeAdapter from dataclasses_avroschema.pydantic import AvroBaseModel @@ -320,7 +324,8 @@ class User(AvroBaseModel): data = [{"name": "bond", "age": 50}, {"name": "bond2", "age": 60}] -users = parse_obj_as(List[User], data) +UserListValidator = TypeAdapter(List[User]) +users = UserListValidator.validate_python(data) users[0].avro_schema() # '{"type": "record", "name": "User", "fields": [{"name": "name", "type": "string"}, {"name": "age", "type": "long"}], "doc": "User with multiple Address"}' @@ -358,7 +363,7 @@ print(User.fake()) ### Excluding fields -Pydantic Fields can be excluded when `dict`, `json` or `copy` methods are called. This meaans that the exclusion is only for [exporting models](https://docs.pydantic.dev/latest/usage/exporting_models/) but not excluded in the instance creations, then the `avro serialization` will include all the class attributes. +Pydantic Fields can be excluded when `dict`, `json` or `copy` methods are called. This meaans that the exclusion is only for [exporting models](https://docs.pydantic.dev/latest/concepts/fields/#exclude) but not excluded in the instance creations, then the `avro serialization` will include all the class attributes. ```python import typing @@ -377,13 +382,15 @@ user = User(name="bond", age=50, has_car=True) print(user) # >>> User(name='bond', age=50, pets=['dog', 'cat'], accounts={'key': 1}, has_car=True) -print(user.dict()) +print(user.model_dump()) # >>> {'name': 'bond', 'age': 50, 'has_car': True} Excludes pets and accounts !!! event = user.serialize() assert user == User.deserialize(event) ``` +*(This script is complete, it should run "as is")* + ## Model Config With `AvroBaseModel` you can use the same [Model Config](https://docs.pydantic.dev/latest/usage/model_config/) that `pydantic` provides, @@ -412,6 +419,8 @@ for example: ```python import enum from dataclasses_avroschema.pydantic import AvroBaseModel + from pydantic import ConfigDict + class Color(str, enum.Enum): BLUE = "BLUE" @@ -419,14 +428,12 @@ for example: class Bus(AvroBaseModel): + model_config = ConfigDict(use_enum_values=True) driver: str color: Color - class Config: - use_enum_values = True - bus = Bus(driver="bond", color=Color.RED) - print(bus.dict()) + print(busmodel_dump()) # >>> {'driver': 'bond', 'color': 'RED'} ``` @@ -437,11 +444,11 @@ To add `custom field attributes` the `metadata` attribute must be set in `pydant !!! note Make sure that `pydantic.Field` is used and *NOT* `dataclasses.field` -## Custom Data Types as Fields -If needed, you can annotate fields with custom classes that define validators. +### Custom Data Types as Fields -### Classes with `__get_validators__` -These classes are [defined by pydantic](https://docs.pydantic.dev/1.10/usage/types/#classes-with-__get_validators__) as Python classes that define the `validate` and `__get_validators__` methods. +If needed, you can annotate fields with [custom classes](https://docs.pydantic.dev/latest/concepts/types/#customizing-validation-with-getpydanticcoreschema) that define validators. + +### Classes with `__get_pydantic_core_schema__` !!! note The conversion mapping of a custom class to its [supported type](./fields_specification.md#avro-field-and-python-types-summary) must be defined in the model's [`json_encoders`](https://docs.pydantic.dev/1.10/usage/exporting_models/#json_encoders) config. @@ -450,36 +457,55 @@ These classes are [defined by pydantic](https://docs.pydantic.dev/1.10/usage/typ [Generating models](#Model-generation) from avro schemas that were generated by classes containing Custom Class fields is not supported. ```python +from typing import Any + from dataclasses_avroschema.pydantic import AvroBaseModel +from pydantic import ConfigDict, GetCoreSchemaHandler +from pydantic_core import core_schema + class CustomClass: def __init__(self, value: str) -> None: self.value = value @classmethod - def __get_validators__(cls): - yield cls.validate - - @classmethod - def validate(cls, value): - if isinstance(value, CustomClass): - return value - elif not isinstance(value, str): - raise ValueError(f"Value must be a string or CustomClass - not {type(value)}") - - return cls(value) + def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler): + def validate(value): + if isinstance(value, CustomClass): + return value + elif not isinstance(value, str): + raise ValueError(f"Value must be a string or CustomClass - not {type(value)}") + + return cls(value) + + from_str_schema = core_schema.chain_schema( + [ + core_schema.str_schema(), + core_schema.no_info_plain_validator_function(validate), + ] + ) + + return core_schema.json_or_python_schema( + json_schema=from_str_schema, + python_schema=core_schema.union_schema( + [ + # check if it's an instance first before doing any further work + core_schema.is_instance_schema(CustomClass), + from_str_schema, + ] + ), + serialization=core_schema.plain_serializer_function_ser_schema(lambda instance: instance.x), + ) def __str__(self) -> str: return f"{self.value}" class MyModel(AvroBaseModel): + model_config = ConfigDict(json_encoders={CustomClass: str}, arbitrary_types_allowed=True) my_id: CustomClass - class Config: - json_encoders = {CustomClass: str} - print(MyModel.avro_schema_to_python()) """ @@ -495,3 +521,5 @@ print(MyModel.avro_schema_to_python()) } """ ``` + +*(This script is complete, it should run "as is")* diff --git a/docs/pydantic_v1.md b/docs/pydantic_v1.md new file mode 100644 index 00000000..d72e03c7 --- /dev/null +++ b/docs/pydantic_v1.md @@ -0,0 +1,506 @@ +# Pydantic V1 Integration + +It is possible to use [pydantic](https://docs.pydantic.dev/1.10/) with `dataclasses-avroschema` making use of `AvroBaseModel`: + +You must use use all the `pydantic` features and all `dataclasses-avroschema` functionality will be injected. + +!!! note + With `pydantic` you do not have to use `python dataclasses` + +!!! warning + The support for `pydantic v1` will be deprecated. We recommend end users to migrate to `pydantic v2`. + +## Avro and Json schemas + +```python title="Basic usage" +import typing +import enum +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + +from pydantic.v1 import Field + + +class FavoriteColor(str, enum.Enum): + BLUE = "BLUE" + YELLOW = "YELLOW" + GREEN = "GREEN" + + +class UserAdvance(AvroBaseModel): + name: str + age: int + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) + has_car: bool = False + favorite_colors: FavoriteColor = FavoriteColor.BLUE + country: str = "Argentina" + address: str = None + + class Meta: + schema_doc = False + + +# Avro schema +UserAdvance.avro_schema() +'{ + "type": "record", + "name": "UserAdvance", + "fields": [ + {"name": "name", "type": "string"}, + {"name": "age", "type": "long"}, + {"name": "pets", "type": {"type": "array", "items": "string", "name": "pet"}, "default": ["dog", "cat"]}, + {"name": "accounts", "type": {"type": "map", "values": "long", "name": "account"}, "default": {"key": 1}}, + {"name": "has_car", "type": "boolean", "default": false}, + {"name": "favorite_colors", "type": {"type": "enum", "name": "FavoriteColor", "symbols": ["BLUE", "YELLOW", "GREEN"]}, "default": "BLUE"}, + {"name": "country", "type": "string", "default": "Argentina"}, + {"name": "address", "type": ["null", "string"], "default": null} + ] +}' + +# Json schema +UserAdvance.json_schema() + +'{ + "title": "UserAdvance", + "type": "object", + "properties": { + "name": {"title": "Name", "type": "string"}, + "age": {"title": "Age", "type": "integer"}, + "pets": {"title": "Pets", "type": "array", "items": {"type": "string"}}, + "accounts": {"title": "Accounts", "type": "object", "additionalProperties": {"type": "integer"}}, + "has_car": {"title": "Has Car", "default": false, "type": "boolean"}, + "favorite_colors": {"default": "BLUE", "allOf": [{"$ref": "#/definitions/FavoriteColor"}]}, + "country": {"title": "Country", "default": "Argentina", "type": "string"}, + "address": {"title": "Address", "type": "string"}}, + "required": ["name", "age"], "definitions": {"FavoriteColor": {"title": "FavoriteColor", "description": "An enumeration.", "enum": ["BLUE", "YELLOW", "GREEN"], "type": "string"}}}' +``` + +*(This script is complete, it should run "as is")* + +!!! note + You must use pydantic.Field instead of dataclasses.field + +## Avro schemas with pydantic types + +Most of `pydantic` types are supported and from them it is possible to generate `avro fields`. Because `pydantic` types are not native `python types` +the end result will contain extra metadata so the end users will have more context at the moment of using the schema. The extra `metadata` is specified +using the key `pydantic-class`. + +### Supported fields + +| Avro Type | Pydantic Type | +|--------------|-------------| +| string | pydantic.FilePath | +| string | pydantic.DirectoryPath | +| string | pydantic.EmailStr | +| string | pydantic.NameEmail | +| string | pydantic.AnyUrl | +| string | pydantic.AnyHttpUrl | +| string | pydantic.HttpUrl | +| string | pydantic.FileUrl | +| string | pydantic.PostgresDsn | +| string | pydantic.CockroachDsn | +| string | pydantic.AmqpDsn | +| string | pydantic.RedisDsn | +| string | pydantic.MongoDsn | +| string | pydantic.KafkaDsn | +| string | pydantic.SecretStr | +| string | pydantic.IPvAnyAddress | +| string | pydantic.IPvAnyInterface | +| string | pydantic.IPvAnyNetwork | +| double | pydantic.NegativeFloat | +| double | pydantic.PositiveFloat | +| long | pydantic.NegativeInt | +| long | pydantic.PositiveIntstr | +| long | pydantic.ConstrainedInt (conint) | + +| Avro Type | Logical type | Pydantic Type | +|--------------|--------------|---------------| +| string | uuid | pydantic.UUID1 | +| string | uuid | pydantic.UUID3 | +| string | uuid | pydantic.UUID4 | +| string | uuid | pydantic.UUID5 | + +```python +import pydantic +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + + +class Infrastructure(AvroBaseModel): + email: pydantic.EmailStr + postgres_dsn: pydantic.PostgresDsn + cockroach_dsn: pydantic.CockroachDsn + amqp_dsn: pydantic.AmqpDsn + redis_dsn: pydantic.RedisDsn + mongo_dsn: pydantic.MongoDsn + kafka_url: pydantic.KafkaDsn + total_nodes: pydantic.PositiveInt + + +Infrastructure.avro_schema() + +{ + "type": "record", + "name": "Infrastructure", + "fields": [ + {"name": "email", "type": {"type": "string", "pydantic-class": "EmailStr"}}, + {"name": "postgres_dsn", "type": {"type": "string", "pydantic-class": "PostgresDsn"}}, + {"name": "cockroach_dsn", "type": {"type": "string", "pydantic-class": "CockroachDsn"}}, + {"name": "amqp_dsn", "type": {"type": "string", "pydantic-class": "AmqpDsn"}}, + {"name": "redis_dsn", "type": {"type": "string", "pydantic-class": "RedisDsn"}}, + {"name": "mongo_dsn", "type": {"type": "string", "pydantic-class": "MongoDsn"}}, + {"name": "kafka_url", "type": {"type": "string", "pydantic-class": "KafkaDsn"}}, + {"name": "total_nodes", "type": {"type": "long", "pydantic-class": "PositiveInt"}} + ] +} +``` + +*(This script is complete, it should run "as is")* + +!!! note + The key `pydantic-class` has been added as `metadata` to have more context when using the schema + +## Model generation + +If is possible to generate [pydantic models](https://marcosschroh.github.io/dataclasses-avroschema/model_generator/#render-pydantic-models) when `pydantic types` have been used. If a field has the matadata key `pydantic-class` +then the proper pydantic types will be used. + +Schema example: + +```python +from dataclasses_avroschema import ModelGenerator, BaseClassEnum + +model_generator = ModelGenerator(base_class=BaseClassEnum.AVRO_DANTIC_MODEL.value) + +schema = { + "type": "record", + "name": "Infrastructure", + "fields": [ + {"name": "email", "type": {"type": "string", "pydantic-class": "EmailStr"}}, + {"name": "kafka_url", "type": {"type": "string", "pydantic-class": "KafkaDsn"}}, + {"name": "total_nodes", "type": {"type": "long", "pydantic-class": "PositiveInt"}}, + {"name": "event_id", "type": {"type": "string", "logicalType": "uuid", "pydantic-class": "UUID1"}}, + {"name": "landing_zone_nodes", "type": {"type": "array", "items": {"type": "long", "pydantic-class": "PositiveInt"}, "name": "landing_zone_node"}}, + {"name": "total_nodes_in_aws", "type": {"type": "long", "pydantic-class": "PositiveInt"}, "default": 10}, + {"name": "optional_kafka_url", "type": ["null", {"type": "string", "pydantic-class": "KafkaDsn"}], "default": None} + ] +} + +result = model_generator.render(schema=schema) + +with open("models.py", mode="+w") as f: + f.write(result) +``` + +and then render the result: + +```python +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel +import pydantic +import typing + + +class Infrastructure(AvroBaseModel): + email: pydantic.EmailStr + kafka_url: pydantic.KafkaDsn + total_nodes: pydantic.PositiveInt + event_id: pydantic.UUID1 + landing_zone_nodes: typing.List[pydantic.PositiveInt] + total_nodes_in_aws: pydantic.PositiveInt = 10 + optional_kafka_url: typing.Optional[pydantic.KafkaDsn] = None +``` + +*(This script is complete, it should run "as is")* + +!!! note + In order to render the pydantic types the base class must be `AVRO_BASE_MODEL` or `PYDANTIC_MODEL` + +### Mapping `avro fields` to `pydantic types` + +|Avro Type | Metadata | Pydantic Type | +|-----------|------------------------------------|----------------| +| string | "pydantic-class": "DirectoryPath" | pydantic.FilePath | +| string | "pydantic-class": "DirectoryPath" | pydantic.DirectoryPath | +| string | "pydantic-class": "EmailStr" | pydantic.EmailStr | +| string | "pydantic-class": "NameEmail" | pydantic.NameEmail | +| string | "pydantic-class": "AnyUrl" | pydantic.AnyUrl | +| string | "pydantic-class": "AnyHttpUrl" | pydantic.AnyHttpUrl | +| string | "pydantic-class": "HttpUrl" | pydantic.HttpUrl | +| string | "pydantic-class": "FileUrl" | pydantic.FileUrl | +| string | "pydantic-class": "PostgresDsn" | pydantic.PostgresDsn | +| string | "pydantic-class": "CockroachDsn | pydantic.CockroachDsn | +| string | "pydantic-class": "AmqpDsn" | pydantic.AmqpDsn | +| string | "pydantic-class": "RedisDsn" | pydantic.RedisDsn | +| string | "pydantic-class": "MongoDsn" | pydantic.MongoDsn | +| string | "pydantic-class": "KafkaDsn" | pydantic.KafkaDsn | +| string | "pydantic-class": "SecretStr" | pydantic.SecretStr | +| string | "pydantic-class": "IPvAnyAddress" | pydantic.IPvAnyAddress | +| string | "pydantic-class": "IPvAnyInterface"| pydantic.IPvAnyInterface | +| string | "pydantic-class": "IPvAnyNetwork" | pydantic.IPvAnyNetwork | +| double | "pydantic-class": "NegativeFloat" | pydantic.NegativeFloat | +| double | "pydantic-class": "PositiveFloat" | pydantic.PositiveFloat | +| long | "pydantic-class": "NegativeInt" | pydantic.NegativeInt | +| long | "pydantic-class": "PositiveInt" | pydantic.PositiveInt | +| long | "pydantic-class": ConstrainedInt" | pydantic.ConstrainedInt | + +|Avro Type | Logical Type | Metadata | Pydantic Type | +|-----------|--------------|----------|------------------------------------| +| string | uuid | "pydantic-class": "UUID1" | pydantic.UUID1 | +| string | uuid | "pydantic-class": "UUID3" | pydantic.UUID3 | +| string | uuid | "pydantic-class": "UUID4" | pydantic.UUID4 | +| string | uuid | "pydantic-class": "UUID5" | pydantic.UUID5 | + +## Pydantic and dataclasses_avroschema batteries + +### To dict, to json and serialization + +```python title="getting dict and json" +user = UserAdvance(name="bond", age=50) + +# to_json from dataclasses-avroschema is the same that json from pydantic +assert user.to_json() == user.json() + +# to_dict from dataclasses-avroschema is the same that dict from pydantic +assert user.to_dict() == user.dict() +``` + +```python title="serialization" +event = user.serialize() +print(event) +# >>> b'\x08bondd\x04\x06dog\x06cat\x00\x02\x06key\x02\x00\x00\x00\x12Argentina\x00' + +UserAdvance.deserialize(data=event) +# >>> UserAdvance(name='bond', age=50, pets=['dog', 'cat'], accounts={'key': 1}, has_car=False, favorite_colors=, country='Argentina', address=None) +``` + +### Parsing Objects + +```python title="parse_obj usage" +import typing + +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + + +class Address(AvroBaseModel): + "An Address" + street: str + street_number: int + + +class User(AvroBaseModel): + "User with multiple Address" + name: str + age: int + addresses: typing.List[Address] + +data_user = { + "name": "john", + "age": 20, + "addresses": [{ + "street": "test", + "street_number": 10, + }], + } + +user = User.parse_obj(data=data_user) +assert type(user.addresses[0]) is Address +``` + +*(This script is complete, it should run "as is")* + +```python title="parse_obj_as usage" +from typing import List + +from pydantic import parse_obj_as + +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + + +class User(AvroBaseModel): + "User with multiple Address" + name: str + age: int + + +data = [{"name": "bond", "age": 50}, {"name": "bond2", "age": 60}] +users = parse_obj_as(List[User], data) + +users[0].avro_schema() +# '{"type": "record", "name": "User", "fields": [{"name": "name", "type": "string"}, {"name": "age", "type": "long"}], "doc": "User with multiple Address"}' +``` + +*(This script is complete, it should run "as is")* + +### Fake + +It is also possible to create `fake` instances with `pydantic` models: + +```python +import typing +import datetime +from pydanti.v1 import Field +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + + +class User(AvroBaseModel): + name: str + age: int + birthday: datetime.date + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) + has_car: bool = False + +print(User.fake()) +# >>> User(name='qWTLkqcIVmSBxpWMpFyR', age=2608, birthday=datetime.date(1982, 3, 30), pets=['wqoEXcJRYjcnJmnIvtiI'], accounts={'JueNdHdzIhHIDsjlHJLc': 779}, has_car=True) +``` + +*(This script is complete, it should run "as is")* + +!!! note + All pydantic supported fields can be used with fake + +### Excluding fields + +Pydantic Fields can be excluded when `dict`, `json` or `copy` methods are called. This meaans that the exclusion is only for [exporting models](https://docs.pydantic.dev/1.10/usage/exporting_models/) but not excluded in the instance creations, then the `avro serialization` will include all the class attributes. + +```python +import typing +from pydantic.v1 import Field +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + + +class User(AvroBaseModel): + name: str + age: int + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"], exclude=True) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}, exclude=True) + has_car: bool = False + +user = User(name="bond", age=50, has_car=True) +print(user) +# >>> User(name='bond', age=50, pets=['dog', 'cat'], accounts={'key': 1}, has_car=True) + +print(user.dict()) +# >>> {'name': 'bond', 'age': 50, 'has_car': True} Excludes pets and accounts !!! + +event = user.serialize() +assert user == User.deserialize(event) +``` + +*(This script is complete, it should run "as is")* + +## Model Config + +With `AvroBaseModel` you can use the same [Model Config](https://docs.pydantic.dev/1.10/usage/model_config/) that `pydantic` provides, +for example: + +=== "Not use Enum values" + ```python + import enum + from dataclasses_avroschema.pydantic import AvroBaseModel + + class Color(str, enum.Enum): + BLUE = "BLUE" + RED = "RED" + + + class Bus(AvroBaseModel): + driver: str + color: Color + + bus = Bus(driver="bond", color=Color.RED) + print(bus.dict()) + # >>> {'driver': 'bond', 'color': } + ``` + +=== "Use Enum values" + ```python + import enum + from dataclasses_avroschema.pydantic import AvroBaseModel + + class Color(str, enum.Enum): + BLUE = "BLUE" + RED = "RED" + + + class Bus(AvroBaseModel): + driver: str + color: Color + + class Config: + use_enum_values = True + + bus = Bus(driver="bond", color=Color.RED) + print(bus.dict()) + # >>> {'driver': 'bond', 'color': 'RED'} + ``` + +## Adding Custom Field-level Attributes + +To add `custom field attributes` the `metadata` attribute must be set in `pydantic.Field`. For more info check [adding-custom-field-level-attributes](https://marcosschroh.github.io/dataclasses-avroschema/fields_specification/#adding-custom-field-level-attributes) section for `dataclasses`. + +!!! note + Make sure that `pydantic.Field` is used and *NOT* `dataclasses.field` + +### Custom Data Types as Fields + +If needed, you can annotate fields with custom classes that define validators. + +### Classes with `__get_validators__` + +These classes are [defined by pydantic](https://docs.pydantic.dev/1.10/usage/types/#classes-with-__get_validators__) as Python classes that define the `validate` and `__get_validators__` methods. + +!!! note + The conversion mapping of a custom class to its [supported type](./fields_specification.md#avro-field-and-python-types-summary) must be defined in the model's [`json_encoders`](https://docs.pydantic.dev/1.10/usage/exporting_models/#json_encoders) config. + +!!! warning + [Generating models](#Model-generation) from avro schemas that were generated by classes containing Custom Class fields is not supported. + +```python +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + + +class CustomClass: + def __init__(self, value: str) -> None: + self.value = value + + @classmethod + def __get_validators__(cls): + yield cls.validate + + @classmethod + def validate(cls, value): + if isinstance(value, CustomClass): + return value + elif not isinstance(value, str): + raise ValueError(f"Value must be a string or CustomClass - not {type(value)}") + + return cls(value) + + def __str__(self) -> str: + return f"{self.value}" + + +class MyModel(AvroBaseModel): + my_id: CustomClass + + class Config: + json_encoders = {CustomClass: str} + + +print(MyModel.avro_schema_to_python()) +""" +{ + "type": "record", + "name": "MyModel", + "fields": [ + { + "name": "my_id", + "type": "string" + } + ] +} +""" +``` + +*(This script is complete, it should run "as is")* diff --git a/mkdocs.yml b/mkdocs.yml index 989bf456..10bb4ccf 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -38,6 +38,7 @@ nav: - Case Schemas: 'case.md' - Factory and fixtures: 'factories_and_fixtures.md' - Model Generator: 'model_generator.md' + - Pydantic V1: 'pydantic_v1.md' - Pydantic: 'pydantic.md' - Faust: 'faust_records.md' - Kafka examples: 'kafka_examples.md' diff --git a/poetry.lock b/poetry.lock index db72ce33..76e4be44 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -193,6 +193,20 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = true +python-versions = ">=3.7" +files = [ + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + [[package]] name = "anyio" version = "4.0.0" @@ -1084,16 +1098,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1474,56 +1478,141 @@ wcwidth = "*" [[package]] name = "pydantic" -version = "1.10.12" -description = "Data validation and settings management using python type hints" +version = "2.4.2" +description = "Data validation using Python type hints" optional = true python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, + {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, ] [package.dependencies] -email-validator = {version = ">=1.0.3", optional = true, markers = "extra == \"email\""} -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} +pydantic-core = "2.10.1" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.10.1" +description = "" +optional = true +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, + {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, + {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, + {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, + {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, + {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, + {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, + {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, + {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, + {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, + {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, + {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, + {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, + {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, + {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, + {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" @@ -1634,7 +1723,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1642,15 +1730,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1667,7 +1748,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1675,7 +1755,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2217,4 +2296,4 @@ pydantic = ["pydantic"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "eecc5f627dcef0198eecaafc64969bd12a1a6df73f52a4d6f609b6463ccf7506" +content-hash = "dc770ad3c86f578672fcb7004bf21db7d289e8243dfd48a24544fb2ed772d491" diff --git a/pyproject.toml b/pyproject.toml index cfd7f95f..f213ea9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,8 +27,8 @@ faker = {version = ">=18.3.1,<20.0.0", optional = true} inflector = "^3.1.0" faust-streaming = {version = "^0.10.11", optional = true} casefy = "^0.1.7" -pydantic = {version = "^1.10.9", optional = true, extras = ["email"]} typing-extensions = {version = "^4.2.0", python = "<3.9"} +pydantic = {version = "^2.4.2", optional = true, extras = ["email"]} [tool.poetry.group.dev.dependencies] mypy = "^1" diff --git a/tests/fake/const.py b/tests/fake/const.py index d8c22b82..8231eb6b 100644 --- a/tests/fake/const.py +++ b/tests/fake/const.py @@ -1,4 +1,5 @@ import pydantic +from pydantic import v1 pydantic_fields = ( pydantic.FilePath, @@ -23,9 +24,38 @@ pydantic.PositiveFloat, pydantic.NegativeInt, pydantic.PositiveInt, - pydantic.ConstrainedInt, - pydantic.UUID1, - pydantic.UUID3, + # pydantic.UUID1, + # pydantic.UUID3, pydantic.UUID4, - pydantic.UUID5, + # pydantic.UUID5, +) + +pydantic_v1_fields = ( + v1.FilePath, + v1.DirectoryPath, + v1.EmailStr, + v1.NameEmail, + v1.AnyUrl, + v1.AnyHttpUrl, + v1.HttpUrl, + v1.FileUrl, + v1.PostgresDsn, + v1.CockroachDsn, + v1.AmqpDsn, + v1.RedisDsn, + v1.MongoDsn, + v1.KafkaDsn, + v1.SecretStr, + v1.IPvAnyAddress, + v1.IPvAnyInterface, + v1.IPvAnyNetwork, + v1.NegativeFloat, + v1.PositiveFloat, + v1.NegativeInt, + v1.PositiveInt, + v1.ConstrainedInt, + v1.UUID1, + v1.UUID3, + v1.UUID4, + v1.UUID5, ) diff --git a/tests/fake/test_fake.py b/tests/fake/test_fake.py index 7c13c27d..21f21ee9 100644 --- a/tests/fake/test_fake.py +++ b/tests/fake/test_fake.py @@ -4,17 +4,7 @@ import typing import uuid -import pydantic -import pytest - from dataclasses_avroschema import AvroModel, types -from dataclasses_avroschema.pydantic import AvroBaseModel - -from .const import pydantic_fields - -parametrize_base_model = pytest.mark.parametrize( - "model_class, decorator", [(AvroModel, dataclasses.dataclass), (AvroBaseModel, lambda f: f)] -) def test_fake_primitive_types(user_dataclass: typing.Type) -> None: @@ -56,10 +46,9 @@ class UserAdvance(AvroModel): assert user_advance.pets == pets -@parametrize_base_model -def test_fake_with_logical_types(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: - @decorator - class LogicalTypes(model_class): +def test_fake_with_logical_types() -> None: + @dataclasses.dataclass + class LogicalTypes(AvroModel): birthday: datetime.date meeting_time: datetime.time meeting_time_micro: types.TimeMicro @@ -70,51 +59,44 @@ class LogicalTypes(model_class): assert isinstance(LogicalTypes.fake(), LogicalTypes) -@parametrize_base_model -def test_fake_union(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: - if model_class is AvroBaseModel: - field = pydantic.Field - else: - field = dataclasses.field - - @decorator - class Bus(model_class): +def test_fake_union() -> None: + @dataclasses.dataclass + class Bus(AvroModel): engine_name: str class Meta: namespace = "types.bus_type" - @decorator - class Car(model_class): + @dataclasses.dataclass + class Car(AvroModel): engine_name: str class Meta: namespace = "types.car_type" - @decorator - class UnionSchema(model_class): + @dataclasses.dataclass + class UnionSchema(AvroModel): first_union: typing.Union[str, int] logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] lake_trip: typing.Union[Bus, Car] river_trip: typing.Optional[typing.Union[Bus, Car]] = None - mountain_trip: typing.Union[Bus, Car] = field(default_factory=lambda: Bus(engine_name="honda")) + mountain_trip: typing.Union[Bus, Car] = dataclasses.field(default_factory=lambda: Bus(engine_name="honda")) assert isinstance(UnionSchema.fake(), UnionSchema) -@parametrize_base_model -def test_fake_one_to_one_relationship(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_fake_one_to_one_relationship() -> None: """ Test schema relationship one-to-one """ - @decorator - class Address(model_class): + @dataclasses.dataclass + class Address(AvroModel): street: str street_number: int - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int address: Address @@ -122,19 +104,18 @@ class User(model_class): assert isinstance(User.fake(), User) -@parametrize_base_model -def test_fake_one_to_many_relationship(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_fake_one_to_many_relationship() -> None: """ Test schema relationship one-to-many """ - @decorator - class Address(model_class): + @dataclasses.dataclass + class Address(AvroModel): street: str street_number: int - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int addresses: typing.List[Address] @@ -144,19 +125,18 @@ class User(model_class): assert User.avro_schema() -@parametrize_base_model -def test_fake_one_to_many_with_tuples(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_fake_one_to_many_with_tuples() -> None: """ Test schema relationship one-to-many """ - @decorator - class Address(model_class): + @dataclasses.dataclass + class Address(AvroModel): street: str street_number: int - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): addresses: typing.Tuple[Address, ...] user = User.fake() @@ -165,19 +145,18 @@ class User(model_class): assert isinstance(user.addresses, tuple) -@parametrize_base_model -def test_fake_one_to_many_map_relationship(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_fake_one_to_many_map_relationship() -> None: """ Test schema relationship one-to-many using a map """ - @decorator - class Address(model_class): + @dataclasses.dataclass + class Address(AvroModel): street: str street_number: int - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int addresses: typing.Dict[str, Address] @@ -185,14 +164,13 @@ class User(model_class): assert isinstance(User.fake(), User) -@parametrize_base_model -def test_self_one_to_one_relationship(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_self_one_to_one_relationship() -> None: """ Test self relationship one-to-one """ - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int teamates: typing.Optional[typing.Type["User"]] = None @@ -232,15 +210,14 @@ class User(AvroModel): assert isinstance(User.fake(), User) -@parametrize_base_model -def test_optional_relationship(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: - @decorator - class Address(model_class): +def test_optional_relationship() -> None: + @dataclasses.dataclass + class Address(AvroModel): street: str street_number: int - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int address: typing.Optional[Address] = None @@ -248,14 +225,13 @@ class User(model_class): assert isinstance(User.fake(), User) -@parametrize_base_model -def test_decimals(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_decimals() -> None: """ Test Decimal logical types """ - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int test_score_1: types.condecimal(max_digits=11, decimal_places=5) @@ -264,14 +240,13 @@ class User(model_class): assert isinstance(User.fake(), User) -@parametrize_base_model -def test_int32(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_int32() -> None: """ Test Int32 type """ - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int test_score_1: types.Int32 = 100 @@ -280,25 +255,16 @@ class User(model_class): assert isinstance(User.fake(), User) -@parametrize_base_model -def test_float32(model_class: typing.Type[AvroModel], decorator: typing.Callable) -> None: +def test_float32() -> None: """ Test Float32 type """ - @decorator - class User(model_class): + @dataclasses.dataclass + class User(AvroModel): name: str age: int test_score_1: types.Float32 = 100.0 test_score_2: types.Float32 = types.Float32(12.4) assert isinstance(User.fake(), User) - - -@pytest.mark.parametrize("pydantic_field", pydantic_fields) -def test_pydantic_field(pydantic_field) -> None: - class User(AvroBaseModel): - name: pydantic_field - - assert isinstance(User.fake(), User) diff --git a/tests/fake/test_fake_pydantic.py b/tests/fake/test_fake_pydantic.py new file mode 100644 index 00000000..e226e641 --- /dev/null +++ b/tests/fake/test_fake_pydantic.py @@ -0,0 +1,222 @@ +import datetime +import decimal +import typing +import uuid + +import pydantic +import pytest + +from dataclasses_avroschema import types +from dataclasses_avroschema.pydantic import AvroBaseModel + +from .const import pydantic_fields as pydantic_fields + + +def test_fake_with_logical_types() -> None: + class LogicalTypes(AvroBaseModel): + birthday: datetime.date + meeting_time: datetime.time + meeting_time_micro: types.TimeMicro + release_datetime: datetime.datetime + release_datetime_micro: types.DateTimeMicro + event_uuid: uuid.UUID + + assert isinstance(LogicalTypes.fake(), LogicalTypes) + + +def test_fake_union() -> None: + class Bus(AvroBaseModel): + engine_name: str + + class Meta: + namespace = "types.bus_type" + + class Car(AvroBaseModel): + engine_name: str + + class Meta: + namespace = "types.car_type" + + class UnionSchema(AvroBaseModel): + first_union: typing.Union[str, int] + logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] + lake_trip: typing.Union[Bus, Car] + river_trip: typing.Optional[typing.Union[Bus, Car]] = None + mountain_trip: typing.Union[Bus, Car] = pydantic.Field(default_factory=lambda: Bus(engine_name="honda")) + + assert isinstance(UnionSchema.fake(), UnionSchema) + + +def test_fake_one_to_one_relationship() -> None: + """ + Test schema relationship one-to-one + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + address: Address + + assert isinstance(User.fake(), User) + + +def test_fake_one_to_many_relationship() -> None: + """ + Test schema relationship one-to-many + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + addresses: typing.List[Address] + + user = User.fake() + assert isinstance(user, User) + assert User.avro_schema() + + +def test_fake_one_to_many_with_tuples() -> None: + """ + Test schema relationship one-to-many + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + addresses: typing.Tuple[Address, ...] + + user = User.fake() + assert isinstance(user, User) + assert User.avro_schema() + assert isinstance(user.addresses, tuple) + + +def test_fake_one_to_many_map_relationship() -> None: + """ + Test schema relationship one-to-many using a map + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + addresses: typing.Dict[str, Address] + + assert isinstance(User.fake(), User) + + +def test_self_one_to_one_relationship() -> None: + """ + Test self relationship one-to-one + """ + + class User(AvroBaseModel): + name: str + age: int + teamates: typing.Optional["User"] = None + + assert isinstance(User.fake(), User) + + +def test_self_one_to_many_relationship() -> None: + """ + Test self relationship one-to-many + """ + + class User(AvroBaseModel): + name: str + age: int + points: typing.List[typing.Optional[types.Float32]] + teamates: typing.Optional[typing.List["User"]] = None + + assert isinstance(User.fake(), User) + + +def test_self_one_to_many_map_relationship() -> None: + """ + Test self relationship one-to-many Map + """ + + class User(AvroBaseModel): + name: str + age: int + friends: typing.Optional[typing.Dict[str, "User"]] = None + teamates: typing.Optional[typing.Dict[str, "User"]] = None + + assert isinstance(User.fake(), User) + + +def test_optional_relationship() -> None: + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + address: typing.Optional[Address] = None + + assert isinstance(User.fake(), User) + + +def test_decimals() -> None: + """ + Test Decimal logical types + """ + + class User(AvroBaseModel): + name: str + age: int + test_score_1: types.condecimal(max_digits=11, decimal_places=5) + test_score_2: types.condecimal(max_digits=5, decimal_places=2) = decimal.Decimal("100.00") + + assert isinstance(User.fake(), User) + + +def test_int32() -> None: + """ + Test Int32 type + """ + + class User(AvroBaseModel): + name: str + age: int + test_score_1: types.Int32 = 100 + test_score_2: types.Int32 = types.Int32(12) + + assert isinstance(User.fake(), User) + + +def test_float32() -> None: + """ + Test Float32 type + """ + + class User(AvroBaseModel): + name: str + age: int + test_score_1: types.Float32 = 100.0 + test_score_2: types.Float32 = types.Float32(12.4) + + assert isinstance(User.fake(), User) + + +@pytest.mark.parametrize("pydantic_field", pydantic_fields) +def test_pydantic_field(pydantic_field) -> None: + class User(AvroBaseModel): + name: pydantic_field + + assert isinstance(User.fake(), User) diff --git a/tests/fake/test_fake_pydantic_v1.py b/tests/fake/test_fake_pydantic_v1.py new file mode 100644 index 00000000..df9129be --- /dev/null +++ b/tests/fake/test_fake_pydantic_v1.py @@ -0,0 +1,223 @@ +import datetime +import decimal +import typing +import uuid + +import pytest +from pydantic.v1 import Field + +from dataclasses_avroschema import types +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + +from .const import pydantic_v1_fields as pydantic_fields + + +def test_fake_with_logical_types() -> None: + class LogicalTypes(AvroBaseModel): + birthday: datetime.date + meeting_time: datetime.time + meeting_time_micro: types.TimeMicro + release_datetime: datetime.datetime + release_datetime_micro: types.DateTimeMicro + event_uuid: uuid.UUID + + assert isinstance(LogicalTypes.fake(), LogicalTypes) + + +def test_fake_union() -> None: + class Bus(AvroBaseModel): + engine_name: str + + class Meta: + namespace = "types.bus_type" + + class Car(AvroBaseModel): + engine_name: str + + class Meta: + namespace = "types.car_type" + + class UnionSchema(AvroBaseModel): + first_union: typing.Union[str, int] + logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] + lake_trip: typing.Union[Bus, Car] + river_trip: typing.Optional[typing.Union[Bus, Car]] = None + mountain_trip: typing.Union[Bus, Car] = Field(default_factory=lambda: Bus(engine_name="honda")) + + assert isinstance(UnionSchema.fake(), UnionSchema) + + +def test_fake_one_to_one_relationship() -> None: + """ + Test schema relationship one-to-one + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + address: Address + + assert isinstance(User.fake(), User) + + +def test_fake_one_to_many_relationship() -> None: + """ + Test schema relationship one-to-many + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + addresses: typing.List[Address] + + user = User.fake() + assert isinstance(user, User) + assert User.avro_schema() + + +def test_fake_one_to_many_with_tuples() -> None: + """ + Test schema relationship one-to-many + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + addresses: typing.Tuple[Address, ...] + + user = User.fake() + assert isinstance(user, User) + assert User.avro_schema() + assert isinstance(user.addresses, tuple) + + +def test_fake_one_to_many_map_relationship() -> None: + """ + Test schema relationship one-to-many using a map + """ + + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + addresses: typing.Dict[str, Address] + + assert isinstance(User.fake(), User) + + +def test_self_one_to_one_relationship() -> None: + """ + Test self relationship one-to-one + """ + + class User(AvroBaseModel): + name: str + age: int + teamates: typing.Optional[typing.Type["User"]] = None + + assert isinstance(User.fake(), User) + + +def test_self_one_to_many_relationship() -> None: + """ + Test self relationship one-to-many + """ + + class User(AvroBaseModel): + name: str + age: int + points: typing.List[typing.Optional[types.Float32]] + teamates: typing.Optional[typing.List["User"]] = None + + assert isinstance(User.fake(), User) + + +@pytest.mark.xfail(reason="Self references in pydantic V1 are not smart. Use pydantic V2") +def test_self_one_to_many_map_relationship() -> None: + """ + Test self relationship one-to-many Map + """ + + class User(AvroBaseModel): + name: str + age: int + friends: typing.Optional[typing.Dict[str, typing.Type["User"]]] = None + teamates: typing.Optional[typing.Dict[str, typing.Type["User"]]] = None + + assert isinstance(User.fake(), User) + + +def test_optional_relationship() -> None: + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + address: typing.Optional[Address] = None + + assert isinstance(User.fake(), User) + + +def test_decimals() -> None: + """ + Test Decimal logical types + """ + + class User(AvroBaseModel): + name: str + age: int + test_score_1: types.condecimal(max_digits=11, decimal_places=5) + test_score_2: types.condecimal(max_digits=5, decimal_places=2) = decimal.Decimal("100.00") + + assert isinstance(User.fake(), User) + + +def test_int32() -> None: + """ + Test Int32 type + """ + + class User(AvroBaseModel): + name: str + age: int + test_score_1: types.Int32 = 100 + test_score_2: types.Int32 = types.Int32(12) + + assert isinstance(User.fake(), User) + + +def test_float32() -> None: + """ + Test Float32 type + """ + + class User(AvroBaseModel): + name: str + age: int + test_score_1: types.Float32 = 100.0 + test_score_2: types.Float32 = types.Float32(12.4) + + assert isinstance(User.fake(), User) + + +@pytest.mark.parametrize("pydantic_field", pydantic_fields) +def test_pydantic_field(pydantic_field) -> None: + class User(AvroBaseModel): + name: pydantic_field + + assert isinstance(User.fake(), User) diff --git a/tests/fields/consts.py b/tests/fields/consts.py index f35bad90..48555c3a 100644 --- a/tests/fields/consts.py +++ b/tests/fields/consts.py @@ -328,11 +328,6 @@ def xfail_annotation(typ): (datetime.datetime, {"type": field_utils.LONG, "logicalType": field_utils.TIMESTAMP_MILLIS}), (uuid.uuid4, {"type": field_utils.STRING, "logicalType": field_utils.UUID}), (uuid.UUID, {"type": field_utils.STRING, "logicalType": field_utils.UUID}), - # pydantic fields - (pydantic.UUID1, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID1"}), - (pydantic.UUID3, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID3"}), - (pydantic.UUID4, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID4"}), - (pydantic.UUID5, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID5"}), ) LOGICAL_TYPES_AND_INVALID_DEFAULTS = ( diff --git a/tests/fields/pydantic/__init__.py b/tests/fields/pydantic/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fields/pydantic/consts.py b/tests/fields/pydantic/consts.py new file mode 100644 index 00000000..94540a8c --- /dev/null +++ b/tests/fields/pydantic/consts.py @@ -0,0 +1,20 @@ +import pydantic +from pydantic import v1 + +from dataclasses_avroschema.fields import field_utils + +PYDANTIC_LOGICAL_TYPES_AND_DEFAULTS = ( + # pydantic fields + (pydantic.UUID1, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID1"}), + (pydantic.UUID3, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID3"}), + (pydantic.UUID4, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID4"}), + (pydantic.UUID5, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID5"}), +) + +PYDANTIC_V1_LOGICAL_TYPES_AND_DEFAULTS = ( + # pydantic fields + (v1.UUID1, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID1"}), + (v1.UUID3, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID3"}), + (v1.UUID4, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID4"}), + (v1.UUID5, {"type": field_utils.STRING, "logicalType": field_utils.UUID, "pydantic-class": "UUID5"}), +) diff --git a/tests/fields/pydantic/test_pydantic.py b/tests/fields/pydantic/test_pydantic.py new file mode 100644 index 00000000..8bec1e02 --- /dev/null +++ b/tests/fields/pydantic/test_pydantic.py @@ -0,0 +1,102 @@ +import dataclasses +from typing import Any + +import pytest +from pydantic import ConfigDict, GetCoreSchemaHandler, field_serializer +from pydantic_core import CoreSchema, core_schema + +from dataclasses_avroschema import AvroField +from dataclasses_avroschema.pydantic import AvroBaseModel + +from . import consts + + +class PydanticCustomType: + @classmethod + def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: + return core_schema.no_info_after_validator_function(cls, handler(str)) + + +class MyModel(AvroBaseModel): + model_config = ConfigDict(json_encoders={PydanticCustomType: str}) + x: PydanticCustomType + + @field_serializer("x") + def serialize_x(self, x: PydanticCustomType, _info): + return str(x) + + +def test_pydantic_custom_class_field(): + field_name = "custom_class" + custom_class_field = AvroField(field_name, PydanticCustomType, MyModel) + + assert custom_class_field.to_dict() == { + "type": "string", + "name": field_name, + } + + +def test_pydantic_custom_class_field_with_default(): + field_name = "custom_class" + default = "a default string" + custom_class_field = AvroField( + field_name, + PydanticCustomType, + MyModel, + default=default, + ) + + assert custom_class_field.to_dict() == { + "type": "string", + "name": field_name, + "default": default, + } + + +def test_pydantic_custom_class_field_with_default_factory(): + """ + When the type is pydantic custom class, the default_factory should + be omitted + """ + field_name = "custom_class" + custom_class_field = AvroField( + field_name, + PydanticCustomType, + MyModel, + default_factory=int, + ) + + assert custom_class_field.default_factory is dataclasses.MISSING + + +def test_pydantic_custom_class_field_with_misconfigured_parent(): + class MisconfiguredParent(AvroBaseModel): + pass + + field_name = "custom_class" + with pytest.raises(KeyError): + AvroField(field_name, PydanticCustomType, MisconfiguredParent) + + +@pytest.mark.parametrize("python_type,avro_type", consts.PYDANTIC_LOGICAL_TYPES_AND_DEFAULTS) +def test_logical_types(python_type, avro_type): + name = "a logical type" + field = AvroField(name, python_type) + + expected = {"name": name, "type": avro_type} + + assert expected == field.to_dict() + + +@pytest.mark.parametrize("python_type,avro_type", consts.PYDANTIC_LOGICAL_TYPES_AND_DEFAULTS) +def test_logical_types_with_null_as_default(python_type, avro_type): + name = "a logical type" + field = AvroField(name, python_type, default=None) + + expected = { + "name": name, + "type": ["null", avro_type], + "default": None, + } + + assert expected == field.to_dict() diff --git a/tests/fields/pydantic/test_pydantic_v1.py b/tests/fields/pydantic/test_pydantic_v1.py new file mode 100644 index 00000000..e7df8342 --- /dev/null +++ b/tests/fields/pydantic/test_pydantic_v1.py @@ -0,0 +1,100 @@ +import dataclasses + +import pytest + +from dataclasses_avroschema import AvroField +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel + +from . import consts + + +class PydanticCustomClass: + @classmethod + def __get_validators__(cls): + pass # This is a stub method + + @classmethod + def validate(cls): + pass # This is a stub method too + + +class PydanticCustomClassParent(AvroBaseModel): + class Config: + json_encoders = {PydanticCustomClass: str} + + +def test_pydantic_custom_class_field(): + field_name = "custom_class" + custom_class_field = AvroField(field_name, PydanticCustomClass, PydanticCustomClassParent) + + assert custom_class_field.to_dict() == { + "type": "string", + "name": field_name, + } + + +def test_pydantic_custom_class_field_with_default(): + field_name = "custom_class" + default = "a default string" + custom_class_field = AvroField( + field_name, + PydanticCustomClass, + PydanticCustomClassParent, + default=default, + ) + + assert custom_class_field.to_dict() == { + "type": "string", + "name": field_name, + "default": default, + } + + +def test_pydantic_custom_class_field_with_default_factory(): + """ + When the type is pydantic custom class, the default_factory should + be omitted + """ + field_name = "custom_class" + custom_class_field = AvroField( + field_name, + PydanticCustomClass, + PydanticCustomClassParent, + default_factory=int, + ) + + assert custom_class_field.default_factory is dataclasses.MISSING + + +def test_pydantic_custom_class_field_with_misconfigured_parent(): + class MisconfiguredParent(AvroBaseModel): + pass + + field_name = "custom_class" + with pytest.raises(ValueError): + AvroField(field_name, PydanticCustomClass, MisconfiguredParent) + + +@pytest.mark.parametrize("python_type,avro_type", consts.PYDANTIC_V1_LOGICAL_TYPES_AND_DEFAULTS) +def test_logical_types(python_type, avro_type): + name = "a logical type" + python_type = python_type + field = AvroField(name, python_type) + + expected = {"name": name, "type": avro_type} + + assert expected == field.to_dict() + + +@pytest.mark.parametrize("python_type,avro_type", consts.PYDANTIC_V1_LOGICAL_TYPES_AND_DEFAULTS) +def test_logical_types_with_null_as_default(python_type, avro_type): + name = "a logical type" + field = AvroField(name, python_type, default=None) + + expected = { + "name": name, + "type": ["null", avro_type], + "default": None, + } + + assert expected == field.to_dict() diff --git a/tests/fields/test_complex_types.py b/tests/fields/test_complex_types.py index a96b9c3e..5c4d3773 100644 --- a/tests/fields/test_complex_types.py +++ b/tests/fields/test_complex_types.py @@ -8,7 +8,6 @@ from dataclasses_avroschema import AvroField, AvroModel, exceptions, types from dataclasses_avroschema.fields import field_utils -from dataclasses_avroschema.pydantic import AvroBaseModel from . import consts @@ -739,73 +738,6 @@ def test_enum_field_default(): assert enum_field4.get_default_value() == Color.GREEN -class PydanticCustomClass: - @classmethod - def __get_validators__(cls): - pass # This is a stub method - - @classmethod - def validate(cls): - pass # This is a stub method too - - -class PydanticCustomClassParent(AvroBaseModel): - class Config: - json_encoders = {PydanticCustomClass: str} - - -def test_pydantic_custom_class_field(): - field_name = "custom_class" - custom_class_field = AvroField(field_name, PydanticCustomClass, PydanticCustomClassParent) - - assert custom_class_field.to_dict() == { - "type": "string", - "name": field_name, - } - - -def test_pydantic_custom_class_field_with_default(): - field_name = "custom_class" - default = "a default string" - custom_class_field = AvroField( - field_name, - PydanticCustomClass, - PydanticCustomClassParent, - default=default, - ) - - assert custom_class_field.to_dict() == { - "type": "string", - "name": field_name, - "default": default, - } - - -def test_pydantic_custom_class_field_with_default_factory(): - """ - When the type is pydantic custom class, the default_factory should - be omitted - """ - field_name = "custom_class" - custom_class_field = AvroField( - field_name, - PydanticCustomClass, - PydanticCustomClassParent, - default_factory=int, - ) - - assert custom_class_field.default_factory is dataclasses.MISSING - - -def test_pydantic_custom_class_field_with_misconfigured_parent(): - class MisconfiguredParent(AvroBaseModel): - pass - - field_name = "custom_class" - with pytest.raises(ValueError): - AvroField(field_name, PydanticCustomClass, MisconfiguredParent) - - @pytest.mark.parametrize("value", [4, "4", True, b"four", Color.BLUE, None]) def test_literal_field_with_single_parameter(value): """ diff --git a/tests/schemas/conftest.py b/tests/schemas/conftest.py index 4d743407..5c31dd81 100644 --- a/tests/schemas/conftest.py +++ b/tests/schemas/conftest.py @@ -1,14 +1,7 @@ -import datetime -import enum import json import os -import typing -import uuid import pytest -from pydantic import Field - -from dataclasses_avroschema.pydantic import AvroBaseModel AVRO_SCHEMAS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "avro") @@ -152,38 +145,3 @@ def pydantic_fields_schema(): @pytest.fixture def order_fields_schema(): return load_json("order_fields.avsc") - - -@pytest.fixture -def AvroBaseModel_model(): - class Bus(AvroBaseModel): - "A Bus" - engine_name: str - - class Meta: - namespace = "types.bus_type" - - class Car(AvroBaseModel): - "A Car" - engine_name: str - - class Meta: - namespace = "types.car_type" - - class TripDistance(str, enum.Enum): - CLOSE = "Close" - FAR = "Far" - - class Meta: - doc = "Distance of the trip" - - class UnionSchema(AvroBaseModel): - "Some Unions" - first_union: typing.Union[str, int] - logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] - lake_trip: typing.Union[Bus, Car] = Field(default_factory=lambda: Bus(engine_name="honda")) - river_trip: typing.Optional[typing.Union[Bus, Car]] = None - mountain_trip: typing.Union[Bus, Car] = Field(default_factory=lambda: Bus.parse_obj({"engine_name": "honda"})) - trip_distance: typing.Union[int, TripDistance] = 123 - - return UnionSchema diff --git a/tests/schemas/pydantic/conftest.py b/tests/schemas/pydantic/conftest.py new file mode 100644 index 00000000..0cb14500 --- /dev/null +++ b/tests/schemas/pydantic/conftest.py @@ -0,0 +1,82 @@ +import datetime +import enum +import typing +import uuid + +import pydantic +import pytest +from pydantic.v1 import Field + +from dataclasses_avroschema.pydantic import AvroBaseModel, v1 + + +@pytest.fixture +def AvroBaseModelV1(): + class Bus(v1.AvroBaseModel): + "A Bus" + engine_name: str + + class Meta: + namespace = "types.bus_type" + + class Car(v1.AvroBaseModel): + "A Car" + engine_name: str + + class Meta: + namespace = "types.car_type" + + class TripDistance(str, enum.Enum): + CLOSE = "Close" + FAR = "Far" + + class Meta: + doc = "Distance of the trip" + + class UnionSchema(v1.AvroBaseModel): + "Some Unions" + first_union: typing.Union[str, int] + logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] + lake_trip: typing.Union[Bus, Car] = Field(default_factory=lambda: Bus(engine_name="honda")) + river_trip: typing.Optional[typing.Union[Bus, Car]] = None + mountain_trip: typing.Union[Bus, Car] = Field(default_factory=lambda: Bus.parse_obj({"engine_name": "honda"})) + trip_distance: typing.Union[int, TripDistance] = 123 + + return UnionSchema + + +@pytest.fixture +def AvroBaseModelV2(): + class Bus(AvroBaseModel): + "A Bus" + engine_name: str + + class Meta: + namespace = "types.bus_type" + + class Car(AvroBaseModel): + "A Car" + engine_name: str + + class Meta: + namespace = "types.car_type" + + class TripDistance(str, enum.Enum): + CLOSE = "Close" + FAR = "Far" + + class Meta: + doc = "Distance of the trip" + + class UnionSchema(AvroBaseModel): + "Some Unions" + first_union: typing.Union[str, int] + logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] + lake_trip: typing.Union[Bus, Car] = pydantic.Field(default_factory=lambda: Bus(engine_name="honda")) + river_trip: typing.Optional[typing.Union[Bus, Car]] = None + mountain_trip: typing.Union[Bus, Car] = pydantic.Field( + default_factory=lambda: Bus.parse_obj({"engine_name": "honda"}) + ) + trip_distance: typing.Union[int, TripDistance] = 123 + + return UnionSchema diff --git a/tests/schemas/pydantic/test_pydantic.py b/tests/schemas/pydantic/test_pydantic.py new file mode 100644 index 00000000..b828f803 --- /dev/null +++ b/tests/schemas/pydantic/test_pydantic.py @@ -0,0 +1,528 @@ +import datetime +import decimal +import enum +import json +import typing +import uuid + +import pytest +from pydantic import ( + UUID1, + AmqpDsn, + CockroachDsn, + EmailStr, + Field, + KafkaDsn, + MongoDsn, + PositiveInt, + PostgresDsn, + RedisDsn, + ValidationError, +) + +from dataclasses_avroschema import types, utils +from dataclasses_avroschema.pydantic import AvroBaseModel + +encoded = "test".encode() + + +def test_pydantic_record_schema_primitive_types(user_avro_json): + class User(AvroBaseModel): + name: str + age: int + has_pets: bool + money: float + encoded: bytes + + class Meta: + schema_doc = False + + assert User.avro_schema() == json.dumps(user_avro_json) + + +def test_pydantic_record_schema_with_metadata(): + class User(AvroBaseModel): + name: str = Field(metadata={"doc": "bar"}) + + class Meta: + schema_doc = False + + expected_schema = {"type": "record", "name": "User", "fields": [{"doc": "bar", "name": "name", "type": "string"}]} + assert User.avro_schema() == json.dumps(expected_schema) + + +def test_pydantic_record_schema_complex_types(user_advance_avro_json, color_enum): + class UserAdvance(AvroBaseModel): + name: str + age: int + pets: typing.List[str] + accounts: typing.Dict[str, int] + favorite_colors: color_enum + md5: types.confixed(size=16) + has_car: bool = False + country: str = "Argentina" + address: typing.Optional[str] = None + + class Meta: + schema_doc = False + + class Config: + arbitrary_types_allowed = True + + assert UserAdvance.avro_schema() == json.dumps(user_advance_avro_json) + + +def test_pydantic_record_schema_complex_types_with_defaults(user_advance_with_defaults_avro_json, color_enum): + class UserAdvance(AvroBaseModel): + name: str + age: int + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) + has_car: bool = False + favorite_colors: color_enum = color_enum.BLUE + country: str = "Argentina" + address: str = None + + class Meta: + schema_doc = False + + assert UserAdvance.avro_schema() == json.dumps(user_advance_with_defaults_avro_json) + + +def test_pydantic_record_schema_logical_types(logical_types_schema): + a_datetime = datetime.datetime(2019, 10, 12, 17, 57, 42, tzinfo=datetime.timezone.utc) + + class LogicalTypes(AvroBaseModel): + "Some logical types" + birthday: datetime.date = a_datetime.date() + meeting_time: datetime.time = a_datetime.time() + release_datetime: datetime.datetime = a_datetime + event_uuid: uuid.UUID = "09f00184-7721-4266-a955-21048a5cc235" + + assert LogicalTypes.avro_schema() == json.dumps(logical_types_schema) + + +def test_pydantic_record_one_to_one_relationship(user_one_address_schema): + """ + Test schema relationship one-to-one + """ + + class Address(AvroBaseModel): + "An Address" + street: str + street_number: int + + class User(AvroBaseModel): + "An User with Address" + name: str + age: int + address: Address + + assert User.avro_schema() == json.dumps(user_one_address_schema) + + +def test_pydantic_record_one_to_one_relationship_with_none_default(user_one_address_schema_with_none_default): + """ + Test schema relationship one-to-one + """ + + class Address(AvroBaseModel): + "An Address" + street: str + street_number: int + + class User(AvroBaseModel): + "An User with Address" + name: str + age: int + address: Address = None + + assert User.avro_schema() == json.dumps(user_one_address_schema_with_none_default) + + +def test_pydantic_record_one_to_many_relationship(user_many_address_schema): + """ + Test schema relationship one-to-many + """ + + class Address(AvroBaseModel): + "An Address" + street: str + street_number: int + + class User(AvroBaseModel): + "User with multiple Address" + name: str + age: int + addresses: typing.List[Address] + + assert User.avro_schema() == json.dumps(user_many_address_schema) + + +def test_pydantic_record_one_to_many_map_relationship(user_many_address_map_schema): + """ + Test schema relationship one-to-many using a map + """ + + class Address(AvroBaseModel): + "An Address" + street: str + street_number: int + + class User(AvroBaseModel): + "User with multiple Address" + name: str + age: int + addresses: typing.Dict[str, Address] + + assert User.avro_schema() == json.dumps(user_many_address_map_schema) + + +def test_pydantic_record_self_one_to_one_relationship(user_self_reference_one_to_one_schema): + """ + Test self relationship one-to-one + """ + + class User(AvroBaseModel): + "User with self reference as friend" + name: str + age: int + friend: "User" + teamates: typing.Optional["User"] = None + + assert User.avro_schema() == json.dumps(user_self_reference_one_to_one_schema) + + +def test_pydantic_record_self_one_to_many_relationship( + user_self_reference_one_to_many_schema, +): + """ + Test self relationship one-to-many + """ + + class User(AvroBaseModel): + "User with self reference as friends" + name: str + age: int + friends: typing.List["User"] + teamates: typing.List["User"] = None + + assert User.avro_schema() == json.dumps(user_self_reference_one_to_many_schema) + + +def test_pydantic_record_self_one_to_many_map_relationship( + user_self_reference_one_to_many_map_schema, +): + """ + Test self relationship one-to-many Map + """ + + class User(AvroBaseModel): + "User with self reference as friends" + name: str + age: int + friends: typing.Dict[str, "User"] + teamates: typing.Dict[str, "User"] = None + + assert User.avro_schema() == json.dumps(user_self_reference_one_to_many_map_schema) + + +def test_pydantic_record_schema_with_unions_type(union_type_schema): + class Bus(AvroBaseModel): + "A Bus" + engine_name: str + + class Meta: + namespace = "types.bus_type" + + class Car(AvroBaseModel): + "A Car" + engine_name: str + + class Meta: + namespace = "types.car_type" + + class TripDistance(enum.Enum): + CLOSE = "Close" + FAR = "Far" + + class Meta: + doc = "Distance of the trip" + namespace = "trip" + + class UnionSchema(AvroBaseModel): + "Some Unions" + first_union: typing.Union[str, int] + logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] + lake_trip: typing.Union[Bus, Car] + river_trip: typing.Union[Bus, Car] = None + mountain_trip: typing.Union[Bus, Car] = Field(default_factory=lambda: Bus(engine_name="honda")) + trip_distance: typing.Union[int, TripDistance] = None + optional_distance: typing.Optional[TripDistance] = None + + assert UnionSchema.avro_schema() == json.dumps(union_type_schema) + + +def test_pydantic_fields(pydantic_fields_schema): + class Infrastructure(AvroBaseModel): + email: EmailStr + postgres_dsn: PostgresDsn + cockroach_dsn: CockroachDsn + amqp_dsn: AmqpDsn + redis_dsn: RedisDsn + mongo_dsn: MongoDsn + kafka_url: KafkaDsn + total_nodes: PositiveInt + event_id: UUID1 + landing_zone_nodes: typing.List[PositiveInt] + total_nodes_in_aws: PositiveInt = 10 + optional_kafka_url: typing.Optional[KafkaDsn] = None + + assert Infrastructure.avro_schema() == json.dumps(pydantic_fields_schema) + + +def test_create_instance(): + class User(AvroBaseModel): + name: str + age: int + has_pets: bool = True + + User( + name="a name", + age=20, + ) + + with pytest.raises(ValueError): + User() + + +def test_validate(): + class User(AvroBaseModel): + name: str + age: int + has_pets: bool = True + + with pytest.raises(ValidationError): + User.model_validate({"name": "a name"}) + + user = User.model_validate( + { + "name": "a name", + "age": 20, + } + ) + + assert user.validate_avro() + + +def test_json_schema(AvroBaseModelV2): + assert AvroBaseModelV2.json_schema() + + +def test_to_dict(AvroBaseModelV2): + instance = AvroBaseModelV2(first_union="hi!", logical_union=uuid.uuid4()) + assert instance.to_dict() == instance.model_dump() + + +def test_asdict(): + class MyEnum(str, enum.Enum): + x = "test" + + class ModelA(AvroBaseModel): + a: int = 1 + d: MyEnum = MyEnum.x + + class ModelB(AvroBaseModel): + b: ModelA + d: MyEnum = MyEnum.x + + target = repr({"b": {"a": 1, "d": "test"}, "d": "test"}) + model_b = ModelB(b=ModelA()) + res_asdict = repr(model_b.asdict()) + assert res_asdict == target, res_asdict + assert model_b.serialize() + + +def test_to_json(AvroBaseModelV2): + instance = AvroBaseModelV2(first_union="hi!", logical_union=uuid.uuid4()) + assert ( + instance.to_json( + separators=( + ",", + ":", + ) + ) + == instance.model_dump_json() + ) + + +def test_to_json_logical_types(): + class LogicalTypes(AvroBaseModel): + "Some logical types" + birthday: datetime.date + meeting_time: datetime.time + release_datetime: datetime.datetime + event_uuid: uuid.UUID + + a_datetime = datetime.datetime(2019, 10, 12, 17, 57, 42) + + data = { + "birthday": a_datetime.date(), + "meeting_time": a_datetime.time(), + "release_datetime": a_datetime, + "event_uuid": uuid.UUID("09f00184-7721-4266-a955-21048a5cc235"), + } + + logical_types = LogicalTypes(**data) + avro_json = logical_types.serialize(serialization_type="avro-json") + to_json = logical_types.model_dump_json() + + assert ( + logical_types.to_json( + separators=( + ",", + ":", + ) + ) + == to_json + ) + assert to_json.encode() != avro_json + + +def test_serialization(color_enum): + class UserAdvance(AvroBaseModel): + name: str + age: int + explicit: types.condecimal(max_digits=3, decimal_places=2) + explicit_with_default: typing.Optional[types.condecimal(max_digits=3, decimal_places=2)] = None + implicit: types.condecimal(max_digits=3, decimal_places=2) = decimal.Decimal("3.14") + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) + has_car: bool = False + favorite_colors: color_enum = color_enum.BLUE + country: str = "Argentina" + address: typing.Optional[str] = None + + class Meta: + schema_doc = False + + user = UserAdvance(name="bond", age=50, explicit=decimal.Decimal("3.12")) + event = user.serialize() + + # we need to update the fields that have `types.Decimal`, otherwise the objects will be different + assert UserAdvance.deserialize(data=event) == user + + +def test_not_pydantic_not_installed(monkeypatch): + monkeypatch.setattr(utils, "pydantic", None) + + class Bus: + pass + + assert not utils.is_pydantic_model(Bus) + + +def test_parse_obj(): + """ + Created nested schema resolution directly from dictionaries + """ + + class Address(AvroBaseModel): + "An Address" + street: str + street_number: int + + class User(AvroBaseModel): + "User with multiple Address" + name: str + age: int + addresses: typing.List[Address] + + data_user = { + "name": "john", + "age": 20, + "addresses": [ + { + "street": "test", + "street_number": 10, + } + ], + } + user = User.parse_obj(data_user) + assert type(user.addresses[0]) is Address + assert User.avro_schema() + + +def test_fake(color_enum) -> None: + class Address(AvroBaseModel): + street: str + street_number: int + + class User(AvroBaseModel): + name: str + age: int + birthday: datetime.date + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) + has_car: bool = False + favorite_colors: color_enum = color_enum.BLUE + country: str = "Argentina" + address: typing.Optional[Address] = None + + # just calling fake is enougt to know that a proper instance was created, + # otherwise a pydantic validation should have been raised + User.fake() + + +def test_exclude_field() -> None: + class Message(AvroBaseModel): + internal_field: str = Field(exclude=True) + public_field: str + + assert Message.avro_schema() + + message = Message(internal_field="internal", public_field="public") + assert "internal_field" not in message.model_dump() + + with pytest.raises(ValueError) as excinfo: + message.serialize(serialization_type="avro-json") + + assert str(excinfo.value) == "no value and no default for internal_field" + + +def test_exclude_field_with_default() -> None: + class Message(AvroBaseModel): + internal_field: str = Field(exclude=True, default="internal") + public_field: str + + assert Message.avro_schema() + + message = Message(internal_field="internal", public_field="public") + assert "internal_field" not in message.model_dump() + + event = message.serialize(serialization_type="avro-json") + + assert Message.deserialize(event, serialization_type="avro-json") == message + assert Message.deserialize(event, serialization_type="avro-json", create_instance=False) == message.model_dump() + + +def test_exclude_field_from_schema(user_extra_avro_attributes): + class User(AvroBaseModel): + "An User" + name: str + age: int + last_name: str = "Bond" + + class Meta: + namespace = "test.com.ar/user/v1" + aliases = [ + "User", + "My favorite User", + ] + exclude = [ + "last_name", + ] + + user = User.fake() + assert User.avro_schema() == json.dumps(user_extra_avro_attributes) + assert User.deserialize(user.serialize()) == user diff --git a/tests/schemas/test_pydantic.py b/tests/schemas/pydantic/test_pydantic_v1.py similarity index 85% rename from tests/schemas/test_pydantic.py rename to tests/schemas/pydantic/test_pydantic_v1.py index 7f6fee3f..664acfcf 100644 --- a/tests/schemas/test_pydantic.py +++ b/tests/schemas/pydantic/test_pydantic_v1.py @@ -5,11 +5,23 @@ import typing import uuid -import pydantic import pytest +from pydantic.v1 import ( + UUID1, + AmqpDsn, + CockroachDsn, + EmailStr, + Field, + KafkaDsn, + MongoDsn, + PositiveInt, + PostgresDsn, + RedisDsn, + ValidationError, +) from dataclasses_avroschema import types, utils -from dataclasses_avroschema.pydantic import AvroBaseModel +from dataclasses_avroschema.pydantic.v1 import AvroBaseModel encoded = "test".encode() @@ -30,7 +42,7 @@ class Meta: def test_pydantic_record_schema_with_metadata(): class User(AvroBaseModel): - name: str = pydantic.Field(metadata={"doc": "bar"}) + name: str = Field(metadata={"doc": "bar"}) class Meta: schema_doc = False @@ -49,7 +61,7 @@ class UserAdvance(AvroBaseModel): md5: types.confixed(size=16) has_car: bool = False country: str = "Argentina" - address: str = None + address: typing.Optional[str] = None class Meta: schema_doc = False @@ -64,8 +76,8 @@ def test_pydantic_record_schema_complex_types_with_defaults(user_advance_with_de class UserAdvance(AvroBaseModel): name: str age: int - pets: typing.List[str] = pydantic.Field(default_factory=lambda: ["dog", "cat"]) - accounts: typing.Dict[str, int] = pydantic.Field(default_factory=lambda: {"key": 1}) + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) has_car: bool = False favorite_colors: color_enum = color_enum.BLUE country: str = "Argentina" @@ -244,7 +256,7 @@ class UnionSchema(AvroBaseModel): logical_union: typing.Union[datetime.datetime, datetime.date, uuid.UUID] lake_trip: typing.Union[Bus, Car] river_trip: typing.Union[Bus, Car] = None - mountain_trip: typing.Union[Bus, Car] = pydantic.Field(default_factory=lambda: Bus(engine_name="honda")) + mountain_trip: typing.Union[Bus, Car] = Field(default_factory=lambda: Bus(engine_name="honda")) trip_distance: typing.Union[int, TripDistance] = None optional_distance: typing.Optional[TripDistance] = None @@ -253,18 +265,18 @@ class UnionSchema(AvroBaseModel): def test_pydantic_fields(pydantic_fields_schema): class Infrastructure(AvroBaseModel): - email: pydantic.EmailStr - postgres_dsn: pydantic.PostgresDsn - cockroach_dsn: pydantic.CockroachDsn - amqp_dsn: pydantic.AmqpDsn - redis_dsn: pydantic.RedisDsn - mongo_dsn: pydantic.MongoDsn - kafka_url: pydantic.KafkaDsn - total_nodes: pydantic.PositiveInt - event_id: pydantic.UUID1 - landing_zone_nodes: typing.List[pydantic.PositiveInt] - total_nodes_in_aws: pydantic.PositiveInt = 10 - optional_kafka_url: typing.Optional[pydantic.KafkaDsn] = None + email: EmailStr + postgres_dsn: PostgresDsn + cockroach_dsn: CockroachDsn + amqp_dsn: AmqpDsn + redis_dsn: RedisDsn + mongo_dsn: MongoDsn + kafka_url: KafkaDsn + total_nodes: PositiveInt + event_id: UUID1 + landing_zone_nodes: typing.List[PositiveInt] + total_nodes_in_aws: PositiveInt = 10 + optional_kafka_url: typing.Optional[KafkaDsn] = None assert Infrastructure.avro_schema() == json.dumps(pydantic_fields_schema) @@ -290,7 +302,7 @@ class User(AvroBaseModel): age: int has_pets: bool = True - with pytest.raises(pydantic.error_wrappers.ValidationError): + with pytest.raises(ValidationError): User.validate({"name": "a name"}) user = User.validate( @@ -303,18 +315,18 @@ class User(AvroBaseModel): assert user.validate_avro() -def test_json_schema(AvroBaseModel_model): - assert AvroBaseModel_model.json_schema() +def test_json_schema(AvroBaseModelV1): + assert AvroBaseModelV1.json_schema() -def test_to_dict(AvroBaseModel_model): - instance = AvroBaseModel_model(first_union="hi!", logical_union=uuid.uuid4()) +def test_to_dict(AvroBaseModelV1): + instance = AvroBaseModelV1(first_union="hi!", logical_union=uuid.uuid4()) assert instance.to_dict() == instance.dict() def test_asdict(): - class MyEnum(enum.IntEnum): - x = 1 + class MyEnum(str, enum.Enum): + x = "test" class ModelA(AvroBaseModel): a: int = 1 @@ -324,14 +336,15 @@ class ModelB(AvroBaseModel): b: ModelA d: MyEnum = MyEnum.x - target = repr({"b": {"a": 1, "d": 1}, "d": 1}) + target = repr({"b": {"a": 1, "d": "test"}, "d": "test"}) model_b = ModelB(b=ModelA()) res_asdict = repr(model_b.asdict()) assert res_asdict == target, res_asdict + assert model_b.serialize() -def test_to_json(AvroBaseModel_model): - instance = AvroBaseModel_model(first_union="hi!", logical_union=uuid.uuid4()) +def test_to_json(AvroBaseModelV1): + instance = AvroBaseModelV1(first_union="hi!", logical_union=uuid.uuid4()) assert instance.to_json() == instance.json() @@ -367,8 +380,8 @@ class UserAdvance(AvroBaseModel): explicit: types.condecimal(max_digits=3, decimal_places=2) explicit_with_default: typing.Optional[types.condecimal(max_digits=3, decimal_places=2)] = None implicit: types.condecimal(max_digits=3, decimal_places=2) = decimal.Decimal("3.14") - pets: typing.List[str] = pydantic.Field(default_factory=lambda: ["dog", "cat"]) - accounts: typing.Dict[str, int] = pydantic.Field(default_factory=lambda: {"key": 1}) + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) has_car: bool = False favorite_colors: color_enum = color_enum.BLUE country: str = "Argentina" @@ -433,8 +446,8 @@ class User(AvroBaseModel): name: str age: int birthday: datetime.date - pets: typing.List[str] = pydantic.Field(default_factory=lambda: ["dog", "cat"]) - accounts: typing.Dict[str, int] = pydantic.Field(default_factory=lambda: {"key": 1}) + pets: typing.List[str] = Field(default_factory=lambda: ["dog", "cat"]) + accounts: typing.Dict[str, int] = Field(default_factory=lambda: {"key": 1}) has_car: bool = False favorite_colors: color_enum = color_enum.BLUE country: str = "Argentina" @@ -448,13 +461,13 @@ class User(AvroBaseModel): def test_exclude_fields() -> None: class Message(AvroBaseModel): - internal_field: str = pydantic.Field(exclude=True) + internal_field: str = Field(exclude=True) public_field: str assert Message.avro_schema() message = Message(internal_field="internal", public_field="public") - assert not "internal_field" in message.dict() + assert "internal_field" not in message.dict() event = message.serialize(serialization_type="avro-json") diff --git a/tests/serialization/test_primitive_types_serialization.py b/tests/serialization/test_primitive_types_serialization.py index c0df61e0..0df982fc 100644 --- a/tests/serialization/test_primitive_types_serialization.py +++ b/tests/serialization/test_primitive_types_serialization.py @@ -1,16 +1,8 @@ import dataclasses import json import math -import typing - -import pytest from dataclasses_avroschema import AvroModel, types -from dataclasses_avroschema.pydantic import AvroBaseModel - -parametrize_base_model = pytest.mark.parametrize( - "model_class, decorator", [(AvroModel, dataclasses.dataclass), (AvroBaseModel, lambda f: f)] -) def test_primitive_types(user_dataclass): @@ -32,10 +24,9 @@ def test_primitive_types(user_dataclass): assert user.to_json() == json.dumps(data_json) -@parametrize_base_model -def test_primitive_types_with_defaults(model_class: typing.Type[AvroModel], decorator: typing.Callable): - @decorator - class User(model_class): +def test_primitive_types_with_defaults(): + @dataclasses.dataclass + class User(AvroModel): name: str = "marcos" age: int = 20 has_pets: bool = False @@ -74,10 +65,9 @@ class User(model_class): assert user.to_json() == json.dumps(data_json) -@parametrize_base_model -def test_primitive_types_with_nulls(model_class: typing.Type[AvroModel], decorator: typing.Callable): - @decorator - class User(model_class): +def test_primitive_types_with_nulls(): + @dataclasses.dataclass + class User(AvroModel): name: str = None age: int = 20 has_pets: bool = False @@ -116,10 +106,9 @@ class User(model_class): assert user.to_json() == json.dumps(data) -@parametrize_base_model -def test_float32_primitive_type(model_class: typing.Type[AvroModel], decorator: typing.Callable): - @decorator - class User(model_class): +def test_float32_primitive_type(): + @dataclasses.dataclass + class User(AvroModel): height: types.Float32 = None data = {"height": 178.3} diff --git a/tests/serialization/test_pydantic_fields.py b/tests/serialization/test_pydantic_fields.py index a77870ec..235d3f37 100644 --- a/tests/serialization/test_pydantic_fields.py +++ b/tests/serialization/test_pydantic_fields.py @@ -1,7 +1,12 @@ +import json +import math +from typing import Any, Optional + import pytest -from pydantic import Field, conint -from pydantic.error_wrappers import ValidationError +from pydantic import ConfigDict, Field, GetCoreSchemaHandler, ValidationError, conint, field_serializer +from pydantic_core import core_schema +from dataclasses_avroschema import types from dataclasses_avroschema.pydantic import AvroBaseModel from dataclasses_avroschema.schema_generator import AVRO, AVRO_JSON @@ -11,17 +16,33 @@ def __init__(self, value: str) -> None: self.value = value @classmethod - def __get_validators__(cls): - yield cls.validate - - @classmethod - def validate(cls, value): - if isinstance(value, CustomClass): - return value - elif not isinstance(value, str): - raise ValueError(f"Value must be a string or CustomClass - not {type(value)}") - - return cls(value) + def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler): + def validate(value): + if isinstance(value, CustomClass): + return value + elif not isinstance(value, str): + raise ValueError(f"Value must be a string or CustomClass - not {type(value)}") + + return cls(value) + + from_str_schema = core_schema.chain_schema( + [ + core_schema.str_schema(), + core_schema.no_info_plain_validator_function(validate), + ] + ) + + return core_schema.json_or_python_schema( + json_schema=from_str_schema, + python_schema=core_schema.union_schema( + [ + # check if it's an instance first before doing any further work + core_schema.is_instance_schema(CustomClass), + from_str_schema, + ] + ), + serialization=core_schema.plain_serializer_function_ser_schema(lambda instance: instance.x), + ) def __eq__(self, other: object) -> bool: if not isinstance(other, CustomClass): @@ -34,10 +55,12 @@ def __str__(self) -> str: class Parent(AvroBaseModel): + model_config = ConfigDict(json_encoders={CustomClass: str}, arbitrary_types_allowed=True) custom_class: CustomClass - class Config: - json_encoders = {CustomClass: str} + @field_serializer("custom_class") + def serialize_custom_class(self, custom_class: CustomClass, _info): + return str(custom_class) parent_under_test = Parent(custom_class=CustomClass("custom class value")) @@ -88,3 +111,126 @@ def test_custom_class_type_deserialize(serialization_type: str, data: bytes): def test_custom_class_deserialize_invalid(): with pytest.raises(ValidationError): Parent.deserialize(b'{"custom_class": 1}', serialization_type="avro-json") + + +def test_primitive_types_with_defaults(): + class User(AvroBaseModel): + name: str = "marcos" + age: int = 20 + has_pets: bool = False + money: float = 100.0 + encoded: bytes = b"hola" + height: types.Int32 = 184 + + data = {"name": "marcos", "age": 20, "has_pets": False, "money": 100.0, "encoded": b"hola", "height": 184} + data_json = {"name": "marcos", "age": 20, "has_pets": False, "money": 100.0, "encoded": "hola", "height": 184} + + user = User() + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data + assert user.to_json() == json.dumps(data_json) + + # check that works with schema evolution + user = User(name="Juan", age=30) + avro_json = user.serialize(serialization_type="avro-json") + + data = {"name": "Juan", "age": 30, "has_pets": False, "money": 100.0, "encoded": b"hola", "height": 184} + data_json = {"name": "Juan", "age": 30, "has_pets": False, "money": 100.0, "encoded": "hola", "height": 184} + + # assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + + # assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + assert user.to_dict() == data + assert user.to_json() == json.dumps(data_json) + + +def test_primitive_types_with_nulls(): + class User(AvroBaseModel): + name: Optional[str] = None + age: Optional[int] = 20 + has_pets: Optional[bool] = False + money: Optional[float] = None + encoded: Optional[bytes] = None + height: Optional[types.Int32] = None + + data = {"name": None, "age": 20, "has_pets": False, "money": 100.0, "encoded": b"hola", "height": 184} + data_json = {"name": None, "age": 20, "has_pets": False, "money": 100.0, "encoded": "hola", "height": 184} + + user = User(**data) + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data + assert user.to_json() == json.dumps(data_json) + + data = {"name": None, "age": 20, "has_pets": False, "money": None, "encoded": None, "height": None} + + user = User() + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data + assert user.to_json() == json.dumps(data) + + +def test_float32_primitive_type(): + class User(AvroBaseModel): + height: Optional[types.Float32] = None + + data = {"height": 178.3} + + user = User(**data) + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + # Floating point error expected + res = user.deserialize(avro_binary, create_instance=False) + assert res["height"] != data["height"] + assert math.isclose(res["height"], data["height"], abs_tol=1e-5) + + res = user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) + assert res["height"] == data["height"] + + # Floating point error expected + res = user.deserialize(avro_binary) + assert res.height != user.height + assert math.isclose(res.height, user.height, abs_tol=1e-5) + + res = user.deserialize(avro_json, serialization_type="avro-json") + assert res.height == user.height + + res = user.to_dict() + assert res["height"] == data["height"] + + data = {"height": None} + + user = User() + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data diff --git a/tests/serialization/test_pydantic_v1_fields.py b/tests/serialization/test_pydantic_v1_fields.py new file mode 100644 index 00000000..5458e9e8 --- /dev/null +++ b/tests/serialization/test_pydantic_v1_fields.py @@ -0,0 +1,217 @@ +import json +import math + +import pytest +from pydantic import Field, conint +from pydantic.v1.error_wrappers import ValidationError + +from dataclasses_avroschema import types +from dataclasses_avroschema.pydantic.v1.main import AvroBaseModel +from dataclasses_avroschema.schema_generator import AVRO, AVRO_JSON + + +class CustomClass: + def __init__(self, value: str) -> None: + self.value = value + + @classmethod + def __get_validators__(cls): + yield cls.validate + + @classmethod + def validate(cls, value): + if isinstance(value, CustomClass): + return value + elif not isinstance(value, str): + raise ValueError(f"Value must be a string or CustomClass - not {type(value)}") + + return cls(value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CustomClass): + return NotImplemented + + return self.value == other.value + + def __str__(self) -> str: + return f"{self.value}" + + +class Parent(AvroBaseModel): + custom_class: CustomClass + + class Config: + json_encoders = {CustomClass: str} + + +parent_under_test = Parent(custom_class=CustomClass("custom class value")) +parent_avro_binary = b"$custom class value" +parent_avro_json = b'{"custom_class": "custom class value"}' + + +def test_int_constrained_type_serialize(): + class ConstrainedType(AvroBaseModel): + value: conint(gt=0) + + c = ConstrainedType(value=1) + serialized = c.serialize(serialization_type="avro-json") + assert serialized == b'{"value": 1}' + + +def test_int_constrained_type_deserialize(): + class ConstrainedType(AvroBaseModel): + value: conint(gt=0) + + c = ConstrainedType(value=1) + deserialized = ConstrainedType.deserialize(b'{"value": 1}', serialization_type="avro-json") + assert deserialized == c + + +def test_int_constrained_type_deserialize_invalid(): + class ConstrainedType(AvroBaseModel): + value: int = Field(gt=0) + + with pytest.raises(AssertionError): + ConstrainedType.deserialize(b'{"value": 0}', serialization_type="avro-json") + + +@pytest.mark.parametrize( + "serialization_type, expected_result", [(AVRO, parent_avro_binary), (AVRO_JSON, parent_avro_json)] +) +def test_custom_class_type_serialize(serialization_type: str, expected_result: bytes): + serialized = parent_under_test.serialize(serialization_type) + assert serialized == expected_result + + +@pytest.mark.parametrize("serialization_type, data", [(AVRO, parent_avro_binary), (AVRO_JSON, parent_avro_json)]) +def test_custom_class_type_deserialize(serialization_type: str, data: bytes): + deserialized = Parent.deserialize(data, serialization_type) + assert deserialized == parent_under_test + + +def test_custom_class_deserialize_invalid(): + with pytest.raises(ValidationError): + Parent.deserialize(b'{"custom_class": 1}', serialization_type="avro-json") + + +def test_primitive_types_with_defaults(): + class User(AvroBaseModel): + name: str = "marcos" + age: int = 20 + has_pets: bool = False + money: float = 100.0 + encoded: bytes = b"hola" + height: types.Int32 = 184 + + data = {"name": "marcos", "age": 20, "has_pets": False, "money": 100.0, "encoded": b"hola", "height": 184} + data_json = {"name": "marcos", "age": 20, "has_pets": False, "money": 100.0, "encoded": "hola", "height": 184} + + user = User() + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data + assert user.to_json() == json.dumps(data_json) + + # check that works with schema evolution + user = User(name="Juan", age=30) + avro_json = user.serialize(serialization_type="avro-json") + + data = {"name": "Juan", "age": 30, "has_pets": False, "money": 100.0, "encoded": b"hola", "height": 184} + data_json = {"name": "Juan", "age": 30, "has_pets": False, "money": 100.0, "encoded": "hola", "height": 184} + + # assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + + # assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + assert user.to_dict() == data + assert user.to_json() == json.dumps(data_json) + + +def test_primitive_types_with_nulls(): + class User(AvroBaseModel): + name: str = None + age: int = 20 + has_pets: bool = False + money: float = None + encoded: bytes = None + height: types.Int32 = None + + data = {"name": None, "age": 20, "has_pets": False, "money": 100.0, "encoded": b"hola", "height": 184} + data_json = {"name": None, "age": 20, "has_pets": False, "money": 100.0, "encoded": "hola", "height": 184} + + user = User(**data) + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data + assert user.to_json() == json.dumps(data_json) + + data = {"name": None, "age": 20, "has_pets": False, "money": None, "encoded": None, "height": None} + + user = User() + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data + assert user.to_json() == json.dumps(data) + + +def test_float32_primitive_type(): + class User(AvroBaseModel): + height: types.Float32 = None + + data = {"height": 178.3} + + user = User(**data) + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + # Floating point error expected + res = user.deserialize(avro_binary, create_instance=False) + assert res["height"] != data["height"] + assert math.isclose(res["height"], data["height"], abs_tol=1e-5) + + res = user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) + assert res["height"] == data["height"] + + # Floating point error expected + res = user.deserialize(avro_binary) + assert res.height != user.height + assert math.isclose(res.height, user.height, abs_tol=1e-5) + + res = user.deserialize(avro_json, serialization_type="avro-json") + assert res.height == user.height + + res = user.to_dict() + assert res["height"] == data["height"] + + data = {"height": None} + + user = User() + avro_binary = user.serialize() + avro_json = user.serialize(serialization_type="avro-json") + + assert user.deserialize(avro_binary, create_instance=False) == data + assert user.deserialize(avro_json, serialization_type="avro-json", create_instance=False) == data + assert user.deserialize(avro_binary) == user + assert user.deserialize(avro_json, serialization_type="avro-json") == user + + assert user.to_dict() == data