From 95b5170ab2ea820ec9bbf884563c669d9bb8a48d Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Sat, 13 Feb 2021 20:33:39 -0500 Subject: [PATCH 01/24] Established Python 2.7 test dependencies. --- setup.py | 6 ++++-- tox.ini | 3 +++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index a297eb4..2d03237 100644 --- a/setup.py +++ b/setup.py @@ -172,14 +172,16 @@ 'sphinx-tabs', 'readme-renderer', 'restview', - 'Flask-SQLAlchemy'], + 'Flask-SQLAlchemy', + 'pydantic;python_version>"3.6"'], 'test': ['coverage', 'pytest', 'pytest-benchmark', 'pytest-cov', 'tox', 'codecov', - 'Flask-SQLAlchemy'], + 'Flask-SQLAlchemy', + 'pydantic;pytnon_version>="3.6"'], }, python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4', diff --git a/tox.ini b/tox.ini index 605c6b6..f80ec05 100644 --- a/tox.ini +++ b/tox.ini @@ -11,6 +11,9 @@ deps = coverage codecov pytest-cov + .python36: pydantic + .python37: pydantic + .python38: pydantic .python34: pyrsistent==0.14.0 validator-collection Flask-SQLAlchemy From a889157d7b24cbe3bf911d6eb6872663fa8394d9 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Sat, 13 Feb 2021 20:34:27 -0500 Subject: [PATCH 02/24] Added Pydantic support to AttributeConfiguration. --- sqlathanor/attributes.py | 400 +++++++++++++++++- .../declarative/_base_configuration_mixin.py | 1 + sqlathanor/errors.py | 6 + tests/test_attributes.py | 53 +++ 4 files changed, 459 insertions(+), 1 deletion(-) diff --git a/sqlathanor/attributes.py b/sqlathanor/attributes.py index 04f8f4d..1277f40 100644 --- a/sqlathanor/attributes.py +++ b/sqlathanor/attributes.py @@ -12,6 +12,7 @@ from sqlathanor._serialization_support import SerializationMixin from sqlathanor.utilities import bool_to_tuple, callable_to_dict +from sqlathanor import errors BLANK_ON_SERIALIZE = { @@ -172,11 +173,26 @@ def __init__(self, provided in ``name``. Defaults to :obj:`None ` :type display_name: :class:`str ` / :obj:`None ` + :param pydantic_field: An optional Pydantic + :class:`ModelField ` which can be used to + validate the attribute on serialization. Defaults to :obj:`None `. + + .. note:: + + If present, values will be validated against the Pydantic field *after* any + ``on_serialize`` function is executed. + + :type pydantic_field: :class:`pydantic.fields.ModelField ` + / :class:`pydantic.fields.FieldInfo ` + / :obj:`None ` + """ object.__setattr__(self, '_dict_proxy', {}) self._current = -1 + self._pydantic_field = None self._name = None self.name = kwargs.pop('name', None) + self.pydantic_field = kwargs.pop('pydantic_field', None) attribute = kwargs.pop('attribute', None) super(AttributeConfiguration, self).__init__(*args, **kwargs) @@ -401,6 +417,27 @@ def name(self, value): value = validators.string(value, allow_empty = True) self._name = value + @property + def pydantic_field(self): + """A Pydantic :class:`ModelField` object that can be used to validate the + attribute. Defaults to :obj:`None `. + + :rtype: :class:`pydantic.fields.ModelField ` / + :class:`pydantic.fields.FieldInfo ` / + :obj:`None ` + """ + return self._pydantic_field + + @pydantic_field.setter + def pydantic_field(self, value): + if not value: + value = None + elif not checkers.is_type(value, ('ModelField', 'FieldInfo')): + raise ValueError('value must be a Pydantic ModelField or FieldInfo object. ' + 'Was: %s' % type(value)) + + self._pydantic_field = value + @classmethod def from_attribute(cls, attribute): """Return an instance of :class:`AttributeConfiguration` configured for a @@ -423,9 +460,365 @@ def copy(self): return new_instance + @classmethod + def from_pydantic_model(cls, + model, + name, + **kwargs): + """Return a new :class:`AttributeConfiguration` instance produced from a + Pydantic model's field definition for ``name``. + + :param model: The :term:`Pydantic model` whose field should be used. + :type model: Pydantic :class:`ModelMetaclass ` + + :param name: The name of the field to convert to convert to an + :class:`AttributeConfiguration` + :type name: :class:`str ` + + :param supports_csv: Determines whether the column can be serialized to or + de-serialized from CSV format. + + If ``True``, can be serialized to CSV and de-serialized from CSV. If + ``False``, will not be included when serialized to CSV and will be ignored + if present in a de-serialized CSV. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to CSV + or de-serialized from CSV. + + :type supports_csv: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param supports_json: Determines whether the column can be serialized to or + de-serialized from JSON format. + + If ``True``, can be serialized to JSON and de-serialized from JSON. + If ``False``, will not be included when serialized to JSON and will be + ignored if present in a de-serialized JSON. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to JSON + or de-serialized from JSON. + + :type supports_json: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param supports_yaml: Determines whether the column can be serialized to or + de-serialized from YAML format. + + If ``True``, can be serialized to YAML and de-serialized from YAML. + If ``False``, will not be included when serialized to YAML and will be + ignored if present in a de-serialized YAML. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to YAML + or de-serialized from YAML. + + :type supports_yaml: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param supports_dict: Determines whether the column can be serialized to or + de-serialized to a Python :class:`dict `. + + If ``True``, can be serialized to :class:`dict ` and de-serialized + from a :class:`dict `. If ``False``, will not be included + when serialized to :class:`dict ` and will be ignored if + present in a de-serialized :class:`dict `. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to a + :class:`dict ` or de-serialized from a :class:`dict `. + + :type supports_dict: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param on_deserialize: A function that will be called when attempting to + assign a de-serialized value to the column. This is intended to either coerce + the value being assigned to a form that is acceptable by the column, or + raise an exception if it cannot be coerced. If :obj:`None `, the data + type's default ``on_deserialize`` function will be called instead. + + .. tip:: + + If you need to execute different ``on_deserialize`` functions for + different formats, you can also supply a :class:`dict `: + + .. code-block:: python + + on_deserialize = { + 'csv': csv_on_deserialize_callable, + 'json': json_on_deserialize_callable, + 'yaml': yaml_on_deserialize_callable, + 'dict': dict_on_deserialize_callable + } + + Defaults to :obj:`None `. + + :type on_deserialize: callable / :class:`dict ` with formats + as keys and values as callables + + :param on_serialize: A function that will be called when attempting to + serialize a value from the column. If :obj:`None `, the data + type's default ``on_serialize`` function will be called instead. + + .. tip:: + + If you need to execute different ``on_serialize`` functions for + different formats, you can also supply a :class:`dict `: + + .. code-block:: python + + on_serialize = { + 'csv': csv_on_serialize_callable, + 'json': json_on_serialize_callable, + 'yaml': yaml_on_serialize_callable, + 'dict': dict_on_serialize_callable + } + + Defaults to :obj:`None `. + + :type on_serialize: callable / :class:`dict ` with formats + as keys and values as callables + + :param csv_sequence: Indicates the numbered position that the column should be in + in a valid CSV-version of the object. Defaults to :obj:`None `. + + .. note:: + + If not specified, the column will go after any columns that *do* have a + ``csv_sequence`` assigned, sorted alphabetically. + + If two columns have the same ``csv_sequence``, they will be sorted + alphabetically. + + :type csv_sequence: :class:`int ` / :obj:`None ` + + :returns: An :class:`AttributeConfiguration` for attribute ``name`` derived from + the Pydantic model. + :rtype: :class:`AttributeConfiguration` + + :raises ValueError: if ``model`` is not a Pydantic + :class:`ModelMetaclass ` + + :raises validator_collection.errors.InvalidVariableName: if ``name`` is not a + valid variable name + :raises FieldNotFoundError: if a field with ``name`` is not found within ``model`` + + """ + if not checkers.is_type(model, ('BaseModel', 'ModelMetaclass')): + raise ValueError('model must be a Pydantic Model. Was: %s' % type(model)) + + name = validators.variable_name(name, allow_empty = False) + if name not in model.__fields__: + raise errors.FieldNotFoundError( + 'name ("%s") not found in the Pydantic model' % name + ) + + field = model.__fields__[name] + + if not kwargs: + kwargs = {} + kwargs['name'] = name + kwargs['pydantic_field'] = field + + return_value = cls(**kwargs) + + return return_value + + +def convert_pydantic_model(model, + **kwargs): + """Convert a Pydantic model to a collection of :class:`AttributeConfiguration` objects. + + :param model: The Pydantic + :class:`ModelMetaclass ` to + convert. + + .. caution:: + + This parameter is expected to be a **class** object, not an **instance** object. + In a Pydantic context, it is the class that you define which inherits from + Pydantic ``BaseModel``. + + Thus, if your Pydantic model definition looks like this: + + .. code-block:: python + + from pydantic import BaseModel + + class User(BaseModel): + id: int + username: str + email: str + + user = User(id = 123, username = 'test_username', email = 'email@domain.dev') + + you would pass convert ``User`` and not ``user``: + + .. code-block:: python + + attribute_configurations = convert_pydantic_model(User) + + :type model: :class:`pydantic.main.ModelMetaclass ` + + :param supports_csv: Determines whether the column can be serialized to or + de-serialized from CSV format. + + If ``True``, can be serialized to CSV and de-serialized from CSV. If + ``False``, will not be included when serialized to CSV and will be ignored + if present in a de-serialized CSV. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to CSV + or de-serialized from CSV. + + :type supports_csv: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param supports_json: Determines whether the column can be serialized to or + de-serialized from JSON format. + + If ``True``, can be serialized to JSON and de-serialized from JSON. + If ``False``, will not be included when serialized to JSON and will be + ignored if present in a de-serialized JSON. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to JSON + or de-serialized from JSON. + + :type supports_json: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param supports_yaml: Determines whether the column can be serialized to or + de-serialized from YAML format. + + If ``True``, can be serialized to YAML and de-serialized from YAML. + If ``False``, will not be included when serialized to YAML and will be + ignored if present in a de-serialized YAML. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to YAML + or de-serialized from YAML. + + :type supports_yaml: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param supports_dict: Determines whether the column can be serialized to or + de-serialized to a Python :class:`dict `. + + If ``True``, can be serialized to :class:`dict ` and de-serialized + from a :class:`dict `. If ``False``, will not be included + when serialized to :class:`dict ` and will be ignored if + present in a de-serialized :class:`dict `. + + Can also accept a 2-member :class:`tuple ` (inbound / outbound) + which determines de-serialization and serialization support respectively. + + Defaults to ``False``, which means the column will not be serialized to a + :class:`dict ` or de-serialized from a :class:`dict `. + + :type supports_dict: :class:`bool ` / :class:`tuple ` of + form (inbound: :class:`bool `, outbound: :class:`bool `) + + :param on_deserialize: A function that will be called when attempting to + assign a de-serialized value to the column. This is intended to either coerce + the value being assigned to a form that is acceptable by the column, or + raise an exception if it cannot be coerced. If :obj:`None `, the data + type's default ``on_deserialize`` function will be called instead. + + .. tip:: + + If you need to execute different ``on_deserialize`` functions for + different formats, you can also supply a :class:`dict `: + + .. code-block:: python + + on_deserialize = { + 'csv': csv_on_deserialize_callable, + 'json': json_on_deserialize_callable, + 'yaml': yaml_on_deserialize_callable, + 'dict': dict_on_deserialize_callable + } + + Defaults to :obj:`None `. + + :type on_deserialize: callable / :class:`dict ` with formats + as keys and values as callables + + :param on_serialize: A function that will be called when attempting to + serialize a value from the column. If :obj:`None `, the data + type's default ``on_serialize`` function will be called instead. + + .. tip:: + + If you need to execute different ``on_serialize`` functions for + different formats, you can also supply a :class:`dict `: + + .. code-block:: python + + on_serialize = { + 'csv': csv_on_serialize_callable, + 'json': json_on_serialize_callable, + 'yaml': yaml_on_serialize_callable, + 'dict': dict_on_serialize_callable + } + + Defaults to :obj:`None `. + + :type on_serialize: callable / :class:`dict ` with formats + as keys and values as callables + + :param csv_sequence: Indicates the numbered position that the column should be in + in a valid CSV-version of the object. Defaults to :obj:`None `. + + .. note:: + + If not specified, the column will go after any columns that *do* have a + ``csv_sequence`` assigned, sorted alphabetically. + + If two columns have the same ``csv_sequence``, they will be sorted + alphabetically. + + :type csv_sequence: :class:`int ` / :obj:`None ` + + :returns: A collection of :class:`AttributeConfiguration` objects. + :rtype: :class:`list ` of :class:`AttributeConfiguration` objects + + :raises ValueError: if ``model`` is not a Pydantic + :class:`BaseModel ` + + """ + if not checkers.is_type(model, ('ModelMetaclass')): + raise ValueError('model must be a Pydantic ModelMetaclass object. ' + 'Was: %s' % model.__class__.__name__) + + attribute_names = [x for x in model.__fields__] + + return_value = [AttributeConfiguration.from_pydantic_model(model, + name = x, + **kwargs) + for x in attribute_names] + + return return_value + def validate_serialization_config(config): - """Validate that ``config`` contains :class:`AttributeConfiguration` objects. + """Validate that ``config`` contains or can be converted to + :class:`AttributeConfiguration` objects. :param config: Object or iterable of objects that represent :class:`AttributeConfigurations ` @@ -433,6 +826,8 @@ def validate_serialization_config(config): :class:`dict ` objects corresponding to a :class:`AttributeConfiguration` / :class:`AttributeConfiguration` / :class:`dict ` object corresponding to a :class:`AttributeConfiguration` + / :class:`pydantic.main.ModelMetaclass ` + object whose fields correspond to :class:`AttributeConfiguration` objects :rtype: :class:`list ` of :class:`AttributeConfiguration` objects """ @@ -446,6 +841,9 @@ def validate_serialization_config(config): if not config: return [] + if checkers.is_type(config[0], ('BaseModel', 'ModelMetaclass')): + config = convert_pydantic_model(config[0]) + return_value = [] for item in config: if isinstance(item, AttributeConfiguration) and item not in return_value: diff --git a/sqlathanor/declarative/_base_configuration_mixin.py b/sqlathanor/declarative/_base_configuration_mixin.py index a57a0fd..e027642 100644 --- a/sqlathanor/declarative/_base_configuration_mixin.py +++ b/sqlathanor/declarative/_base_configuration_mixin.py @@ -1013,6 +1013,7 @@ def configure_serialization(cls, objects to apply to the class. Defaults to :obj:`None `. :type configs: iterable of :class:`AttributeConfiguration ` / + :class:`pydantic.main.ModelMetaclass` / :obj:`None ` :param attributes: Collection of :term:`model attribute` names to which diff --git a/sqlathanor/errors.py b/sqlathanor/errors.py index 2b90295..8f2e047 100644 --- a/sqlathanor/errors.py +++ b/sqlathanor/errors.py @@ -97,3 +97,9 @@ class ExtraKeyError(DeserializationError): class UnsupportedValueTypeError(DeserializationError): """Error raised when a value type found in a serialized string is not supported.""" pass + +class FieldNotFoundError(SQLAthanorError): + """Error raised when a Pydantic :class:`Field ` name + is not found within a Pydantic :class:`BaseModel `. + """ + pass diff --git a/tests/test_attributes.py b/tests/test_attributes.py index 0b13d5f..37a3e91 100644 --- a/tests/test_attributes.py +++ b/tests/test_attributes.py @@ -8,12 +8,31 @@ Tests for the schema extensions written in :ref:`sqlathanor.attributes`. """ +from typing import Any import pytest +from sqlathanor._compat import is_py36 + +if is_py36: + from pydantic import BaseModel + from pydantic.fields import Field, ModelField + + class PydanticModel(BaseModel): + field_1: int + field_2: str + field_3: Any +else: + def Field(*args, **kwargs): + return None + PydanticModel = 'Python <3.6' + + from sqlathanor.attributes import AttributeConfiguration, validate_serialization_config, \ BLANK_ON_SERIALIZE from sqlathanor.utilities import bool_to_tuple, callable_to_dict +from sqlathanor.errors import FieldNotFoundError + @pytest.mark.parametrize('kwargs', [ (None), @@ -48,6 +67,18 @@ 'on_deserialize': None, 'display_name': 'some_display_name' }), + ({ + 'name': 'test_pydantic_field', + 'supports_csv': False, + 'csv_sequence': None, + 'supports_json': False, + 'supports_yaml': False, + 'supports_dict': False, + 'on_serialize': None, + 'on_deserialize': None, + 'pydantic_field': Field(alias = 'test_pydantic_field', + title = 'Test Pydantic Field') + }) ]) def test_AttributeConfiguration__init__(kwargs): if kwargs is None: @@ -58,6 +89,7 @@ def test_AttributeConfiguration__init__(kwargs): assert result.name == kwargs.get('name', None) assert result.display_name == kwargs.get('display_name', None) + assert result.pydantic_field == kwargs.get('pydantic_field', None) assert result.supports_csv == bool_to_tuple(kwargs.get('supports_csv', (False, False))) assert result.csv_sequence == kwargs.get('csv_sequence', None) @@ -145,6 +177,7 @@ def test_AttributeConfiguration_keys(): assert keys is not None assert len(keys) == len(config) + def test_AttributeConfiguration__iterate__(): config = AttributeConfiguration() length = len(config) @@ -157,6 +190,22 @@ def test_AttributeConfiguration__iterate__(): assert len(config.values()) == len(config) == len(config.keys()) == index +if is_py36: + @pytest.mark.parametrize('model, name, error', [ + (PydanticModel, 'field_1', None), + (PydanticModel, 'missing_field', FieldNotFoundError), + ]) + def test_AttributeConfiguration_from_pydantic_model(model, name, error): + if not error: + result = AttributeConfiguration.from_pydantic_model(model, name) + assert result.name == name + assert result.pydantic_field is not None + assert isinstance(result.pydantic_field, ModelField) + else: + with pytest.raises(error): + result = AttributeConfiguration.from_pydantic_model(model, name) + + @pytest.mark.parametrize('config, expected_length', [ ([], 0), (None, 0), @@ -165,8 +214,12 @@ def test_AttributeConfiguration__iterate__(): ([AttributeConfiguration(), AttributeConfiguration()], 1), ({ 'name': 'test_1' }, 1), ([{ 'name': 'test_2' }, {'name': 'test_3'}], 2), + (PydanticModel, 3), ]) def test_validate_serialization_config(config, expected_length): + if config == 'Python <3.6': + config = None + expected_length = 0 result = validate_serialization_config(config) assert len(result) == expected_length if len(result) > 0: From b89c54e00802feabdf5eb7a501aeceeee9b5b5f1 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Sat, 13 Feb 2021 21:30:45 -0500 Subject: [PATCH 03/24] Added Pydantic field serialization as a pre-processor to de-serializing values. --- sqlathanor/attributes.py | 2 +- .../declarative/_base_configuration_mixin.py | 9 +++- sqlathanor/declarative/base_model.py | 14 +++++- tests/test_deserializers.py | 49 +++++++++++++++++++ 4 files changed, 69 insertions(+), 5 deletions(-) diff --git a/sqlathanor/attributes.py b/sqlathanor/attributes.py index 1277f40..8cc7035 100644 --- a/sqlathanor/attributes.py +++ b/sqlathanor/attributes.py @@ -180,7 +180,7 @@ def __init__(self, .. note:: If present, values will be validated against the Pydantic field *after* any - ``on_serialize`` function is executed. + ``on_deserialize`` function is executed. :type pydantic_field: :class:`pydantic.fields.ModelField ` / :class:`pydantic.fields.FieldInfo ` diff --git a/sqlathanor/declarative/_base_configuration_mixin.py b/sqlathanor/declarative/_base_configuration_mixin.py index e027642..31798a6 100644 --- a/sqlathanor/declarative/_base_configuration_mixin.py +++ b/sqlathanor/declarative/_base_configuration_mixin.py @@ -698,7 +698,8 @@ def set_attribute_serialization_config(cls, :class:`AttributeConfiguration ` to apply. If :obj:`None `, will set particular values based on their corresponding keyword arguments. - :type config: :class:`AttributeConfiguration ` + :type config: :class:`AttributeConfiguration ` / + / Pydantic :class:`ModelField ` object / / :obj:`None ` :param supports_csv: Determines whether the column can be serialized to or @@ -921,7 +922,11 @@ def set_attribute_serialization_config(cls, if config is None: new_config = AttributeConfiguration(name = attribute) else: - new_config = config + if checkers.is_type(config, 'ModelField'): + new_config = AttributeConfiguration(name = attribute, + pydantic_field = config) + else: + new_config = config if attribute != new_config.name: raise ValueError( diff --git a/sqlathanor/declarative/base_model.py b/sqlathanor/declarative/base_model.py index a198ddd..5e47ba1 100644 --- a/sqlathanor/declarative/base_model.py +++ b/sqlathanor/declarative/base_model.py @@ -285,10 +285,20 @@ def _get_deserialized_value(cls, return_value = on_deserialize(value) except Exception: raise ValueDeserializationError( - "attribute '%s' failed de-serialization to format '%s'" % (attribute, - format) + "attribute '%s' failed de-serialization from format '%s'" % (attribute, + format) ) + if config.pydantic_field: + return_value, error_list = config.pydantic_field.validate(return_value) + if error_list: + error_message = ( + "attribute '%s' failed de-serialization from format '%s'" % (attribute, + format) + ) + error_message += '\n-- Pydantic Validation Errors:\n{%s}' % error_list + + raise ValueDeserializationError(error_list) return return_value diff --git a/tests/test_deserializers.py b/tests/test_deserializers.py index 2eb39ef..73ef422 100644 --- a/tests/test_deserializers.py +++ b/tests/test_deserializers.py @@ -19,6 +19,21 @@ from sqlathanor.errors import InvalidFormatError, ValueDeserializationError, \ UnsupportedDeserializationError +from sqlathanor._compat import is_py36 + +if is_py36: + from pydantic import BaseModel + from pydantic.fields import Field, ModelField + + class PydanticModel(BaseModel): + id: datetime.timedelta + + pydantic_field = PydanticModel.__fields__.get('id', None) + +else: + pydantic_field = None + PydanticModel = 'Python <3.6' + @pytest.mark.parametrize('attribute, format, input_value, expected_result, error', [ ('name', 'csv', 'serialized', 'deserialized', None), @@ -50,3 +65,37 @@ def test__get_deserialized_value(request, else: with pytest.raises(error): result = target._get_deserialized_value(input_value, format, attribute) + + +if is_py36: + @pytest.mark.parametrize('attribute, format, input_value, pydantic_field, expected_result, error', [ + ('id', 'csv', '1', pydantic_field, 1, None), + ('id', 'csv', 'invalid', pydantic_field, None, ValueDeserializationError), + ('id', 'yaml', '1', pydantic_field, 1, UnsupportedDeserializationError), + + ]) + def test__get_deserialized_value_pydantic(request, + model_complex_postgresql, + instance_postgresql, + attribute, + format, + input_value, + pydantic_field, + expected_result, + error): + model = model_complex_postgresql[0] + model.set_attribute_serialization_config('id', + config = pydantic_field, + supports_csv = True, + supports_json = True, + supports_yaml = False, + supports_dict = True) + instance_values = instance_postgresql[1][0] + target = model(**instance_values) + + if not error: + result = target._get_deserialized_value(input_value, format, attribute) + assert result == expected_result + else: + with pytest.raises(error): + result = target._get_deserialized_value(input_value, format, attribute) From da45b234f97d1d41a67e8304993d74d5c15cab85 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Sat, 13 Feb 2021 22:14:29 -0500 Subject: [PATCH 04/24] Added documentation pf the config_set parameter for BaseModel.set_attribute_serialization_config(). Closes #100. --- sqlathanor/declarative/_base_configuration_mixin.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/sqlathanor/declarative/_base_configuration_mixin.py b/sqlathanor/declarative/_base_configuration_mixin.py index 31798a6..30af5a2 100644 --- a/sqlathanor/declarative/_base_configuration_mixin.py +++ b/sqlathanor/declarative/_base_configuration_mixin.py @@ -889,6 +889,17 @@ def set_attribute_serialization_config(cls, :type csv_sequence: :class:`int ` / :obj:`None ` / ``False`` + :param config_set: The name of the :term:`configuration set` where the + serialization/de-serialization configuration for ``attribute`` should be + updated. Defaults to :obj:`None ` + + .. warning:: + + If the ``config_set`` is not defined on the model, then a + :exc:`ValueError ` will be raised. + + :type config_set: :class:`str ` / :obj:`None ` + :raises ConfigurationError: if ``config_set`` is not empty and there are no configuration sets defined on ``cls`` or if there are configuration sets defined but no ``config_set`` is specified From 8a7e6a444a91594678823eb631188facfca2792b Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Mon, 15 Feb 2021 16:46:49 -0500 Subject: [PATCH 05/24] Implemented column consolidation and config set generation from one or more Pydantic models with unit tests. --- sqlathanor/default_deserializers.py | 115 +++++++++++++++++++++ sqlathanor/utilities.py | 155 ++++++++++++++++++++++++++++ tests/test_utilities.py | 70 ++++++++++++- 3 files changed, 339 insertions(+), 1 deletion(-) diff --git a/sqlathanor/default_deserializers.py b/sqlathanor/default_deserializers.py index 399db3a..0195725 100644 --- a/sqlathanor/default_deserializers.py +++ b/sqlathanor/default_deserializers.py @@ -6,8 +6,11 @@ # there as needed. import datetime +from decimal import Decimal import io +from typing import get_origin, get_args, Any, Optional, Union, Mapping, List + from validator_collection import validators, checkers from sqlathanor._compat import json @@ -235,6 +238,118 @@ def get_type_mapping(value, return column_type +def get_pydantic_type_mapping(field, + type_mapping = None, + skip_nested = True, + default_to_str = False): + """Retrieve the SQL type mapping for ``field``. + + :param field: The Pydantic :class:`ModelField ` + whose SQL type will be returned. + :type field: :class:`ModelField ` + + :param type_mapping: Determines how the value type of ``value`` map to + SQL column data types. To add a new mapping or override a default, set a + key to the name of the value type in Python, and set the value to a + :doc:`SQLAlchemy Data Type `. The following are the + default mappings applied: + + .. list-table:: + :widths: 30 30 + :header-rows: 1 + + * - Python Literal + - SQL Column Type + * - ``bool`` + - :class:`Boolean ` + * - ``str`` + - :class:`Text ` + * - ``int`` + - :class:`Integer ` + * - ``float`` + - :class:`Float ` + * - ``date`` + - :class:`Date ` + * - ``datetime`` + - :class:`DateTime ` + * - ``time`` + - :class:`Time ` + * - ``timedelta`` + - :class:`Interval ` + + :type type_mapping: :class:`dict ` with type names as keys and + column data types as values / :obj:`None ` + + :param skip_nested: If ``True`` then if ``value`` is a nested item (e.g. + iterable, :class:`dict ` objects, etc.) it will return + :obj:`None `. If ``False``, will treat nested items as + :class:`str `. Defaults to ``True``. + :type skip_nested: :class:`bool ` + + :param default_to_str: If ``True``, will automatically set a ``value`` whose + value type cannot be determined to ``str`` + (:class:`Text `). If ``False``, will + use the value type's ``__name__`` attribute and attempt to find a mapping. + Defaults to ``False``. + :type default_to_str: :class:`bool ` + + :returns: The :doc:`SQLAlchemy Data Type ` for ``value``, or + :obj:`None ` if the value should be skipped + :rtype: :doc:`SQLAlchemy Data Type ` / :obj:`None` + + :raises UnsupportedValueTypeError: when ``value`` does not have corresponding + :doc:`SQLAlchemy Data Type ` + + """ + if not checkers.is_type(field, 'ModelField'): + raise UnsupportedValueTypeError('field must be a Pydantic ModelField. ' + 'Was: %s' % type(field)) + + if not type_mapping: + type_mapping = DEFAULT_PYTHON_SQL_TYPE_MAPPING + + for key in DEFAULT_PYTHON_SQL_TYPE_MAPPING: + if key not in type_mapping: + type_mapping[key] = DEFAULT_PYTHON_SQL_TYPE_MAPPING[key] + + if field.type_ in (list, set, frozenset, Mapping, dict) and skip_nested: + return None + elif field.type_ in (list, set, frozenset, Any) and default_to_str: + target_type = 'str' + elif field.type_ is None and default_to_str: + target_type = 'str' + elif field.type_ == bool or (get_origin(field.type_) in (Union, Optional, List) and + get_args(field.type_)[0] == str): + target_type = 'bool' + elif field.type_ == int or (get_origin(field.type_) in (Union, Optional, List) and + get_args(field.type_)[0] == int): + target_type = 'int' + elif field.type_ in (float, Decimal) or (get_origin(field.type_) in (Union, Optional, List) and + get_args(field.type_)[0] in (float, Decimal)): + target_type = 'float' + elif field.type_ == datetime.time or (get_origin(field.type_) in (Union, Optional, List) and + get_args(field.type_)[0] == datetime.time): + target_type = 'time' + elif field.type_ == datetime.datetime or (get_origin(field.type_) in (Union, Optional, List) and + get_args(field.type_)[0] == datetime.datetime): + target_type = 'datetime' + elif field.type_ == datetime.date or (get_origin(field.type_) in (Union, Optional, List) and + get_args(field.type_)[0] == datetime.date): + target_type = 'date' + elif field.type_ == Any: + target_type = 'str' + elif default_to_str: + target_type = 'str' + else: + target_type = field.type_.__name__ + + column_type = type_mapping.get(target_type, None) + if not column_type: + raise UnsupportedValueTypeError( + 'field type (%s) is not a supported type (%s)' % (field.type_, target_type) + ) + + return column_type DEFAULT_DESERIALIZERS = { diff --git a/sqlathanor/utilities.py b/sqlathanor/utilities.py index 4782de4..a6665ec 100644 --- a/sqlathanor/utilities.py +++ b/sqlathanor/utilities.py @@ -831,3 +831,158 @@ def read_csv_data(input_data, input_data = parsed_data[1] return input_data + + +def columns_from_pydantic(config_sets, + primary_key, + skip_nested = True, + default_to_str = False, + type_mapping = None, + **kwargs): + """Generate a set of :class:`Column ` instances and a + corresponding collection of + :class:`AttributeConfiguration ` objects + from a set of :term:`Pydantic Models `. + + :param config_sets: A collection of :term:`Pydantic Models ` organized + into a :class:`dict ` whose keys correspond to the name of a + :term:`configuration set` and whose values are an iterable of + :term:`pydantic.BaseModel ` classes. + :type config_sets: :class:`dict ` with :class:`str ` keys + and whose values are an iterable of + :term:`pydantic.BaseModel ` classes. + + :param primary_key: The name of the column/key that should be used as the table's + primary key. + :type primary_key: :class:`str ` + + :param cls: The base class to use when generating a new :term:`model class`. + Defaults to :class:`BaseModel` to provide serialization/de-serialization + support. + + If a :class:`tuple ` of classes, will include :class:`BaseModel` + in that list of classes to mixin serialization/de-serialization support. + + If not :obj:`None ` and not a :class:`tuple `, will mixin + :class:`BaseModel` with the value passed to provide + serialization/de-serialization support. + :type cls: :obj:`None ` / :class:`tuple ` of classes / + class object + + :param skip_nested: If ``True`` then any keys in ``serialized_dict`` that + feature nested items (e.g. iterables, :class:`dict ` objects, + etc.) will be ignored. If ``False``, will treat serialized items as + :class:`str `. Defaults to ``True``. + :type skip_nested: :class:`bool ` + + :param default_to_str: If ``True``, will automatically set a key/column whose + value type cannot be determined to ``str`` + (:class:`Text `). If ``False``, will + use the value type's ``__name__`` attribute and attempt to find a mapping. + Defaults to ``False``. + :type default_to_str: :class:`bool ` + + :param type_mapping: Determines how value types in ``pydantic_models`` map to + SQL column data types. To add a new mapping or override a default, set a + key to the name of the value type in Python, and set the value to a + :doc:`SQLAlchemy Data Type `. The following are the + default mappings applied: + + .. list-table:: + :widths: 30 30 + :header-rows: 1 + + * - Python Literal + - SQL Column Type + * - ``bool`` + - :class:`Boolean ` + * - ``str`` + - :class:`Text ` + * - ``int`` + - :class:`Integer ` + * - ``float`` + - :class:`Float ` + * - ``date`` + - :class:`Date ` + * - ``datetime`` + - :class:`DateTime ` + * - ``time`` + - :class:`Time ` + + :type type_mapping: :class:`dict ` with type names as keys and + column data types as values. + + :returns: A collection of :class:`Column ` instances and + a meta-style serialization configuration with one or more configuration sets. + :rtype: 2-member :class:`tuple ` where the first member is a + :class:`list ` of :class:`Column ` instances + and the second member is a meta-style serialization configuration + (:class:`list ` OR + :class:`dict ` of config sets) + """ + from sqlathanor.attributes import AttributeConfiguration + from sqlathanor.schema import Column + from sqlathanor.default_deserializers import get_pydantic_type_mapping + + serialization_config = {} + has_multiple_config_sets = True + if len(config_sets.keys()) == 1: + serialization_config = [] + has_multiple_config_sets = False + + column_names = [] + columns = [] + + for key in config_sets: + models = config_sets[key] + set_attribute_configs = [] + for model in models: + attribute_names = [validators.variable_name(x, allow_empty = False) + for x in model.__fields__ + if x not in column_names] + + attribute_configs = [AttributeConfiguration.from_pydantic_model(model, + name = x, + **kwargs) + for x in attribute_names] + + model_columns = [] + for field_name in attribute_names: + if field_name in column_names: + continue + + field = model.__fields__[field_name] + column_type = get_pydantic_type_mapping(field, + type_mapping = type_mapping, + skip_nested = skip_nested, + default_to_str = default_to_str) + if column_type is None: + continue + + if field_name == primary_key: + column = Column(name = field_name, + type_ = column_type, + primary_key = True) + else: + column = Column(name = field_name, + type_ = column_type) + + column.required = bool(field.required) + column.nullable = bool(field.allow_none) + + if field.default: + column.default = field.default + + model_columns.append(column) + column_names.append(field_name) + + columns.extend(model_columns) + + set_attribute_configs.extend(attribute_configs) + if has_multiple_config_sets: + serialization_config[key] = set_attribute_configs + else: + serialization_config.extend(set_attribute_configs) + + return columns, serialization_config diff --git a/tests/test_utilities.py b/tests/test_utilities.py index 423e4ea..93ad0e3 100644 --- a/tests/test_utilities.py +++ b/tests/test_utilities.py @@ -9,6 +9,7 @@ """ import os +from typing import Any, Union, Optional import pytest import sqlalchemy @@ -16,17 +17,44 @@ from validator_collection import checkers from validator_collection.errors import NotAnIterableError +from sqlathanor._compat import is_py36 + from tests.fixtures import db_engine, tables, base_model, db_session, \ model_complex_postgresql, instance_postgresql, input_files, check_input_file from sqlathanor.utilities import bool_to_tuple, callable_to_dict, format_to_tuple, \ get_class_type_key, raise_UnsupportedSerializationError, \ raise_UnsupportedDeserializationError, iterable__to_dict, parse_yaml, parse_json, \ - get_attribute_names, is_an_attribute, parse_csv, read_csv_data + get_attribute_names, is_an_attribute, parse_csv, read_csv_data, columns_from_pydantic from sqlathanor.errors import InvalidFormatError, UnsupportedSerializationError, \ UnsupportedDeserializationError, MaximumNestingExceededError, \ MaximumNestingExceededWarning, DeserializationError, CSVStructureError +if is_py36: + from pydantic import BaseModel + from pydantic.fields import Field, ModelField + + class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + + class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + + class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] +else: + def Field(*args, **kwargs): + return None + PydanticModel = 'Python <3.6' + def sample_callable(): @@ -450,3 +478,43 @@ def test_read_csv_data(input_files, input_data, single_record, expected_result): assert result == expected_result else: assert result.strip() == expected_result.strip() + + +if is_py36: + @pytest.mark.parametrize('kwargs, expected_columns, expected_config_sets, error', [ + ({'config_sets': { + '_single': [PydanticModel] + }, + 'primary_key': 'id'}, 3, 1, None), + ({'config_sets': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2, PydanticModel3] + }, + 'primary_key': 'id'}, 7, 2, None), + ({'config_sets': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2] + }, + 'primary_key': 'id'}, 4, 2, None), + ({'config_sets': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2], + 'set_three': [PydanticModel3] + }, + 'primary_key': 'id'}, 7, 3, None), + ]) + def test_columns_from_pydantic(kwargs, expected_columns, expected_config_sets, error): + if not error: + columns, serialization_config = columns_from_pydantic(**kwargs) + assert len(columns) == expected_columns + if expected_config_sets > 1: + assert isinstance(serialization_config, dict) + assert len(serialization_config.keys()) == expected_config_sets + for config_set in serialization_config: + assert len(serialization_config[config_set]) <= expected_columns + else: + assert isinstance(serialization_config, list) + assert len(serialization_config) == expected_columns + else: + with pytest.raises(error): + columns, serialization_config = columns_from_pydantic(**kwargs) From 5e8463db251f27e227efdfc992aa11ea7592e3f2 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Mon, 15 Feb 2021 17:41:46 -0500 Subject: [PATCH 06/24] Implemented generate_model_from_pydantic() with unit tests. --- sqlathanor/declarative/__init__.py | 6 +- sqlathanor/declarative/generate_model.py | 193 ++++++++++++++++++++++- tests/test_model_generation.py | 89 ++++++++++- 3 files changed, 283 insertions(+), 5 deletions(-) diff --git a/sqlathanor/declarative/__init__.py b/sqlathanor/declarative/__init__.py index 2f92f54..fca3d06 100644 --- a/sqlathanor/declarative/__init__.py +++ b/sqlathanor/declarative/__init__.py @@ -8,7 +8,8 @@ from sqlathanor.declarative.base_model import BaseModel from sqlathanor.declarative.declarative_base import declarative_base, as_declarative from sqlathanor.declarative.generate_model import generate_model_from_csv, \ - generate_model_from_json, generate_model_from_yaml, generate_model_from_dict + generate_model_from_json, generate_model_from_yaml, generate_model_from_dict, \ + generate_model_from_pydantic __all__ = [ @@ -18,5 +19,6 @@ 'generate_model_from_csv', 'generate_model_from_json', 'generate_model_from_yaml', - 'generate_model_from_dict' + 'generate_model_from_dict', + 'generate_model_from_pydantic' ] diff --git a/sqlathanor/declarative/generate_model.py b/sqlathanor/declarative/generate_model.py index 6df789f..0cf1406 100644 --- a/sqlathanor/declarative/generate_model.py +++ b/sqlathanor/declarative/generate_model.py @@ -5,15 +5,17 @@ # extension, and its member function documentation is automatically incorporated # there as needed. -from validator_collection import checkers +from validator_collection import checkers, validators from sqlathanor.declarative.base_model import BaseModel from sqlathanor.declarative.declarative_base import declarative_base from sqlathanor.attributes import AttributeConfiguration, validate_serialization_config -from sqlathanor.utilities import parse_yaml, parse_json, parse_csv, read_csv_data +from sqlathanor.utilities import parse_yaml, parse_json, parse_csv, read_csv_data, \ + columns_from_pydantic from sqlathanor.default_deserializers import get_type_mapping from sqlathanor.schema import Column + def generate_model_from_dict(serialized_dict, tablename, primary_key, @@ -723,3 +725,190 @@ def generate_model_from_csv(serialized, **kwargs) return generated_model + + +def generate_model_from_pydantic(pydantic_models, + tablename, + primary_key, + cls = BaseModel, + skip_nested = True, + default_to_str = False, + type_mapping = None, + base_model_attrs = None, + **kwargs): + """Generate a :term:`model class` from one or more + :term:`Pydantic models `. + + .. versionadded: 0.8.0 + + .. note:: + + This function *cannot* programmatically create + :term:`relationships `, :term:`hybrid properties `, + or :term:`association proxies `. + + :param pydantic_models: The :term:`Pydantic Model(s) ` which will + determine the ORM columns/attributes that will be present within the generated + :term:`model class`. + + .. hint:: + + If supplying an iterable of :term:`Pydantic models `, then all + models will be coalesced into a single :term:`model class`. If supplying a + :class:`dict ` whose values are separate + :term:`Pydantic models `, then each :class:`dict ` + key will correspond to a single serialization/de-serialization + :term:`configuration set` in the resulting :term:`model class`. + + :type pydantic_models: :class:`pydantic.BaseModel ` + / iterable of :class:`pydantic.BaseModel ` / + :class:`dict ` whose keys correspond to :term:`configuration set` names + and whose value must be a + :class:`pydantic.BaseModel ` + + :param tablename: The name of the SQL table to which the model corresponds. + :type tablename: :class:`str ` + + :param primary_key: The name of the column/key that should be used as the table's + primary key. + :type primary_key: :class:`str ` + + :param cls: The base class to use when generating a new :term:`model class`. + Defaults to :class:`BaseModel` to provide serialization/de-serialization + support. + + If a :class:`tuple ` of classes, will include :class:`BaseModel` + in that list of classes to mixin serialization/de-serialization support. + + If not :obj:`None ` and not a :class:`tuple `, will mixin + :class:`BaseModel` with the value passed to provide + serialization/de-serialization support. + :type cls: :obj:`None ` / :class:`tuple ` of classes / + class object + + :param skip_nested: If ``True`` then any keys in ``serialized_dict`` that + feature nested items (e.g. iterables, :class:`dict ` objects, + etc.) will be ignored. If ``False``, will treat serialized items as + :class:`str `. Defaults to ``True``. + :type skip_nested: :class:`bool ` + + :param default_to_str: If ``True``, will automatically set a key/column whose + value type cannot be determined to ``str`` + (:class:`Text `). If ``False``, will + use the value type's ``__name__`` attribute and attempt to find a mapping. + Defaults to ``False``. + :type default_to_str: :class:`bool ` + + :param type_mapping: Determines how value types in ``pydantic_models`` map to + SQL column data types. To add a new mapping or override a default, set a + key to the name of the value type in Python, and set the value to a + :doc:`SQLAlchemy Data Type `. The following are the + default mappings applied: + + .. list-table:: + :widths: 30 30 + :header-rows: 1 + + * - Python Literal + - SQL Column Type + * - ``bool`` + - :class:`Boolean ` + * - ``str`` + - :class:`Text ` + * - ``int`` + - :class:`Integer ` + * - ``float`` + - :class:`Float ` + * - ``date`` + - :class:`Date ` + * - ``datetime`` + - :class:`DateTime ` + * - ``time`` + - :class:`Time ` + + :type type_mapping: :class:`dict ` with type names as keys and + column data types as values. + + :param base_model_attrs: Optional :class:`dict ` of special + attributes that will be applied to the generated + :class:`BaseModel ` (e.g. + ``__table_args__``). Keys will correspond to the attribute name, while the + value is the value that will be applied. Defaults to :obj:`None `. + :type base_model_attrs: :class:`dict ` / :obj:`None ` + + :param kwargs: Any additional keyword arguments will be passed to + :func:`declarative_base() ` when + generating the programmatic :class:`BaseModel `. + + :returns: :term:`Model class` whose structure matches ``pydantic_models`` and whose + serialization/de-serialization configuration corresponds to the pattern implied by + ``pydantic_models`` structure + :rtype: :class:`BaseModel` + + :raises UnsupportedValueTypeError: when a value in ``pydantic_models`` does not + have a corresponding key in ``type_mapping`` + :raises ValueError: if ``pydantic_models`` is not a supported type or is empty + :raises ValueError: if ``tablename`` is empty + :raises ValueError: if ``primary_key`` is empty + + """ + # pylint: disable=too-many-branches + if not pydantic_models: + raise ValueError('pydantic_models cannot be empty') + if not tablename: + raise ValueError('tablename cannot be empty') + if not primary_key: + raise ValueError('primary_key cannot be empty') + + config_sets = {} + if checkers.is_type(pydantic_models, 'ModelMetaclass'): + config_sets['_single'] = pydantic_models + elif checkers.is_iterable(pydantic_models, forbid_literals = (str, bytes, dict)): + config_sets['_single'] = [x for x in pydantic_models] + elif checkers.is_dict(pydantic_models): + for key in pydantic_models: + value = pydantic_models[key] + key = validators.string(key, allow_empty = False) + if not checkers.is_iterable(value, forbid_literals = (str, bytes, dict)): + value = [value] + for item in value: + if not checkers.is_type(item, 'ModelMetaclass'): + raise ValueError('pydantic_models must contain Pydantic BaseModel ' + 'values. Contained: %s' % type(value)) + config_sets[key] = [x for x in value] + else: + raise ValueError('pydantic_models must either be a Pydantic BaseModel, an ' + 'iterable of Pydantic BaseModels, or a dict whose value is a ' + 'Pydantic BaseModel or an iterable of Pydantic BaseModels') + + GeneratedBaseModel = declarative_base(cls = cls, **kwargs) + + class InterimGeneratedModel(object): + # pylint: disable=too-few-public-methods,missing-docstring,invalid-variable-name + __tablename__ = tablename + + columns, serialization_config = columns_from_pydantic(config_sets, + primary_key = primary_key, + type_mapping = type_mapping, + skip_nested = skip_nested, + default_to_str = default_to_str) + + for column in columns: + setattr(InterimGeneratedModel, column.name, column) + + if base_model_attrs: + for key in base_model_attrs: + setattr(InterimGeneratedModel, key, base_model_attrs[key]) + + class GeneratedModel(GeneratedBaseModel, InterimGeneratedModel): + # pylint: disable=missing-docstring,too-few-public-methods + pass + + if isinstance(serialization_config, list): + GeneratedModel.configure_serialization(configs = serialization_config) + else: + for config_set in serialization_config: + GeneratedModel.configure_serialization(configs = serialization_config[config_set], + config_set = config_set) + + return GeneratedModel diff --git a/tests/test_model_generation.py b/tests/test_model_generation.py index 6753e8c..d30a6e8 100644 --- a/tests/test_model_generation.py +++ b/tests/test_model_generation.py @@ -9,6 +9,7 @@ """ from datetime import datetime +from typing import Any, Union, Optional import pytest @@ -18,8 +19,10 @@ from sqlalchemy.types import Integer, Text, Float, DateTime, Date, Time, Boolean from validator_collection import checkers +from sqlathanor._compat import is_py36 + from sqlathanor.declarative import generate_model_from_dict, generate_model_from_json, \ - generate_model_from_yaml, generate_model_from_csv + generate_model_from_yaml, generate_model_from_csv, generate_model_from_pydantic from sqlathanor.attributes import AttributeConfiguration from sqlathanor.errors import UnsupportedValueTypeError, CSVStructureError @@ -27,6 +30,33 @@ # pylint: disable=line-too-long + +if is_py36: + from pydantic import BaseModel + from pydantic.fields import Field, ModelField + + class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + + class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + + class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] +else: + def Field(*args, **kwargs): + return None + PydanticModel = 'Python <3.6' + + def test_func(): pass @@ -646,3 +676,60 @@ def test_generate_model_from_csv(input_files, for key in base_model_attrs: assert hasattr(result, key) is True assert getattr(result, key) == base_model_attrs[key] + + +@pytest.mark.parametrize('kwargs, error', [ + ({}, (TypeError, ValueError)), + ('invalid-type', (TypeError, ValueError)), + ({'primary_key': 'id'}, (TypeError, ValueError)), + ({'tablename': 'some_tablename'}, (TypeError, ValueError)), + ({'pydantic_models': 'invalid-type', + 'tablename': 'some_tablename', + 'primary_key': 'id'}, (TypeError, ValueError)), + ({'pydantic_models': { + '_single': [PydanticModel] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, None), + ({'pydantic_models': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2, PydanticModel3] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, None), + ({'pydantic_models': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, None), + ({'pydantic_models': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2], + 'set_three': [PydanticModel3] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, None), +]) +def test_generate_model_from_pydantic(kwargs, error): + if not error: + tablename = kwargs.get('tablename', None) + primary_key = kwargs.get('primary_key', None) + base_model_attrs = kwargs.get('base_model_attrs', None) + + result = generate_model_from_pydantic(**kwargs) + + assert hasattr(result, 'to_json') is True + assert hasattr(result, 'new_from_json') is True + assert hasattr(result, 'update_from_json') is True + assert hasattr(result, '__serialization__') is True + + assert result.__tablename__ == tablename + + if base_model_attrs: + for key in base_model_attrs: + assert hasattr(result, key) is True + assert getattr(result, key) == base_model_attrs[key] + else: + with pytest.raises(error): + result = generate_model_from_pydantic(**kwargs) From a33985191bdb69264d28913b0bdc3b27263e2e66 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Mon, 15 Feb 2021 17:51:32 -0500 Subject: [PATCH 07/24] Implemented Table.from_pydantic() with unit tests. --- sqlathanor/schema.py | 150 +++++++++++++++++++++++++++- tests/test_table_deserialization.py | 81 +++++++++++++++ 2 files changed, 230 insertions(+), 1 deletion(-) diff --git a/sqlathanor/schema.py b/sqlathanor/schema.py index 91ce0e0..da8e9af 100644 --- a/sqlathanor/schema.py +++ b/sqlathanor/schema.py @@ -23,7 +23,8 @@ from sqlathanor import attributes from sqlathanor._serialization_support import SerializationMixin from sqlathanor.default_deserializers import get_type_mapping -from sqlathanor.utilities import parse_json, parse_yaml, parse_csv, read_csv_data +from sqlathanor.utilities import parse_json, parse_yaml, parse_csv, read_csv_data, \ + columns_from_pydantic from sqlathanor.errors import SQLAthanorError @@ -1029,3 +1030,150 @@ def from_csv(cls, **kwargs) return table + + @classmethod + def from_pydantic(cls, + models, + tablename, + metadata, + primary_key, + skip_nested = True, + default_to_str = False, + type_mapping = None, + **kwargs): + """Generate a :class:`Table` object from a one of more + :term:`Pydantic Models `. + + .. versionadded: 0.8.0 + + :param models: The :term:`Pydantic Model(s) ` which will + determine the columns/attributes that will be present within the generated + table. + + .. hint:: + + If supplying an iterable of :term:`Pydantic models `, then all + models will be coalesced into a single + :class:`Table `. If supplying a + :class:`dict ` whose values are separate + :term:`Pydantic models `, then each :class:`dict ` + key will correspond to a single serialization/de-serialization + :term:`configuration set` in the resulting :term:`model class`. + + :type models: :class:`pydantic.BaseModel ` + / iterable of :class:`pydantic.BaseModel ` / + :class:`dict ` whose keys correspond to :term:`configuration set` names + and whose value must be a + :class:`pydantic.BaseModel ` + + :type serialized: :class:`dict ` + + :param tablename: The name of the SQL table to which the model corresponds. + :type tablename: :class:`str ` + + :param metadata: a :class:`MetaData ` + object which will contain this table. The metadata is used as a point of + association of this table with other tables which are referenced via foreign + key. It also may be used to associate this table with a particular + :class:`Connectable `. + :type metadata: :class:`MetaData ` + + :param primary_key: The name of the column/key that should be used as the table's + primary key. + :type primary_key: :class:`str ` + + :param skip_nested: If ``True`` then any keys in ``serialized`` that + feature nested items (e.g. iterables, :class:`dict ` objects, + etc.) will be ignored. If ``False``, will treat nested items as + :class:`str `. Defaults to ``True``. + :type skip_nested: :class:`bool ` + + :param default_to_str: If ``True``, will automatically set a key/column whose + value type cannot be determined to ``str`` + (:class:`Text `). If ``False``, will + use the value type's ``__name__`` attribute and attempt to find a mapping. + Defaults to ``False``. + :type default_to_str: :class:`bool ` + + :param type_mapping: Determines how value types in ``serialized`` map to + SQL column data types. To add a new mapping or override a default, set a + key to the name of the value type in Python, and set the value to a + :doc:`SQLAlchemy Data Type `. The following are the + default mappings applied: + + .. list-table:: + :widths: 30 30 + :header-rows: 1 + + * - Python Literal + - SQL Column Type + * - ``bool`` + - :class:`Boolean ` + * - ``str`` + - :class:`Text ` + * - ``int`` + - :class:`Integer ` + * - ``float`` + - :class:`Float ` + * - ``date`` + - :class:`Date ` + * - ``datetime`` + - :class:`DateTime ` + * - ``time`` + - :class:`Time ` + + :type type_mapping: :class:`dict ` with type names as keys and + column data types as values. + + :param kwargs: Any additional keyword arguments will be passed to the + :class:`Table` constructor. For a full list of options, please see + :class:`sqlalchemy.schema.Table `. + + :returns: A :class:`Table ` object. + :rtype: :class:`Table ` + + :raises UnsupportedValueTypeError: when a value in ``serialized`` does not + have a corresponding key in ``type_mapping`` + :raises ValueError: if ``models`` is empty or is not an acceptable type + :raises ValueError: if ``tablename`` is empty + :raises ValueError: if ``primary_key`` is empty + + """ + if not tablename: + raise ValueError('tablename cannot be empty') + if not primary_key: + raise ValueError('primary_key cannot be empty') + + if not models: + raise ValueError('models cannot be empty') + if checkers.is_iterable(models, forbid_literals = (str, bytes, dict)): + for item in models: + if not checkers.is_type(item, 'ModelMetaclass'): + raise ValueError('models must contain Pydantic BaseModel classes. ' + 'Found: %s' % type(item)) + elif not checkers.is_type(models, ('ModelMetaclass', dict)): + raise ValueError('models must be a Pydantic BaseModel or a dict. ' + 'Was: %s' % type(models)) + elif isinstance(models, dict): + for key in models: + value = models[key] + if checkers.is_iterable(value, forbid_literals = (str, bytes, dict)): + for item in value: + if not checkers.is_type(item, 'ModelMetaclass'): + raise ValueError('models must contain Pydantic BaseModel classes. ' + 'Found: %s' % type(item)) + elif not checkers.is_type(value, 'ModelMetaclass'): + raise ValueError('models must contain Pydantic BaseModel classes. ' + 'Found: %s' % type(value)) + + columns, serialization_config = columns_from_pydantic(models, + primary_key = primary_key, + skip_nested = skip_nested, + default_to_str = default_to_str, + type_mapping = type_mapping, + **kwargs) + + return cls(tablename, + metadata, + *columns, + **kwargs) diff --git a/tests/test_table_deserialization.py b/tests/test_table_deserialization.py index 9ebd0cc..20ba5f2 100644 --- a/tests/test_table_deserialization.py +++ b/tests/test_table_deserialization.py @@ -9,6 +9,7 @@ """ from datetime import datetime +from typing import Any, Union, Optional import pytest @@ -23,8 +24,36 @@ from sqlathanor import Table from sqlathanor.errors import UnsupportedValueTypeError, CSVStructureError +from sqlathanor._compat import is_py36 + from tests.fixtures import check_input_file, input_files +if is_py36: + from pydantic import BaseModel + from pydantic.fields import Field, ModelField + + class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + + class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + + class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] +else: + def Field(*args, **kwargs): + return None + PydanticModel = 'Python <3.6' + + # pylint: disable=line-too-long def test_func(): @@ -691,3 +720,55 @@ def test_from_csv(input_files, assert isinstance(item_column.type, item[1]) is True assert item_column.primary_key is (item[0] == primary_key) + + +@pytest.mark.parametrize('kwargs, expected_columns, error', [ + ({}, 0, (TypeError, ValueError)), + ('invalid-type', 0, (TypeError, ValueError)), + ({'primary_key': 'id'}, 0, (TypeError, ValueError)), + ({'tablename': 'some_tablename'}, 0, (TypeError, ValueError)), + ({'models': 'invalid-type', + 'tablename': 'some_tablename', + 'primary_key': 'id'}, 0, (TypeError, ValueError)), + ({'models': { + '_single': [PydanticModel] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, 3, None), + ({'models': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2, PydanticModel3] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, 7, None), + ({'models': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, 4, None), + ({'models': { + 'set_one': [PydanticModel], + 'set_two': [PydanticModel2], + 'set_three': [PydanticModel3] + }, + 'tablename': 'some_tablename', + 'primary_key': 'id'}, 7, None), +]) +def test_from_pydantic(kwargs, expected_columns, error): + if not error: + tablename = kwargs.get('tablename', None) + primary_key = kwargs.get('primary_key', None) + + kwargs['metadata'] = MetaData() + + result = Table.from_pydantic(**kwargs) + + assert isinstance(result, Table) + assert result.name == tablename + + assert len(result.c) == expected_columns + + else: + with pytest.raises(error): + result = Table.from_pydantic(**kwargs) From ace929b99bda9e8ba3bf1a6025f9cbf49c18c187 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 14:43:34 -0500 Subject: [PATCH 08/24] Closes #99. Updated documentation to reflect Pydantic support. --- README.rst | 6 +- docs/_import_sqlathanor.rst | 3 + docs/_versus_alternatives.rst | 69 +++ docs/api.rst | 11 + docs/conf.py | 3 +- docs/glossary.rst | 37 ++ docs/index.rst | 5 +- docs/pydantic.rst | 411 ++++++++++++++++++ docs/quickstart.rst | 53 ++- docs/using.rst | 90 +++- .../declarative/_base_configuration_mixin.py | 7 +- sqlathanor/schema.py | 6 +- sqlathanor/utilities.py | 4 +- 13 files changed, 682 insertions(+), 23 deletions(-) create mode 100644 docs/pydantic.rst diff --git a/README.rst b/README.rst index 17d4a3b..9bcb5e2 100644 --- a/README.rst +++ b/README.rst @@ -266,8 +266,10 @@ Key SQLAthanor Features * Customize the validation used when de-serializing particular columns to match your needs. * Works with Declarative Reflection and the SQLAlchemy Automap extension. -* Programmatically generate Declarative Base Models from serialized data. -* Programmatically generate SQLAlchemy ``Table`` objects from serialized data. +* Programmatically generate Declarative Base Models from serialized data or Pydantic + models. +* Programmatically generate SQLAlchemy ``Table`` objects from serialized data or Pydantic + models. **SQLAthanor** vs Alternatives diff --git a/docs/_import_sqlathanor.rst b/docs/_import_sqlathanor.rst index 581cb6a..d5024f3 100644 --- a/docs/_import_sqlathanor.rst +++ b/docs/_import_sqlathanor.rst @@ -164,3 +164,6 @@ The table below shows how `SQLAlchemy`_ classes and functions map to their .. code-block:: python from sqlathanor.automap import automap_base + +.. _SQLAlchemy: http://www.sqlalchemy.org +.. _Flask-SQLAlchemy: http://flask-sqlalchemy.pocoo.org/2.3/ diff --git a/docs/_versus_alternatives.rst b/docs/_versus_alternatives.rst index d49e837..ddcba7a 100644 --- a/docs/_versus_alternatives.rst +++ b/docs/_versus_alternatives.rst @@ -28,6 +28,72 @@ it might be helpful to compare **SQLAthanor** to some commonly-used alternatives find that I never really roll my own serialization/de-serialization approach when working `SQLAlchemy`_ models any more. + .. tab:: Pydantic + + .. tip:: + + Because `Pydantic`_ is growing in popularity, we have decided to integrate `Pydantic`_ + support within **SQLAthanor**. + + Using + :func:`generate_model_from_pydantic() `, + you can now programmatically generate a **SQLAthanor** + :class:`BaseModel ` from your + :term:`Pydantic models `. + + This allows you to only maintain *one* representation of your data model (the + `Pydantic`_ one), while still being able to use SQLAthanor's rich serialization / + de-serialization configuration functionality. + + `Pydantic`_ is an amazing object parsing library that leverages native Python typing + to provide simple syntax and rich functionality. While I have philosophical quibbles + about some of its API semantics and architectural choices, I cannot deny that it is + elegant, extremely performant, and all around excellent. + + Since `FastAPI`_, one of the fastest-growing web application frameworks in the Python + ecosystem is tightly coupled with `Pydantic`_, it has gained significant ground within + the community. + + However, when compared to **SQLAthanor** it has a number of architectural limitations: + + While `Pydantic`_ has excellent serialization and deserialization functionality to + JSON, it is extremely limited with its serialization/deserialization support for other + common data formats like CSV or YAML. + + Second, by its design `Pydantic`_ forces you to maintain **multiple** representations + of your data model. On the one hand, you will need your `SQLAlchemy`_ ORM + representation, but then you will *also* need one or more `Pydantic`_ models that will + be used to serialize/de-serialize your model instances. + + Third, by its design, `Pydantic`_ tends to lead to significant amounts of duplicate + code, maintaining similar-but-not-quite-identical versions of your data models (with + one `Pydantic`_ schema for each context in which you might serialize/de-serialize your + data model). + + Fourth, its API semantics can get extremely complicated when trying to use it as a + true serialization/de-serialization library. + + While `Pydantic`_ has made efforts to integrate ORM support into its API, that + functionality is relatively limited in its current form. + + .. tip:: + + **When to use it?** + + `Pydantic`_ is easy to use when building simple web applications due to its + reliance on native Python typing. The need to maintain multiple representations + of your data models is a trivial burden with small applications or relatively + simple data models. + + `Pydantic`_ is also practically required when building applications using the + excellent `FastAPI`_ framework. + + So given these two things, we recommend using `Pydantic`_ *in combination* with + **SQLAthanor** to get the best of both words: native Python typing for validation + against your Python model (via `Pydantic`_) with rich configurable + serialization/de-serialization logic (via **SQLAthanor**), all integrated into + the underlying `SQLAlchemy`_ ORM. + .. tab:: Marshmallow The `Marshmallow`_ library and its `Marshmallow-SQLAlchemy`_ extension are @@ -161,6 +227,9 @@ it might be helpful to compare **SQLAthanor** to some commonly-used alternatives .. _Marshmallow: https://marshmallow.readthedocs.io/en/3.0/ .. _Marshmallow-SQLAlchemy: https://marshmallow-sqlalchemy.readthedocs.io/en/latest/ +.. _Pydantic: https://pydantic-docs.helpmanual.io/ +.. _FastAPI: https://fastapi.tiangolo.com/ .. _Colander: https://docs.pylonsproject.org/projects/colander/en/latest/ .. _ColanderAlchemy: https://colanderalchemy.readthedocs.io/en/latest/ .. _pandas: http://pandas.pydata.org/ +.. _SQLAlchemy: http://www.sqlalchemy.org diff --git a/docs/api.rst b/docs/api.rst index 146828b..bcc5b4b 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -79,6 +79,13 @@ generate_model_from_dict() ---------------------------------------- +generate_model_from_pydantic() +------------------------------------- + +.. autofunction:: generate_model_from_pydantic + +-------------------------------------------------------- + .. module:: sqlathanor.schema Schema @@ -197,6 +204,8 @@ Table .. automethod:: Table.from_yaml + .. automethod:: Table.from_pydantic + ---------------------------- .. module:: sqlathanor.attributes @@ -220,6 +229,8 @@ AttributeConfiguration .. automethod:: AttributeConfiguration.__init__ + .. automethod:: from_pydantic_model + ---------------------- validate_serialization_config() diff --git a/docs/conf.py b/docs/conf.py index c567ae7..c4447d4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -227,10 +227,11 @@ intersphinx_mapping = { 'python': ('https://docs.python.org/3.6', None), 'python27': ('https://docs.python.org/2.7', None), - 'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest/', None), + 'sqlalchemy': ('http://docs.sqlalchemy.org/en/13/', None), 'simplejson': ('http://simplejson.readthedocs.io/en/latest/', None), 'validator-collection': ('http://validator-collection.readthedocs.io/en/latest/', None), 'flask_sqlalchemy': ('http://flask-sqlalchemy.pocoo.org/2.3/', None), + #'pydantic': ('https://pydantic-docs.helpmanual.io/', None), } # -- Options for todo extension ---------------------------------------------- diff --git a/docs/glossary.rst b/docs/glossary.rst index 1aeeaaf..43de009 100644 --- a/docs/glossary.rst +++ b/docs/glossary.rst @@ -31,6 +31,22 @@ Glossary with fields (columns) separated by a delimiter character (typically a comma ``,`` or pipe ``|``). + Configuration Set + A named set of attribute configurations which determine which :term:`model class` + attributes get :term:`serialized ` / + :term:`de-serialized ` under given circumstances (when indicating + the configuration set during the serialization / de-serialization operation). + + .. tip:: + + Think of a configuration set as a set of "rules" that determine what gets + processed when serializing or de-serializing a :term:`model class`. + + .. note:: + + It is possible for a single :term:`model class` to have many different configuration + sets, so long as each set has a unique name. + Declarative Configuration A way of configuring :term:`serialization` and :term:`de-serialization` for particular :term:`model attributes ` when defining @@ -187,6 +203,27 @@ Glossary module from the standard Python library, or an outside pickling library like `dill `_. + Pydantic Model + A Pydantic Model is a representation of a class which contains data and some + attributes that inherits from the + :class:`pydantic.BaseModel ` class. This model is + used by the `Pydantic `_ library to either + validate the typing of data upon deserialization or to serialize data to appropriate + types when needed. + + By definition, each Pydantic model is self-contained (though they may inherit across + models). Pydantic models are inherently reliant on Python's native typing support, + relying on type hints and annotations to provide the canonical instructions against + which to validate or based on which to serialize. + + .. seealso:: + + * :doc:`SQLAthanor and Pydantic ` + * :func:`generate_model_from_pydantic() ` + * :meth:`Table.from_pydantic() ` + * :meth:`AttributeConfiguration.from_pydantic_model() ` + + Relationship A connection between two database tables or their corresponding :term:`model classes ` defined using a foreign key constraint. diff --git a/docs/index.rst b/docs/index.rst index 854a24c..964cae1 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -42,6 +42,7 @@ SQLAthanor Home Quickstart: Patterns and Best Practices Using SQLAthanor + SQLAthanor and Pydantic API Reference Default Serialization Functions Default De-serialization Functions @@ -175,9 +176,9 @@ Key SQLAthanor Features your needs. * Works with :ref:`Declarative Reflection ` and the :ref:`Automap Extension `. -* Programmatically :ref:`generate Declarative Base Models from serialized data `. +* Programmatically :ref:`generate Declarative Base Models from serialized data or Pydantic models `. * Programmatically create :ref:`SQLAlchemy Table objects ` from - serialized data. + serialized data or :term:`Pydantic models `. | diff --git a/docs/pydantic.rst b/docs/pydantic.rst new file mode 100644 index 0000000..009ccdf --- /dev/null +++ b/docs/pydantic.rst @@ -0,0 +1,411 @@ +****************************************** +SQLAthanor and Pydantic +****************************************** + +.. |strong| raw:: html + + + +.. |/strong| raw:: html + + + +.. contents:: + :local: + :depth: 3 + :backlinks: entry + +---------- + +.. versionadded:: 0.8.0 + +SQLAthanor, Pydantic, and FastAPI +===================================== + +.. sidebar:: Some Caveats + + I'm a huge fan of the `Pydantic`_ library. Its authors have made many choices that I + would not have made, but that does not change my immense respect for the work they have + done. My interpretation of "priorities" below reflects my subjective evaluation of the + library and its API semantics, and from my experience using the library in a number of + web application projects of varying complexity. + + My observations are in no way meant to be a criticism: They are merely an observation of + where the libraries have taken different philosophical and stylistic paths. + + My respect for `Pydantic`_ is one of the main reasons why I have decided to extend + **SQLAthanor** with built-in support for the `Pydantic`_ library. + +**SQLAthanor** and `Pydantic`_ are both concerned with the serialization and +deserialization of data. However, they approach the topic from different angles and have +made a variety of (very different) architectural and stylistic choices. These choices +reflect a different set of priorities: + +.. list-table:: + :widths: 50 50 + :header-rows: 1 + + * - SQLAthanor Priorities + - Pydantic Priorities + * - Database/ORM compatibility with `SQLAlchemy`_ + - Database/ORM agnosticism + * - The maintenance of a single representation of your data model, tied to its database implementation + - Multiple representations of your data model, each of which is tied to its usage in your code + * - Explicit reference and conceptual documentation + - Documentation by example / in code + * - Explicit APIs for the data model's lifecycle + - Implicit APIs relying on the Python standard library + +Both libraries have their place: in general terms, if I were working on a simple web +application, on a microservice, or on a relatively simple data model I would consider +`Pydantic`_ as a perfectly viable "quick-and-dirty" option. Its use of Python's native +typing hints/annotation is a beautifully elegant solution. + +However, if I need to build a robust API with complex data model representations, tables +with multiple relationships, or complicated business logic? Then I would prefer the +robust and extensible capabilities afforded by the `SQLAlchemy`_ Delarative ORM and +the **SQLAthanor** library. + +If that were it, I would consider `Pydantic`_ to be equivalent to `Marshmallow`_ and +`Colander`_: an interesting tool for serialization/deserialization, and one that has its +place, but not one that **SQLAthanor** need be concerned with. + +But there's one major difference: `FastAPI`_. + +`FastAPI`_ is an amazing microframework, and is rapidly rising in popularity across the +Python ecosystem. That's for very good reason: It is blisteringly fast, its API is +relatively simple, and it has the ability to automatically generate OpenAPI/Swagger +schemas of your API endpoints. What's not to love? + +Well, its tight coupling with `Pydantic`_, for one thing. When building an application +using the `FastAPI`_ framework, I am practically forced to use +:term:`Pydantic models ` as my API inputs, outputs, and validators. If I +choose not to use Pydantic models, then I lose many of the valuable features (besides +performance) which make `FastAPI`_ so attractive for writing API applications. + +But using `FastAPI`_ and `Pydantic`_ in a complex API application may require a lot of +"extra" code: the repetition of object models, the replication of business logic, +the duplication of context, etc. All of these are concerns that **SQLAthanor** was +explicitly designed to minimize. + +So what to do? Most patterns, documentation, and best practices found on the internet for +authoring `FastAPI`_ applications explicitly suggest that you (manually, in your code): + + * Create a `SQLAlchemy`_ :term:`model class` for the database interface for each data + model + * Create one `Pydantic`_ :term:`model class ` for *each* "version" of + your data model's output/input. So if you need one read version and a different write + version? You need two :term:`Pydantic models `. + * Use your :term:`Pydantic models ` as the validators for your API + endpoints, as needed. + +This is all fine and dandy, but now what happens if you need to add an attribute to your +data model? You have to make a change to your `SQLAlchemy`_ model class, and to one or +more `Pydantic`_ models, and possibly to your API endpoints. And let's not get started on +changes to your data model's underlying business logic! + +There has to be a better way. + +Which is why I added `Pydantic`_ support to **SQLAthanor**. With this added support, you +can effectively use your :term:`Pydantic models ` as the "canonical +definition" of your data model. Think of the lifecycle this way: + + * You define your data model in one or more :term:`Pydantic models `. + * You programmatically create a `SQLAlchemy`_ :term:`model class` whose columns are + *automatically* derived from the underlying :term:`Pydantic models ` + and for whom each :term:`Pydantic Model` serves as a serialization/deserialization + :term:`configuration set`. + +Thus, you remove one of the (more complicated) steps in the process of writing your +`FastAPI`_ application. Now all you have to do is create your `Pydantic`_ models, and then +generate your **SQLAthanor** :term:`model classes `. Your `FastAPI`_ can +still validate based on your `Pydantic`_ models, even if you choose to drive +serialization/deserialization from your `SQLAlchemy`_ :term:`model classes `. + +In other words: It saves you code! Just look at the example below: + +.. tabs:: + + .. tab:: FastAPI with Pydantic only + + .. todo:: + + Add an example + + .. tab:: FastAPI with SQLAthanor/Pydantic + + .. todo:: + + Add an example + +---------------- + +.. _generating_and_configuring_model_classes_using_pydantic: + +Generating and Configuring Model Classes Using Pydantic +========================================================== + +As **SQLAthanor** relies on the creation of :term:`model classes ` which +both define your database representation and provide serialization/deserialization +configuration instructions, the first step to using `Pydantic`_ with **SQLAthanor** is +to generate your :term:`model classes ` based on your +:term:`Pydantic models `. + +You can do this in **SQLAthanor** using the +:func:`generate_model_from_pydantic() ` +function. This function takes your :term:`Pydantic models ` as an input, +and creates a **SQLAthanor** :term:`model class` (which is a subclass of +:class:`sqlathanor.declarative.BaseModel`). + +When generating your model classes from :term:`Pydantic models `, you can +supply multiple models which will then get consolidated into a single **SQLAthanor** +:class:`BaseModel `. For example: + +.. tabs:: + + .. tab:: 1 Model + + This example shows how you would generate a single + :class:`sqlathanor.BaseModel ` from a single + :class:`pydantic.BaseModel`. Since it only has one model, it would have only one + serialization/deserialization :term:`configuration set` by default: + + .. code-block:: python + + from pydantic import BaseModel as PydanticBaseModel + from sqlathanor import generate_model_from_pydantic + + class SinglePydanticModel(PydanticBaseModel): + id: int + username: str + email: str + + SingleSQLAthanorModel = generate_model_from_pydantic(SinglePydanticModel, + tablename = 'my_tablename', + primary_key = 'id') + + This code will generate a single **SQLAthanor** :term:`model class` named + ``SingleSQLAthanorModel``, which will contain three columns: ``id``, ``username``, + and ``email``. The column types will be set to correspond to the data types annotated + in the ``SinglePydanticModel`` class definition. + + .. tab:: 2 Models (shared config set) + + This example shows how you would combine multiple + :term:`Pydantic models ` into a single + :class:`sqlathanor.BaseModel `. A typical use case + would be if one :term:`Pydantic model` represents the output when + you are retrieving/viewing a user's data (which does not have a ``password`` field for + security reasons) and hte other :term:`Pydantic model` represents the input when + you are writing/creating a new user (which does need the password field). + + .. note:: + + Because both :term:`Pydantic models ` are passed to the function in + a single :class:`list `, they will receive a single **SQLAthanor** + :term:`configuration set`. + + .. code-block:: python + + from pydantic import BaseModel as PydanticBaseModel + from sqlathanor import generate_model_from_pydantic + + class ReadUserModel(PydanticBaseModel): + id: int + username: str + email: str + + class WriteUserModel(ReadUserModel): + password: str + + SingleSQLAthanorModel = generate_model_from_pydantic([ReadUserModel, + WriteUserModel], + tablename = 'my_tablename', + primary_key = 'id') + + This code will generate a single **SQLAthanor** :term:`model class` named + ``SingleSQLAthanorModel`` with four columns (``id``, ``username``, ``email``, and + ``password``). However, because all models were passed in as a single list, the + columns will be consolidated with only *one* :term:`configuration set`. + + .. caution:: + + In my experience, it is very rare that you would want to consolidate multiple + :term:`Pydantic models ` with only one :term:`configuration set`. + Most of the type, each :term:`Pydantic model` will actually represent its own + :term:`configuration set` as documented in the next example. + + .. tab:: 2 Models (independent config sets) + + This example shows how you would combine multiple + :term:`Pydantic models ` into a single + :class:`sqlathanor.BaseModel `, but configure + multiple serialization/deserialization + :term:`configuration sets ` based on those + :term:`Pydantic models `. + + This is the most-common use case, and is fairly practical. To define multiple + :term:`configuration sets `, simply pass the + :term:`Pydantic models ` as key/value pairs in the first argument: + + .. code-block:: python + + from pydantic import BaseModel as PydanticBaseModel + from sqlathanor import generate_model_from_pydantic + + class ReadUserModel(PydanticBaseModel): + id: int + username: str + email: str + + class WriteUserModel(ReadUserModel): + password: str + + SQLAthanorModel = generate_model_from_pydantic({ 'read': ReadUserModel, + 'write': WriteUserModel + }, + tablename = 'my_tablename', + primary_key = 'id') + + This code will generate a single **SQLAthanor** :term:`model class` + (``SQLAthanorModel``, with four columns - ``id``, ``username``, ``email``, and + ``password``), but that model class will have two configuration sets: ``read`` which + will serialize/de-serialize only three columns (``id``, ``username``, and ``email``) and + ``write`` which will serialize/de-serialize four columns (``id``, ``username``, + ``email``, and ``password``). + + This ``SQLAthanorModel`` then becomes useful when serializing your + :term:`model instances ` to :class:`dict ` or de-serializing + them from :class:`dict ` using the context-appropriate + :term:`configuration set`: + + .. code-block:: python + + # Assumes that "as_dict" contains a string JSON representation with attributes as + # defined in your "WriteUserModel" Pydantic model. + model_instance = SQLAthanorModel.new_from_json(as_json, config_set = 'write') + + # Produces a dict representation of the object with three attributes, corresponding + # to your "ReadUserModel" Pydantic model. + readable_as_dict = model_instance.to_dict(config_set = 'read') + +.. tip:: + + When generating your **SQLAthanor** :term:`model classes ` from your + :term:`Pydantic models `, it is important to remember that serialization + and de-serialization is disabled by default for security reasons. Therefore a best + practice is to + :ref:`enable/disable your serialization and de-serialization at runtime `. + + .. seealso:: + + * :meth:`BaseModel.configure_serialization() ` + * :meth:`BaseModel.set_attribute_serialization_config() ` + +.. caution:: + + This functionality *does not* support more complex table structures, including + relationships, hybrid properties, or association proxies. + +------------------------- + +Generating Tables from Pydantic Models +========================================== + +Just as you can +:ref:`generate SQLAthanor model classes from Pydantic models `, +you can also create :class:`Table ` objects from +:term:`Pydantic models `, consolidating their attributes into standard +SQL :class:`Column ` definitions. + +.. code-block:: python + + from pydantic import BaseModel + from sqlathanor import Table + + # Define Your Pydantic Models + class UserWriteModel(BaseModel): + id: int + username: str + email: str + password: str + + class UserReadModel(BaseModel): + id: int + username: str + email: str + + # Create Your Table + pydantic_table = Table.from_pydantic([UserWriteModel, UserReadModel], + tablename = 'my_tablename_goes_here', + primary_key = 'id') + +This code will generate a single :class:`Table ` instance +(``pydantic_table``) which will have four columns: ``id``, ``username``, ``email``, and +``password``. Their column types will correspond to the type hints defined in the Pydantic +models. + +.. seealso:: + + * :class:`Table ` + * :meth:`Table.from_pydantic() ` + +---------------------- + +.. _configuring_attributes_from_pydantic_models: + +Configuring Attributes from Pydantic Models +=============================================== + +There may be times when you wish to configure the serialization / de-serialization of +:term:`model class` attributes based on a related :term:`Pydantic model`. You can +programmatically create a new +:class:`AttributeConfiguration ` instance +from a :term:`Pydantic model` by calling the +:meth:`AttributeConfiguration.from_pydantic_model() ` +class method: + +.. code-block:: python + + from pydantic import BaseModel + from sqlathanor import Table + + # Define Your Pydantic Models + class UserWriteModel(BaseModel): + id: int + username: str + email: str + password: str + + class UserReadModel(BaseModel): + id: int + username: str + email: str + + password_config = AttributeConfiguration.from_pydantic_model(UserWriteModel, + field = 'password', + supports_csv = (True, False), + supports_json = (True, False), + supports_yaml = (True, False), + supports_dict = (True, False), + on_deserialize = my_encryption_function) + +This code will produce a single +:class:`AttributeConfiguration ` instance +named ``password_config``. It will support the de-serialization of data, but will never be +serialized (a typical pattern for password fields!). Furthermore, it will execute the +``my_encryption_function`` during the de-serialization process. + +A very common use case is to configure the serialization/de-serialization profile for +attributes that were programmatically derived from +:term:`Pydantic models `. + +.. seealso:: + + * :meth:`AttributeConfiguration.from_pydantic_model() ` + +.. _Pydantic: https://pydantic-docs.helpmanual.io/ +.. _FastAPI: https://fastapi.tiangolo.com/ +.. _SQLAlchemy: http://www.sqlalchemy.org +.. _Marshmallow: https://marshmallow.readthedocs.io/en/3.0/ +.. _Colander: https://docs.pylonsproject.org/projects/colander/en/latest/ diff --git a/docs/quickstart.rst b/docs/quickstart.rst index dde5a5d..8853363 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -347,7 +347,8 @@ Password De-serialization Programmatically Generating Models ===================================== -.. versionadded:: 0.3.0 +.. versionadded:: 0.3.0 generation from CSV, JSON, YAML, or :class:`dict ` +.. versionadded:: 0.8.0 generation from Pydantic models .. seealso:: @@ -355,6 +356,7 @@ Programmatically Generating Models * :func:`generate_model_from_json() ` * :func:`generate_model_from_yaml() ` * :func:`generate_model_from_dict() ` + * :func:`generate_model_from_pydantic() ` .. tabs:: @@ -403,6 +405,17 @@ Programmatically Generating Models tablename = 'my_table_name', primary_key = 'id') + .. tab:: Pydantic + + .. code-block:: python + + from sqlathanor import generate_model_from_pydantic + + # Assumes that "PydanticReadModel" and "PydanticWriteModel" contain the Pydantic + # models for the object/resource you are representing. + PydanticModel = generate_model_from_pydantic([PydanticReadModel, PydanticWriteModel], + tablename = 'my_table_name', + primary_key = 'id') ---------------------------- @@ -672,10 +685,11 @@ Using SQLAthanor with Flask-SQLAlchemy ---------------------------- -Generating SQLAlchemy Tables from Serialized Data +Generating SQLAlchemy Tables Programmatically ==================================================== -.. versionadded:: 0.3.0 +.. versionadded:: 0.3.0 CSV, JSON, YAML, and :class:`dict ` support +.. versionadded:: 0.8.0 Pydantic model support .. seealso:: @@ -683,6 +697,7 @@ Generating SQLAlchemy Tables from Serialized Data * :meth:`Table.from_json() ` * :meth:`Table.from_yaml() ` * :meth:`Table.from_dict() ` + * :meth:`Table.from_pydantic() ` .. tabs:: @@ -749,3 +764,35 @@ Generating SQLAlchemy Tables from Serialized Data skip_nested = True, default_to_str = False, type_mapping = None) + + .. tab:: Pydantic + + .. versionadded:: 0.8.0 + + .. code-block:: python + + from pydantic import BaseModel + from sqlathanor import Table + + # Define Your Pydantic Models + class UserWriteModel(BaseModel): + id: int + username: str + email: str + password: str + + class UserReadModel(BaseModel): + id: int + username: str + email: str + + # Create Your Table + pydantic_table = Table.from_pydantic([UserWriteModel, UserReadModel], + tablename = 'my_tablename_goes_here', + primary_key = 'id') + + .. seealso:: + + * :class:`Table ` + * :meth:`Table.from_pydantic() ` + * :doc:`SQLAthanor and Pydantic ` diff --git a/docs/using.rst b/docs/using.rst index a1e5c26..691e192 100644 --- a/docs/using.rst +++ b/docs/using.rst @@ -148,6 +148,8 @@ SQLAthanor Features :doc:`SQLAlchemy ORM `. * Maintain all of the existing APIs, methods, functions, and functionality of :doc:`SQLAlchemy Declarative ORM `. +* Drive the definition and configuration of your data model from your + :term:`Pydantic models `. --------------- @@ -260,7 +262,7 @@ Dependencies .. tabs:: - .. tab:: Standard Approach + .. tab:: Normally Because **SQLAthanor** is a drop-in replacement for `SQLAlchemy`_ and its :doc:`Declarative ORM `, you can @@ -272,7 +274,36 @@ Dependencies * :doc:`SQLAlchemy ORM Tutorial ` * :doc:`Flask-SQLAlchemy: Declaring Models ` - .. tab:: Declarative Reflection + .. tab:: from Pydantic + + .. versionadded:: 0.8.0 + + If your application is using `Pydantic`_ as a parsing/validation library, then you + can programmatically generate a pre-configured + :doc:`SQLAlchemy Declarative ` + :term:`model class` using **SQLAthanor** with the syntax + ``generate_model_from_pydantic()``. + + This function can accept one or more :term:`Pydantic models `, and + will consolidate them into a single **SQLAthanor** :term:`model class`, with + each underlying :term:`Pydantic model` corresponding to a :term:`configuration set` + for easy serialization / deserialization: + + .. code-block:: python + + from sqlathanor import generate_model_from_pydantic + + # Assuming that "PydanticBaseModel" is a + PydanticDBModel = generate_model_from_pydantic([PydanticReadModel, PydanticWriteModel], + tablename = 'my_table_name', + primary_key = 'id') + + .. seealso:: + + * :func:`generate_model_from_pydantic() ` + * :doc:`SQLAthanor and Pydantic Support ` + + .. tab:: via Reflection `SQLAlchemy`_ supports the use of `reflection`_ with the :doc:`SQLAlchemy Declarative ORM `. @@ -293,7 +324,7 @@ Dependencies * **SQLAlchemy**: :doc:`Reflecting Database Objects ` * **SQLAlchemy**: `Using Reflection with Declarative `_ - .. tab:: Using Automap + .. tab:: via Automap .. versionadded:: 0.2.0 @@ -314,7 +345,7 @@ Dependencies * :ref:`Using Automap with SQLAthanor ` * **SQLAlchemy**: :doc:`Automap Extension ` - .. tab:: Programmatically + .. tab:: from Serialized Data .. versionadded:: 0.3.0 @@ -1054,6 +1085,8 @@ Why Two Configuration Approaches? and "data scientists", I've tried to design an interface that will feel "natural" to both communities. +.. _configuring_at_runtime: + Configuring at Runtime ------------------------------------- @@ -1632,20 +1665,22 @@ expect it in inbound data to de-serialize. * **SQLAlchemy**: :doc:`Automap Extension ` * :ref:`Using Declarative Reflection with SQLAthanor ` -.. _SQLAlchemy: http://www.sqlalchemy.org -.. _Flask-SQLAlchemy: http://flask-sqlalchemy.pocoo.org/ .. _reflection: http://docs.sqlalchemy.org/en/latest/orm/extensions/declarative/table_config.html#using-reflection-with-declarative ---------------------------- .. _generating_tables: -Generating SQLAlchemy Tables from Serialized Data +Generating SQLAlchemy Tables... ==================================================== +...from Serialized Data +------------------------------- + .. versionadded:: 0.3.0 -If you are **not** using `SQLAlchemy`_'s :doc:`Declarative ORM ` +If you are **not** using `SQLAlchemy`_'s +:doc:`Declarative ORM ` but would like to generate SQLAlchemy :class:`Table ` objects programmatically based on serialized data, you can do so by importing the **SQLAthanor** :class:`Table ` object and calling a @@ -1724,3 +1759,42 @@ objects programmatically based on serialized data, you can do so by importing th * :meth:`Table.from_json() ` * :meth:`Table.from_yaml() ` * :meth:`Table.from_dict() ` + +... from Pydantic Models +-------------------------------- + +If you are **not** using `SQLAlchemy`_'s +:doc:`Declarative ORM ` +but would like to generate SQLAlchemy :class:`Table ` +objects programmatically based on :term:`Pydantic models `, you can do so +by importing the **SQLAthanor** :class:`Table ` class and calling +the :meth:`from_pydantic() ` class +method: + +.. code-block:: python + + from pydantic import BaseModel + from sqlathanor import Table + + # Define Your Pydantic Models + class PydanticWriteModel(BaseModel): + id: int + username: str + email: str + password: str + + class PydanticReadModel(BaseModel): + id: int + username: str + email: str + + # Create Your Table + pydantic_table = Table.from_pydantic([PydanticWriteModel, PydanticReadModel], + tablename = 'my_tablename_goes_here', + primary_key = 'id') + +.. seealso:: + + * :class:`Table ` + * :meth:`Table.from_pydantic() ` + * :doc:`SQLAthanor and Pydantic ` diff --git a/sqlathanor/declarative/_base_configuration_mixin.py b/sqlathanor/declarative/_base_configuration_mixin.py index 30af5a2..484201b 100644 --- a/sqlathanor/declarative/_base_configuration_mixin.py +++ b/sqlathanor/declarative/_base_configuration_mixin.py @@ -1013,8 +1013,11 @@ def configure_serialization(cls, on_serialize = None, on_deserialize = None, config_set = None): - """Apply configuration settings to the :term:`model class` (overwrites - entire configuration). + """Apply configuration settings to the :term:`model class`. + + .. caution:: + This method either overwrites the entire configuration (if no ``config_set`` is + supplied), or overwrites the entire ``config_set`` indicated. .. tip:: diff --git a/sqlathanor/schema.py b/sqlathanor/schema.py index da8e9af..aa3a3a5 100644 --- a/sqlathanor/schema.py +++ b/sqlathanor/schema.py @@ -1041,12 +1041,12 @@ def from_pydantic(cls, default_to_str = False, type_mapping = None, **kwargs): - """Generate a :class:`Table` object from a one of more - :term:`Pydantic Models `. + """Generate a :class:`Table` object from one or more + :term:`Pydantic models `. .. versionadded: 0.8.0 - :param models: The :term:`Pydantic Model(s) ` which will + :param models: The :term:`Pydantic model(s) ` which will determine the columns/attributes that will be present within the generated table. diff --git a/sqlathanor/utilities.py b/sqlathanor/utilities.py index a6665ec..55f2a83 100644 --- a/sqlathanor/utilities.py +++ b/sqlathanor/utilities.py @@ -842,9 +842,9 @@ def columns_from_pydantic(config_sets, """Generate a set of :class:`Column ` instances and a corresponding collection of :class:`AttributeConfiguration ` objects - from a set of :term:`Pydantic Models `. + from a set of :term:`Pydantic models `. - :param config_sets: A collection of :term:`Pydantic Models ` organized + :param config_sets: A collection of :term:`Pydantic models ` organized into a :class:`dict ` whose keys correspond to the name of a :term:`configuration set` and whose values are an iterable of :term:`pydantic.BaseModel ` classes. From 2bc10aac991c3749aff761eee2a3bb740f23d812 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 14:53:37 -0500 Subject: [PATCH 09/24] Closes #100. Updated config set documentation in BaseModel class methods. --- .../declarative/_base_configuration_mixin.py | 48 ++++++++++--------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/sqlathanor/declarative/_base_configuration_mixin.py b/sqlathanor/declarative/_base_configuration_mixin.py index 484201b..15addef 100644 --- a/sqlathanor/declarative/_base_configuration_mixin.py +++ b/sqlathanor/declarative/_base_configuration_mixin.py @@ -568,8 +568,8 @@ def get_serialization_config(cls, :param config_set: If not :obj:`None `, will return those :class:`AttributeConfiguration ` - objects that are contained within the named set. Defaults to - :obj:`None `. + objects that are contained within the named + :term:`configuration set`. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` :returns: List of attribute configurations. @@ -648,8 +648,8 @@ def get_attribute_serialization_config(cls, :param config_set: If not :obj:`None `, will return the :class:`AttributeConfiguration ` - object for ``attribute`` that is contained within the named set. Defaults to - :obj:`None `. + object for ``attribute`` that is contained within the named + :term:`configuration set`. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` :returns: The @@ -896,7 +896,8 @@ def set_attribute_serialization_config(cls, .. warning:: If the ``config_set`` is not defined on the model, then a - :exc:`ValueError ` will be raised. + :exc:`ConfigurationError ` will be + raised. :type config_set: :class:`str ` / :obj:`None ` @@ -1017,7 +1018,8 @@ def configure_serialization(cls, .. caution:: This method either overwrites the entire configuration (if no ``config_set`` is - supplied), or overwrites the entire ``config_set`` indicated. + supplied) set for the :term:`model class`, or overwrites the ``config_set`` + indicated. .. tip:: @@ -1203,10 +1205,10 @@ def configure_serialization(cls, as keys and values as callables / :obj:`None ` / ``False`` :param config_set: If not :obj:`None `, will apply ``configs`` to the - configuration set named. If the class does not use pre-existing configuration sets, - will switch the class' meta configuration to use configuration sets, with any - pre-existing configuration set assigned to a set named ``_original``. Defaults - to :obj:`None `. + :term:`configuration set` named. If the class does not use pre-existing + configuration sets, will switch the class' meta configuration to use + configuration sets, with any pre-existing configuration assigned to a set named + ``_original``. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` """ @@ -1317,7 +1319,7 @@ def does_support_serialization(cls, :type from_dict: :class:`bool ` / :obj:`None ` :param config_set: If not :obj:`None `, will determine serialization - support within the indicated configuration set. Defaults to + support within the indicated :term:`configuration set`. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` @@ -1433,9 +1435,9 @@ def get_csv_serialization_config(cls, Defaults to :obj:`None `. :type serialize: :class:`bool ` / :obj:`None ` - :param config_set: If not :obj:`None `, the named configuration set - whose CSV serialization configuration should be returned. Defaults to - :obj:`None `. + :param config_set: If not :obj:`None `, the named + :term:`configuration set` whose CSV serialization configuration should be + returned. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` :returns: Set of attribute serialization configurations that match the @@ -1486,9 +1488,9 @@ def get_json_serialization_config(cls, Defaults to :obj:`None `. :type serialize: :class:`bool ` / :obj:`None ` - :param config_set: If not :obj:`None `, the named configuration set - whose serialization configuration should be returned. Defaults to - :obj:`None `. + :param config_set: If not :obj:`None `, the named + :term:`configuration set` whose JSON serialization configuration should be + returned. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` :returns: Set of attribute serialization configurations that match the @@ -1524,9 +1526,9 @@ def get_yaml_serialization_config(cls, Defaults to :obj:`None `. :type serialize: :class:`bool ` / :obj:`None ` - :param config_set: If not :obj:`None `, the named configuration set - whose serialization configuration should be returned. Defaults to - :obj:`None `. + :param config_set: If not :obj:`None `, the named + :term:`configuration set` whose YAML serialization configuration should be + returned. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` :returns: Set of attribute serialization configurations that match the @@ -1564,9 +1566,9 @@ def get_dict_serialization_config(cls, Defaults to :obj:`None `. :type serialize: :class:`bool ` / :obj:`None ` - :param config_set: If not :obj:`None `, the named configuration set - whose serialization configuration should be returned. Defaults to - :obj:`None `. + :param config_set: If not :obj:`None `, the named + :term:`configuration set` whose :class:`dict ` serialization + configuration should be returned. Defaults to :obj:`None `. :type config_set: :class:`str ` / :obj:`None ` :returns: Set of attribute serialization configurations that match the From e017071980fb26b2c62f0a1eaf10be3947f8d7fc Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 14:58:19 -0500 Subject: [PATCH 10/24] Updated documentation on heap-caching class methods. --- .../declarative/_base_configuration_mixin.py | 28 ++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/sqlathanor/declarative/_base_configuration_mixin.py b/sqlathanor/declarative/_base_configuration_mixin.py index 15addef..2a91afe 100644 --- a/sqlathanor/declarative/_base_configuration_mixin.py +++ b/sqlathanor/declarative/_base_configuration_mixin.py @@ -1581,7 +1581,25 @@ def get_dict_serialization_config(cls, config_set = config_set)], config_set) @classmethod - def _heapable(cls, name, value, config_set=None): + def _heapable(cls, + name, + value, + config_set = None): + """Cache the serialization configuration in a + ``__serialization_cache___`` class attribute. + + :param name: The name to set for the heap. + :type name: :class:`str ` + + :param value: The serialization/de-serialization configuration to cache in the + heap. + + :param config_set: If not :obj:`None `, the name of the named + :term:`configuration set` for which the serialization/deserialization + confiugration should be cached. Defaults to :obj:`None `. + :type config_set: :class:`str ` / :obj:`None ` + """ + _heap_name = '__serialization_cache__%s{name}__%s__' % (name, "default" if config_set is None else config_set) if not hasattr(cls, _heap_name): @@ -1594,6 +1612,14 @@ def _heapable(cls, name, value, config_set=None): @classmethod def clear_serialization_cache(cls): + """Clears any cached serialization/de-serialization configurations. + + .. note:: + + Does not affect the configuration itself - merely clears the heap caches if + present. + + """ for n in list(cls.__dict__.keys()): if n.startswith('__serialization_cache__'): delattr(cls, n) From 57decb730884177368f9f573c1e85639535150ac Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 18:22:39 -0500 Subject: [PATCH 11/24] Fixed typo in setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2d03237..c4d1bdd 100644 --- a/setup.py +++ b/setup.py @@ -181,7 +181,7 @@ 'tox', 'codecov', 'Flask-SQLAlchemy', - 'pydantic;pytnon_version>="3.6"'], + 'pydantic;python_version>="3.6"'], }, python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4', From fe6f75177d52bf248f04fa2c41fe732cd7ca8d3e Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 20:41:14 -0500 Subject: [PATCH 12/24] Made code resilient to typing introspection support in earlier versions of Python. --- .gitignore | 1 + sqlathanor/default_deserializers.py | 32 +++++++++++------ sqlathanor/utilities.py | 55 ++++++++++++++++++++++++++++- 3 files changed, 76 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index db29281..03d23a2 100644 --- a/.gitignore +++ b/.gitignore @@ -93,6 +93,7 @@ celerybeat-schedule # virtualenv .venv venv/ +.py35 .python38 .py38 .python37 diff --git a/sqlathanor/default_deserializers.py b/sqlathanor/default_deserializers.py index 0195725..1da3aca 100644 --- a/sqlathanor/default_deserializers.py +++ b/sqlathanor/default_deserializers.py @@ -9,13 +9,17 @@ from decimal import Decimal import io -from typing import get_origin, get_args, Any, Optional, Union, Mapping, List +from typing import Any, Optional, Union, Mapping, List +try: + from typing import UnionMeta +except ImportError: + from typing import _GenericAlias as UnionMeta from validator_collection import validators, checkers from sqlathanor._compat import json -from sqlathanor.utilities import format_to_tuple, get_class_type_key, \ - raise_UnsupportedSerializationError, raise_UnsupportedDeserializationError +from sqlathanor.utilities import format_to_tuple, get_class_type_key, get_origin, \ + get_args, raise_UnsupportedSerializationError, raise_UnsupportedDeserializationError from sqlathanor.errors import UnsupportedValueTypeError from sqlalchemy.types import Boolean, Date, DateTime, Float, Integer, Text, Time, Interval @@ -318,22 +322,25 @@ def get_pydantic_type_mapping(field, target_type = 'str' elif field.type_ is None and default_to_str: target_type = 'str' - elif field.type_ == bool or (get_origin(field.type_) in (Union, Optional, List) and - get_args(field.type_)[0] == str): + elif (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and + get_args(field.type_)[0] == str): + target_type = 'str' + elif field.type_ == bool or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and + get_args(field.type_)[0] == bool): target_type = 'bool' - elif field.type_ == int or (get_origin(field.type_) in (Union, Optional, List) and + elif field.type_ == int or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and get_args(field.type_)[0] == int): target_type = 'int' - elif field.type_ in (float, Decimal) or (get_origin(field.type_) in (Union, Optional, List) and + elif field.type_ in (float, Decimal) or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and get_args(field.type_)[0] in (float, Decimal)): target_type = 'float' - elif field.type_ == datetime.time or (get_origin(field.type_) in (Union, Optional, List) and + elif field.type_ == datetime.time or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and get_args(field.type_)[0] == datetime.time): target_type = 'time' - elif field.type_ == datetime.datetime or (get_origin(field.type_) in (Union, Optional, List) and + elif field.type_ == datetime.datetime or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and get_args(field.type_)[0] == datetime.datetime): target_type = 'datetime' - elif field.type_ == datetime.date or (get_origin(field.type_) in (Union, Optional, List) and + elif field.type_ == datetime.date or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and get_args(field.type_)[0] == datetime.date): target_type = 'date' elif field.type_ == Any: @@ -341,7 +348,10 @@ def get_pydantic_type_mapping(field, elif default_to_str: target_type = 'str' else: - target_type = field.type_.__name__ + try: + target_type = get_args(field.type_)[0] + except (AttributeError, IndexError): + target_type = field.type_.__name__ column_type = type_mapping.get(target_type, None) if not column_type: diff --git a/sqlathanor/utilities.py b/sqlathanor/utilities.py index 55f2a83..a860e9d 100644 --- a/sqlathanor/utilities.py +++ b/sqlathanor/utilities.py @@ -22,7 +22,7 @@ from validator_collection import validators, checkers from validator_collection.errors import NotAnIterableError -from sqlathanor._compat import json, is_py2, is_py36, is_py35, dict as dict_ +from sqlathanor._compat import json, is_py2, is_py34, is_py36, is_py35, dict as dict_ from sqlathanor.errors import InvalidFormatError, UnsupportedSerializationError, \ UnsupportedDeserializationError, MaximumNestingExceededError, \ MaximumNestingExceededWarning, DeserializationError, CSVStructureError @@ -986,3 +986,56 @@ class object serialization_config.extend(set_attribute_configs) return columns, serialization_config + + +def get_origin(type_): + """Retrieve the base type annotation of ``type_``. + + :param type_: The full generic type annotation to parse. + + :returns: The type origin. :obj:`None ` if on Python 3.4 or 2.7. + """ + if is_py2 or is_py34: + return None + + try: + from typing import get_origin as get_origin_ + return_value = get_origin_(type_) + except ImportError: + try: + return_value = type_.__extra__ + except AttributeError: + try: + return_value = type_.__origin__ + except AttributeError: + return_value = type_.__class__ + + return return_value + + +def get_args(type_): + """Retrieve the arguments passed to a generic type annotation of ``type_``. + + :param type_: The full generic type annotation to parse. + + :returns: The arguments passed to the generic type annotation. + :obj:`None ` if on Python 3.4 or 2.7. + """ + if is_py2 or is_py34: + return None + + try: + from typing import get_args as get_args_ + return_value = get_args_(type_) + except ImportError: + try: + return_value = type_.__args__ + except AttributeError: + try: + return_value = type_.__parameters__ + except AttributeError: + for attr in dir(type_): + if '_params__' in attr and '_set_params__' not in attr: + return_value = getattr(type_, attr, None) + + return return_value From 10d15f0d70925c78922a71764cbe1c1276c92b66 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 20:52:23 -0500 Subject: [PATCH 13/24] Fixed minor bug. --- sqlathanor/utilities.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sqlathanor/utilities.py b/sqlathanor/utilities.py index a860e9d..1cc0f5d 100644 --- a/sqlathanor/utilities.py +++ b/sqlathanor/utilities.py @@ -1034,8 +1034,10 @@ def get_args(type_): try: return_value = type_.__parameters__ except AttributeError: + return_value = None for attr in dir(type_): if '_params__' in attr and '_set_params__' not in attr: return_value = getattr(type_, attr, None) + break return return_value From 129551c4cbd22a33bcc865184229dbbe72cb9f91 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 21:26:21 -0500 Subject: [PATCH 14/24] Fixed edge case for type identification in earlier Python versions. --- sqlathanor/default_deserializers.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/sqlathanor/default_deserializers.py b/sqlathanor/default_deserializers.py index 1da3aca..b5fbfa9 100644 --- a/sqlathanor/default_deserializers.py +++ b/sqlathanor/default_deserializers.py @@ -348,10 +348,14 @@ def get_pydantic_type_mapping(field, elif default_to_str: target_type = 'str' else: - try: - target_type = get_args(field.type_)[0] - except (AttributeError, IndexError): - target_type = field.type_.__name__ + prelim_target_type = get_args(field.type_) + if prelim_target_type: + try: + target_type = get_args(field.type_)[0] + except (AttributeError, IndexError): + target_type = field.type_.__name__ + else: + target_type = prelim_target_type column_type = type_mapping.get(target_type, None) if not column_type: From 24f1a22c09ee3d10e067d6191cd041d030027ea6 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 21:41:46 -0500 Subject: [PATCH 15/24] Added additional edge case support. --- sqlathanor/default_deserializers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sqlathanor/default_deserializers.py b/sqlathanor/default_deserializers.py index b5fbfa9..c084532 100644 --- a/sqlathanor/default_deserializers.py +++ b/sqlathanor/default_deserializers.py @@ -325,6 +325,9 @@ def get_pydantic_type_mapping(field, elif (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and get_args(field.type_)[0] == str): target_type = 'str' + elif field.type_ == str or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and + get_args(field.type_)[0] == str): + target_type = 'str' elif field.type_ == bool or (get_origin(field.type_) in (Union, Optional, List, UnionMeta) and get_args(field.type_)[0] == bool): target_type = 'bool' @@ -355,7 +358,7 @@ def get_pydantic_type_mapping(field, except (AttributeError, IndexError): target_type = field.type_.__name__ else: - target_type = prelim_target_type + return None column_type = type_mapping.get(target_type, None) if not column_type: From 4538346691fc81705ecbaae022f775703e3e8dd0 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 22:19:49 -0500 Subject: [PATCH 16/24] Added Union parent class for Python 3.6 support. --- sqlathanor/default_deserializers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sqlathanor/default_deserializers.py b/sqlathanor/default_deserializers.py index c084532..4cf514d 100644 --- a/sqlathanor/default_deserializers.py +++ b/sqlathanor/default_deserializers.py @@ -13,7 +13,10 @@ try: from typing import UnionMeta except ImportError: - from typing import _GenericAlias as UnionMeta + try: + from typing import _GenericAlias as UnionMeta + except ImportError: + UnionMeta = Union from validator_collection import validators, checkers From 949d748a3eb42b44b19c4fdaab433da97f33d14c Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 22:26:16 -0500 Subject: [PATCH 17/24] Removed implicit Pydantic support for Python 3.5. --- sqlathanor/utilities.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sqlathanor/utilities.py b/sqlathanor/utilities.py index 1cc0f5d..87b3ff7 100644 --- a/sqlathanor/utilities.py +++ b/sqlathanor/utilities.py @@ -995,7 +995,7 @@ def get_origin(type_): :returns: The type origin. :obj:`None ` if on Python 3.4 or 2.7. """ - if is_py2 or is_py34: + if is_py2 or is_py34 or is_py35: return None try: @@ -1021,7 +1021,7 @@ def get_args(type_): :returns: The arguments passed to the generic type annotation. :obj:`None ` if on Python 3.4 or 2.7. """ - if is_py2 or is_py34: + if is_py2 or is_py34 or is_py35: return None try: From b48e538d6037e9ade580fee02cde90d7d3884590 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 22:29:22 -0500 Subject: [PATCH 18/24] Fixed typing import to support Python 3.4 and 2.7. --- sqlathanor/default_deserializers.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/sqlathanor/default_deserializers.py b/sqlathanor/default_deserializers.py index 4cf514d..b7aeff9 100644 --- a/sqlathanor/default_deserializers.py +++ b/sqlathanor/default_deserializers.py @@ -9,7 +9,15 @@ from decimal import Decimal import io -from typing import Any, Optional, Union, Mapping, List +try: + from typing import Any, Optional, Union, Mapping, List +except ImportError: + Any = 'Any' + Optional = 'Optional' + Union = 'Union' + Mapping = 'Mapping' + List = 'List' + try: from typing import UnionMeta except ImportError: From 2ce5ae0dbe0a2de4431e04201c269d1c986327be Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 23:01:07 -0500 Subject: [PATCH 19/24] Attempted to address SyntaxError in Python 3.5 --- tests/test_table_deserialization.py | 39 +++++++++++++++++------------ tests/test_utilities.py | 39 +++++++++++++++++------------ 2 files changed, 46 insertions(+), 32 deletions(-) diff --git a/tests/test_table_deserialization.py b/tests/test_table_deserialization.py index 20ba5f2..62a8865 100644 --- a/tests/test_table_deserialization.py +++ b/tests/test_table_deserialization.py @@ -32,26 +32,33 @@ from pydantic import BaseModel from pydantic.fields import Field, ModelField - class PydanticModel(BaseModel): - id: int - field_1: str - field_2: str - - class PydanticModel2(BaseModel): - id: int - field_1: str - field_2: str - field_3: Any - - class PydanticModel3(BaseModel): - id: int - field_4: Optional[str] - field_5: bool - field_6: Union[str, int] + try: + class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + + class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + + class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] + except SyntaxError: + PydanticModel = 'Python <3.6' + PydanticModel2 = 'Python <3.6' + PydanticModel3 = 'Python <3.6' else: def Field(*args, **kwargs): return None PydanticModel = 'Python <3.6' + PydanticModel2 = 'Python <3.6' + PydanticModel3 = 'Python <3.6' # pylint: disable=line-too-long diff --git a/tests/test_utilities.py b/tests/test_utilities.py index 93ad0e3..a010726 100644 --- a/tests/test_utilities.py +++ b/tests/test_utilities.py @@ -34,26 +34,33 @@ from pydantic import BaseModel from pydantic.fields import Field, ModelField - class PydanticModel(BaseModel): - id: int - field_1: str - field_2: str - - class PydanticModel2(BaseModel): - id: int - field_1: str - field_2: str - field_3: Any - - class PydanticModel3(BaseModel): - id: int - field_4: Optional[str] - field_5: bool - field_6: Union[str, int] + try: + class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + + class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + + class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] + except SyntaxError: + PydanticModel = 'Python <3.6' + PydanticModel2 = 'Python <3.6' + PydanticModel3 = 'Python <3.6' else: def Field(*args, **kwargs): return None PydanticModel = 'Python <3.6' + PydanticModel2 = 'Python <3.6' + PydanticModel3 = 'Python <3.6' From 1e4eefc534fada56c68a5ce5d571aea97a04c9d7 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Tue, 16 Feb 2021 23:43:21 -0500 Subject: [PATCH 20/24] Addressed syntax error in Python 3.5, 3.4, and 2.7. --- tests/test_table_deserialization.py | 43 ++++++++++++++++------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/tests/test_table_deserialization.py b/tests/test_table_deserialization.py index 62a8865..baca748 100644 --- a/tests/test_table_deserialization.py +++ b/tests/test_table_deserialization.py @@ -29,27 +29,32 @@ from tests.fixtures import check_input_file, input_files if is_py36: - from pydantic import BaseModel - from pydantic.fields import Field, ModelField - + class_def = """ +from pydantic import BaseModel +from pydantic.fields import Field, ModelField + +class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + +class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + +class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] +""" try: - class PydanticModel(BaseModel): - id: int - field_1: str - field_2: str - - class PydanticModel2(BaseModel): - id: int - field_1: str - field_2: str - field_3: Any - - class PydanticModel3(BaseModel): - id: int - field_4: Optional[str] - field_5: bool - field_6: Union[str, int] + exec(class_def) except SyntaxError: + def Field(*args, **kwargs): + return None PydanticModel = 'Python <3.6' PydanticModel2 = 'Python <3.6' PydanticModel3 = 'Python <3.6' From 294fa063a7f08da1ee037d8a84fc0a981802cbd8 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Wed, 17 Feb 2021 00:22:42 -0500 Subject: [PATCH 21/24] Addressed syntax errors in a number of unit test modules for Python 2.7 - 3.5. --- tests/test_attributes.py | 22 ++++++++++----- tests/test_deserializers.py | 17 ++++++++---- tests/test_model_generation.py | 50 +++++++++++++++++++++------------- tests/test_utilities.py | 43 ++++++++++++++++------------- 4 files changed, 82 insertions(+), 50 deletions(-) diff --git a/tests/test_attributes.py b/tests/test_attributes.py index 37a3e91..4d4094d 100644 --- a/tests/test_attributes.py +++ b/tests/test_attributes.py @@ -15,13 +15,21 @@ from sqlathanor._compat import is_py36 if is_py36: - from pydantic import BaseModel - from pydantic.fields import Field, ModelField - - class PydanticModel(BaseModel): - field_1: int - field_2: str - field_3: Any + class_def = """ +from pydantic import BaseModel +from pydantic.fields import Field, ModelField + +class PydanticModel(BaseModel): + field_1: int + field_2: str + field_3: Any +""" + try: + exec(class_def) + except SyntaxError: + def Field(*args, **kwargs): + return None + PydanticModel = 'Python <3.6' else: def Field(*args, **kwargs): return None diff --git a/tests/test_deserializers.py b/tests/test_deserializers.py index 73ef422..f879b78 100644 --- a/tests/test_deserializers.py +++ b/tests/test_deserializers.py @@ -22,13 +22,20 @@ from sqlathanor._compat import is_py36 if is_py36: - from pydantic import BaseModel - from pydantic.fields import Field, ModelField + class_def = """ +from pydantic import BaseModel +from pydantic.fields import Field, ModelField - class PydanticModel(BaseModel): - id: datetime.timedelta +class PydanticModel(BaseModel): + id: datetime.timedelta - pydantic_field = PydanticModel.__fields__.get('id', None) +pydantic_field = PydanticModel.__fields__.get('id', None) +""" + try: + exec(class_def) + except SyntaxError: + pydantic_field = None + PydanticModel = 'Python <3.6' else: pydantic_field = None diff --git a/tests/test_model_generation.py b/tests/test_model_generation.py index d30a6e8..b439254 100644 --- a/tests/test_model_generation.py +++ b/tests/test_model_generation.py @@ -32,29 +32,41 @@ if is_py36: - from pydantic import BaseModel - from pydantic.fields import Field, ModelField - - class PydanticModel(BaseModel): - id: int - field_1: str - field_2: str - - class PydanticModel2(BaseModel): - id: int - field_1: str - field_2: str - field_3: Any - - class PydanticModel3(BaseModel): - id: int - field_4: Optional[str] - field_5: bool - field_6: Union[str, int] + class_def = """ +from pydantic import BaseModel +from pydantic.fields import Field, ModelField + +class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + +class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + +class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] +""" + try: + exec(class_def) + except SyntaxError: + def Field(*args, **kwargs): + return None + PydanticModel = 'Python <3.6' + PydanticModel2 = 'Python <3.6' + PydanticModel3 = 'Python <3.6' else: def Field(*args, **kwargs): return None PydanticModel = 'Python <3.6' + PydanticModel2 = 'Python <3.6' + PydanticModel3 = 'Python <3.6' def test_func(): diff --git a/tests/test_utilities.py b/tests/test_utilities.py index a010726..af05624 100644 --- a/tests/test_utilities.py +++ b/tests/test_utilities.py @@ -31,27 +31,32 @@ MaximumNestingExceededWarning, DeserializationError, CSVStructureError if is_py36: - from pydantic import BaseModel - from pydantic.fields import Field, ModelField - + class_def = """ +from pydantic import BaseModel +from pydantic.fields import Field, ModelField + +class PydanticModel(BaseModel): + id: int + field_1: str + field_2: str + +class PydanticModel2(BaseModel): + id: int + field_1: str + field_2: str + field_3: Any + +class PydanticModel3(BaseModel): + id: int + field_4: Optional[str] + field_5: bool + field_6: Union[str, int] +""" try: - class PydanticModel(BaseModel): - id: int - field_1: str - field_2: str - - class PydanticModel2(BaseModel): - id: int - field_1: str - field_2: str - field_3: Any - - class PydanticModel3(BaseModel): - id: int - field_4: Optional[str] - field_5: bool - field_6: Union[str, int] + exec(class_def) except SyntaxError: + def Field(*args, **kwargs): + return None PydanticModel = 'Python <3.6' PydanticModel2 = 'Python <3.6' PydanticModel3 = 'Python <3.6' From 507aa52ed4f6c5fbccca18a6cb16eccd44ae3151 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Wed, 17 Feb 2021 01:09:08 -0500 Subject: [PATCH 22/24] Debugged unit tests in Python 3.5, 3.4, and 2.7. --- tests/test_model_generation.py | 4 +++- tests/test_table_deserialization.py | 5 +++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/test_model_generation.py b/tests/test_model_generation.py index b439254..5dce236 100644 --- a/tests/test_model_generation.py +++ b/tests/test_model_generation.py @@ -724,7 +724,7 @@ def test_generate_model_from_csv(input_files, 'primary_key': 'id'}, None), ]) def test_generate_model_from_pydantic(kwargs, error): - if not error: + if not error and not isinstance(PydanticModel, str): tablename = kwargs.get('tablename', None) primary_key = kwargs.get('primary_key', None) base_model_attrs = kwargs.get('base_model_attrs', None) @@ -742,6 +742,8 @@ def test_generate_model_from_pydantic(kwargs, error): for key in base_model_attrs: assert hasattr(result, key) is True assert getattr(result, key) == base_model_attrs[key] + elif not error: + pass else: with pytest.raises(error): result = generate_model_from_pydantic(**kwargs) diff --git a/tests/test_table_deserialization.py b/tests/test_table_deserialization.py index baca748..7293918 100644 --- a/tests/test_table_deserialization.py +++ b/tests/test_table_deserialization.py @@ -768,7 +768,7 @@ def test_from_csv(input_files, 'primary_key': 'id'}, 7, None), ]) def test_from_pydantic(kwargs, expected_columns, error): - if not error: + if not error and not isinstance(PydanticModel, str): tablename = kwargs.get('tablename', None) primary_key = kwargs.get('primary_key', None) @@ -780,7 +780,8 @@ def test_from_pydantic(kwargs, expected_columns, error): assert result.name == tablename assert len(result.c) == expected_columns - + elif not error: + pass else: with pytest.raises(error): result = Table.from_pydantic(**kwargs) From 31963c97aeccf6e8284fad97550dc55c39453d61 Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Wed, 17 Feb 2021 01:44:08 -0500 Subject: [PATCH 23/24] Fixed ImportErrors in test modules for Python 3.4 and 2.7. --- tests/test_attributes.py | 5 ++++- tests/test_model_generation.py | 7 ++++++- tests/test_table_deserialization.py | 7 ++++++- tests/test_utilities.py | 7 ++++++- 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/tests/test_attributes.py b/tests/test_attributes.py index 4d4094d..58d5889 100644 --- a/tests/test_attributes.py +++ b/tests/test_attributes.py @@ -8,7 +8,10 @@ Tests for the schema extensions written in :ref:`sqlathanor.attributes`. """ -from typing import Any +try: + from typing import Any +except ImportError: + Any = 'Any' import pytest diff --git a/tests/test_model_generation.py b/tests/test_model_generation.py index 5dce236..f03bbc7 100644 --- a/tests/test_model_generation.py +++ b/tests/test_model_generation.py @@ -9,7 +9,12 @@ """ from datetime import datetime -from typing import Any, Union, Optional +try: + from typing import Any, Union, Optional +except ImportError: + Any = 'Any' + Union = 'Union' + Optional = 'Optional' import pytest diff --git a/tests/test_table_deserialization.py b/tests/test_table_deserialization.py index 7293918..5c3e3c4 100644 --- a/tests/test_table_deserialization.py +++ b/tests/test_table_deserialization.py @@ -9,7 +9,12 @@ """ from datetime import datetime -from typing import Any, Union, Optional +try: + from typing import Any, Union, Optional +except ImportError: + Any = 'Any' + Union = 'Union' + Optional = 'Optional' import pytest diff --git a/tests/test_utilities.py b/tests/test_utilities.py index af05624..8fc12f8 100644 --- a/tests/test_utilities.py +++ b/tests/test_utilities.py @@ -9,7 +9,12 @@ """ import os -from typing import Any, Union, Optional +try: + from typing import Any, Union, Optional +except ImportError: + Any = 'Any' + Union = 'Union' + Optional = 'Optional' import pytest import sqlalchemy From 4e595eb611a1e98f0303155e022cfd30fdbddf4c Mon Sep 17 00:00:00 2001 From: Chris Modzelewski Date: Wed, 17 Feb 2021 07:47:57 -0500 Subject: [PATCH 24/24] Updated tox.ini for Pydantic support in coverage test environment. --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index f80ec05..7ac7895 100644 --- a/tox.ini +++ b/tox.ini @@ -72,6 +72,7 @@ deps = simplejson validator-collection Flask-SQLAlchemy + pydantic commands = {[testenv]commands} coverage report