Skip to content

Commit

Permalink
Merge pull request #113 from NowanIlfideme/15-support-pydantic-v2
Browse files Browse the repository at this point in the history
Support installations of Pydantic v2 (but only v1 backported models)
  • Loading branch information
NowanIlfideme committed Apr 1, 2024
2 parents 7e6de55 + 6786ef2 commit 04a9ea7
Show file tree
Hide file tree
Showing 25 changed files with 76 additions and 26 deletions.
1 change: 1 addition & 0 deletions .github/workflows/python-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ jobs:
- "3.10"
lib-pydantic:
- "1.10.0"
- "2.6.4"
deps:
- dev,docs
steps:
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ This example works for "pure", JSON-safe Pydantic models via

```python
from pydantic import BaseModel
# from pydantic.v1 import BaseModel # Pydantic V2
from pydantic_kedro import PydanticJsonDataset


Expand Down Expand Up @@ -61,7 +62,7 @@ Pydantic models:
```python
from tempfile import TemporaryDirectory

from pydantic import BaseModel
from pydantic.v1 import BaseModel
from pydantic_kedro import load_model, save_model

class MyModel(BaseModel):
Expand Down
2 changes: 2 additions & 0 deletions docs/arbitrary_types.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ You can't save/load these via JSON, but you can use the other dataset types:
```python
from tempfile import TemporaryDirectory
from pydantic import BaseModel
# from pydantic.v1 import BaseModel # Pydantic V2
from pydantic_kedro import PydanticZipDataset


Expand Down Expand Up @@ -92,6 +93,7 @@ Here's a example for [pandas](https://pandas.pydata.org/) and Pydantic V1:
import pandas as pd
from kedro_datasets.pandas import ParquetDataset
from pydantic import validator
# from pydantic.v1 import validator # Pydantic V2
from pydantic_kedro import ArbModel, PydanticZipDataset


Expand Down
2 changes: 2 additions & 0 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ Then use it as usual within your Kedro pipelines:

```python
from pydantic import BaseModel
# from pydantic.v1 import BaseModel # Pydantic V2
from kedro.pipeline import node

class SomeModel(BaseModel):
Expand Down Expand Up @@ -85,6 +86,7 @@ to save your model to any `fsspec`-supported location:

```python
from pydantic import BaseModel
# from pydantic.v1 import BaseModel # Pydantic V2
from pydantic_kedro import PydanticJsonDataset


Expand Down
1 change: 1 addition & 0 deletions docs/standalone_usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ You can use `pydantic-kedro` to save and load your Pydantic models without invok
from tempfile import TemporaryDirectory

from pydantic import BaseModel
# from pydantic.v1 import BaseModel # Pydantic V2
from pydantic_kedro import load_model, save_model

class MyModel(BaseModel):
Expand Down
10 changes: 10 additions & 0 deletions env-v1.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# This is an example Conda environment, for local development
name: pyd-kedro-v1
channels:
- conda-forge
dependencies:
- python=3.9 # minimum targeted version is currently 3.9
- pip
- pip:
- pydantic<2
- -e ".[dev,docs]"
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ dependencies:
- python=3.9 # minimum targeted version is currently 3.9
- pip
- pip:
- pydantic>2,<3
- -e ".[dev,docs]"
12 changes: 9 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ classifiers = [
"Typing :: Typed",
]
dependencies = [
"pydantic>=1.10.0,<2", # TODO
"pydantic-yaml>=1.1.2",
"pydantic>=1.10.0,<3", # WIP
"pydantic-yaml>=1.3.0",
"ruamel-yaml<0.18", # Current limitation
"kedro>=0.19.3,<0.20",
"kedro-datasets>=2.1.0",
Expand Down Expand Up @@ -72,6 +72,8 @@ version = { attr = "pydantic_kedro.version.__version__" }
[tool.ruff]
line-length = 105
src = ["src"]

[tool.ruff.lint]
select = [
"E", # pycodestyle
"F", # pyflakes
Expand All @@ -80,7 +82,7 @@ select = [
]
ignore = ["D203", "D213"] # conflicting

[tool.ruff.pydocstyle]
[tool.ruff.lint.pydocstyle]
convention = "numpy"


Expand Down Expand Up @@ -122,3 +124,7 @@ module = [
"kedro_datasets.*",
]
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = ["pydantic.v1"]
ignore_missing_imports = true
2 changes: 1 addition & 1 deletion src/pydantic_kedro/_dict_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from types import TracebackType
from typing import Any, Dict, List, Optional, Type, Union

from pydantic import BaseModel
from pydantic_kedro._pydantic import BaseModel

from ._internals import import_string

Expand Down
7 changes: 4 additions & 3 deletions src/pydantic_kedro/_internals.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@

from typing import Any, Callable, Dict, Type

from kedro_datasets.pickle.pickle_dataset import PickleDataset
from kedro.io.core import AbstractDataset
from pydantic import BaseModel, create_model
from kedro_datasets.pickle.pickle_dataset import PickleDataset

from ._pydantic import BaseModel, create_model

KLS_MARK_STR = "class"

Expand Down Expand Up @@ -122,7 +123,7 @@ def get_kedro_default(kls: Type[BaseModel]) -> Callable[[str], AbstractDataset]:
def create_expanded_model(model: BaseModel) -> BaseModel:
"""Create an 'expanded' model with additional metadata."""
pyd_kls = type(model)
if KLS_MARK_STR in pyd_kls.__fields__.keys():
if KLS_MARK_STR in pyd_kls.__fields__.keys(): # type: ignore
raise ValueError(f"Marker {KLS_MARK_STR!r} already exists as a field; can't dump model.")
pyd_kls_path = f"{pyd_kls.__module__}.{pyd_kls.__qualname__}"

Expand Down
23 changes: 23 additions & 0 deletions src/pydantic_kedro/_pydantic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
"""Pydantic import, depending on the version."""

# mypy: ignore_errors

__all__ = [
"BaseConfig",
"BaseModel",
"BaseSettings",
"Extra",
"Field",
"create_model",
]

import pydantic

PYDANTIC_VERSION = pydantic.version.VERSION

if PYDANTIC_VERSION > "2" and PYDANTIC_VERSION < "3":
from pydantic.v1 import BaseConfig, BaseModel, BaseSettings, Extra, Field, create_model
elif PYDANTIC_VERSION < "2":
from pydantic import BaseConfig, BaseModel, BaseSettings, Extra, Field, create_model # noqa
else:
raise ImportError("Unknown version of Pydantic.")
3 changes: 2 additions & 1 deletion src/pydantic_kedro/datasets/auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import fsspec
from fsspec import AbstractFileSystem
from kedro.io.core import AbstractDataset, get_protocol_and_path
from pydantic import BaseModel

from pydantic_kedro._pydantic import BaseModel

from .folder import PydanticFolderDataset
from .json import PydanticJsonDataset
Expand Down
2 changes: 1 addition & 1 deletion src/pydantic_kedro/datasets/folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@
from fsspec.core import strip_protocol
from fsspec.implementations.local import LocalFileSystem
from kedro.io.core import AbstractDataset, parse_dataset_definition
from pydantic import BaseConfig, BaseModel, Extra, Field

from pydantic_kedro._dict_io import PatchPydanticIter, dict_to_model
from pydantic_kedro._internals import get_kedro_default, get_kedro_map, import_string
from pydantic_kedro._local_caching import get_cache_dir
from pydantic_kedro._pydantic import BaseConfig, BaseModel, Extra, Field

__all__ = ["PydanticFolderDataset"]

Expand Down
2 changes: 1 addition & 1 deletion src/pydantic_kedro/datasets/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
import fsspec
from fsspec import AbstractFileSystem
from kedro.io.core import AbstractDataset, get_filepath_str, get_protocol_and_path
from pydantic import BaseModel

from pydantic_kedro._dict_io import PatchPydanticIter, dict_to_model
from pydantic_kedro._pydantic import BaseModel


class PydanticJsonDataset(AbstractDataset[BaseModel, BaseModel]):
Expand Down
2 changes: 1 addition & 1 deletion src/pydantic_kedro/datasets/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@
import ruamel.yaml as yaml
from fsspec import AbstractFileSystem
from kedro.io.core import AbstractDataset, get_filepath_str, get_protocol_and_path
from pydantic import BaseModel
from pydantic_yaml import to_yaml_file

from pydantic_kedro._dict_io import PatchPydanticIter, dict_to_model
from pydantic_kedro._pydantic import BaseModel


class PydanticYamlDataset(AbstractDataset[BaseModel, BaseModel]):
Expand Down
2 changes: 1 addition & 1 deletion src/pydantic_kedro/datasets/zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
import fsspec
from fsspec.implementations.zip import ZipFileSystem
from kedro.io.core import AbstractDataset
from pydantic import BaseModel

from pydantic_kedro._local_caching import get_cache_dir
from pydantic_kedro._pydantic import BaseModel

from .folder import PydanticFolderDataset

Expand Down
5 changes: 3 additions & 2 deletions src/pydantic_kedro/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@

from typing import Callable, Dict, Type

from kedro_datasets.pickle.pickle_dataset import PickleDataset
from kedro.io import AbstractDataset
from pydantic import BaseConfig, BaseModel
from kedro_datasets.pickle.pickle_dataset import PickleDataset

from pydantic_kedro._pydantic import BaseConfig, BaseModel


def _kedro_default(x: str) -> PickleDataset:
Expand Down
2 changes: 1 addition & 1 deletion src/pydantic_kedro/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from typing import Literal, Type, TypeVar

from kedro.io.core import AbstractDataset
from pydantic import BaseModel

from pydantic_kedro._pydantic import BaseModel
from pydantic_kedro.datasets.auto import PydanticAutoDataset
from pydantic_kedro.datasets.folder import PydanticFolderDataset
from pydantic_kedro.datasets.json import PydanticJsonDataset
Expand Down
2 changes: 1 addition & 1 deletion src/test/catalogs/test_basic_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@

from kedro.config import OmegaConfigLoader
from kedro.io import DataCatalog
from pydantic import BaseModel

from pydantic_kedro import PydanticJsonDataset
from pydantic_kedro._pydantic import BaseModel

local_dir = Path(__file__).parent

Expand Down
3 changes: 1 addition & 2 deletions src/test/test_auto.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Specialized tests for `PydanticAutoDataset`."""

from pydantic import BaseModel

from pydantic_kedro._pydantic import BaseModel
from pydantic_kedro import PydanticAutoDataset, PydanticJsonDataset, PydanticZipDataset


Expand Down
2 changes: 1 addition & 1 deletion src/test/test_ds_simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import pytest
from kedro.io.core import AbstractDataset
from pydantic import BaseModel
from pydantic_kedro._pydantic import BaseModel

from pydantic_kedro import (
PydanticAutoDataset,
Expand Down
2 changes: 1 addition & 1 deletion src/test/test_inheritance.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
from kedro_datasets.pandas.csv_dataset import CSVDataset
from kedro_datasets.pandas.parquet_dataset import ParquetDataset
from kedro_datasets.pickle.pickle_dataset import PickleDataset
from pydantic import BaseModel

from pydantic_kedro import PydanticFolderDataset
from pydantic_kedro._pydantic import BaseModel

dfx = pd.DataFrame([[1, 2, 3]], columns=["a", "b", "c"])

Expand Down
4 changes: 2 additions & 2 deletions src/test/test_nested_subtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from typing import Dict, List, Literal

import pytest
from pydantic import BaseModel

from pydantic_kedro._pydantic import BaseModel
from pydantic_kedro import load_model, save_model


Expand Down Expand Up @@ -60,7 +60,7 @@ def test_nested_subclass(
"""Test round-trip of objects with a nested subclass."""
# Initial round-trip (should always work)
save_model(obj, f"{tmpdir}/obj", format=format)
obj2 = load_model(f"{tmpdir}/obj", AbstractBaz)
obj2 = load_model(f"{tmpdir}/obj", AbstractBaz) # type: ignore[type-abstract]
assert obj.foo == obj2.foo
assert obj.get_baz() == obj2.get_baz()
# Nested round-trip (should also always work, but is more diffictult)
Expand Down
2 changes: 1 addition & 1 deletion src/test/test_strict.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""Test strict models and BaseSettings subclasses."""

import pytest
from pydantic import BaseModel, BaseSettings
from typing_extensions import Literal

from pydantic_kedro import load_model, save_model
from pydantic_kedro._pydantic import BaseModel, BaseSettings


class ExSettings(BaseSettings):
Expand Down
5 changes: 3 additions & 2 deletions src/test/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""Test utility functions, loading and saving models."""

from pydantic import BaseModel
from typing import Any

from pydantic_kedro import load_model, save_model
from pydantic_kedro._pydantic import BaseModel


class MyModel(BaseModel):
Expand All @@ -16,6 +17,6 @@ def test_utils_load_save(tmpdir: str):
# using memory to avoid tempfile
save_model(MyModel(x="example"), f"{tmpdir}/model")

obj = load_model(f"{tmpdir}/model")
obj: Any = load_model(f"{tmpdir}/model")
assert isinstance(obj, MyModel)
assert obj.x == "example"

0 comments on commit 04a9ea7

Please sign in to comment.