Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pydantic V2 API Migration #148

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
102 changes: 54 additions & 48 deletions fhir/resources/core/fhirabstractmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,11 @@
from enum import Enum
from functools import lru_cache

from pydantic.v1 import BaseModel, Extra, Field
from pydantic import ValidationError, BaseModel, Field, ConfigDict
from pydantic.fields import FieldInfo
from pydantic.v1.class_validators import ROOT_VALIDATOR_CONFIG_KEY, root_validator
from pydantic.v1.error_wrappers import ErrorWrapper, ValidationError
nazrulworld marked this conversation as resolved.
Show resolved Hide resolved
from pydantic.v1.errors import ConfigError, PydanticValueError
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

from pydantic.v1.fields import ModelField
from pydantic.v1.parse import Protocol
from pydantic.v1.utils import ROOT_KEY, sequence_like
from pydantic_core import InitErrorDetails
nazrulworld marked this conversation as resolved.
Show resolved Hide resolved

from .utils import is_primitive_type, load_file, load_str_bytes, xml_dumps, yaml_dumps

Expand Down Expand Up @@ -49,15 +47,14 @@ def json_dumps(v, *, default, option=0, return_bytes=False):
from pydantic.v1.typing import TupleGenerator
from pydantic.v1.types import StrBytes
from pydantic.v1.typing import AnyCallable
from pydantic.v1.main import Model

__author__ = "Md Nazrul Islam<email2nazrul@gmail.com>"

logger = logging.getLogger(__name__)
FHIR_COMMENTS_FIELD_NAME = "fhir_comments"


class WrongResourceType(PydanticValueError):
class WrongResourceType:
code = "wrong.resource_type"
msg_template = "Wrong ResourceType: {error}"
nazrulworld marked this conversation as resolved.
Show resolved Hide resolved

Expand All @@ -66,47 +63,56 @@ class FHIRAbstractModel(BaseModel, abc.ABC):
"""Abstract base model class for all FHIR elements."""

resource_type: str = ... # type: ignore
model_config = ConfigDict(
extra="forbid",
populate_by_name=True,
validate_assignment=True,
)
Comment on lines +68 to +72
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this replaces the class Config: ... approach in pydantic v2

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@nazrulworld @bwalsh the previously supported json_loads and json_dumps config parameters were removed in pydantic v2


fhir_comments: typing.Union[str, typing.List[str]] = Field(
None, alias="fhir_comments", element_property=False
)

def __init__(__pydantic_self__, **data: typing.Any) -> None:
def __init__(self: "FHIRAbstractModel", **data: typing.Any) -> None:
""" """
resource_type = data.pop("resource_type", None)
errors = []
errors: typing.List[InitErrorDetails] = []
if (
"resourceType" in data
and "resourceType" not in __pydantic_self__.__fields__
and "resourceType" not in self.model_fields
nazrulworld marked this conversation as resolved.
Show resolved Hide resolved
):
resource_type = data.pop("resourceType", None)

if (
resource_type is not None
and resource_type != __pydantic_self__.__fields__["resource_type"].default
and resource_type != self.model_fields["resource_type"].default
):
expected_resource_type = __pydantic_self__.__fields__[
expected_resource_type = self.model_fields[
"resource_type"
].default
error = (
f"``{__pydantic_self__.__class__.__module__}."
f"{__pydantic_self__.__class__.__name__}`` "
error_message = (
f"``{self.__class__.__module__}."
f"{self.__class__.__name__}`` "
f"expects resource type ``{expected_resource_type}``, "
f"but got ``{resource_type}``. "
"Make sure resource type name is correct and right "
"ModelClass has been chosen."
)
errors.append(
ErrorWrapper(WrongResourceType(error=error), loc="resource_type")
init_error_details = InitErrorDetails(
type=WrongResourceType.code,
loc=("resource_type",),
ctx={"message": error_message},
input=resource_type
)
errors.append(init_error_details)
if errors:
raise ValidationError(errors, __pydantic_self__.__class__)
raise ValidationError(errors, self.__class__)

BaseModel.__init__(__pydantic_self__, **data)
BaseModel.__init__(self, **data)

@classmethod
def add_root_validator(
nazrulworld marked this conversation as resolved.
Show resolved Hide resolved
cls: typing.Type["Model"],
cls: typing.Type["BaseModel"],
validator: typing.Union["AnyCallable", classmethod],
*,
pre: bool = False,
Expand All @@ -125,11 +131,11 @@ def add_root_validator(

# first level validation
if any([func_name in cls_.__dict__ for cls_ in cls.mro()]):
raise ConfigError(
raise ValidationError(
f"{cls} already has same name '{func_name}' method or attribute!"
)
if func_name in cls.__fields__:
raise ConfigError(f"{cls} already has same name '{func_name}' field!")
if func_name in cls.model_fields:
raise ValidationError(f"{cls} already has same name '{func_name}' field!")

# evaluate through root_validator
validator = root_validator(
Expand All @@ -141,13 +147,13 @@ def add_root_validator(
arg_list = list(sig.parameters.keys())

if len(arg_list) != 2:
raise ConfigError(
raise ValidationError(
f"Invalid signature for root validator {func_name}: {sig}"
", should be: (cls, values)."
)

if arg_list[0] != "cls":
raise ConfigError(
raise ValidationError(
f"Invalid signature for root validator {func_name}: {sig}, "
f'"{arg_list[0]}" not permitted as first argument, '
"should be: (cls, values)."
Expand All @@ -173,10 +179,10 @@ def add_root_validator(

@classmethod
def element_properties(
cls: typing.Type["Model"],
) -> typing.Generator[ModelField, None, None]:
cls: typing.Type["BaseModel"],
) -> typing.Generator[FieldInfo, None, None]:
""" """
for model_field in cls.__fields__.values():
for model_field in cls.model_fields:
nazrulworld marked this conversation as resolved.
Show resolved Hide resolved
if model_field.field_info.extra.get("element_property", False):
yield model_field

Expand All @@ -189,7 +195,7 @@ def elements_sequence(cls):

@classmethod
@lru_cache(maxsize=1024, typed=True)
def has_resource_base(cls: typing.Type["Model"]) -> bool:
def has_resource_base(cls: typing.Type["BaseModel"]) -> bool:
""" """
# xxx: calculate metrics, other than cache it!
for cl in inspect.getmro(cls)[:-4]:
Expand All @@ -199,9 +205,9 @@ def has_resource_base(cls: typing.Type["Model"]) -> bool:

@classmethod
@lru_cache(maxsize=None, typed=True)
def get_resource_type(cls: typing.Type["Model"]) -> str:
def get_resource_type(cls: typing.Type["BaseModel"]) -> str:
""" """
return cls.__fields__["resource_type"].default
return cls.model_fields["resource_type"].default

@classmethod
@lru_cache(maxsize=None, typed=True)
Expand All @@ -211,7 +217,9 @@ def get_alias_mapping(
"""Mappings between field's name and alias"""
aliases = cls.elements_sequence()
return {
f.alias: fname for fname, f in cls.__fields__.items() if f.alias in aliases
field_info.alias: field_name
for field_name, field_info in cls.model_fields.items()
if field_info.alias in aliases
}

@classmethod
Expand All @@ -221,15 +229,15 @@ def get_json_encoder(cls) -> typing.Callable[[typing.Any], typing.Any]:

@classmethod
def parse_file(
cls: typing.Type["Model"],
cls: typing.Type["BaseModel"],
path: typing.Union[str, pathlib.Path],
*,
content_type: typing.Optional[str] = None,
encoding: str = "utf8",
proto: typing.Optional[Protocol] = None,
allow_pickle: bool = False,
**extra,
) -> "Model":
) -> BaseModel:
extra.update({"cls": cls})
obj = load_file(
path,
Expand All @@ -240,19 +248,19 @@ def parse_file(
json_loads=cls.__config__.json_loads,
**extra,
)
return cls.parse_obj(obj)
return cls.model_validate(obj)

@classmethod
def parse_raw(
cls: typing.Type["Model"],
cls: typing.Type["BaseModel"],
b: "StrBytes",
*,
content_type: typing.Optional[str] = None,
encoding: str = "utf8",
proto: typing.Optional[Protocol] = None,
allow_pickle: bool = False,
**extra,
) -> "Model":
) -> BaseModel:
extra.update({"cls": cls})
try:
obj = load_str_bytes(
Expand All @@ -265,8 +273,14 @@ def parse_raw(
**extra,
)
except (ValueError, TypeError, UnicodeDecodeError) as e: # noqa: B014
raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls)
nazrulworld marked this conversation as resolved.
Show resolved Hide resolved
return cls.parse_obj(obj)
init_error_details = InitErrorDetails(
type="failed_parse_raw",
loc=("__root__",),
input=b,
ctx={"message": e}
)
raise ValidationError([init_error_details], cls)
return cls.model_validate(obj)

def yaml( # type: ignore
self,
Expand Down Expand Up @@ -436,7 +450,7 @@ def _fhir_iter(
for prop_name in self.elements_sequence():
field_key = alias_maps[prop_name]

field = self.__fields__[field_key]
field = self.model_fields[field_key]
is_primitive = is_primitive_type(field)
v = self.__dict__.get(field_key, None)
dict_key = by_alias and field.alias or field_key
Expand Down Expand Up @@ -518,11 +532,3 @@ def _fhir_get_value(
):
return None
return value

class Config:
json_loads = json_loads
json_dumps = json_dumps
allow_population_by_field_name = True
extra = Extra.forbid
validate_assignment = True
error_msg_templates = {"value_error.extra": "extra fields not permitted"}
2 changes: 1 addition & 1 deletion fhir/resources/core/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from typing import TYPE_CHECKING, Any, Callable, Union, cast, no_type_check, Optional

from pydantic.v1.parse import Protocol
from pydantic.v1.parse import load_file as default_load_file
from pydantic import load_file as default_load_file
from pydantic.v1.parse import load_str_bytes as default_load_str_bytes
from pydantic.v1.types import StrBytes

Expand Down