Skip to content

Commit

Permalink
Merge pull request #357 from lsst/tickets/DM-40002
Browse files Browse the repository at this point in the history
DM-40002: Support pydantic v2
  • Loading branch information
timj committed Jul 19, 2023
2 parents 1237108 + cc509f0 commit 32b72e6
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 30 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
python-version: "3.11"

- name: Install dependencies
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
python-version: "3.11"
cache: "pip"
cache-dependency-path: "setup.cfg"

Expand Down
13 changes: 5 additions & 8 deletions python/lsst/pipe/base/_task_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,8 @@
from collections.abc import Collection, Iterator, Mapping, Sequence
from typing import Any, Protocol

try:
from pydantic.v1 import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr
except ModuleNotFoundError:
from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr # type: ignore

from lsst.daf.butler._compat import _BaseModelCompat
from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr

_DEPRECATION_REASON = "Will be removed after v25."
_DEPRECATION_VERSION = "v24"
Expand Down Expand Up @@ -59,7 +56,7 @@ def _isListLike(v: Any) -> bool:
return isinstance(v, Sequence) and not isinstance(v, str)


class TaskMetadata(BaseModel):
class TaskMetadata(_BaseModelCompat):
"""Dict-like object for storing task metadata.
Metadata can be stored at two levels: single task or task plus subtasks.
Expand Down Expand Up @@ -417,7 +414,7 @@ def get(self, key: str, default: Any = None) -> Any:
key : `str`
The key to retrieve. Can be dot-separated hierarchical.
default
The value to return if the key doesnot exist.
The value to return if the key does not exist.
Returns
-------
Expand Down Expand Up @@ -574,4 +571,4 @@ def _validate_value(self, value: Any) -> tuple[str, Any]:


# Needed because a TaskMetadata can contain a TaskMetadata.
TaskMetadata.update_forward_refs()
TaskMetadata.model_rebuild()
23 changes: 10 additions & 13 deletions python/lsst/pipe/base/graph/quantumNode.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,9 @@
Quantum,
SerializedQuantum,
)
from lsst.daf.butler._compat import _BaseModelCompat
from lsst.utils.introspection import find_outside_stacklevel

try:
from pydantic.v1 import BaseModel
except ModuleNotFoundError:
from pydantic import BaseModel # type: ignore

from ..pipeline import TaskDef

BuildId = NewType("BuildId", str)
Expand Down Expand Up @@ -145,7 +141,7 @@ def from_simple(
) -> QuantumNode:
if recontitutedDimensions is not None:
warnings.warn(
"The recontitutedDimensions argument is now ignored and may be removed after v 27",
"The recontitutedDimensions argument is now ignored and may be removed after v26",
category=FutureWarning,
stacklevel=find_outside_stacklevel("lsst.pipe.base"),
)
Expand All @@ -159,7 +155,7 @@ def from_simple(
_fields_set = {"quantum", "taskLabel", "nodeId"}


class SerializedQuantumNode(BaseModel):
class SerializedQuantumNode(_BaseModelCompat):
"""Model representing a `QuantumNode` in serializable form."""

quantum: SerializedQuantum
Expand All @@ -168,10 +164,11 @@ class SerializedQuantumNode(BaseModel):

@classmethod
def direct(cls, *, quantum: dict[str, Any], taskLabel: str, nodeId: str) -> SerializedQuantumNode:
node = SerializedQuantumNode.__new__(cls)
setter = object.__setattr__
setter(node, "quantum", SerializedQuantum.direct(**quantum))
setter(node, "taskLabel", taskLabel)
setter(node, "nodeId", uuid.UUID(nodeId))
setter(node, "__fields_set__", _fields_set)
node = cls.model_construct(
__fields_set=_fields_set,
quantum=SerializedQuantum.direct(**quantum),
taskLabel=taskLabel,
nodeId=uuid.UUID(nodeId),
)

return node
11 changes: 4 additions & 7 deletions python/lsst/pipe/base/tests/mocks/_storage_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,6 @@
from collections.abc import Callable, Iterable, Mapping
from typing import Any, cast

try:
import pydantic.v1 as pydantic
except ModuleNotFoundError:
import pydantic # type: ignore
from lsst.daf.butler import (
DatasetComponent,
Formatter,
Expand All @@ -50,6 +46,7 @@
StorageClassDelegate,
StorageClassFactory,
)
from lsst.daf.butler._compat import _BaseModelCompat
from lsst.daf.butler.formatters.json import JsonFormatter
from lsst.utils.introspection import get_full_type_name

Expand Down Expand Up @@ -82,7 +79,7 @@ def is_mock_name(name: str) -> bool:
# access to complex real storage classes (and their pytypes) to test against.


class MockDataset(pydantic.BaseModel):
class MockDataset(_BaseModelCompat):
"""The in-memory dataset type used by `MockStorageClass`."""

ref: SerializedDatasetRef
Expand Down Expand Up @@ -140,7 +137,7 @@ def make_derived(self, **kwargs: Any) -> MockDataset:
`~lsst.daf.butler.SerializedDatasetType` to override in the result.
"""
dataset_type_updates = {
k: kwargs.pop(k) for k in list(kwargs) if k in SerializedDatasetType.__fields__
k: kwargs.pop(k) for k in list(kwargs) if k in SerializedDatasetType.model_fields # type: ignore
}
derived_dataset_type = self.dataset_type.copy(update=dataset_type_updates)
derived_ref = self.ref.copy(update=dict(datasetType=derived_dataset_type))
Expand All @@ -156,7 +153,7 @@ def make_derived(self, **kwargs: Any) -> MockDataset:
return self.copy(update=kwargs)


class MockDatasetQuantum(pydantic.BaseModel):
class MockDatasetQuantum(_BaseModelCompat):
"""Description of the quantum that produced a mock dataset."""

task_label: str
Expand Down

0 comments on commit 32b72e6

Please sign in to comment.