Skip to content

Commit

Permalink
Merge pull request #396 from lsst/tickets/DM-42302
Browse files Browse the repository at this point in the history
DM-42302: Drop support for Pydantic v1.
  • Loading branch information
TallJimbo committed Jan 4, 2024
2 parents 9d3cecc + 473a15e commit 3bc6aad
Show file tree
Hide file tree
Showing 9 changed files with 86 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build_docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
pip install wheel
- name: Install documenteer
run: pip install 'documenteer[pipelines]>=0.8'
run: pip install 'documenteer[pipelines]==0.8.2'

- name: Install dependencies
run: |
Expand Down
1 change: 1 addition & 0 deletions doc/changes/DM-42302.misc.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Drop support for Pydantic 1.x.
1 change: 1 addition & 0 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@

intersphinx_mapping["networkx"] = ("https://networkx.org/documentation/stable/", None) # noqa: F405
intersphinx_mapping["lsst"] = ("https://pipelines.lsst.io/v/weekly/", None) # noqa: F405
intersphinx_mapping["pydantic"] = ("https://docs.pydantic.dev/latest/", None) # noqa: F405
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ dependencies = [
"lsst-daf-butler",
"lsst-pex-config",
"astropy",
"pydantic <3.0",
"pydantic >=2,<3.0",
"networkx",
"pyyaml >= 5.1",
"numpy >= 1.17",
Expand Down
27 changes: 24 additions & 3 deletions python/lsst/pipe/base/_task_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,13 @@

import itertools
import numbers
import sys
import warnings
from collections.abc import Collection, Iterator, Mapping, Sequence
from typing import Any, Protocol

from lsst.daf.butler._compat import _BaseModelCompat
from lsst.utils.introspection import find_outside_stacklevel
from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr
from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr

# The types allowed in a Task metadata field are restricted
# to allow predictable serialization.
Expand All @@ -60,7 +60,7 @@ def _isListLike(v: Any) -> bool:
return isinstance(v, Sequence) and not isinstance(v, str)


class TaskMetadata(_BaseModelCompat):
class TaskMetadata(BaseModel):
"""Dict-like object for storing task metadata.
Metadata can be stored at two levels: single task or task plus subtasks.
Expand Down Expand Up @@ -572,6 +572,27 @@ def _validate_value(self, value: Any) -> tuple[str, Any]:

raise ValueError(f"TaskMetadata does not support values of type {value!r}.")

# Work around the fact that Sphinx chokes on Pydantic docstring formatting,
# when we inherit those docstrings in our public classes.
if "sphinx" in sys.modules:

def copy(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.copy`."""
return super().copy(*args, **kwargs)

def model_dump(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_dump`."""
return super().model_dump(*args, **kwargs)

def model_copy(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_copy`."""
return super().model_copy(*args, **kwargs)

@classmethod
def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_json_schema`."""
return super().model_json_schema(*args, **kwargs)


# Needed because a TaskMetadata can contain a TaskMetadata.
TaskMetadata.model_rebuild()
4 changes: 2 additions & 2 deletions python/lsst/pipe/base/graph/quantumNode.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
from dataclasses import dataclass
from typing import Any, NewType

import pydantic
from lsst.daf.butler import (
DatasetRef,
DimensionRecord,
Expand All @@ -41,7 +42,6 @@
Quantum,
SerializedQuantum,
)
from lsst.daf.butler._compat import _BaseModelCompat
from lsst.utils.introspection import find_outside_stacklevel

from ..pipeline import TaskDef
Expand Down Expand Up @@ -188,7 +188,7 @@ def _replace_quantum(self, quantum: Quantum) -> None:
_fields_set = {"quantum", "taskLabel", "nodeId"}


class SerializedQuantumNode(_BaseModelCompat):
class SerializedQuantumNode(pydantic.BaseModel):
"""Model representing a `QuantumNode` in serializable form."""

quantum: SerializedQuantum
Expand Down
13 changes: 6 additions & 7 deletions python/lsst/pipe/base/pipeline_graph/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
import networkx
import pydantic
from lsst.daf.butler import DatasetType, DimensionConfig, DimensionGroup, DimensionUniverse
from lsst.daf.butler._compat import _BaseModelCompat

from .. import automatic_connection_constants as acc
from ._dataset_types import DatasetTypeNode
Expand Down Expand Up @@ -85,7 +84,7 @@ def expect_not_none(value: _U | None, msg: str) -> _U:
return value


class SerializedEdge(_BaseModelCompat):
class SerializedEdge(pydantic.BaseModel):
"""Struct used to represent a serialized `Edge` in a `PipelineGraph`.
All `ReadEdge` and `WriteEdge` state not included here is instead
Expand Down Expand Up @@ -204,7 +203,7 @@ def deserialize_write_edge(
)


class SerializedTaskInitNode(_BaseModelCompat):
class SerializedTaskInitNode(pydantic.BaseModel):
"""Struct used to represent a serialized `TaskInitNode` in a
`PipelineGraph`.
Expand Down Expand Up @@ -305,7 +304,7 @@ def deserialize(
)


class SerializedTaskNode(_BaseModelCompat):
class SerializedTaskNode(pydantic.BaseModel):
"""Struct used to represent a serialized `TaskNode` in a `PipelineGraph`.
The task label is serialized by the context in which a
Expand Down Expand Up @@ -461,7 +460,7 @@ def deserialize(
)


class SerializedDatasetTypeNode(_BaseModelCompat):
class SerializedDatasetTypeNode(pydantic.BaseModel):
"""Struct used to represent a serialized `DatasetTypeNode` in a
`PipelineGraph`.
Expand Down Expand Up @@ -583,7 +582,7 @@ def deserialize(
return None


class SerializedTaskSubset(_BaseModelCompat):
class SerializedTaskSubset(pydantic.BaseModel):
"""Struct used to represent a serialized `TaskSubset` in a `PipelineGraph`.
The subsetlabel is serialized by the context in which a
Expand Down Expand Up @@ -634,7 +633,7 @@ def deserialize_task_subset(self, label: str, xgraph: networkx.MultiDiGraph) ->
return TaskSubset(xgraph, label, members, self.description)


class SerializedPipelineGraph(_BaseModelCompat):
class SerializedPipelineGraph(pydantic.BaseModel):
"""Struct used to represent a serialized `PipelineGraph`."""

version: str = ".".join(str(v) for v in _IO_VERSION_INFO)
Expand Down
55 changes: 49 additions & 6 deletions python/lsst/pipe/base/tests/mocks/_storage_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,12 @@
"is_mock_name",
)

import sys
import uuid
from collections.abc import Callable, Iterable, Mapping
from typing import Any, cast

import pydantic
from lsst.daf.butler import (
DataIdValue,
DatasetComponent,
Expand All @@ -54,7 +56,6 @@
StorageClassDelegate,
StorageClassFactory,
)
from lsst.daf.butler._compat import _BaseModelCompat
from lsst.daf.butler.formatters.json import JsonFormatter
from lsst.utils.introspection import get_full_type_name

Expand Down Expand Up @@ -117,7 +118,7 @@ def is_mock_name(name: str) -> bool:
# access to complex real storage classes (and their pytypes) to test against.


class MockDataset(_BaseModelCompat):
class MockDataset(pydantic.BaseModel):
"""The in-memory dataset type used by `MockStorageClass`."""

dataset_id: uuid.UUID | None
Expand Down Expand Up @@ -189,9 +190,9 @@ def make_derived(self, **kwargs: Any) -> MockDataset:
The newly-mocked dataset.
"""
dataset_type_updates = {
k: kwargs.pop(k) for k in list(kwargs) if k in SerializedDatasetType.model_fields # type: ignore
k: kwargs.pop(k) for k in list(kwargs) if k in SerializedDatasetType.model_fields
}
kwargs.setdefault("dataset_type", self.dataset_type.copy(update=dataset_type_updates))
kwargs.setdefault("dataset_type", self.dataset_type.model_copy(update=dataset_type_updates))
# Fields below are those that should not be propagated to the derived
# dataset, because they're not about the intrinsic on-disk thing.
kwargs.setdefault("converted_from", None)
Expand All @@ -200,10 +201,31 @@ def make_derived(self, **kwargs: Any) -> MockDataset:
# Also use setdefault on the ref in case caller wants to override that
# directly, but this is expected to be rare enough that it's not worth
# it to try to optimize out the work above to make derived_ref.
return self.copy(update=kwargs)
return self.model_copy(update=kwargs)

# Work around the fact that Sphinx chokes on Pydantic docstring formatting,
# when we inherit those docstrings in our public classes.
if "sphinx" in sys.modules:

class MockDatasetQuantum(_BaseModelCompat):
def copy(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.copy`."""
return super().copy(*args, **kwargs)

def model_dump(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_dump`."""
return super().model_dump(*args, **kwargs)

def model_copy(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_copy`."""
return super().model_copy(*args, **kwargs)

@classmethod
def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_json_schema`."""
return super().model_json_schema(*args, **kwargs)


class MockDatasetQuantum(pydantic.BaseModel):
"""Description of the quantum that produced a mock dataset.
This is also used to represent task-init operations for init-output mock
Expand All @@ -222,6 +244,27 @@ class MockDatasetQuantum(_BaseModelCompat):
Keys are task-internal connection names, not dataset type names.
"""

# Work around the fact that Sphinx chokes on Pydantic docstring formatting,
# when we inherit those docstrings in our public classes.
if "sphinx" in sys.modules:

def copy(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.copy`."""
return super().copy(*args, **kwargs)

def model_dump(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_dump`."""
return super().model_dump(*args, **kwargs)

def model_copy(self, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_copy`."""
return super().model_copy(*args, **kwargs)

@classmethod
def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
"""See `pydantic.BaseModel.model_json_schema`."""
return super().model_json_schema(*args, **kwargs)


MockDataset.model_rebuild()

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pyyaml >= 5.1
pydantic < 3.0
pydantic >=2,<3.0
numpy >= 1.17
networkx
frozendict
Expand Down

0 comments on commit 3bc6aad

Please sign in to comment.