Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-39665: Remove faked DataCoordinate from all tests #852

Merged
merged 16 commits into from
Jun 16, 2023
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 2 additions & 1 deletion python/lsst/daf/butler/_butler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2512,12 +2512,13 @@ def validateConfiguration(
# a DatasetRef for each defined instrument
datasetRefs = []

dimensions = self.dimensions.extract(("instrument",))
timj marked this conversation as resolved.
Show resolved Hide resolved
for datasetType in datasetTypes:
if "instrument" in datasetType.dimensions:
for instrument in instruments:
datasetRef = DatasetRef(
datasetType,
{"instrument": instrument}, # type: ignore
DataCoordinate.standardize(instrument=instrument, graph=dimensions),
conform=False,
timj marked this conversation as resolved.
Show resolved Hide resolved
run="validate",
)
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/daf/butler/registry/queries/_query_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ def extract_dimension_relationships(self, relation: Relation) -> set[frozenset[s
could easily correct this by joining that dimension in directly. But
it's also missing the ``{instrument, exposure, physical_filter}``
relationship we'd get from the ``exposure`` dimension's own relation
(``exposure`` implies ``phyiscal_filter``) and the similar
(``exposure`` implies ``physical_filter``) and the similar
``{instrument, physical_filter, band}`` relationship from the
``physical_filter`` dimension relation; we need the relationship logic
to recognize that those dimensions need to be joined in as well in
Expand Down
37 changes: 15 additions & 22 deletions python/lsst/daf/butler/tests/_datasetsHelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,23 +30,14 @@
)

import os
import uuid
from collections.abc import Iterable
from collections.abc import Iterable, Mapping
from typing import TYPE_CHECKING, Any

from lsst.daf.butler import DatasetRef, DatasetType, StorageClass
from lsst.daf.butler import DataCoordinate, DatasetRef, DatasetType, StorageClass
from lsst.daf.butler.formatters.yaml import YamlFormatter

if TYPE_CHECKING:
from lsst.daf.butler import (
Config,
DataCoordinate,
DatasetId,
Datastore,
Dimension,
DimensionGraph,
Registry,
)
from lsst.daf.butler import Config, DatasetId, Datastore, Dimension, DimensionGraph, Registry


class DatasetTestHelper:
Expand All @@ -57,23 +48,29 @@ def makeDatasetRef(
datasetTypeName: str,
dimensions: DimensionGraph | Iterable[str | Dimension],
storageClass: StorageClass | str,
dataId: DataCoordinate,
dataId: DataCoordinate | Mapping[str, Any],
*,
id: DatasetId | None = None,
run: str | None = None,
conform: bool = True,
) -> DatasetRef:
"""Make a DatasetType and wrap it in a DatasetRef for a test"""
return self._makeDatasetRef(
datasetTypeName, dimensions, storageClass, dataId, id=id, run=run, conform=conform
datasetTypeName,
dimensions,
storageClass,
dataId,
id=id,
run=run,
conform=conform,
)

def _makeDatasetRef(
self,
datasetTypeName: str,
dimensions: DimensionGraph | Iterable[str | Dimension],
storageClass: StorageClass | str,
dataId: DataCoordinate,
dataId: DataCoordinate | Mapping,
*,
id: DatasetId | None = None,
run: str | None = None,
Expand All @@ -91,26 +88,22 @@ def _makeDatasetRef(

if run is None:
run = "dummy"
if not isinstance(dataId, DataCoordinate):
dataId = DataCoordinate.standardize(dataId, graph=datasetType.dimensions)
return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)


class DatastoreTestHelper:
"""Helper methods for Datastore tests"""

root: str
id: DatasetId
root: str | None
config: Config
datastoreType: type[Datastore]
configFile: str

def setUpDatastoreTests(self, registryClass: type[Registry], configClass: type[Config]) -> None:
"""Shared setUp code for all Datastore tests"""
self.registry = registryClass()

# Need to keep ID for each datasetRef since we have no butler
# for these tests
self.id = uuid.uuid4()

self.config = configClass(self.configFile)

# Some subclasses override the working root directory
Expand Down
20 changes: 15 additions & 5 deletions tests/test_butler.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
except ImportError:
boto3 = None

def mock_s3(cls):
def mock_s3(cls): # type: ignore[no-untyped-def]

Check warning on line 49 in tests/test_butler.py

View check run for this annotation

Codecov / codecov/patch

tests/test_butler.py#L49

Added line #L49 was not covered by tests
"""A no-op decorator in case moto mock_s3 can not be imported."""
return cls

Expand Down Expand Up @@ -98,6 +98,8 @@
from lsst.utils.introspection import get_full_type_name

if TYPE_CHECKING:
import types

from lsst.daf.butler import Datastore, DimensionGraph, Registry, StorageClass

TESTDIR = os.path.abspath(os.path.dirname(__file__))
Expand All @@ -115,7 +117,7 @@
os.environ.pop(k, None)


def makeExampleMetrics():
def makeExampleMetrics() -> MetricsExample:
return MetricsExample(
{"AM1": 5.2, "AM2": 30.6},
{"a": [1, 2, 3], "b": {"blue": 5, "red": "green"}},
Expand All @@ -135,7 +137,7 @@
"""Simple tests for ButlerConfig that are not tested in any other test
cases."""

def testSearchPath(self):
def testSearchPath(self) -> None:
configFile = os.path.join(TESTDIR, "config", "basic", "butler.yaml")
with self.assertLogs("lsst.daf.butler", level="DEBUG") as cm:
config1 = ButlerConfig(configFile)
Expand Down Expand Up @@ -175,7 +177,14 @@
cls.storageClassFactory = StorageClassFactory()
cls.storageClassFactory.addFromConfig(cls.configFile)

def assertGetComponents(self, butler, datasetRef, components, reference, collections=None) -> None:
def assertGetComponents(
self,
butler: Butler,
datasetRef: DatasetRef,
components: tuple[str, ...],
reference: Any,
collections: Any = None,
) -> None:
datasetType = datasetRef.datasetType
dataId = datasetRef.dataId
deferred = butler.getDeferred(datasetRef)
Expand Down Expand Up @@ -381,6 +390,7 @@
self.assertNotEqual(metric, sliced)
self.assertEqual(metric.summary, sliced.summary)
self.assertEqual(metric.output, sliced.output)
assert metric.data is not None # for mypy
self.assertEqual(metric.data[:stop], sliced.data)
# getDeferred with parameters
sliced = butler.getDeferred(ref, parameters={"slice": slice(stop)}).get()
Expand Down Expand Up @@ -2314,7 +2324,7 @@
configFile = os.path.join(TESTDIR, "config/basic/butler-chained.yaml")


def setup_module(module) -> None:
def setup_module(module: types.ModuleType) -> None:
clean_environment()


Expand Down
19 changes: 16 additions & 3 deletions tests/test_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,25 @@ def setUp(self):
dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
sc = StorageClass("DummySC", dict, None)
self.calexpA = self.makeDatasetRef(
"calexp", dimensions, sc, {"instrument": "A", "physical_filter": "u"}, conform=False
"calexp",
dimensions,
sc,
{"instrument": "A", "physical_filter": "u", "visit": 3},
)

dimensions = self.universe.extract(("visit", "detector", "instrument"))
self.pviA = self.makeDatasetRef("pvi", dimensions, sc, {"instrument": "A", "visit": 1}, conform=False)
self.pviB = self.makeDatasetRef("pvi", dimensions, sc, {"instrument": "B", "visit": 2}, conform=False)
self.pviA = self.makeDatasetRef(
"pvi",
dimensions,
sc,
{"instrument": "A", "visit": 1, "detector": 0},
)
self.pviB = self.makeDatasetRef(
"pvi",
dimensions,
sc,
{"instrument": "B", "visit": 2, "detector": 0},
)

def testSimpleAccept(self):
config = ConstraintsConfig({"accept": ["calexp", "ExposureF"]})
Expand Down