Skip to content

Commit

Permalink
Merge pull request #631 from lsst/tickets/DM-33271
Browse files Browse the repository at this point in the history
DM-33271: Fix mypy warnings with 0.931
  • Loading branch information
timj committed Jan 15, 2022
2 parents 8f7140c + f774ee9 commit 90f5a21
Show file tree
Hide file tree
Showing 18 changed files with 86 additions and 42 deletions.
3 changes: 1 addition & 2 deletions .github/workflows/mypy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ jobs:
python-version: 3.8

- name: Install
# pin mypy to fix a compatibility issue between mypy 0.92 and pedantic
run: pip install "mypy==0.910" pydantic
run: pip install mypy pydantic

- name: Install third party stubs
run: pip install types-requests types-PyYAML types-click types-pkg_resources types-Deprecated
Expand Down
2 changes: 2 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ repos:
rev: v2.3.0
hooks:
- id: check-yaml
args:
- "--unsafe"
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
Expand Down
3 changes: 3 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ disallow_untyped_defs = False
[mypy-lsst.daf.butler.registry.tests.*]
ignore_errors = True

[mypy-lsst.daf.butler.core._butlerUri]
ignore_errors = True

# version.py is added by scons and may not exist when we run mypy.

[mypy-lsst.daf.butler.version]
Expand Down
12 changes: 6 additions & 6 deletions python/lsst/daf/butler/_butler.py
Original file line number Diff line number Diff line change
Expand Up @@ -417,8 +417,8 @@ def makeRepo(
raise ValueError("makeRepo must be passed a regular Config without defaults applied.")

# Ensure that the root of the repository exists or can be made
uri = ResourcePath(root, forceDirectory=True)
uri.mkdir()
root_uri = ResourcePath(root, forceDirectory=True)
root_uri.mkdir()

config = Config(config)

Expand Down Expand Up @@ -457,21 +457,21 @@ def makeRepo(
# branch, _everything_ in the config is expanded, so there's no
# need to special case this.
Config.updateParameters(RegistryConfig, config, full, toMerge=("managers",), overwrite=False)
configURI: Union[str, ResourcePathExpression]
configURI: ResourcePathExpression
if outfile is not None:
# When writing to a separate location we must include
# the root of the butler repo in the config else it won't know
# where to look.
config["root"] = uri.geturl()
config["root"] = root_uri.geturl()
configURI = outfile
else:
configURI = uri
configURI = root_uri
config.dumpToUri(configURI, overwrite=overwrite)

# Create Registry and populate tables
registryConfig = RegistryConfig(config.get("registry"))
dimensionConfig = DimensionConfig(dimensionConfig)
Registry.createFromConfig(registryConfig, dimensionConfig=dimensionConfig, butlerRoot=root)
Registry.createFromConfig(registryConfig, dimensionConfig=dimensionConfig, butlerRoot=root_uri)

log.verbose("Wrote new Butler configuration file to %s", configURI)

Expand Down
10 changes: 7 additions & 3 deletions python/lsst/daf/butler/_butlerRepoIndex.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,15 @@ def _read_repository_index(cls, index_uri: ResourcePathExpression) -> Config:
-----
Does check the cache before reading the file.
"""
# Force the given value to a ResourcePath so that it can be used
# as an index into the cache consistently.
uri = ResourcePath(index_uri)

if index_uri in cls._cache:
return cls._cache[index_uri]
return cls._cache[uri]

repo_index = Config(index_uri)
cls._cache[index_uri] = repo_index
repo_index = Config(uri)
cls._cache[uri] = repo_index

return repo_index

Expand Down
8 changes: 5 additions & 3 deletions python/lsst/daf/butler/core/_butlerUri.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# Temporarily alias ResourcePath as ButlerURI.
__all__ = ["ButlerURI"]

from typing import Any, Type

from deprecated.sphinx import deprecated
Expand Down Expand Up @@ -54,7 +56,7 @@ def __new__(cls, uri: ResourcePathExpression, **kwargs: Any) -> ResourcePath:
_add_base(cls)
return new

new = ResourcePath(uri, **kwargs)
_add_base(type(new))
new_uri = ResourcePath(uri, **kwargs)
_add_base(type(new_uri))

return new
return new_uri
6 changes: 3 additions & 3 deletions python/lsst/daf/butler/core/dimensions/_coordinate.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ def __iter__(self) -> Iterator[Dimension]:
def __len__(self) -> int:
return len(self.keys())

def keys(self) -> NamedValueAbstractSet[Dimension]:
def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
return self.graph.required

@property
Expand Down Expand Up @@ -765,7 +765,7 @@ def __iter__(self) -> Iterator[Dimension]:
def __len__(self) -> int:
return len(self.keys())

def keys(self) -> NamedValueAbstractSet[Dimension]:
def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
return self._target.graph.dimensions

@property
Expand Down Expand Up @@ -809,7 +809,7 @@ def __iter__(self) -> Iterator[DimensionElement]:
def __len__(self) -> int:
return len(self.keys())

def keys(self) -> NamedValueAbstractSet[DimensionElement]:
def keys(self) -> NamedValueAbstractSet[DimensionElement]: # type: ignore
return self._target.graph.elements

@property
Expand Down
7 changes: 5 additions & 2 deletions python/lsst/daf/butler/core/dimensions/_skypix.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,12 +230,15 @@ def visit(self, builder: DimensionConstructionBuilder) -> None:
if self._maxLevel is not None:
maxLevel = self._maxLevel
else:
maxLevel = getattr(PixelizationClass, "MAX_LEVEL", None)
if maxLevel is None:
# MyPy does not know the return type of getattr.
max_level = getattr(PixelizationClass, "MAX_LEVEL", None)
if max_level is None:
raise TypeError(
f"Skypix pixelization class {self._pixelizationClassName} does"
" not have MAX_LEVEL but no max level has been set explicitly."
)
assert isinstance(max_level, int)
maxLevel = max_level
system = SkyPixSystem(
self.name,
maxLevel=maxLevel,
Expand Down
10 changes: 6 additions & 4 deletions python/lsst/daf/butler/core/fileDataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from dataclasses import dataclass
from typing import Any, List, Optional, Union

from lsst.resources import ResourcePathExpression
from lsst.resources import ResourcePath, ResourcePathExpression

from .datasets import DatasetRef
from .formatter import FormatterParameter
Expand All @@ -42,8 +42,8 @@ class FileDataset:
"""Registry information about the dataset. (`list` of `DatasetRef`).
"""

path: ResourcePathExpression
"""Path to the dataset (`lsst.resources.ResourcePathExpression`).
path: Union[str, ResourcePath]
"""Path to the dataset (`lsst.resources.ResourcePath` or `str`).
If the dataset was exported with ``transfer=None`` (i.e. in-place),
this is relative to the datastore root (only datastores that have a
Expand All @@ -63,7 +63,9 @@ def __init__(
*,
formatter: Optional[FormatterParameter] = None,
):
self.path = path
# Do not want to store all possible options supported by ResourcePath
# so force a conversion for the non-str parameters.
self.path = path if isinstance(path, str) else ResourcePath(path, forceAbsolute=False)
if isinstance(refs, DatasetRef):
refs = [refs]
self.refs = refs
Expand Down
7 changes: 4 additions & 3 deletions python/lsst/daf/butler/core/named.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def byName(self) -> Dict[str, V_co]:
return dict(zip(self.names, self.values()))

@abstractmethod
def keys(self) -> NamedValueAbstractSet[K_co]:
def keys(self) -> NamedValueAbstractSet[K_co]: # type: ignore
# TODO: docs
raise NotImplementedError()

Expand Down Expand Up @@ -232,7 +232,7 @@ def __delitem__(self, key: Union[str, K]) -> None:
del self._dict[key]
del self._names[key.name]

def keys(self) -> NamedValueAbstractSet[K]:
def keys(self) -> NamedValueAbstractSet[K]: # type: ignore
return NameMappingSetView(self._names)

def values(self) -> ValuesView[V]:
Expand Down Expand Up @@ -519,7 +519,8 @@ def clear(self) -> None:

def remove(self, element: Union[str, K]) -> Any:
# Docstring inherited.
del self._mapping[getattr(element, "name", element)]
k = getattr(element, "name") if not isinstance(element, str) else element
del self._mapping[k]

def discard(self, element: Union[str, K]) -> Any:
# Docstring inherited.
Expand Down
8 changes: 5 additions & 3 deletions python/lsst/daf/butler/datastores/fileDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -786,14 +786,16 @@ def _pathInStore(self, path: ResourcePathExpression) -> Optional[str]:
return pathUri.relative_to(self.root)

def _standardizeIngestPath(
self, path: ResourcePathExpression, *, transfer: Optional[str] = None
self, path: Union[str, ResourcePath], *, transfer: Optional[str] = None
) -> Union[str, ResourcePath]:
"""Standardize the path of a to-be-ingested file.
Parameters
----------
path : `lsst.resources.ResourcePathExpression`
Path of a file to be ingested.
path : `str` or `lsst.resources.ResourcePath`
Path of a file to be ingested. This parameter is not expected
to be all the types that can be used to construct a
`~lsst.resources.ResourcePath`.
transfer : `str`, optional
How (and whether) the dataset should be added to the datastore.
See `ingest` for details of transfer modes.
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/daf/butler/registries/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def createFromConfig(
cls,
config: Optional[Union[RegistryConfig, str]] = None,
dimensionConfig: Optional[Union[DimensionConfig, str]] = None,
butlerRoot: Optional[str] = None,
butlerRoot: Optional[ResourcePathExpression] = None,
) -> Registry:
"""Create registry database and return `Registry` instance.
Expand Down
4 changes: 2 additions & 2 deletions python/lsst/daf/butler/registries/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def createFromConfig(
cls,
config: Optional[Union[RegistryConfig, str]] = None,
dimensionConfig: Optional[Union[DimensionConfig, str]] = None,
butlerRoot: Optional[str] = None,
butlerRoot: Optional[ResourcePathExpression] = None,
) -> Registry:
"""Create registry database and return `SqlRegistry` instance.
Expand All @@ -119,7 +119,7 @@ def createFromConfig(
dimensionConfig : `DimensionConfig` or `str`, optional
Dimensions configuration, if missing then default configuration
will be loaded from dimensions.yaml.
butlerRoot : `str`, optional
butlerRoot : convertible to `lsst.resources.ResourcePath`, optional
Path to the repository root this `SqlRegistry` will manage.
Returns
Expand Down
4 changes: 2 additions & 2 deletions python/lsst/daf/butler/registry/_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def createFromConfig(
cls,
config: Optional[Union[RegistryConfig, str]] = None,
dimensionConfig: Optional[Union[DimensionConfig, str]] = None,
butlerRoot: Optional[str] = None,
butlerRoot: Optional[ResourcePathExpression] = None,
) -> Registry:
"""Create registry database and return `Registry` instance.
Expand All @@ -158,7 +158,7 @@ def createFromConfig(
dimensionConfig : `DimensionConfig` or `str`, optional
Dimensions configuration, if missing then default configuration
will be loaded from dimensions.yaml.
butlerRoot : `str`, optional
butlerRoot : convertible to `lsst.resources.ResourcePath`, optional
Path to the repository root this `Registry` will manage.
Returns
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/daf/butler/registry/dimensions/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def sync(self, record: DimensionRecord, update: bool = False) -> Union[bool, Dic
# Inserted a new row, so we just need to insert new overlap
# rows.
self._skyPixOverlap.insert([record])
elif "region" in inserted_or_updated: # type: ignore
elif "region" in inserted_or_updated:
# Updated the region, so we need to delete old overlap rows
# and insert new ones.
# (mypy should be able to tell that inserted_or_updated
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/daf/butler/registry/summaries.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def names(self) -> AbstractSet[str]:
# Docstring inherited.
return self._mapping.names

def keys(self) -> NamedValueAbstractSet[GovernorDimension]:
def keys(self) -> NamedValueAbstractSet[GovernorDimension]: # type: ignore
return self._mapping.keys()

def values(self) -> ValuesView[AbstractSet[str]]:
Expand Down
19 changes: 16 additions & 3 deletions tests/data/registry/datasets-uuid.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
# flat for detectors 1-2, physical_filter=Cam1-R1
# flat for detectors 3-4, physical_filter=Cam1-R2
#
# At present, the path and formatter entries here are missing. If we ever
# want to use this data for a tests that involves a datastore, that would need
# to change.
# At present the formatter entry is missing and the path is a placeholder.
# If we ever want to use this data for a tests that involves a datastore,
# that would need to change.
description: Butler Data Repository Export
version: 0
data:
Expand All @@ -31,16 +31,19 @@ data:
data_id:
instrument: Cam1
detector: 1
path: placeholder
-
dataset_id: !uuid 51352db4-a47a-447c-b12d-a50b206b17cd
data_id:
instrument: Cam1
detector: 2
path: placeholder
-
dataset_id: !uuid e06b6f3c-5b3e-46d0-bdde-4ef972301116
data_id:
instrument: Cam1
detector: 3
path: placeholder
-
type: dataset
dataset_type: flat
Expand All @@ -53,20 +56,23 @@ data:
detector: 2
physical_filter: Cam1-G
band: g
path: placeholder
-
dataset_id: !uuid 84239e7f-c41f-46d5-97b9-a27976b98ceb
data_id:
instrument: Cam1
detector: 3
physical_filter: Cam1-G
band: g
path: placeholder
-
dataset_id: !uuid fd51bce1-2848-49d6-a378-f8a122f5139a
data_id:
instrument: Cam1
detector: 4
physical_filter: Cam1-G
band: g
path: placeholder
-
type: run
name: imported_r
Expand All @@ -80,16 +86,19 @@ data:
data_id:
instrument: Cam1
detector: 2
path: placeholder
-
dataset_id: !uuid dc0ef017-dc94-4118-b431-d65b1ef89a5f
data_id:
instrument: Cam1
detector: 3
path: placeholder
-
dataset_id: !uuid e255067d-dcc5-4f39-9824-0baa5817d3e5
data_id:
instrument: Cam1
detector: 4
path: placeholder
-
type: dataset
dataset_type: flat
Expand All @@ -102,24 +111,28 @@ data:
detector: 1
physical_filter: Cam1-R1
band: r
path: placeholder
-
dataset_id: !uuid c1296796-56c5-4acf-9b49-40d920c6f840
data_id:
instrument: Cam1
detector: 2
physical_filter: Cam1-R1
band: r
path: placeholder
-
dataset_id: !uuid 393972aa-57ab-41c2-b3bc-03541ca6bc6e
data_id:
instrument: Cam1
detector: 3
physical_filter: Cam1-R2
band: r
path: placeholder
-
dataset_id: !uuid 37185f04-1cea-4ddd-a065-a5e0be9e19de
data_id:
instrument: Cam1
detector: 4
physical_filter: Cam1-R2
band: r
path: placeholder

0 comments on commit 90f5a21

Please sign in to comment.