Skip to content

Commit

Permalink
Use modern syntax for annotations
Browse files Browse the repository at this point in the history
Tuple -> tuple
Optional[str] -> str | None
etc
  • Loading branch information
timj committed Nov 14, 2022
1 parent cf39060 commit 7d03ce1
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 24 deletions.
20 changes: 10 additions & 10 deletions python/lsst/daf/butler/script/ingest_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

import logging
from collections import defaultdict
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
from typing import TYPE_CHECKING, Any

from astropy.table import Table
from lsst.resources import ResourcePath
Expand All @@ -45,10 +45,10 @@ def ingest_files(
dataset_type: str,
run: str,
table_file: str,
data_id: Tuple[str, ...] = (),
formatter: Optional[str] = None,
data_id: tuple[str, ...] = (),
formatter: str | None = None,
id_generation_mode: str = "UNIQUE",
prefix: Optional[str] = None,
prefix: str | None = None,
transfer: str = "auto",
) -> None:
"""Ingest files from a table.
Expand Down Expand Up @@ -117,11 +117,11 @@ def ingest_files(

def extract_datasets_from_table(
table: Table,
common_data_id: Dict,
common_data_id: dict,
datasetType: DatasetType,
formatter: Optional[str] = None,
prefix: Optional[str] = None,
) -> List[FileDataset]:
formatter: str | None = None,
prefix: str | None = None,
) -> list[FileDataset]:
"""Extract datasets from the supplied table.
Parameters
Expand Down Expand Up @@ -193,10 +193,10 @@ def extract_datasets_from_table(
return datasets


def parse_data_id_tuple(data_ids: Tuple[str, ...], universe: DimensionUniverse) -> Dict[str, Any]:
def parse_data_id_tuple(data_ids: tuple[str, ...], universe: DimensionUniverse) -> dict[str, Any]:
# Convert any additional k=v strings in the dataId tuple to dict
# form.
data_id: Dict[str, Any] = {}
data_id: dict[str, Any] = {}
for id_str in data_ids:
dimension_str, value = id_str.split("=")

Expand Down
7 changes: 3 additions & 4 deletions python/lsst/daf/butler/script/pruneCollection.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@

from collections.abc import Callable
from dataclasses import dataclass
from typing import Optional, Union

from astropy.table import Table

Expand All @@ -41,9 +40,9 @@ class PruneCollectionResult:
def __init__(self, confirm: bool) -> None:
# if `confirm == True`, will contain the astropy table describing data
# that will be removed.
self.removeTable: Union[None, Table] = None
self.removeTable: None | Table = None
# the callback function to do the work
self.onConfirmation: Union[None, Callable[[], None]] = None
self.onConfirmation: None | Callable[[], None] = None
# true if the user should be shown what will be removed before pruning
# the collection.
self.confirm: bool = confirm
Expand Down Expand Up @@ -82,7 +81,7 @@ class CollectionInfo:
"""Lightweight container to hold the type of collection and the number
of datasets in the collection if applicable."""

count: Optional[int]
count: int | None
type: str

result = PruneCollectionResult(confirm)
Expand Down
4 changes: 1 addition & 3 deletions python/lsst/daf/butler/script/register_dataset_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@

__all__ = ("register_dataset_type",)

from typing import Tuple

from .._butler import Butler
from ..core import DatasetType

Expand All @@ -32,7 +30,7 @@ def register_dataset_type(
repo: str,
dataset_type: str,
storage_class: str,
dimensions: Tuple[str, ...],
dimensions: tuple[str, ...],
is_calibration: bool = False,
) -> bool:
"""Register a new dataset type.
Expand Down
7 changes: 3 additions & 4 deletions python/lsst/daf/butler/script/removeRuns.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
from collections.abc import Callable, Mapping, Sequence
from dataclasses import dataclass
from functools import partial
from typing import Dict, List, Tuple

from .._butler import Butler
from ..registry import CollectionType, MissingCollectionError
Expand All @@ -38,7 +37,7 @@ class RemoveRun:
# the name of the run:
name: str
# parent CHAINED collections the RUN belongs to:
parents: List[str]
parents: list[str]


@dataclass
Expand All @@ -61,7 +60,7 @@ class RemoveRunsResult:
def _getCollectionInfo(
repo: str,
collection: str,
) -> Tuple[List[RemoveRun], Mapping[str, int]]:
) -> tuple[list[RemoveRun], Mapping[str, int]]:
"""Get the names and types of collections that match the collection
string.
Expand Down Expand Up @@ -92,7 +91,7 @@ def _getCollectionInfo(
except MissingCollectionError:
collectionNames = list()
runs = []
datasets: Dict[str, int] = defaultdict(int)
datasets: dict[str, int] = defaultdict(int)
for collectionName in collectionNames:
assert butler.registry.getCollectionType(collectionName).name == "RUN"
parents = butler.registry.getCollectionParentChains(collectionName)
Expand Down
5 changes: 2 additions & 3 deletions python/lsst/daf/butler/script/transferDatasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
__all__ = ("transferDatasets",)

import logging
from typing import Tuple

from .._butler import Butler
from ..registry.queries import DatasetQueryResults
Expand All @@ -34,8 +33,8 @@
def transferDatasets(
source: str,
dest: str,
dataset_type: Tuple[str, ...],
collections: Tuple[str, ...],
dataset_type: tuple[str, ...],
collections: tuple[str, ...],
where: str,
find_first: bool,
transfer: str,
Expand Down

0 comments on commit 7d03ce1

Please sign in to comment.