Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-39605: Use butler.dimensions rather than butler.registry.dimensions #244

Merged
merged 8 commits into from
Jun 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/docstyle.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ jobs:
call-workflow:
uses: lsst/rubin_workflows/.github/workflows/docstyle.yaml@main
with:
args: "python/lsst/ctrl/mpexec/"
args: "python/"
63 changes: 62 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ convention = "numpy"
# Docstring at the very first line is not required
# D200, D205 and D400 all complain if the first sentence of the docstring does
# not fit on one line.
add-ignore = ["D107", "D105", "D102", "D100", "D200", "D205", "D400"]
# Do not require docstrings in __init__.py files (D104)
add-ignore = ["D107", "D105", "D102", "D100", "D200", "D205", "D400", "D104"]

[tool.coverage.report]
show_missing = true
Expand All @@ -135,3 +136,63 @@ exclude_lines = [
"if __name__ == .__main__.:",
"if TYPE_CHECKING:",
]

# The rule used with Ruff configuration is to disable every lint that has
# legitimate exceptions that are not dodgy code, rather than cluttering code
# with noqa markers. This is therefore a reiatively relaxed configuration that
# errs on the side of disabling legitimate lints.
#
# Reference for settings: https://beta.ruff.rs/docs/settings/
# Reference for rules: https://beta.ruff.rs/docs/rules/
[tool.ruff]
exclude = [
"docs/**",
]
line-length = 110
ignore = [
"ANN101", # self should not have a type annotation
"ANN102", # cls should not have a type annotation
"ANN401", # sometimes Any is the right type
"ARG001", # unused function arguments are often legitimate
"ARG002", # unused method arguments are often legitimate
"ARG005", # unused lambda arguments are often legitimate
"BLE001", # we want to catch and report Exception in background tasks
"C414", # nested sorted is how you sort by multiple keys with reverse
"COM812", # omitting trailing commas allows black autoreformatting
"D102", # sometimes we use docstring inheritence
"D104", # don't see the point of documenting every package
"D105", # our style doesn't require docstrings for magic methods
"D106", # Pydantic uses a nested Config class that doesn't warrant docs
"EM101", # justification (duplicate string in traceback) is silly
"EM102", # justification (duplicate string in traceback) is silly
"FBT003", # positional booleans are normal for Pydantic field defaults
"G004", # forbidding logging f-strings is appealing, but not our style
"RET505", # disagree that omitting else always makes code more readable
"PLR0913", # factory pattern uses constructors with many arguments
"PLR2004", # too aggressive about magic values
"S105", # good idea but too many false positives on non-passwords
"S106", # good idea but too many false positives on non-passwords
"SIM102", # sometimes the formatting of nested if statements is clearer
"SIM117", # sometimes nested with contexts are clearer
"TCH001", # we decided to not maintain separate TYPE_CHECKING blocks
"TCH002", # we decided to not maintain separate TYPE_CHECKING blocks
"TCH003", # we decided to not maintain separate TYPE_CHECKING blocks
"TID252", # if we're going to use relative imports, use them always
"TRY003", # good general advice but lint is way too aggressive
"N802",
"N803",
"N806",
"N812",
"N815",
"N816",
"D107",
"D105",
"D102",
"D100",
"D200",
"D205",
"D400",
"D104",
]
select = ["ALL"]
target-version = "py311"
1 change: 1 addition & 0 deletions python/lsst/ctrl/mpexec/cli/cmd/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ def build(ctx: click.Context, **kwargs: Any) -> None:

@contextmanager
def coverage_context(kwargs: dict[str, Any]) -> Iterator[None]:
"""Enable coverage recording."""
packages = kwargs.pop("cov_packages", ())
report = kwargs.pop("cov_report", True)
if not kwargs.pop("coverage", False):
Expand Down
15 changes: 10 additions & 5 deletions python/lsst/ctrl/mpexec/cli/opt/optionGroups.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@

class pipeline_build_options(OptionGroup): # noqa: N801
"""Decorator to add options to the command function for building a
pipeline."""
pipeline.
"""

def __init__(self) -> None:
self.decorators = [
Expand Down Expand Up @@ -85,7 +86,8 @@ def __init__(self) -> None:

class qgraph_options(OptionGroup): # noqa: N801
"""Decorator to add options to a command function for creating a quantum
graph."""
graph.
"""

def __init__(self) -> None:
self.decorators = [
Expand Down Expand Up @@ -120,7 +122,8 @@ def __init__(self) -> None:

class butler_options(OptionGroup): # noqa: N801
"""Decorator to add options to a command function for configuring a
butler."""
butler.
"""

def __init__(self) -> None:
self.decorators = [
Expand All @@ -138,7 +141,8 @@ def __init__(self) -> None:

class execution_options(OptionGroup): # noqa: N801
"""Decorator to add options to a command function for executing a
pipeline."""
pipeline.
"""

def __init__(self) -> None:
self.decorators = [
Expand All @@ -158,7 +162,8 @@ def __init__(self) -> None:

class meta_info_options(OptionGroup): # noqa: N801
"""Decorator to add options to a command function for managing pipeline
meta information."""
meta information.
"""

def __init__(self) -> None:
self.decorators = [
Expand Down
4 changes: 4 additions & 0 deletions python/lsst/ctrl/mpexec/cli/pipetask.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@


class PipetaskCLI(LoaderCLI):
"""Pipetask-specific implementation of command-line loader."""

localCmdPkg = "lsst.ctrl.mpexec.cli.cmd"


Expand All @@ -41,10 +43,12 @@ class PipetaskCLI(LoaderCLI):
@log_tty_option()
@log_label_option()
def cli(log_level, long_log, log_file, log_tty, log_label) -> None: # type: ignore
"""Implement pipetask command line."""
# log_level is handled by get_command and list_commands, and is called in
# one of those functions before this is called.
pass


def main() -> None:
"""Run pipetask command-line."""
return cli()
5 changes: 3 additions & 2 deletions python/lsst/ctrl/mpexec/cli/script/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,9 @@
def build( # type: ignore
order_pipeline, pipeline, pipeline_actions, pipeline_dot, save_pipeline, show, **kwargs
):
"""Implements the command line interface `pipetask build` subcommand,
should only be called by command line tools and unit test code that tests
"""Implement the command line interface `pipetask build` subcommand.

Should only be called by command line tools and unit test code that tests
this function.

Build and optionally save pipeline definition.
Expand Down
6 changes: 6 additions & 0 deletions python/lsst/ctrl/mpexec/cli/script/cleanup.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@


class NoSuchCollectionFailure:
"""Failure when there is no such collection."""

def __init__(self, collection: str):
self.collection = collection

Expand All @@ -38,6 +40,8 @@ def __str__(self) -> str:


class NotChainedCollectionFailure:
"""Failure when this is not a chained collection."""

def __init__(self, collection: str, type: str):
self.collection = collection
self.type = type
Expand All @@ -47,6 +51,8 @@ def __str__(self) -> str:


class CleanupResult(ConfirmableResult):
"""Information containing the result of the cleanup request."""

def __init__(self, butler_config: str):
self.butler_config = butler_config
self.runs_to_remove: list[str] = []
Expand Down
26 changes: 17 additions & 9 deletions python/lsst/ctrl/mpexec/cli/script/confirmable.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@


from abc import ABC, abstractmethod
from typing import Callable
from collections.abc import Callable

import click


class ConfirmableResult(ABC):

"""Interface for a results class that can be used by the `confirm`
function."""
function.
"""

@abstractmethod
def describe(self, will: bool) -> str:
Expand All @@ -47,33 +47,41 @@ def describe(self, will: bool) -> str:

@abstractmethod
def on_confirmation(self) -> None:
"""Performs the action that was returned from `describe`. This is
Called just after the user has confirmed (if needed)."""
"""Perform the action that was returned from `describe`.

This is Called just after the user has confirmed (if needed).
"""
pass

@property
@abstractmethod
def failed(self) -> bool:
"""Query if there was a failure preparing the ConfirmableResult,
before `on_confirmation` is called."""
before `on_confirmation` is called.
"""
pass

@property
@abstractmethod
def describe_failure(self) -> str:
"""Get a message describing the failure. This is used as the message
when raising a `ClickException` to stop with exit code 1."""
"""Get a message describing the failure.

This is used as the message when raising a `~click.ClickException` to
stop with exit code 1.
"""
pass

@property
@abstractmethod
def can_continue(self) -> bool:
"""Query if the ConfirmableResult can continue. Returns `False` if
there is no work to be done."""
there is no work to be done.
"""
pass


def confirm(script_func: Callable[[], ConfirmableResult], confirm: bool) -> ConfirmableResult:
"""Prompt user to continue."""
result = script_func()
if result.failed:
raise click.ClickException(result.describe_failure)
Expand Down
6 changes: 4 additions & 2 deletions python/lsst/ctrl/mpexec/cli/script/pre_exec_init_qbb.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,10 @@ def pre_exec_init_qbb(
qgraph_id: str | None,
config_search_path: list[str] | None,
) -> None:
"""Implements the command line interface `pipetask pre-exec-init-qbb`
subcommand, should only be called by command line tools and unit test code
"""Implement the command line interface ``pipetask pre-exec-init-qbb``
subcommand.

Should only be called by command line tools and unit test code
that tests this function.

Parameters
Expand Down
42 changes: 26 additions & 16 deletions python/lsst/ctrl/mpexec/cli/script/purge.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


import itertools
from typing import Any, Optional, Union
from typing import Any

from lsst.daf.butler import Butler, CollectionType
from lsst.daf.butler.registry import MissingCollectionError
Expand All @@ -35,6 +35,8 @@


class ChildHasMultipleParentsFailure:
"""Failure when the child has multiple parents."""

def __init__(self, child: str, parents: list[str]):
self.child = child
self.parents = parents
Expand All @@ -45,6 +47,8 @@ def __str__(self) -> str:


class TopCollectionHasParentsFailure:
"""Failure when the top collection has parents."""

def __init__(self, collection: str, parents: list[str]):
self.collection = collection
self.parents = parents
Expand All @@ -57,7 +61,9 @@ def __str__(self) -> str:
)


class TopCollectionIsNotChianedFailure:
class TopCollectionIsNotChainedFailure:
"""Failure when the top collection is not a chain."""

def __init__(self, collection: str, collection_type: CollectionType):
self.collection = collection
self.collection_type = collection_type
Expand All @@ -70,14 +76,18 @@ def __str__(self) -> str:


class TopCollectionNotFoundFailure:
"""Failure when the top collection is not found."""

def __init__(self, collection: str):
self.collection = collection

def __str__(self) -> str:
return f'The passed-in colleciton "{self.collection}" was not found.'
return f'The passed-in collection "{self.collection}" was not found.'


class PurgeResult(ConfirmableResult):
"""The results of the purge command."""

def __init__(self, butler_config: str):
self.runs_to_remove: list[str] = []
self.chains_to_remove: list[str] = []
Expand Down Expand Up @@ -123,23 +133,23 @@ def can_continue(self) -> bool:

def fail(
self,
failure: Union[
ChildHasMultipleParentsFailure,
TopCollectionHasParentsFailure,
TopCollectionIsNotChianedFailure,
TopCollectionNotFoundFailure,
],
failure: (
ChildHasMultipleParentsFailure
| TopCollectionHasParentsFailure
| TopCollectionIsNotChainedFailure
| TopCollectionNotFoundFailure
),
) -> None:
self.failure = failure


def check_parents(butler: Butler, child: str, expected_parents: list[str]) -> Optional[list[str]]:
"""Check that the parents of a child collection match
the provided expected parents.
def check_parents(butler: Butler, child: str, expected_parents: list[str]) -> list[str] | None:
"""Check that the parents of a child collection match the provided
expected parents.

Parameters
----------
butler : `Butler`
butler : `~lsst.daf.butler.Butler`
The butler to the current repo.
child : `str`
The child collection to check.
Expand Down Expand Up @@ -180,7 +190,7 @@ def prepare_to_remove(
other child collections will be ignored.
parent_collection : `str`
The parent CHAINED collection currently being removed.
butler : `Butler`
butler : `~lsst.daf.butler.Butler`
The butler to the repo.
recursive : `bool`
If True then children of the top collection that are also CHAINED
Expand Down Expand Up @@ -232,7 +242,7 @@ def purge(

Returns
-------
purge_result : PurgeResult
purge_result : `PurgeResult`
The description of what datasets to remove and/or failures encountered
while preparing to remove datasets to remove, and a completion function
to remove the datasets after confirmation, if needed.
Expand All @@ -247,7 +257,7 @@ def purge(
return result

if collection_type != CollectionType.CHAINED:
result.fail(TopCollectionIsNotChianedFailure(collection, collection_type))
result.fail(TopCollectionIsNotChainedFailure(collection, collection_type))
elif parents := check_parents(butler, collection, []):
result.fail(TopCollectionHasParentsFailure(collection, parents))
else:
Expand Down