Skip to content

Commit

Permalink
chore: update black
Browse files Browse the repository at this point in the history
  • Loading branch information
vberlier committed Feb 16, 2024
1 parent 288198b commit 2ac5ebe
Show file tree
Hide file tree
Showing 25 changed files with 164 additions and 149 deletions.
52 changes: 26 additions & 26 deletions mecha/api.py
Expand Up @@ -257,9 +257,12 @@ def prepare_token_stream(
multiline: Optional[bool] = None,
) -> Iterator[TokenStream]:
"""Prepare the token stream for parsing."""
with stream.reset(*stream.data), stream.provide(
spec=self.spec,
multiline=self.spec.multiline if multiline is None else multiline,
with (
stream.reset(*stream.data),
stream.provide(
spec=self.spec,
multiline=self.spec.multiline if multiline is None else multiline,
),
):
with stream.reset_syntax(comment=r"#.*$", literal=AstLiteral.regex.pattern):
with stream.indent(skip=["comment"]), stream.ignore("indent", "dedent"):
Expand All @@ -276,8 +279,7 @@ def parse(
multiline: Optional[bool] = None,
provide: Optional[JsonDict] = None,
preprocessor: Optional[Preprocessor] = None,
) -> AstRoot:
...
) -> AstRoot: ...

@overload
def parse(
Expand All @@ -290,8 +292,7 @@ def parse(
multiline: Optional[bool] = None,
provide: Optional[JsonDict] = None,
preprocessor: Optional[Preprocessor] = None,
) -> AstNodeType:
...
) -> AstNodeType: ...

@overload
def parse(
Expand All @@ -304,8 +305,7 @@ def parse(
multiline: Optional[bool] = None,
provide: Optional[JsonDict] = None,
preprocessor: Optional[Preprocessor] = None,
) -> Any:
...
) -> Any: ...

def parse(
self,
Expand Down Expand Up @@ -393,8 +393,7 @@ def compile(
readonly: Optional[bool] = None,
initial_step: int = 0,
report: Optional[DiagnosticCollection] = None,
) -> PackType:
...
) -> PackType: ...

@overload
def compile(
Expand All @@ -407,8 +406,7 @@ def compile(
readonly: Optional[bool] = None,
initial_step: int = 0,
report: Optional[DiagnosticCollection] = None,
) -> None:
...
) -> None: ...

@overload
def compile(
Expand All @@ -423,8 +421,7 @@ def compile(
readonly: Optional[bool] = None,
initial_step: int = 0,
report: Optional[DiagnosticCollection] = None,
) -> TextFileType:
...
) -> TextFileType: ...

@overload
def compile(
Expand All @@ -439,8 +436,7 @@ def compile(
readonly: Optional[bool] = None,
initial_step: int = 0,
report: Optional[DiagnosticCollection] = None,
) -> Function:
...
) -> Function: ...

def compile(
self,
Expand Down Expand Up @@ -616,15 +612,19 @@ def log_reported_diagnostics(self):
def format_perf(self) -> List[List[str]]:
"""Format perf report."""
step_headers = [
"Lint"
if step is self.lint
else "Transform"
if step is self.transform
else "Optimize"
if step is self.optimize
else "Check"
if step is self.check
else repr(step)
(
"Lint"
if step is self.lint
else (
"Transform"
if step is self.transform
else (
"Optimize"
if step is self.optimize
else "Check" if step is self.check else repr(step)
)
)
)
for step in self.steps
]

Expand Down
34 changes: 16 additions & 18 deletions mecha/ast.py
Expand Up @@ -183,14 +183,16 @@ def dump(
+ (
"\n" + ("\n".join((f"{prefix} {type(child)}" if shallow else child.dump(prefix + " ", shallow, exclude)) for child in attribute) if attribute else prefix + " <empty>") # type: ignore
if isinstance(attribute := getattr(self, f.name), AbstractChildren)
else "\n"
+ (
f"{prefix} {type(attribute)}"
if shallow
else attribute.dump(prefix + " ", shallow, exclude)
else (
"\n"
+ (
f"{prefix} {type(attribute)}"
if shallow
else attribute.dump(prefix + " ", shallow, exclude)
)
if isinstance(attribute, AbstractNode)
else f" {attribute!r}"
)
if isinstance(attribute, AbstractNode)
else f" {attribute!r}"
)
for f in fields(self)
if not exclude or f.name not in exclude
Expand Down Expand Up @@ -700,23 +702,19 @@ def evaluate(self) -> Any:

@overload
@classmethod
def from_value(cls, value: Union[bool, int, float, str]) -> "AstNbtValue":
...
def from_value(cls, value: Union[bool, int, float, str]) -> "AstNbtValue": ...

@overload
@classmethod
def from_value(cls, value: Mapping[Any, Any]) -> "AstNbtCompound":
...
def from_value(cls, value: Mapping[Any, Any]) -> "AstNbtCompound": ...

@overload
@classmethod
def from_value(cls, value: Sequence[Any]) -> "AstNbtList":
...
def from_value(cls, value: Sequence[Any]) -> "AstNbtList": ...

@overload
@classmethod
def from_value(cls, value: Any) -> "AstNbt":
...
def from_value(cls, value: Any) -> "AstNbt": ...

@classmethod
def from_value(cls, value: Any) -> "AstNbt":
Expand Down Expand Up @@ -1039,9 +1037,9 @@ class AstSelectorAdvancementMatch(AstNode):
"""Ast selector advancement match node."""

key: AstResourceLocation = required_field()
value: Union[
AstBool, AstChildren[AstSelectorAdvancementPredicateMatch]
] = required_field()
value: Union[AstBool, AstChildren[AstSelectorAdvancementPredicateMatch]] = (
required_field()
)


@dataclass(frozen=True, slots=True)
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/annotate_diagnostics.py
@@ -1,6 +1,5 @@
"""Plugin that inserts comments containing mecha diagnostics."""


__all__ = [
"annotate_diagnostics",
]
Expand Down
4 changes: 1 addition & 3 deletions mecha/contrib/bake_macros.py
@@ -1,6 +1,5 @@
"""Plugin for baking static macro invocations."""


__all__ = [
"bake_macros",
"BakeMacrosOptions",
Expand Down Expand Up @@ -85,8 +84,7 @@ def __call__(
source: TextFileBase[Any],
*,
preprocessor: Preprocessor,
) -> AstRoot:
...
) -> AstRoot: ...


@dataclass
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/clear_diagnostics.py
@@ -1,6 +1,5 @@
"""Plugin that clears diagnostics."""


from beet import Context

from mecha import Mecha
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/debug_ast.py
@@ -1,6 +1,5 @@
"""Plugin for emitting ast."""


__all__ = [
"DebugAstOptions",
"DebugAstEmitter",
Expand Down
7 changes: 2 additions & 5 deletions mecha/contrib/embed.py
@@ -1,6 +1,5 @@
"""Plugin for parsing and resolving embeds in json and nbt strings."""


__all__ = [
"AstJsonValueEmbed",
"AstNbtValueEmbed",
Expand Down Expand Up @@ -64,15 +63,13 @@ def __call__(
*,
using: str,
preprocessor: Preprocessor,
) -> AstNode:
...
) -> AstNode: ...


class EmbedSerializeCallback(Protocol):
"""Callback required for serializing embed."""

def __call__(self, node: AstNode) -> str:
...
def __call__(self, node: AstNode) -> str: ...


class EmbedHandler:
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/implicit_execute.py
@@ -1,6 +1,5 @@
"""Plugin that handles implicit execute commands."""


__all__ = [
"ImplicitExecuteParser",
]
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/inline_function_tag.py
@@ -1,6 +1,5 @@
"""Plugin for declaring function tags inline."""


__all__ = [
"InlineFunctionTagHandler",
]
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/json_files.py
@@ -1,6 +1,5 @@
"""Plugin for compiling json files with mecha."""


__all__ = [
"JsonFileCompilation",
"AstJsonRoot",
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/messages.py
@@ -1,6 +1,5 @@
"""Plugin for handling message references in commands."""


__all__ = [
"AstMessageReference",
"AstMessageReferencePath",
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/nested_location.py
@@ -1,6 +1,5 @@
"""Plugin for handling lexically nested resource locations."""


__all__ = [
"AstNestedLocation",
"UnresolvedNestedLocation",
Expand Down
17 changes: 10 additions & 7 deletions mecha/contrib/nested_resources.py
@@ -1,6 +1,5 @@
"""Plugin for handling nested resources."""


__all__ = [
"NestedResources",
"NestedResourcesTransformer",
Expand Down Expand Up @@ -80,9 +79,11 @@ def __init__(
}

self.text_resources = {
f"{file_type.snake_name}_file"
if file_type.snake_name in should_disambiguate
else file_type.snake_name: file_type
(
f"{file_type.snake_name}_file"
if file_type.snake_name in should_disambiguate
else file_type.snake_name
): file_type
for pack in packs
for file_type in pack.get_file_types(extend=TextFileBase)
}
Expand Down Expand Up @@ -292,9 +293,11 @@ def nested_resources(self, node: AstRoot):
continue

file_instance = file_type(
content.evaluate()
if isinstance(content, AstJson)
else content.value,
(
content.evaluate()
if isinstance(content, AstJson)
else content.value
),
original=self.database.current.original,
)

Expand Down
19 changes: 12 additions & 7 deletions mecha/contrib/nested_yaml.py
@@ -1,6 +1,5 @@
"""Plugin for handling nested yaml."""


__all__ = [
"BaseYamlObjectCollector",
"BaseYamlArrayCollector",
Expand Down Expand Up @@ -243,16 +242,22 @@ def __call__(self, stream: TokenStream) -> Any:
with stream.reset("nested_yaml"):
return self.original_parser(stream)

with stream.intercept("newline"), stream.syntax(
colon=r":",
dash=r"\-",
key=r"[a-zA-Z0-9._+-]+",
with (
stream.intercept("newline"),
stream.syntax(
colon=r":",
dash=r"\-",
key=r"[a-zA-Z0-9._+-]+",
),
):
if consume_line_continuation(stream):
return self.parse_yaml(stream)

with stream.ignore("newline"), stream.syntax(
string=r'"(?:\\.|[^\\\n])*?"' "|" r"'(?:\\.|[^\\\n])*?'",
with (
stream.ignore("newline"),
stream.syntax(
string=r'"(?:\\.|[^\\\n])*?"' "|" r"'(?:\\.|[^\\\n])*?'",
),
):
if token := stream.get("string"):
return self.string_collector(
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/raw.py
@@ -1,6 +1,5 @@
"""Plugin for inserting raw commands."""


__all__ = [
"RawCommandSerializer",
]
Expand Down
1 change: 0 additions & 1 deletion mecha/contrib/relative_location.py
@@ -1,6 +1,5 @@
"""Plugin that resolves relative resource locations."""


__all__ = [
"RelativeResourceLocationParser",
"resolve_using_database",
Expand Down
5 changes: 3 additions & 2 deletions mecha/contrib/source_map.py
@@ -1,6 +1,5 @@
"""Plugin that emits source mapping information."""


__all__ = [
"AstSourceMap",
"SourceMapOptions",
Expand Down Expand Up @@ -31,7 +30,9 @@


class SourceMapOptions(BaseModel):
header: str = "# [source_map] {{ compilation_unit.filename or compilation_unit.resource_location }}"
header: str = (
"# [source_map] {{ compilation_unit.filename or compilation_unit.resource_location }}"
)

class Config:
extra = "forbid"
Expand Down

0 comments on commit 2ac5ebe

Please sign in to comment.