Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,15 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm

## [unreleased]

### Added

* Support for module syntax: `module` keyword and `use rule` syntax ([#99][99])
* Support for `containerized` keyword

### Changed

* Updated snakemake dependency to ^6.0.0 ([#99][99])

## [0.3.1]

### Fixed
Expand Down Expand Up @@ -170,4 +179,5 @@ is 40 character long, the line is 48 characters long. However, we were only pass
[93]: https://github.com/snakemake/snakefmt/issues/93
[96]: https://github.com/snakemake/snakefmt/issues/96
[97]: https://github.com/snakemake/snakefmt/pull/97
[99]: https://github.com/snakemake/snakefmt/issues/99

153 changes: 95 additions & 58 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ importlib_metadata = "^1.7.0"
pytest = "^5.2"
pytest-cov = "^2.8.1"
flake8 = "^3.7.9"
snakemake = "^5.32.2"
snakemake = "^6.0.0"
isort = "^5.1.0"

[tool.poetry.urls]
Expand Down
20 changes: 18 additions & 2 deletions snakefmt/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,24 @@ class EmptyContextError(Exception):
pass


class NamedKeywordError(Exception):
pass
def NotAnIdentifierError(line_nb: str, identifier: str, keyword_line: str):
raise SyntaxError(
f"{line_nb}'{identifier}' in '{keyword_line}' is not a valid identifier"
)


def ColonError(line_nb: str, identifier: str, keyword_line: str):
raise SyntaxError(
f"{line_nb}Colon (not '{identifier}') expected after " f"'{keyword_line}'"
)


def NewlineError(line_nb: str, keyword_line: str):
raise SyntaxError((f"{line_nb}Newline expected after keyword " f"'{keyword_line}'"))


def SyntaxFormError(line_nb: str, keyword_line: str, syntax_form: str):
raise SyntaxError(f"{line_nb}'{keyword_line}' not of form '{syntax_form}'")


class InvalidParameterSyntax(Exception):
Expand Down
24 changes: 11 additions & 13 deletions snakefmt/formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,14 @@
from snakefmt.parser.syntax import (
COMMENT_SPACING,
TAB,
InlineSingleParam,
Parameter,
ParameterSyntax,
RuleInlineSingleParam,
SingleParam,
Syntax,
)
from snakefmt.types import TokenIterator

rule_like_formatted = {"rule", "checkpoint"}

triple_quote_matcher = re.compile(
r"^\s*(\w?\"{3}.*?\"{3})|^\s*(\w?'{3}.*?'{3})", re.DOTALL | re.MULTILINE
)
Expand Down Expand Up @@ -96,7 +94,7 @@ def flush_buffer(
formatted = "".join(formatted_lines[:-1]) # Remove the 'pass' line
else:
formatted = self.run_black_format_str(self.buffer, self.target_indent)
code_indent = self.context.code_indent
code_indent = self.syntax.code_indent
if code_indent is not None:
formatted = textwrap.indent(formatted, f"{TAB * code_indent}")

Expand All @@ -112,13 +110,14 @@ def flush_buffer(
self.buffer = ""

def process_keyword_context(self, in_global_context: bool):
cur_indent = self.context.cur_indent
cur_indent = self.syntax.cur_indent
self.add_newlines(cur_indent, in_global_context=in_global_context)
formatted = (
f"{TAB * cur_indent}{self.context.keyword_name}:{self.context.comment}\n"
)
formatted = f"{TAB * cur_indent}{self.syntax.keyword_line}"
if self.syntax.enter_context:
formatted += ":"
formatted += f"{self.syntax.comment}\n"
self.result += formatted
self.last_recognised_keyword = self.context.keyword_name
self.last_recognised_keyword = self.syntax.keyword_name

def process_keyword_param(
self, param_context: ParameterSyntax, in_global_context: bool
Expand Down Expand Up @@ -223,10 +222,9 @@ def format_params(self, parameters: ParameterSyntax, in_rule: bool) -> str:

p_class = parameters.__class__
single_param = issubclass(p_class, SingleParam)
inline_fmting = single_param
# Cancel single param formatting if in rule-like context and param not inline
if in_rule and p_class is not RuleInlineSingleParam:
inline_fmting = False
inline_fmting = False
if p_class is InlineSingleParam:
inline_fmting = True

result = f"{used_indent}{parameters.keyword_name}:"
if inline_fmting:
Expand Down
130 changes: 77 additions & 53 deletions snakefmt/parser/grammar.py
Original file line number Diff line number Diff line change
@@ -1,87 +1,111 @@
from typing import NamedTuple, Optional, Type, Union

from snakefmt.parser.syntax import (
InlineSingleParam,
KeywordSyntax,
NoKeywordParamList,
NoKeyParamList,
ParamList,
RuleInlineSingleParam,
SingleParam,
Syntax,
Vocabulary,
)


class Grammar(NamedTuple):
class Context(NamedTuple):
"""
Ties together a vocabulary and a context (=syntax reader)
When a keyword from `vocab` is recognised, a new grammar is induced
Ties together a vocabulary and a syntax.
When a keyword from `vocab` is recognised, a new context is induced
"""

vocab: Optional[Union[Type[Vocabulary], Vocabulary]]
context: Union[Type[Syntax], Syntax]
syntax: Union[Type[Syntax], Syntax]


class PythonCode(Vocabulary):
pass


# In common between 'use rule' and 'rule'
rule_properties = dict(
name=Context(None, SingleParam),
input=Context(None, ParamList),
output=Context(None, ParamList),
params=Context(None, ParamList),
threads=Context(None, InlineSingleParam),
resources=Context(None, ParamList),
priority=Context(None, InlineSingleParam),
version=Context(None, SingleParam),
log=Context(None, ParamList),
message=Context(None, SingleParam),
benchmark=Context(None, SingleParam),
conda=Context(None, SingleParam),
singularity=Context(None, SingleParam),
container=Context(None, SingleParam),
containerized=Context(None, SingleParam),
envmodules=Context(None, NoKeyParamList),
wildcard_constraints=Context(None, ParamList),
shadow=Context(None, SingleParam),
group=Context(None, SingleParam),
cache=Context(None, InlineSingleParam),
)


class SnakeUseRule(Vocabulary):
spec = rule_properties


class SnakeRule(Vocabulary):
spec = dict(
name=Grammar(None, SingleParam),
input=Grammar(None, ParamList),
output=Grammar(None, ParamList),
params=Grammar(None, ParamList),
threads=Grammar(None, RuleInlineSingleParam),
resources=Grammar(None, ParamList),
priority=Grammar(None, RuleInlineSingleParam),
version=Grammar(None, SingleParam),
log=Grammar(None, ParamList),
message=Grammar(None, SingleParam),
benchmark=Grammar(None, SingleParam),
conda=Grammar(None, SingleParam),
singularity=Grammar(None, SingleParam),
container=Grammar(None, SingleParam),
envmodules=Grammar(None, NoKeywordParamList),
wildcard_constraints=Grammar(None, ParamList),
shadow=Grammar(None, SingleParam),
group=Grammar(None, SingleParam),
run=Grammar(PythonCode, KeywordSyntax),
shell=Grammar(None, SingleParam),
script=Grammar(None, SingleParam),
notebook=Grammar(None, SingleParam),
wrapper=Grammar(None, SingleParam),
cwl=Grammar(None, SingleParam),
cache=Grammar(None, RuleInlineSingleParam),
run=Context(PythonCode, KeywordSyntax),
shell=Context(None, SingleParam),
script=Context(None, SingleParam),
notebook=Context(None, SingleParam),
wrapper=Context(None, SingleParam),
cwl=Context(None, SingleParam),
**rule_properties
)


class SnakeModule(Vocabulary):
spec = dict(
snakefile=Context(None, SingleParam),
config=Context(None, SingleParam),
skip_validation=Context(None, SingleParam),
meta_wrapper=Context(None, SingleParam),
replace_prefix=Context(None, SingleParam),
)


class SnakeSubworkflow(Vocabulary):
spec = dict(
snakefile=Grammar(None, SingleParam),
workdir=Grammar(None, SingleParam),
configfile=Grammar(None, SingleParam),
snakefile=Context(None, SingleParam),
workdir=Context(None, SingleParam),
configfile=Context(None, SingleParam),
)


class SnakeGlobal(Vocabulary):
spec = dict(
envvars=Grammar(None, NoKeywordParamList),
include=Grammar(None, SingleParam),
workdir=Grammar(None, SingleParam),
configfile=Grammar(None, SingleParam),
pepfile=Grammar(None, SingleParam),
pepschema=Grammar(None, SingleParam),
report=Grammar(None, SingleParam),
ruleorder=Grammar(None, SingleParam),
rule=Grammar(SnakeRule, KeywordSyntax),
checkpoint=Grammar(SnakeRule, KeywordSyntax),
subworkflow=Grammar(SnakeSubworkflow, KeywordSyntax),
localrules=Grammar(None, NoKeywordParamList),
onstart=Grammar(PythonCode, KeywordSyntax),
onsuccess=Grammar(PythonCode, KeywordSyntax),
onerror=Grammar(PythonCode, KeywordSyntax),
wildcard_constraints=Grammar(None, ParamList),
scattergather=Grammar(None, ParamList),
singularity=Grammar(None, SingleParam),
container=Grammar(None, SingleParam),
envvars=Context(None, NoKeyParamList),
include=Context(None, InlineSingleParam),
workdir=Context(None, InlineSingleParam),
configfile=Context(None, InlineSingleParam),
pepfile=Context(None, InlineSingleParam),
pepschema=Context(None, InlineSingleParam),
report=Context(None, InlineSingleParam),
ruleorder=Context(None, InlineSingleParam),
rule=Context(SnakeRule, KeywordSyntax),
checkpoint=Context(SnakeRule, KeywordSyntax),
subworkflow=Context(SnakeSubworkflow, KeywordSyntax),
localrules=Context(None, NoKeyParamList),
onstart=Context(PythonCode, KeywordSyntax),
onsuccess=Context(PythonCode, KeywordSyntax),
onerror=Context(PythonCode, KeywordSyntax),
wildcard_constraints=Context(None, ParamList),
singularity=Context(None, InlineSingleParam),
container=Context(None, InlineSingleParam),
containerized=Context(None, InlineSingleParam),
scattergather=Context(None, ParamList),
module=Context(SnakeModule, KeywordSyntax),
use=Context(SnakeUseRule, KeywordSyntax),
)
Loading