From 9e1b0defc7ae8f45b42caf690971ba8b5124a994 Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Wed, 13 Apr 2022 23:53:18 -0700 Subject: [PATCH 1/8] Make expressions_from_rules public, and refactor a bit. --- parsimonious/grammar.py | 42 ++++++++++++++++-------------- parsimonious/tests/test_grammar.py | 2 +- 2 files changed, 24 insertions(+), 20 deletions(-) diff --git a/parsimonious/grammar.py b/parsimonious/grammar.py index 367f27e..3fb8669 100644 --- a/parsimonious/grammar.py +++ b/parsimonious/grammar.py @@ -7,6 +7,7 @@ """ from collections import OrderedDict from textwrap import dedent +from typing import Type from parsimonious.exceptions import BadGrammar, UndefinedLabel from parsimonious.expressions import (Literal, Regex, Sequence, OneOf, @@ -44,6 +45,9 @@ class Grammar(OrderedDict): increase cache hit ratio. [Is this implemented yet?] """ + rule_visitor: Type["RuleVisitor"] + rule_grammar: "Grammar" + def __init__(self, rules='', **more_rules): """Construct a grammar. @@ -63,7 +67,7 @@ def __init__(self, rules='', **more_rules): k: (expression(v, k, self) if is_callable(v) else v) for k, v in more_rules.items()} - exprs, first = self._expressions_from_rules(rules, decorated_custom_rules) + exprs, first = self.expressions_from_rules(rules, decorated_custom_rules) super().__init__(exprs.items()) self.default_rule = first # may be None @@ -86,7 +90,8 @@ def _copy(self): new.default_rule = self.default_rule return new - def _expressions_from_rules(self, rules, custom_rules): + @classmethod + def expressions_from_rules(cls, rules, custom_rules): """Return a 2-tuple: a dict of rule names pointing to their expressions, and then the first rule. @@ -99,8 +104,8 @@ def _expressions_from_rules(self, rules, custom_rules): Expressions """ - tree = rule_grammar.parse(rules) - return RuleVisitor(custom_rules).visit(tree) + tree = cls.rule_grammar.parse(rules) + return cls.visitor_cls(custom_rules).visit(tree) def parse(self, text, pos=0): """Parse some text with the :term:`default rule`. @@ -141,18 +146,6 @@ def __repr__(self): return "Grammar({!r})".format(str(self)) -class TokenGrammar(Grammar): - """A Grammar which takes a list of pre-lexed tokens instead of text - - This is useful if you want to do the lexing yourself, as a separate pass: - for example, to implement indentation-based languages. - - """ - def _expressions_from_rules(self, rules, custom_rules): - tree = rule_grammar.parse(rules) - return TokenRuleVisitor(custom_rules).visit(tree) - - class BootstrappingGrammar(Grammar): """The grammar used to recognize the textual rules that describe other grammars @@ -162,7 +155,7 @@ class BootstrappingGrammar(Grammar): grammar description syntax. """ - def _expressions_from_rules(self, rule_syntax, custom_rules): + def expressions_from_rules(self, rule_syntax, custom_rules): """Return the rules for parsing the grammar definition syntax. Return a 2-tuple: a dict of rule names pointing to their expressions, @@ -496,13 +489,24 @@ def visit_regex(self, node, regex): 'than characters.') +class TokenGrammar(Grammar): + """A Grammar which takes a list of pre-lexed tokens instead of text + + This is useful if you want to do the lexing yourself, as a separate pass: + for example, to implement indentation-based languages. + + """ + visitor_cls = TokenRuleVisitor + + # Bootstrap to level 1... -rule_grammar = BootstrappingGrammar(rule_syntax) +Grammar.visitor_cls = RuleVisitor +rule_grammar = Grammar.rule_grammar = BootstrappingGrammar(rule_syntax) # ...and then to level 2. This establishes that the node tree of our rule # syntax is built by the same machinery that will build trees of our users' # grammars. And the correctness of that tree is tested, indirectly, in # test_grammar. -rule_grammar = Grammar(rule_syntax) +rule_grammar = Grammar.rule_grammar = Grammar(rule_syntax) # TODO: Teach Expression trees how to spit out Python representations of diff --git a/parsimonious/tests/test_grammar.py b/parsimonious/tests/test_grammar.py index b4ac7f7..b7accce 100644 --- a/parsimonious/tests/test_grammar.py +++ b/parsimonious/tests/test_grammar.py @@ -168,7 +168,7 @@ def test_expressions_from_rules(self): That the correct ``Expression`` tree is built is already tested in ``RuleGrammarTests``. This tests only that the ``Grammar`` base class's - ``_expressions_from_rules`` works. + ``expressions_from_rules`` works. """ greeting_grammar = Grammar('greeting = "hi" / "howdy"') From af8a0db681b45a1cd8df0e5d9aebc4d4be8bed1c Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Sun, 17 Apr 2022 16:24:08 -0700 Subject: [PATCH 2/8] Test for grammar extensions. --- parsimonious/tests/test_grammar.py | 35 +++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/parsimonious/tests/test_grammar.py b/parsimonious/tests/test_grammar.py index b7accce..a270b7f 100644 --- a/parsimonious/tests/test_grammar.py +++ b/parsimonious/tests/test_grammar.py @@ -621,7 +621,6 @@ def test_binary_grammar(): body = ~b"[^\xFF]*" terminator = b"\xFF" """) - length = 22 assert g.parse(b"\xff22~" + (b"a" * 22) + b"\xff") is not None @@ -649,3 +648,37 @@ def test_inconsistent_string_types_in_grammar(): foo = "foo" bar = "bar" """) + + +def test_grammar_extend_method(): + g = Grammar(r""" + a = (b / c)+ + b = "b" + c = "c" + """) + g2 = g.extend(r""" + b = ^b / "B" + c = ^c / "C" + """) + assert g.parse("bc") + assert g2.parse("bBcC") + with pytest.raises(ParseError): + g.parse("bBcC") + + +def test_grammar_extend_dsl(): + g = Grammar(r""" + a = (b / c)+ + b = "b" + c = "c" + """) + g2 = Grammar(fr""" + {g.rule_definition[0]} + ====================== + b = ^b / "B" + c = ^c / "C" + """) + assert g.parse("bc") + assert g2.parse("bBcC") + with pytest.raises(ParseError): + g.parse("bBcC") From 1facf6f258c3105970d3b3f70ad92a176b5da67c Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Sun, 17 Apr 2022 16:26:29 -0700 Subject: [PATCH 3/8] Add method to extend grammars. --- parsimonious/expressions.py | 7 +++++ parsimonious/grammar.py | 52 ++++++++++++++++++++++++++++++++++--- 2 files changed, 55 insertions(+), 4 deletions(-) diff --git a/parsimonious/expressions.py b/parsimonious/expressions.py index 9200365..20593c8 100644 --- a/parsimonious/expressions.py +++ b/parsimonious/expressions.py @@ -122,6 +122,9 @@ def resolve_refs(self, rule_map): # Nothing to do on the base expression. return self + def resolve_inherited_references(self, rule_map): + return self + def parse(self, text, pos=0): """Return a parse tree of ``text``. @@ -321,6 +324,10 @@ def resolve_refs(self, rule_map): self.members = tuple(m.resolve_refs(rule_map) for m in self.members) return self + def resolve_inherited_references(self, rule_map): + self.members = tuple(m.resolve_inherited_references(rule_map) for m in self.members) + return self + def __hash__(self): # Note we leave members out of the hash computation, since compounds can get added to # sets, then have their members mutated. See RuleVisitor._resolve_refs. diff --git a/parsimonious/grammar.py b/parsimonious/grammar.py index 3fb8669..9be3c6a 100644 --- a/parsimonious/grammar.py +++ b/parsimonious/grammar.py @@ -62,6 +62,8 @@ def __init__(self, rules='', **more_rules): ``rules`` in case of naming conflicts. """ + # Retain a copy of the arguments to allow grammar extensions + self.rule_definition = rules, more_rules decorated_custom_rules = { k: (expression(v, k, self) if is_callable(v) else v) @@ -71,6 +73,18 @@ def __init__(self, rules='', **more_rules): super().__init__(exprs.items()) self.default_rule = first # may be None + def extend(self, rules: str, **more_rules) -> "Grammar": + """Return a new grammar with the given rules added. + """ + new_rules = f""" + {self.rule_definition[0]} + ========================= + {rules} + """ + new_more_rules = self.rule_definition[1].copy() + new_more_rules.update(more_rules) + return Grammar(new_rules, **new_more_rules) + def default(self, rule_name): """Return a new Grammar whose :term:`default rule` is ``rule_name``.""" new = self._copy() @@ -215,6 +229,7 @@ def expressions_from_rules(self, rule_syntax, custom_rules): # leafmost kinds of nodes. Literals like "/" count as leaves. rules = _ rule* + rule = label equals expression equals = "=" _ literal = spaceless_literal _ @@ -231,11 +246,12 @@ def expressions_from_rules(self, rule_syntax, custom_rules): lookahead_term = "&" term _ term = not_term / lookahead_term / quantified / atom quantified = atom quantifier - atom = reference / literal / regex / parenthesized + atom = inherited_reference / reference / literal / regex / parenthesized regex = "~" spaceless_literal ~"[ilmsuxa]*"i _ parenthesized = "(" _ expression ")" _ quantifier = ~r"[*+?]|\{\d*,\d+\}|\{\d+,\d*\}|\{\d+\}" _ reference = label !equals + inherited_reference = "^" reference # A subsequent equal sign is the only thing that distinguishes a label # (which begins a new rule) from a reference (which is just a pointer to a @@ -244,8 +260,13 @@ def expressions_from_rules(self, rule_syntax, custom_rules): # _ = ~r"\s*(?:#[^\r\n]*)?\s*" _ = meaninglessness* - meaninglessness = ~r"\s+" / comment + meaninglessness = ~r"\s+" / comment / divider comment = ~r"#[^\r\n]*" + + # At least two dashes or equals signs. Used for separating grammars which inherit by + # concatenation. Currently has no semantic content, though may later be used to make + # the syntax of the inherited/overridden rules explicit. + divider = ~r"={2,}|-{2,}" ''') @@ -285,7 +306,10 @@ def resolve_refs(self, rule_map): # Just for debugging: def _as_rhs(self): - return '' % self + return f'<{self.__class__.__name__} to %s>' % self + + def resolve_inherited_references(self, rule_map): + return self class RuleVisitor(NodeVisitor): @@ -452,7 +476,9 @@ def visit_rules(self, node, rules_list): # override earlier ones. This lets us define rules multiple times and # have the last declaration win, so you can extend grammars by # concatenation. - rule_map = OrderedDict((expr.name, expr) for expr in rules) + rule_map = OrderedDict() + for rule in rules: + rule_map[rule.name] = rule.resolve_inherited_references(rule_map) # And custom rules override string-based rules. This is the least # surprising choice when you compare the dict constructor: @@ -472,6 +498,24 @@ def visit_rules(self, node, rules_list): return rule_map, (rule_map[rules[0].name] if isinstance(rules, list) and rules else None) + def visit_descendant_rules(self, node, visited_children): + divider, _, rules = visited_children + return rules + + def visit_inherited_reference(self, node, visited_children): + caret, name = visited_children + return LazyInheritedReference(name) + + +class LazyInheritedReference(LazyReference): + def resolve_refs(self, rule_map): + # This is a bug in RuleVisitor.visit_rules. + raise AssertionError( + f"Inherited references should have been resolved, but has not been resolved {self!r}.") + + def resolve_inherited_references(self, rule_map): + return rule_map[self] + class TokenRuleVisitor(RuleVisitor): """A visitor which builds expression trees meant to work on sequences of From 15f900b3d559d107e41d6eb995e02884e713d81c Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Tue, 19 Apr 2022 00:18:55 -0700 Subject: [PATCH 4/8] Add example of extending the grammar. --- parsimonious/tests/examples/__init__.py | 0 .../examples/grammar_syntax_extension.py | 78 +++++++++++++++++++ parsimonious/tests/test_examples.py | 22 ++++++ 3 files changed, 100 insertions(+) create mode 100644 parsimonious/tests/examples/__init__.py create mode 100644 parsimonious/tests/examples/grammar_syntax_extension.py create mode 100644 parsimonious/tests/test_examples.py diff --git a/parsimonious/tests/examples/__init__.py b/parsimonious/tests/examples/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/parsimonious/tests/examples/grammar_syntax_extension.py b/parsimonious/tests/examples/grammar_syntax_extension.py new file mode 100644 index 0000000..2aff713 --- /dev/null +++ b/parsimonious/tests/examples/grammar_syntax_extension.py @@ -0,0 +1,78 @@ +""" +This example extends parsimonious's grammar syntax for a different approach to token grammars: +* CAPITALIZED references are refer to ``token.type`` names. They do not need to be explicitly + named elsewhere in the grammar. +* lowercase references are refer to other rules. +* A token's attributes can match rules, e.g. requiring that an attribute be a date in a particular + format. This uses a syntax similar to Xpath's ``node[@attr='value']`` syntax. +""" + +from typing import Dict + +from parsimonious.grammar import Grammar +from parsimonious.expressions import Expression +from parsimonious.nodes import Node + + +class TokenRef(Expression): + def __init__(self, ref, name=""): + super().__init__(name=name) + self.ref = ref + + def _uncached_match(self, token_list, pos, cache, error): + if self.ref == getattr(token_list[pos], "type", None): + return Node(self, token_list, pos, pos + 1, children=[]) + + +class AttrsPredicateExpression(Expression): + """ + A predicate expression that matches a node with a given set of attributes. + """ + + def __init__(self, token_type, attrs: Dict[str, str]): + self.attrs = attrs + self.token_type = token_type + + def __repr__(self) -> str: + return f"AttrsPredicateExpression({self.token_type}[{self.attrs}])" % self.attrs + + def _uncached_match(self, token_list, pos, cache, error): + + tok_match = self.token_type.match_core(token_list, pos, cache, error) + if tok_match: + tok = token_list[pos] + for k, v in self.attrs.items(): + attr = getattr(tok, k, None) + if not isinstance(attr, str) or not v.parse(attr): + return None + # TODO: should children have each of the attr matches? + return Node(self, token_list, pos, pos+1, children=[tok_match]) + + +class AttrsTokenGrammar(Grammar): + rule_grammar = Grammar.rule_grammar.extend(r""" + # TODO: Support lexer natively? + term = attrs_predicate_expression / ^term + + # Token names are required to be all-caps alphanumeric, with underscores. + reference = token_reference / ^reference + token_reference = ~r"[A-Z_][A-Z0-9_]*" + + attrs_predicate_expression = token_reference "[" _ attr_expressions "]" _ + attr_expressions = ("@" label "=" _ expression _)+ + """) + + class visitor_cls(Grammar.visitor_cls): + def visit_token_reference(self, node, children) -> str: + return TokenRef(node.text) + + def visit_attrs_predicate_expression(self, node, children): + label, _, lbrac, attr_expressions, rbrac, _ = children + return AttrsPredicateExpression(label, attr_expressions) + + def visit_attr_expressions(self, node, children) -> Dict[str, Expression]: + predicates = {} + for at, label, equals, _, expression, _ in children: + assert isinstance(label, str) + predicates[label] = expression + return predicates diff --git a/parsimonious/tests/test_examples.py b/parsimonious/tests/test_examples.py new file mode 100644 index 0000000..0470806 --- /dev/null +++ b/parsimonious/tests/test_examples.py @@ -0,0 +1,22 @@ +from types import SimpleNamespace + +import pytest + +from parsimonious.exceptions import ParseError +from parsimonious.tests.examples.grammar_syntax_extension import AttrsTokenGrammar + + +def noparse(grammar, text): + with pytest.raises(ParseError): + grammar.parse(text) + + +def test_extended_grammar(): + g = AttrsTokenGrammar(r""" + a = B[@foo=("bar" / "baz") @baz=~"baz"+] + """) + + assert g.parse([SimpleNamespace(type="B", foo="bar", baz="bazbaz")]) + assert g.parse([SimpleNamespace(type="B", foo="baz", baz="bazbaz")]) + noparse(g, [SimpleNamespace(type="C", foo="bar", baz="baz")]) + noparse(g, [SimpleNamespace(type="C", foo="bar", baz="baz")]) \ No newline at end of file From 9daef80e211f50760d60e798d5259394f497d69b Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Thu, 21 Apr 2022 19:01:15 -0700 Subject: [PATCH 5/8] Make the example importable --- parsimonious/{tests => }/examples/__init__.py | 0 parsimonious/{tests => }/examples/grammar_syntax_extension.py | 0 parsimonious/tests/test_examples.py | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) rename parsimonious/{tests => }/examples/__init__.py (100%) rename parsimonious/{tests => }/examples/grammar_syntax_extension.py (100%) diff --git a/parsimonious/tests/examples/__init__.py b/parsimonious/examples/__init__.py similarity index 100% rename from parsimonious/tests/examples/__init__.py rename to parsimonious/examples/__init__.py diff --git a/parsimonious/tests/examples/grammar_syntax_extension.py b/parsimonious/examples/grammar_syntax_extension.py similarity index 100% rename from parsimonious/tests/examples/grammar_syntax_extension.py rename to parsimonious/examples/grammar_syntax_extension.py diff --git a/parsimonious/tests/test_examples.py b/parsimonious/tests/test_examples.py index 0470806..cf0e44d 100644 --- a/parsimonious/tests/test_examples.py +++ b/parsimonious/tests/test_examples.py @@ -3,7 +3,7 @@ import pytest from parsimonious.exceptions import ParseError -from parsimonious.tests.examples.grammar_syntax_extension import AttrsTokenGrammar +from parsimonious.examples.grammar_syntax_extension import AttrsTokenGrammar def noparse(grammar, text): From 3e3bc95961f3cde4f8d7b7f9124c038e6a442b85 Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Thu, 21 Apr 2022 19:02:38 -0700 Subject: [PATCH 6/8] Fix comment --- parsimonious/grammar.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parsimonious/grammar.py b/parsimonious/grammar.py index 9be3c6a..62d990c 100644 --- a/parsimonious/grammar.py +++ b/parsimonious/grammar.py @@ -509,7 +509,7 @@ def visit_inherited_reference(self, node, visited_children): class LazyInheritedReference(LazyReference): def resolve_refs(self, rule_map): - # This is a bug in RuleVisitor.visit_rules. + # If triggered, this indicates a bug in RuleVisitor.visit_rules. raise AssertionError( f"Inherited references should have been resolved, but has not been resolved {self!r}.") From 1871c3ee1535afd1da1c406b122b7815630b11b6 Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Wed, 27 Apr 2022 00:33:45 -0700 Subject: [PATCH 7/8] Fixes to example grammar. --- .../examples/grammar_syntax_extension.py | 25 +++++++++++++++--- parsimonious/grammar.py | 2 +- parsimonious/nodes.py | 3 ++- parsimonious/tests/test_examples.py | 26 ++++++++++++++++--- 4 files changed, 47 insertions(+), 9 deletions(-) diff --git a/parsimonious/examples/grammar_syntax_extension.py b/parsimonious/examples/grammar_syntax_extension.py index 2aff713..004afe9 100644 --- a/parsimonious/examples/grammar_syntax_extension.py +++ b/parsimonious/examples/grammar_syntax_extension.py @@ -9,7 +9,7 @@ from typing import Dict -from parsimonious.grammar import Grammar +from parsimonious.grammar import Grammar, LazyReference from parsimonious.expressions import Expression from parsimonious.nodes import Node @@ -19,6 +19,18 @@ def __init__(self, ref, name=""): super().__init__(name=name) self.ref = ref + def __repr__(self): + if self.name: + return f"TokenRef({self.ref!r}, {self.name!r})" + else: + return f"TokenRef({self.ref!r})" + + def __str__(self): + return self.ref + + def _as_rhs(self): + return self.ref + def _uncached_match(self, token_list, pos, cache, error): if self.ref == getattr(token_list[pos], "type", None): return Node(self, token_list, pos, pos + 1, children=[]) @@ -56,7 +68,7 @@ class AttrsTokenGrammar(Grammar): # Token names are required to be all-caps alphanumeric, with underscores. reference = token_reference / ^reference - token_reference = ~r"[A-Z_][A-Z0-9_]*" + token_reference = ~r"[A-Z_][A-Z0-9_]*" _ !equals attrs_predicate_expression = token_reference "[" _ attr_expressions "]" _ attr_expressions = ("@" label "=" _ expression _)+ @@ -64,7 +76,14 @@ class AttrsTokenGrammar(Grammar): class visitor_cls(Grammar.visitor_cls): def visit_token_reference(self, node, children) -> str: - return TokenRef(node.text) + ref, _, _ = children + return TokenRef(ref.text, name=ref.text) + + def visit_reference(self, node, children): + if isinstance(children[0], TokenRef): + return children[0] + else: + return LazyReference(children[0]) def visit_attrs_predicate_expression(self, node, children): label, _, lbrac, attr_expressions, rbrac, _ = children diff --git a/parsimonious/grammar.py b/parsimonious/grammar.py index 62d990c..a888e99 100644 --- a/parsimonious/grammar.py +++ b/parsimonious/grammar.py @@ -406,7 +406,7 @@ def visit_reference(self, node, reference): We resolve them all later. """ - label, not_equals = reference + label, *_ = reference return LazyReference(label) def visit_regex(self, node, regex): diff --git a/parsimonious/nodes.py b/parsimonious/nodes.py index 0607b00..04c1a61 100644 --- a/parsimonious/nodes.py +++ b/parsimonious/nodes.py @@ -215,13 +215,14 @@ def visit(self, node): # Don't catch and re-wrap already-wrapped exceptions. raise except Exception as exc: - # implentors may define exception classes that should not be + # implementors may define exception classes that should not be # wrapped. if isinstance(exc, self.unwrapped_exceptions): raise # Catch any exception, and tack on a parse tree so it's easier to # see where it went wrong. exc_class = type(exc) + raise raise VisitationError(exc, exc_class, node) def generic_visit(self, node, visited_children): diff --git a/parsimonious/tests/test_examples.py b/parsimonious/tests/test_examples.py index cf0e44d..16fbf1f 100644 --- a/parsimonious/tests/test_examples.py +++ b/parsimonious/tests/test_examples.py @@ -12,11 +12,29 @@ def noparse(grammar, text): def test_extended_grammar(): + Tok = SimpleNamespace g = AttrsTokenGrammar(r""" a = B[@foo=("bar" / "baz") @baz=~"baz"+] """) - assert g.parse([SimpleNamespace(type="B", foo="bar", baz="bazbaz")]) - assert g.parse([SimpleNamespace(type="B", foo="baz", baz="bazbaz")]) - noparse(g, [SimpleNamespace(type="C", foo="bar", baz="baz")]) - noparse(g, [SimpleNamespace(type="C", foo="bar", baz="baz")]) \ No newline at end of file + assert g.parse([Tok(type="B", foo="bar", baz="bazbaz")]) + assert g.parse([Tok(type="B", foo="baz", baz="bazbaz")]) + noparse(g, [Tok(type="C", foo="bar", baz="baz")]) + noparse(g, [Tok(type="C", foo="bar", baz="baz")]) + + g2 = AttrsTokenGrammar(r""" + segment = TEXT (DATA_SEP TEXT)* SEG_TERM + """) + Tok2 = lambda t: SimpleNamespace(type=t) + tokens = [ + Tok2("TEXT"), + *([Tok2("DATA_SEP"), Tok2("TEXT")] * 10), + Tok2("SEG_TERM"), + ] + assert g2.parse(tokens) + SEGMENT_GRAMMAR = AttrsTokenGrammar(r""" + x12 = segment+ + segment = TEXT (DATA_SEP elem)* SEG_TERM + elem = value (REPEAT_SEP value)* + value = TEXT (COMPONENT_SEP TEXT)* + """) From 38973610cf0b80deaf6c24dfe8103153580db931 Mon Sep 17 00:00:00 2001 From: Lucas Wiman Date: Mon, 2 May 2022 00:37:43 -0700 Subject: [PATCH 8/8] Fix a bug in the example. --- parsimonious/examples/grammar_syntax_extension.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/parsimonious/examples/grammar_syntax_extension.py b/parsimonious/examples/grammar_syntax_extension.py index 004afe9..1f1692d 100644 --- a/parsimonious/examples/grammar_syntax_extension.py +++ b/parsimonious/examples/grammar_syntax_extension.py @@ -41,9 +41,10 @@ class AttrsPredicateExpression(Expression): A predicate expression that matches a node with a given set of attributes. """ - def __init__(self, token_type, attrs: Dict[str, str]): + def __init__(self, token_type, attrs: Dict[str, str], name=""): self.attrs = attrs self.token_type = token_type + super().__init__(name=name) def __repr__(self) -> str: return f"AttrsPredicateExpression({self.token_type}[{self.attrs}])" % self.attrs