diff --git a/parsing/ast.py b/parsing/ast.py index 35e2f9f..39edf37 100644 --- a/parsing/ast.py +++ b/parsing/ast.py @@ -6,28 +6,10 @@ """ from __future__ import annotations -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from parsing.grammar import SymbolSpec - from parsing.interfaces import Parser class Symbol: - def __init__(self, symSpec: SymbolSpec, parser: Parser): - self.__symSpec = symSpec - self.__parser = parser - - def __repr__(self) -> str: - return repr(self.symSpec) - - @property - def symSpec(self) -> SymbolSpec: - return self.__symSpec - - @property - def parser(self) -> Parser: - return self.__parser + pass class Nonterm(Symbol): @@ -74,9 +56,6 @@ def reduceB(self, id): "%reduce id" """ - def __init__(self, parser: Parser) -> None: - Symbol.__init__(self, parser.sym_spec(self), parser) - def merge(self, other: Nonterm) -> Nonterm: """ Merging happens when there is an ambiguity in the input that allows @@ -133,9 +112,6 @@ class rparen(Token): class id(Token): "%token" """ - def __init__(self, parser: Parser) -> None: - Symbol.__init__(self, parser.sym_spec(self), parser) - class Precedence: """ diff --git a/parsing/glrparser.py b/parsing/glrparser.py index 8c0825a..3cda854 100644 --- a/parsing/glrparser.py +++ b/parsing/glrparser.py @@ -132,7 +132,7 @@ def token(self, token: Token) -> None: def eoi(self) -> None: """ Signal end-of-input to the parser.""" - token = EndOfInput(self) + token = EndOfInput() self.token(token) # Gather the start symbols from the stacks. diff --git a/parsing/interfaces.py b/parsing/interfaces.py index 2b6dc0c..851495e 100644 --- a/parsing/interfaces.py +++ b/parsing/interfaces.py @@ -62,10 +62,6 @@ class Parser(abc.ABC): def __init__(self, spec: Spec) -> None: raise NotImplementedError - @abc.abstractmethod - def sym_spec(self, sym: Symbol) -> SymbolSpec: - ... - @abc.abstractmethod def token(self, token: Token) -> None: raise NotImplementedError diff --git a/parsing/lrparser.py b/parsing/lrparser.py index 15b8a55..54dcc40 100644 --- a/parsing/lrparser.py +++ b/parsing/lrparser.py @@ -9,7 +9,6 @@ Epsilon, ShiftAction, ReduceAction, - SymbolSpec, ) from parsing.interfaces import Parser, Spec @@ -37,9 +36,6 @@ def __init__(self, spec: Spec) -> None: self.reset() self.verbose = False - def sym_spec(self, sym: Symbol) -> SymbolSpec: - return self._spec.sym_spec(sym) - @property def spec(self) -> Spec: return self._spec @@ -53,7 +49,7 @@ def start(self) -> list[Symbol] | None: def reset(self) -> None: self._start = None - self._stack = [(Epsilon(self), 0)] + self._stack = [(Epsilon(), 0)] def token(self, token: Token) -> None: """Feed a token to the parser.""" @@ -62,7 +58,7 @@ def token(self, token: Token) -> None: def eoi(self) -> None: """Signal end-of-input to the parser.""" - token = EndOfInput(self) + token = EndOfInput() self.token(token) assert self._stack[-1][0] == token # <$>. @@ -136,7 +132,7 @@ def _reduce(self, production: Production) -> None: def _production( self, production: Production, rhs: list[Symbol] ) -> Nonterm: - sym = production.lhs.nontermType(self) + sym = production.lhs.nontermType() nRhs = len(rhs) assert nRhs == len(production.rhs) r = production.method(sym, *rhs) diff --git a/parsing/tests/test_basic.py b/parsing/tests/test_basic.py index acfcff3..0acffc5 100644 --- a/parsing/tests/test_basic.py +++ b/parsing/tests/test_basic.py @@ -13,33 +13,33 @@ def __init__(self, spec): spec = parsing.Spec(a) parser = TestParser(spec) - parser.token(a.TokenId(parser)) - parser.token(a.TokenStar(parser)) - parser.token(a.TokenId(parser)) - parser.token(a.TokenPlus(parser)) - parser.token(a.TokenId(parser)) + parser.token(a.TokenId()) + parser.token(a.TokenStar()) + parser.token(a.TokenId()) + parser.token(a.TokenPlus()) + parser.token(a.TokenId()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]") parser = TestParser(spec) - parser.token(a.TokenId(parser)) - parser.token(a.TokenPlus(parser)) - parser.token(a.TokenId(parser)) - parser.token(a.TokenStar(parser)) - parser.token(a.TokenId(parser)) + parser.token(a.TokenId()) + parser.token(a.TokenPlus()) + parser.token(a.TokenId()) + parser.token(a.TokenStar()) + parser.token(a.TokenId()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]") parser = TestParser(spec) - parser.token(a.TokenId(parser)) - parser.token(a.TokenStar(parser)) - parser.token(a.TokenLparen(parser)) - parser.token(a.TokenId(parser)) - parser.token(a.TokenPlus(parser)) - parser.token(a.TokenId(parser)) - parser.token(a.TokenRparen(parser)) + parser.token(a.TokenId()) + parser.token(a.TokenStar()) + parser.token(a.TokenLparen()) + parser.token(a.TokenId()) + parser.token(a.TokenPlus()) + parser.token(a.TokenId()) + parser.token(a.TokenRparen()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]") @@ -54,33 +54,33 @@ def __init__(self, spec): spec = parsing.Spec(b, skinny=False) parser = TestParser(spec) - parser.token(b.id(parser)) - parser.token(b.star(parser)) - parser.token(b.id(parser)) - parser.token(b.plus(parser)) - parser.token(b.id(parser)) + parser.token(b.id()) + parser.token(b.star()) + parser.token(b.id()) + parser.token(b.plus()) + parser.token(b.id()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]") parser = TestParser(spec) - parser.token(b.id(parser)) - parser.token(b.plus(parser)) - parser.token(b.id(parser)) - parser.token(b.star(parser)) - parser.token(b.id(parser)) + parser.token(b.id()) + parser.token(b.plus()) + parser.token(b.id()) + parser.token(b.star()) + parser.token(b.id()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]") parser = TestParser(spec) - parser.token(b.id(parser)) - parser.token(b.star(parser)) - parser.token(b.lparen(parser)) - parser.token(b.id(parser)) - parser.token(b.plus(parser)) - parser.token(b.id(parser)) - parser.token(b.rparen(parser)) + parser.token(b.id()) + parser.token(b.star()) + parser.token(b.lparen()) + parser.token(b.id()) + parser.token(b.plus()) + parser.token(b.id()) + parser.token(b.rparen()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]") @@ -95,13 +95,13 @@ def __init__(self, spec): spec = parsing.Spec(d, skinny=False) parser = TestParser(spec) - parser.token(d.id(parser)) - parser.token(d.star(parser)) - parser.token(d.id(parser)) - parser.token(d.plus(parser)) - parser.token(d.id(parser)) - parser.token(d.star(parser)) - parser.token(d.id(parser)) + parser.token(d.id()) + parser.token(d.star()) + parser.token(d.id()) + parser.token(d.plus()) + parser.token(d.id()) + parser.token(d.star()) + parser.token(d.id()) parser.eoi() self.assertEqual(len(parser.start), 1) @@ -117,11 +117,11 @@ def __init__(self, spec): spec = parsing.Spec(h, skinny=False) parser = TestGlrParser(spec) - parser.token(h.TokenI(parser)) - parser.token(h.TokenPlus(parser)) - parser.token(h.TokenI(parser)) - parser.token(h.TokenStar(parser)) - parser.token(h.TokenI(parser)) + parser.token(h.TokenI()) + parser.token(h.TokenPlus()) + parser.token(h.TokenI()) + parser.token(h.TokenStar()) + parser.token(h.TokenI()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(repr(parser.start[0]), "(i + (i * i))") @@ -149,11 +149,11 @@ def __init__(self, spec): spec2 = pickle.loads(specPickle) parser = TestGlrParser(spec2) - parser.token(b.id(parser)) - parser.token(b.star(parser)) - parser.token(b.id(parser)) - parser.token(b.plus(parser)) - parser.token(b.id(parser)) + parser.token(b.id()) + parser.token(b.star()) + parser.token(b.id()) + parser.token(b.plus()) + parser.token(b.id()) parser.eoi() self.assertEqual(len(parser.start), 1) self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")