Skip to content

Commit

Permalink
Stop passing parser instance to Symbol instances
Browse files Browse the repository at this point in the history
There's no real reason why `Symbol` types need to take a `Parser`
instance and resolve and retain their `SymbolSpec`.  This is a breaking
change, but only if your `Symbol` subclasses are overloading `__init__`,
which they shouldn't normally need to be doing.
  • Loading branch information
elprans committed Aug 31, 2021
1 parent acb3a70 commit c4ce270
Show file tree
Hide file tree
Showing 5 changed files with 56 additions and 88 deletions.
26 changes: 1 addition & 25 deletions parsing/ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,28 +6,10 @@
"""

from __future__ import annotations
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from parsing.grammar import SymbolSpec
from parsing.interfaces import Parser


class Symbol:
def __init__(self, symSpec: SymbolSpec, parser: Parser):
self.__symSpec = symSpec
self.__parser = parser

def __repr__(self) -> str:
return repr(self.symSpec)

@property
def symSpec(self) -> SymbolSpec:
return self.__symSpec

@property
def parser(self) -> Parser:
return self.__parser
pass


class Nonterm(Symbol):
Expand Down Expand Up @@ -74,9 +56,6 @@ def reduceB(self, id):
"%reduce id"
"""

def __init__(self, parser: Parser) -> None:
Symbol.__init__(self, parser.sym_spec(self), parser)

def merge(self, other: Nonterm) -> Nonterm:
"""
Merging happens when there is an ambiguity in the input that allows
Expand Down Expand Up @@ -133,9 +112,6 @@ class rparen(Token):
class id(Token):
"%token" """

def __init__(self, parser: Parser) -> None:
Symbol.__init__(self, parser.sym_spec(self), parser)


class Precedence:
"""
Expand Down
2 changes: 1 addition & 1 deletion parsing/glrparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def token(self, token: Token) -> None:
def eoi(self) -> None:
"""
Signal end-of-input to the parser."""
token = EndOfInput(self)
token = EndOfInput()
self.token(token)

# Gather the start symbols from the stacks.
Expand Down
4 changes: 0 additions & 4 deletions parsing/interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,6 @@ class Parser(abc.ABC):
def __init__(self, spec: Spec) -> None:
raise NotImplementedError

@abc.abstractmethod
def sym_spec(self, sym: Symbol) -> SymbolSpec:
...

@abc.abstractmethod
def token(self, token: Token) -> None:
raise NotImplementedError
Expand Down
10 changes: 3 additions & 7 deletions parsing/lrparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
Epsilon,
ShiftAction,
ReduceAction,
SymbolSpec,
)
from parsing.interfaces import Parser, Spec

Expand Down Expand Up @@ -37,9 +36,6 @@ def __init__(self, spec: Spec) -> None:
self.reset()
self.verbose = False

def sym_spec(self, sym: Symbol) -> SymbolSpec:
return self._spec.sym_spec(sym)

@property
def spec(self) -> Spec:
return self._spec
Expand All @@ -53,7 +49,7 @@ def start(self) -> list[Symbol] | None:

def reset(self) -> None:
self._start = None
self._stack = [(Epsilon(self), 0)]
self._stack = [(Epsilon(), 0)]

def token(self, token: Token) -> None:
"""Feed a token to the parser."""
Expand All @@ -62,7 +58,7 @@ def token(self, token: Token) -> None:

def eoi(self) -> None:
"""Signal end-of-input to the parser."""
token = EndOfInput(self)
token = EndOfInput()
self.token(token)

assert self._stack[-1][0] == token # <$>.
Expand Down Expand Up @@ -136,7 +132,7 @@ def _reduce(self, production: Production) -> None:
def _production(
self, production: Production, rhs: list[Symbol]
) -> Nonterm:
sym = production.lhs.nontermType(self)
sym = production.lhs.nontermType()
nRhs = len(rhs)
assert nRhs == len(production.rhs)
r = production.method(sym, *rhs)
Expand Down
102 changes: 51 additions & 51 deletions parsing/tests/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,33 +13,33 @@ def __init__(self, spec):
spec = parsing.Spec(a)

parser = TestParser(spec)
parser.token(a.TokenId(parser))
parser.token(a.TokenStar(parser))
parser.token(a.TokenId(parser))
parser.token(a.TokenPlus(parser))
parser.token(a.TokenId(parser))
parser.token(a.TokenId())
parser.token(a.TokenStar())
parser.token(a.TokenId())
parser.token(a.TokenPlus())
parser.token(a.TokenId())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")

parser = TestParser(spec)
parser.token(a.TokenId(parser))
parser.token(a.TokenPlus(parser))
parser.token(a.TokenId(parser))
parser.token(a.TokenStar(parser))
parser.token(a.TokenId(parser))
parser.token(a.TokenId())
parser.token(a.TokenPlus())
parser.token(a.TokenId())
parser.token(a.TokenStar())
parser.token(a.TokenId())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]")

parser = TestParser(spec)
parser.token(a.TokenId(parser))
parser.token(a.TokenStar(parser))
parser.token(a.TokenLparen(parser))
parser.token(a.TokenId(parser))
parser.token(a.TokenPlus(parser))
parser.token(a.TokenId(parser))
parser.token(a.TokenRparen(parser))
parser.token(a.TokenId())
parser.token(a.TokenStar())
parser.token(a.TokenLparen())
parser.token(a.TokenId())
parser.token(a.TokenPlus())
parser.token(a.TokenId())
parser.token(a.TokenRparen())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]")
Expand All @@ -54,33 +54,33 @@ def __init__(self, spec):
spec = parsing.Spec(b, skinny=False)

parser = TestParser(spec)
parser.token(b.id(parser))
parser.token(b.star(parser))
parser.token(b.id(parser))
parser.token(b.plus(parser))
parser.token(b.id(parser))
parser.token(b.id())
parser.token(b.star())
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")

parser = TestParser(spec)
parser.token(b.id(parser))
parser.token(b.plus(parser))
parser.token(b.id(parser))
parser.token(b.star(parser))
parser.token(b.id(parser))
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.token(b.star())
parser.token(b.id())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]")

parser = TestParser(spec)
parser.token(b.id(parser))
parser.token(b.star(parser))
parser.token(b.lparen(parser))
parser.token(b.id(parser))
parser.token(b.plus(parser))
parser.token(b.id(parser))
parser.token(b.rparen(parser))
parser.token(b.id())
parser.token(b.star())
parser.token(b.lparen())
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.token(b.rparen())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]")
Expand All @@ -95,13 +95,13 @@ def __init__(self, spec):
spec = parsing.Spec(d, skinny=False)

parser = TestParser(spec)
parser.token(d.id(parser))
parser.token(d.star(parser))
parser.token(d.id(parser))
parser.token(d.plus(parser))
parser.token(d.id(parser))
parser.token(d.star(parser))
parser.token(d.id(parser))
parser.token(d.id())
parser.token(d.star())
parser.token(d.id())
parser.token(d.plus())
parser.token(d.id())
parser.token(d.star())
parser.token(d.id())
parser.eoi()

self.assertEqual(len(parser.start), 1)
Expand All @@ -117,11 +117,11 @@ def __init__(self, spec):
spec = parsing.Spec(h, skinny=False)

parser = TestGlrParser(spec)
parser.token(h.TokenI(parser))
parser.token(h.TokenPlus(parser))
parser.token(h.TokenI(parser))
parser.token(h.TokenStar(parser))
parser.token(h.TokenI(parser))
parser.token(h.TokenI())
parser.token(h.TokenPlus())
parser.token(h.TokenI())
parser.token(h.TokenStar())
parser.token(h.TokenI())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(repr(parser.start[0]), "(i + (i * i))")
Expand Down Expand Up @@ -149,11 +149,11 @@ def __init__(self, spec):
spec2 = pickle.loads(specPickle)

parser = TestGlrParser(spec2)
parser.token(b.id(parser))
parser.token(b.star(parser))
parser.token(b.id(parser))
parser.token(b.plus(parser))
parser.token(b.id(parser))
parser.token(b.id())
parser.token(b.star())
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")
Expand Down

0 comments on commit c4ce270

Please sign in to comment.