Permalink
Browse files

Remove unnecessary pgen2 Driver class.

It's now a PushTokens() procedure.
  • Loading branch information...
Andy Chu
Andy Chu committed Mar 20, 2018
1 parent e67796b commit 410d82afc7ca39416198c0bf27a222f2ce1434a0
Showing with 56 additions and 63 deletions.
  1. +14 −15 opy/opy_main.py
  2. +42 −48 opy/pgen2/driver.py
View
@@ -18,7 +18,7 @@
#this_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
#sys.path.append(os.path.join(this_dir))
from .pgen2 import driver, pgen, grammar
from .pgen2 import driver, parse, pgen, grammar
from .pgen2 import token
from .pgen2 import tokenize
from . import pytree
@@ -185,8 +185,6 @@ def OpyCommandMain(argv):
symbols = None
tr = None
dr = driver.Driver(gr)
if action == 'pgen2':
grammar_path = argv[1]
pickle_path = argv[2]
@@ -218,14 +216,15 @@ def OpyCommandMain(argv):
py_path = argv[1]
with open(py_path) as f:
tokens = tokenize.generate_tokens(f.readline)
tree = dr.parse_tokens(tokens, convert=py2st, start_symbol=FILE_INPUT)
p = parse.Parser(gr, convert=py2st)
parses_tree = driver.PushTokens(p, tokens, FILE_INPUT)
if isinstance(tree, tuple):
n = CountTupleTree(tree)
if isinstance(parse_tree, tuple):
n = CountTupleTree(parse_tree)
log('COUNT %d', n)
printer = TupleTreePrinter(transformer._names)
printer.Print(tree)
printer.Print(parse_tree)
else:
tree.PrettyPrint(sys.stdout)
log('\tChildren: %d' % len(tree.children), file=sys.stderr)
@@ -236,7 +235,8 @@ def OpyCommandMain(argv):
with open(py_path) as f:
tokens = tokenize.generate_tokens(f.readline)
parse_tree = dr.parse_tokens(tokens, convert=py2st, start_symbol=FILE_INPUT)
p = parse.Parser(gr, convert=py2st)
parse_tree = driver.PushTokens(p, tokens, FILE_INPUT)
as_tree = tr.transform(parse_tree)
co = pycodegen.compile(as_tree, py_path, 'exec')
log("Compiled to %d bytes of bytecode", len(co.co_code))
@@ -251,13 +251,11 @@ def OpyCommandMain(argv):
py_expr = argv[1]
f = cStringIO.StringIO(py_expr)
tokens = tokenize.generate_tokens(f.readline)
parse_tree = dr.parse_tokens(tokens,
convert=py2st,
start_symbol=gr.symbol2number['eval_input'])
p = parse.Parser(gr, convert=py2st)
parse_tree = driver.PushTokens(p, tokens, gr.symbol2number['eval_input'])
as_tree = tr.transform(parse_tree)
co = pycodegen.compile(as_tree, '<eval input>', 'eval')
v = dis_tool.Visitor()
v.show_code(co)
print()
@@ -269,10 +267,11 @@ def OpyCommandMain(argv):
py_expr = raw_input('opy> ')
f = cStringIO.StringIO(py_expr)
tokens = tokenize.generate_tokens(f.readline)
p = parse.Parser(gr, convert=py2st)
# TODO: change this to 'single input'? Why doesn't this work?
parse_tree = dr.parse_tokens(tokens,
convert=py2st,
start_symbol=gr.symbol2number['eval_input'])
parse_tree = driver.PushTokens(p, tokens, gr.symbol2number['eval_input'])
as_tree = tr.transform(parse_tree)
co = pycodegen.compile(as_tree, '<REPL input>', 'single')
View
@@ -25,57 +25,51 @@ def log(msg, *args):
print(msg, file=sys.stderr)
class Driver(object):
def PushTokens(p, tokens, start_symbol, convert=None, debug=False):
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
def __init__(self, grammar):
self.grammar = grammar
p.setup(start=start_symbol)
def parse_tokens(self, tokens, start_symbol=None, convert=None, debug=False):
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
p = parse.Parser(self.grammar, convert=convert)
p.setup(start=start_symbol)
# What is all this for?
lineno = 1
column = 0
type_ = value = start = end = line_text = None
prefix = ""
for quintuple in tokens:
type_, value, start, end, line_text = quintuple
if start != (lineno, column):
assert (lineno, column) <= start, ((lineno, column), start)
s_lineno, s_column = start
if lineno < s_lineno:
prefix += "\n" * (s_lineno - lineno)
lineno = s_lineno
column = 0
if column < s_column:
prefix += line_text[column:s_column]
column = s_column
if type_ in (tokenize.COMMENT, tokenize.NL):
prefix += value
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
continue
if type_ == token.OP:
type_ = grammar.opmap[value]
if debug:
log("%s %r (prefix=%r)", token.tok_name[type_], value, prefix)
if p.addtoken(type_, value, (prefix, start)):
if debug:
log("Stop.")
break
prefix = ""
# What is all this for?
lineno = 1
column = 0
type_ = value = start = end = line_text = None
prefix = ""
for quintuple in tokens:
type_, value, start, end, line_text = quintuple
if start != (lineno, column):
assert (lineno, column) <= start, ((lineno, column), start)
s_lineno, s_column = start
if lineno < s_lineno:
prefix += "\n" * (s_lineno - lineno)
lineno = s_lineno
column = 0
if column < s_column:
prefix += line_text[column:s_column]
column = s_column
if type_ in (tokenize.COMMENT, tokenize.NL):
prefix += value
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
else:
# We never broke out -- EOF is too soon (how can this happen???)
raise parse.ParseError("incomplete input",
type_, value, (prefix, start))
return p.rootnode
continue
if type_ == token.OP:
type_ = grammar.opmap[value]
if debug:
log("%s %r (prefix=%r)", token.tok_name[type_], value, prefix)
if p.addtoken(type_, value, (prefix, start)):
if debug:
log("Stop.")
break
prefix = ""
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
else:
# We never broke out -- EOF is too soon (how can this happen???)
raise parse.ParseError("incomplete input",
type_, value, (prefix, start))
return p.rootnode

0 comments on commit 410d82a

Please sign in to comment.