Skip to content

Commit

Permalink
Checkpoint: a bunch of debug statements for fixing short-if.
Browse files Browse the repository at this point in the history
  • Loading branch information
dansanderson committed Oct 20, 2015
1 parent 0210b27 commit b909ff4
Show file tree
Hide file tree
Showing 5 changed files with 117 additions and 12 deletions.
4 changes: 2 additions & 2 deletions p8tool
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import sys

import pico8
from pico8 import tool

if __name__ == '__main__':
sys.exit(pico8.tool.main(sys.argv[1:]))
sys.exit(tool.main(sys.argv[1:]))
1 change: 1 addition & 0 deletions pico8/lua/lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ class TokSymbol(Token):
_TOKEN_MATCHERS.extend([
(re.compile(r'--.*'), TokComment),
(re.compile(r'[ \t]+'), TokSpace),
(re.compile(r'\r\n'), TokNewline),
(re.compile(r'\n'), TokNewline),
])
_TOKEN_MATCHERS.extend([
Expand Down
11 changes: 11 additions & 0 deletions pico8/lua/lua.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ def get_token_count(self):
c += 1
return c

def get_line_count(self):
c = 0
for t in self._lexer._tokens:
if isinstance(t, lexer.TokNewline):
c += 1
return c

def get_title(self):
title_tok = self._lexer.tokens[0]
if not isinstance(title_tok, lexer.TokComment):
Expand All @@ -52,6 +59,10 @@ def tokens(self):
def root(self):
return self._parser.root

@property
def version(self):
return self._version

@classmethod
def from_lines(cls, lines, version):
"""Produce a Lua data object from lines of Lua source.
Expand Down
24 changes: 21 additions & 3 deletions pico8/lua/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,22 +261,33 @@ def _continue():
self._accept(lexer.TokSymbol(';')) is not None):
# Eat leading and intervening semicolons.
pass
last_pos = self._pos
stat = self._stat()
if stat is None:
if ((stat is None) or
(max_pos is not None and self._pos > max_pos)):
self._pos = last_pos
break
stats.append(stat)
print('DEBUG: ... first stats: {} found, pos={}'.format(len(stats), self._pos))
if _continue():
while self._accept(lexer.TokSymbol(';')) is not None:
# Eat leading and intervening semicolons.
pass
last_pos = self._pos
print('DEBUG: ... before calling laststat, pos={}'.format(last_pos))
laststat = self._laststat()
if laststat is not None:
if ((laststat is None) or
(max_pos is not None and self._pos > max_pos)):
print ('DEBUG: laststat={} pos={} max_pos={}'.format(laststat, self._pos, max_pos))
self._pos = last_pos
else:
stats.append(laststat)
print('DEBUG: ... last stat: {} found, pos={}'.format(len(stats), self._pos))
while (_continue() and
(self._accept(lexer.TokSymbol(';')) is not None)):
# Eat trailing semicolons.
pass
if max_pos is not None and self._pos != max_pos:
if max_pos is not None and self._pos > max_pos:
return None
return Chunk(stats, start=pos, end=self._pos)

Expand Down Expand Up @@ -362,6 +373,7 @@ def _stat(self):
while (then_end_pos < len(self._tokens) and
self._tokens[then_end_pos] != lexer.TokNewline('\n')):
then_end_pos += 1
print('DEBUG: short-if pos={} then_end_pos={} tokens={}'.format(self._pos, then_end_pos, self._tokens[self._pos:then_end_pos]))
block = self._assert(self._chunk(max_pos=then_end_pos),
'valid chunk in short-if')
# (Use exp.value here to unwrap it from the bracketed
Expand Down Expand Up @@ -447,11 +459,15 @@ def _laststat(self):
StatReturn(explist)
"""
pos = self._pos
print('DEBUG: ... laststat initial pos={}'.format(self._pos))
if self._accept(lexer.TokKeyword('break')) is not None:
return StatBreak(start=pos, end=self._pos)
if self._accept(lexer.TokKeyword('return')) is not None:
print('DEBUG: ... laststat pos before explist: {}'.format(self._pos))
explist = self._explist()
print('DEBUG: ... laststat return, explist={}, pos={}'.format(explist, self._pos))
return StatReturn(explist, start=pos, end=self._pos)
print('DEBUG: ... no laststat')
self._pos = pos
return None

Expand Down Expand Up @@ -551,6 +567,7 @@ def _explist(self):
exps = []
exp = self._exp()
if exp is None:
self._pos = pos
return None
exps.append(exp)
while True:
Expand All @@ -559,6 +576,7 @@ def _explist(self):
exp = self._assert(self._exp(), 'exp after comma')
exps.append(exp)
if len(exps) == 0:
self._pos = pos
return None
return ExpList(exps, start=pos, end=self._pos)

Expand Down
89 changes: 82 additions & 7 deletions pico8/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,29 @@


import argparse
import csv
import os
import sys
import textwrap

from . import util

from .game import game
from .lua import lexer
from .lua import parser

def _get_argparser():
"""Builds and returns the argument parser."""
# TODO: real help text
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''
Commands:
stats [--csv] <filename> [<filename>...]
Display stats about one or more carts.
'''))
parser.add_argument(
'command', type=str, nargs='1',
'command', type=str,
help='the command to execute')
parser.add_argument(
'filename', type=str, nargs='*',
help='the names of files to process')
parser.add_argument(
'--indentwidth', type=int, action='store', default=2,
help='the indent width as a number of spaces')
Expand All @@ -30,19 +37,87 @@ def _get_argparser():
parser.add_argument(
'--minify', action='store_true',
help='minifies the code instead of formatting it')
parser.add_argument(
'--csv', action='store_true',
help='for stats, output a CSV file instead of text')
parser.add_argument(
'-q', '--quiet', action='store_true',
help='suppresses inessential messages')
parser.add_argument(
'filename', type=str, nargs='*',
help='the names of files to process')

return parser


def main(orig_args):
args = _get_argparser().parse_args(args=orig_args)
arg_parser = _get_argparser()
args = arg_parser.parse_args(args=orig_args)
util.set_quiet(args.quiet)

has_errors = False

# TODO: ...
if args.command == 'stats':
csv_writer = None
if args.csv:
csv_writer = csv.writer(sys.stdout)
csv_writer.writerow([
'Filename',
'Title',
'Byline',
'Code Version',
'Char Count',
'Token Count',
'Line Count'
])

for fname in args.filename:
if not fname.endswith('.p8.png') and not fname.endswith('.p8'):
print('{}: filename must end in .p8 or .p8.png'.format(fname))
continue
is_p8 = fname.endswith('.p8')

g = None
try:
if is_p8:
with open(fname, 'r') as fh:
g = game.Game.from_p8_file(fh)
else:
with open(fname, 'rb') as fh:
g = game.Game.from_p8png_file(fh)
except lexer.LexerError as e:
print('{}: {}'.format(fname, e))
return 1
except parser.ParserError as e:
print('{}: {}'.format(fname, e))
return 1

if args.csv:
csv_writer.writerow([
os.path.basename(fname),
g.lua.get_title(),
g.lua.get_byline(),
g.lua.version,
g.lua.get_char_count(),
g.lua.get_token_count(),
g.lua.get_line_count()
])
else:
title = g.lua.get_title()
byline = g.lua.get_byline()

if title is not None:
print('{} ({})'.format(title, os.path.basename(fname)))
else:
print(os.path.basename(fname))
if byline is not None:
print(byline)
print('version: {} lines: {} chars: {} tokens: {}'.format(
g.lua.version, g.lua.get_line_count(),
g.lua.get_char_count(), g.lua.get_token_count()))
print('')
else:
arg_parser.print_help()
return 1

return 0

0 comments on commit b909ff4

Please sign in to comment.