Skip to content

Commit

Permalink
remove parens in more cases (esp. useful for new constants feature); …
Browse files Browse the repository at this point in the history
…better handle negative consts as arguments to ^; fix sometimes incorrectly reported token count after minify; add bbs test results to repo
  • Loading branch information
thisismypassport committed Apr 23, 2024
1 parent c5ce833 commit 642de99
Show file tree
Hide file tree
Showing 33 changed files with 3,971 additions and 155 deletions.
78 changes: 46 additions & 32 deletions pico_minify.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from utils import *
from pico_defs import fixnum_is_negative
from pico_tokenize import TokenType
from pico_tokenize import StopTraverse, k_skip_children
from pico_parse import Node, NodeType, VarKind
from pico_parse import k_unary_ops_prec, get_precedence, is_right_assoc
from pico_parse import k_unary_ops_prec, get_precedence, is_right_assoc, can_replace_with_unary
from pico_parse import is_vararg_expr, is_short_block_stmt, is_global_or_builtin_local
from pico_output import format_fixnum, format_string_literal
from pico_output import output_min_wspace, output_original_wspace
Expand Down Expand Up @@ -292,6 +293,40 @@ def fixup_nodes_pre(node):

if node.type in (NodeType.if_, NodeType.while_) and node.short and (analysis.new_shorts[node.type] == False):
minify_change_shorthand(node, False)

# remove unneeded groups

while node.type == NodeType.group:
inner, outer = node.child, node.parent
inner_prec, outer_prec = get_precedence(inner), get_precedence(outer)
needed = True
if e(inner_prec) and e(outer_prec) and (inner_prec > outer_prec or (inner_prec == outer_prec and
(outer_prec == k_unary_ops_prec or is_right_assoc(outer) == (outer.right == node)))):
needed = False
elif e(outer_prec) and inner.type in (NodeType.var, NodeType.index, NodeType.member, NodeType.call, NodeType.varargs):
needed = False
elif e(outer_prec) and inner.type == NodeType.const and (focus.tokens or can_replace_with_unary(node) or
not (inner.token.type == TokenType.number and fixnum_is_negative(inner.token.fixnum_value))):
needed = False
elif outer.type in (NodeType.group, NodeType.table_member, NodeType.table_index, NodeType.op_assign):
needed = False
elif outer.type == NodeType.call and (node in outer.args[:-1] or
(outer.args and node == outer.args[-1] and not is_vararg_expr(inner))):
needed = False
elif outer.type in (NodeType.assign, NodeType.local) and (node in outer.sources[:-1] or
(outer.sources and node == outer.sources[-1] and (not is_vararg_expr(inner) or len(outer.targets) <= len(outer.sources)))):
needed = False
elif outer.type in (NodeType.return_, NodeType.table) and (node in outer.items[:-1] or
(outer.items and node == outer.items[-1] and not is_vararg_expr(inner))):
needed = False
elif outer.type in (NodeType.if_, NodeType.elseif, NodeType.while_, NodeType.until) and not outer.short:
needed = False

if needed:
break
else:
node.replace_with(node.child.move())
# node may now be another group, so loop

def fixup_nodes_post(node):
if minify_tokens:
Expand All @@ -310,10 +345,6 @@ def fixup_nodes_post(node):
prev = prev.prev_sibling()
if prev and prev.type == node.type:
minify_merge_assignments(prev, node, ctxt, safe_reorder)

def remove_parens(token):
token.erase("(")
token.parent.erase_token(-1, ")")

def fixup_tokens(token):

Expand All @@ -340,27 +371,9 @@ def fixup_tokens(token):
if token.value == "(" and token.parent.type == NodeType.call and len(token.parent.args) == 1:
arg = token.parent.args[0]
if arg.type == NodeType.table or (arg.type == NodeType.const and arg.token.type == TokenType.string):
return remove_parens(token)

if token.value == "(" and token.parent.type == NodeType.group:
inner, outer = token.parent.child, token.parent.parent
inner_prec, outer_prec = get_precedence(inner), get_precedence(outer)
if e(inner_prec) and e(outer_prec) and (inner_prec > outer_prec or (inner_prec == outer_prec and
(outer_prec == k_unary_ops_prec or is_right_assoc(outer) == (outer.right == token.parent)))):
return remove_parens(token)
if outer.type in (NodeType.group, NodeType.table_member, NodeType.table_index, NodeType.op_assign):
return remove_parens(token)
if outer.type == NodeType.call and (token.parent in outer.args[:-1] or
(outer.args and token.parent == outer.args[-1] and not is_vararg_expr(inner))):
return remove_parens(token)
if outer.type in (NodeType.assign, NodeType.local) and (token.parent in outer.sources[:-1] or
(outer.sources and token.parent == outer.sources[-1] and (not is_vararg_expr(inner) or len(outer.targets) <= len(outer.sources)))):
return remove_parens(token)
if outer.type in (NodeType.return_, NodeType.table) and (token.parent in outer.items[:-1] or
(outer.items and token.parent == outer.items[-1] and not is_vararg_expr(inner))):
return remove_parens(token)
if outer.type in (NodeType.if_, NodeType.elseif, NodeType.while_, NodeType.until) and not outer.short:
return remove_parens(token)
token.erase("(")
token.parent.erase_token(-1, ")")
return

# replace tokens for higher consistency

Expand All @@ -377,13 +390,14 @@ def fixup_tokens(token):
token.modify(minify_string_literal(ctxt, token, focus))

if token.type == TokenType.number:
outer_prec = get_precedence(token.parent.parent) if token.parent.type == NodeType.const else None
allow_unary = outer_prec is None or outer_prec < k_unary_ops_prec
allow_unary = can_replace_with_unary(token.parent)
token.modify(format_fixnum(token.fixnum_value, sign=None if allow_unary else ''))
if token.value.startswith("-") or token.value.startswith("~"):
# insert synthetic unary token, so that output_tokens's tokenize won't get confused
token.parent.insert_token(0, TokenType.punct, token.value[0], near_next=True)
token.modify(token.value[1:])

if token.type == TokenType.number:
if token.value.startswith("-") or token.value.startswith("~"): # either due to format_fixnum above, or due to ConstToken.value
# insert synthetic unary token, so that output_tokens's tokenize and root.get_tokens() won't get confused
token.parent.insert_token(0, TokenType.punct, token.value[0], near_next=True)
token.modify(token.value[1:])

root.traverse_nodes(fixup_nodes_pre, fixup_nodes_post, tokens=fixup_tokens)

Expand Down
3 changes: 1 addition & 2 deletions pico_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def str_add_1(str):
return str[:-1] + chr(ord(str[-1]) + 1)

numvalue = value / (1 << 16)
decvalue = "%.10f" % numvalue
decvalue = "%.6f" % numvalue
while "." in decvalue:
nextvalue = decvalue[:-1]
nextupvalue = str_add_1(nextvalue)
Expand Down Expand Up @@ -174,7 +174,6 @@ def output_original_wspace(root, exclude_comments=False):
"""convert a root back to a string, using original whitespace (optionally except comments)"""
output = []
prev_token = Token.none
prev_welded_token = None
prev_vline = 0
need_linebreak = False

Expand Down
15 changes: 13 additions & 2 deletions pico_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,12 @@ def __init__(m, type, children, **kwargs):
for child in children:
child.parent = m

def get_tokens(m):
def get_tokens(m): # (not including erased tokens, whereas traverse includes them)
tokens = []
m.traverse_tokens(lambda token: tokens.append(token))
def on_token(token):
if token.value != None:
tokens.append(token)
m.traverse_tokens(on_token)
return tokens

short = False # default property
Expand Down Expand Up @@ -965,4 +968,12 @@ def is_right_assoc(node):
else:
return False

def can_replace_with_unary(node):
parent = node.parent
if not parent or (parent.type == NodeType.binary_op and parent.right is node):
return True
else:
prec = get_precedence(parent)
return prec is None or prec < k_unary_ops_prec

from pico_process import Error
12 changes: 8 additions & 4 deletions pico_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,9 +202,11 @@ def __init__(m, type, other, fixnum_value=None, string_value=None, value=None):
lazy_property.clear(m, "value")

@lazy_property
def value(m): # will not be called if minified normally, so output is suboptimal
def value(m): # used during going over chars for rename (tsk...) and for output when not minify-tokens
# (but not used for output under minify-tokens)
if e(m.fixnum_value):
return format_fixnum(m.fixnum_value, sign='')
allow_unary = can_replace_with_unary(m.parent)
return format_fixnum(m.fixnum_value, sign=None if allow_unary else "")
else:
return format_string_literal(m.string_value, long=False)

Expand Down Expand Up @@ -528,12 +530,14 @@ def tokenize_long_string(off):
def count_tokens(tokens):
count = 0
for i, token in enumerate(tokens):
assert token.value != None

if token.children:
for comment in token.children:
if comment.hint == CommentHint.lint and k_lint_count_stop in comment.hintdata:
return count

if token.value in (",", ".", ":", ";", "::", ")", "]", "}", "end", "local", None):
if token.value in (",", ".", ":", ";", "::", ")", "]", "}", "end", "local"):
continue

if token.value in ("-", "~") and i+1 < len(tokens) and tokens[i+1].type == TokenType.number and \
Expand Down Expand Up @@ -652,6 +656,6 @@ def parse_string_literal(str):

return "".join(litparts)

from pico_parse import Node, VarKind
from pico_parse import Node, VarKind, can_replace_with_unary
from pico_output import format_fixnum, format_string_literal
from pico_process import Error
12 changes: 7 additions & 5 deletions run_bbs_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,9 +229,9 @@ def run(focus):
filename = str(focus) if focus else "normal"

input_json = path_join("test_bbs", "input.json")
output_json = path_join("test_bbs", "output", filename + ".json")
compare_json = path_join("test_bbs", "compare", filename + ".json")
unfocused_json = path_join("test_bbs", "compare", "normal.json") if g_opts.compare_unfocused else None
output_json = path_join("test_output", "bbs", filename + ".json")
compare_json = path_join("test_compare", "bbs", filename + ".json")
unfocused_json = path_join("test_compare", "bbs", "normal.json") if g_opts.compare_unfocused else None
inputs = try_file_read_json(input_json, {})
outputs = try_file_read_json(output_json, {})
compares = try_file_read_json(compare_json, {})
Expand Down Expand Up @@ -298,7 +298,9 @@ def run_all():

if __name__ == "__main__":
init_tests(g_opts)
for dir in ("output", "compare"):
dir_ensure_exists(path_join("test_bbs", dir))
dir_ensure_exists("test_bbs")
for dir in ("test_output", "test_compare"):
dir_ensure_exists(path_join(dir, "bbs"))

run_all()
sys.exit(end_tests())
6 changes: 3 additions & 3 deletions run_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ def run():
"--no-minify-spaces", "--no-minify-lines", "--no-minify-comments", "--no-minify-rename",
pico8_output="output.p8.printh")
run_test("nominify", "input.p8", "output-nomin.p8", check_output=False, pico8_output="output.p8.printh")
run_test("reformat", "input.p8", "input-reformat.p8", "--unminify", "--unminify-indent", "4")

run_test("nopreserve", "nopreserve.p8", "nopreserve.p8", "--minify",
"--no-preserve", "circfill,rectfill", pico8_output_val="yep")
Expand All @@ -167,11 +168,11 @@ def run():
pico8_output="const.p8.printh")
run_test("const2", "const2.p8", "const2.p8", "--minify",
"--no-minify-spaces", "--no-minify-lines", "--no-minify-comments", "--no-minify-rename", "--no-minify-tokens",
pico8_run=True, stdout_output="const2.txt", norm_stdout=norm_paths)
stdout_output="const2.txt", norm_stdout=norm_paths)
run_test("constcl", "constcl.p8", "constcl.p8", "--minify")
run_test("constcl-1", "constcl.p8", "constcl-1.p8", "--minify", "--const", "DEBUG", "true", "--const", "SPEED", "2.5", "--str-const", "VERSION", "v1.2")
run_test("constcl-2", "constcl.p8", "constcl-2.p8", "--minify", "--const", "DEBUG", "true", "--const", "SPEED", "-2.6", "--const", "hero", "~1")
run_test("constmin", "const.p8", "constmin.p8", "--minify", pico8_run=True)
run_test("constmin", "const.p8", "constmin.p8", "--minify", pico8_output="const.p8.printh")

if run_test("test", "test.p8", "test.p8", "--minify", "--no-minify-consts", pico8_output_val="DONE"):
run_test("unmintest", "test.p8", "test-un.p8", "--unminify", from_output=True, pico8_output_val="DONE")
Expand Down Expand Up @@ -233,7 +234,6 @@ def run():
run_test("repl-oc", "repl.p8", "repl-oc.p8", "--minify", "--focus-chars", pico8_output_val="finished")
run_test("repl-ob", "repl.p8", "repl-ob.p8", "--minify", "--focus-compressed", pico8_output_val="finished")

run_test("reformat", "input.p8", "input-reformat.p8", "--unminify", "--unminify-indent", "4")
run_test("notnil", "notnil.p8", "notnil.p8", "--minify", pico8_output_val="passed")
run_test("wildcards", "wildcards.p8", "wildcards.p8", "--minify", "--no-minify-consts")

Expand Down
2 changes: 1 addition & 1 deletion shrinko8.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pico_defs import get_default_version_id
import argparse

k_version = 'v1.2.0f'
k_version = 'v1.2.0g'

def SplitBySeps(val):
return k_hint_split_re.split(val)
Expand Down
Loading

0 comments on commit 642de99

Please sign in to comment.