Skip to content

Commit

Permalink
Merge 8df4ce4 into 0fa0129
Browse files Browse the repository at this point in the history
  • Loading branch information
metatoaster authored Aug 11, 2018
2 parents 0fa0129 + 8df4ce4 commit 9da3068
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 11 deletions.
19 changes: 19 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,25 @@
Changelog
=========

1.1.1 - 2018-08-??
------------------

- Ensure that the accounting of layout rule chunks is done correctly in
the case where layout handlers specified a tuple of layout rules for
combined handling. [
`#19 <https://github.com/calmjs/calmjs.parse/issues/19>`_
]

- The issue caused by this error manifest severely in the case where
multiple layout rule tokens are produced in a manner that repeats
a pattern that also have a layout handler rule for them, which
does not typically happen for normal code with the standard printers
(as layout chunks are many and they generally do not result in a
repeated pattern that gets consumed). However this is severely
manifested in the case of minified output with semicolons dropped,
as that basically guarantee that any series of closing blocks that
fit the pattern to be simply dropped.

1.1.0 - 2018-08-07
------------------

Expand Down
25 changes: 22 additions & 3 deletions src/calmjs/parse/tests/test_es5_unparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2166,11 +2166,19 @@ def parse_to_sourcemap_tokens_minify(text):
)


def minify_drop_semi_helper(tree):
result = minify_print(
tree, obfuscate=True, shadow_funcname=True, drop_semi=True)
# try to parse the result to ensure that it also is valid
new_tree = es5(result)
assert result == minify_print(
new_tree, obfuscate=True, shadow_funcname=True, drop_semi=True)
return result


MinifyDropSemiPrintTestCase = build_equality_testcase(
'MinifyDropSemiPrintTestCase',
partial(
minify_print, obfuscate=True, shadow_funcname=True, drop_semi=True
), ((
minify_drop_semi_helper, ((
label,
parse(textwrap.dedent(source).strip()),
answer,
Expand Down Expand Up @@ -2276,5 +2284,16 @@ def parse_to_sourcemap_tokens_minify(text):
})();
""",
'(function $(){(function a(){var a=1})()})()',
), (
'nested_return_function',
"""
v = function() {
return function() {
return function() {
};
};
};
""",
'v=function(){return function(){return function(){}}}',
)])
)
34 changes: 34 additions & 0 deletions src/calmjs/parse/tests/test_unparsers_walker.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,40 @@ def noop(*a, **kw):
n2 = Node([n1, n0])
self.assertEqual('?', ''.join(c.text for c in walk(dispatcher, n2)))

def test_repeated_layouts(self):
class Block(Node):
pass

def space_layout(dispatcher, node, before, after, prev):
yield SimpleChunk(' ')

def newline_layout(dispatcher, node, before, after, prev):
yield SimpleChunk('n')

dispatcher = Dispatcher(
definitions={
'Node': (Space,),
'Block': (JoinAttr(Iter(), value=(Newline,)),)
},
token_handler=None, # not actually produced
layout_handlers={
Space: space_layout,
Newline: newline_layout,
# drop the subsequent space
(Newline, Space): newline_layout,
},
deferrable_handlers={},
)

n0 = Block([])
self.assertEqual('', ''.join(c.text for c in walk(dispatcher, n0)))
n1 = Block([Node([])] * 1)
self.assertEqual(' ', ''.join(c.text for c in walk(dispatcher, n1)))
n2 = Block([Node([])] * 2)
self.assertEqual(' n', ''.join(c.text for c in walk(dispatcher, n2)))
n3 = Block([Node([])] * 3)
self.assertEqual(' nn', ''.join(c.text for c in walk(dispatcher, n3)))


class DispatcherTestcase(unittest.TestCase):

Expand Down
17 changes: 9 additions & 8 deletions src/calmjs/parse/unparsers/walker.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,30 +303,31 @@ def process_layouts(layout_rule_chunks, last_chunk, chunk):
# the preliminary stack that will be cleared whenever a
# normalized layout rule chunk is generated.
lrcs_stack = []
rule_stack = []

# first pass: generate both the normalized/finalized lrcs.
for lrc in layout_rule_chunks:
rule_stack.append(lrc.rule)
lrcs_stack.append(lrc)

# check every single chunk from left to right...
for idx in range(len(rule_stack)):
handler = dispatcher.layout(tuple(rule_stack[idx:]))
for idx in range(len(lrcs_stack)):
rule = tuple(lrc.rule for lrc in lrcs_stack[idx:])
handler = dispatcher.layout(rule)
if handler is not NotImplemented:
# not manipulating lrsc_stack from within the same
# for loop that it is being iterated upon
break
else:
lrcs_stack.append(lrc)
# which continues back to the top of the outer for loop
continue

# So a handler is found from inside the rules; extend the
# chunks from the stack that didn't get normalized, and
# generate a new layout rule chunk. Junk the stack after.
# generate a new layout rule chunk.
lrcs_stack[:] = lrcs_stack[:idx]
lrcs_stack.append(LayoutChunk(
tuple(rule_stack[idx:]), handler,
rule, handler,
layout_rule_chunks[idx].node,
))
rule_stack[:] = [tuple(rule_stack[idx:])]

# second pass: now the processing can be done.
for lr_chunk in lrcs_stack:
Expand Down

0 comments on commit 9da3068

Please sign in to comment.