Permalink
Please sign in to comment.
Browse files
Isolated the two unit test failures with scripts in opy/testdata/.
- Parameterize VirtualMachine: more_info, subset - Remove unused print_frames() function - Add ability to filter 'dis' by a single function, e.g. to see only the _parse() function in re.pyc.
- Loading branch information...
Showing
with
123 additions
and 35 deletions.
- +19 −27 opy/byterun/pyvm2.py
- +22 −7 opy/compiler2/dis_tool.py
- +2 −1 opy/opy_main.py
- +25 −0 opy/test.sh
- +44 −0 opy/testdata/generator_exception.py
- +11 −0 opy/testdata/regex_compile.py
| @@ -0,0 +1,44 @@ | ||
| #!/usr/bin/python | ||
| from __future__ import print_function | ||
| """ | ||
| generator_exception.py | ||
| """ | ||
| import sys | ||
| def Tokenize(s): | ||
| for item in ('1', '2', '3'): | ||
| yield item | ||
| class Parser(object): | ||
| """Recursive TDOP parser.""" | ||
| def __init__(self, lexer): | ||
| self.lexer = lexer # iterable | ||
| self.token = None # current token | ||
| def Next(self): | ||
| """Move to the next token.""" | ||
| try: | ||
| t = self.lexer.next() | ||
| except StopIteration: | ||
| t = None | ||
| self.token = t | ||
| def main(argv): | ||
| lexer = Tokenize('1+2') | ||
| p = Parser(lexer) | ||
| p.Next() | ||
| p.Next() | ||
| print('Done') | ||
| if __name__ == '__main__': | ||
| try: | ||
| main(sys.argv) | ||
| except RuntimeError as e: | ||
| print >>sys.stderr, 'FATAL: %s' % e | ||
| sys.exit(1) |
| @@ -0,0 +1,11 @@ | ||
| #!/usr/bin/python | ||
| from __future__ import print_function | ||
| """ | ||
| regex_compile.py | ||
| Failing test for opyc run / byterun. | ||
| """ | ||
| import re | ||
| print(re.compile(r'.*')) |
0 comments on commit
45e8dcf