|
|
@@ -13,7 +13,7 @@ |
|
|
|
|
|
from osh import parse_lib
|
|
|
from osh import ast_ as ast
|
|
|
from osh.lex import LEXER_DEF, LexMode
|
|
|
from osh.lex import LEXER_DEF
|
|
|
from osh import ast_ as ast
|
|
|
|
|
|
lex_mode_e = ast.lex_mode_e
|
|
|
@@ -46,97 +46,97 @@ def assertTokensEqual(self, left, right): |
|
|
def testRead(self):
|
|
|
lexer = _InitLexer(CMD)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, 'ls'), t)
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
|
|
|
self.assertTokensEqual(ast.token(Id.WS_Space, ' '), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, '/'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Op_Newline, '\n'), t)
|
|
|
|
|
|
# Line two
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, 'ls'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.WS_Space, ' '), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, '/home/'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Op_Newline, '\n'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Eof_Real, ''), t)
|
|
|
|
|
|
# Another EOF gives EOF
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Eof_Real, ''), t)
|
|
|
|
|
|
def testRead_VS_ARG_UNQ(self):
|
|
|
# Another EOF gives EOF
|
|
|
lexer = _InitLexer("'hi'")
|
|
|
t = lexer.Read(LexMode.VS_ARG_UNQ)
|
|
|
t = lexer.Read(lex_mode_e.VS_ARG_UNQ)
|
|
|
#self.assertTokensEqual(ast.token(Id.Eof_Real, ''), t)
|
|
|
#t = l.Read(LexMode.VS_ARG_UNQ)
|
|
|
#t = l.Read(lex_mode_e.VS_ARG_UNQ)
|
|
|
print(t)
|
|
|
|
|
|
def testExtGlob(self):
|
|
|
lexer = _InitLexer('@(foo|bar)')
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.ExtGlob_At, '@('), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, 'foo'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.Op_Pipe, '|'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, 'bar'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.Op_RParen, ')'), t)
|
|
|
|
|
|
# Individual cases
|
|
|
|
|
|
lexer = _InitLexer('@(')
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.ExtGlob_At, '@('), t)
|
|
|
|
|
|
lexer = _InitLexer('*(')
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.ExtGlob_Star, '*('), t)
|
|
|
|
|
|
lexer = _InitLexer('?(')
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.ExtGlob_QMark, '?('), t)
|
|
|
|
|
|
lexer = _InitLexer('$')
|
|
|
t = lexer.Read(LexMode.EXTGLOB)
|
|
|
t = lexer.Read(lex_mode_e.EXTGLOB)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Other, '$'), t)
|
|
|
|
|
|
def testBashRegexState(self):
|
|
|
lexer = _InitLexer('(foo|bar)')
|
|
|
|
|
|
t = lexer.Read(LexMode.BASH_REGEX)
|
|
|
t = lexer.Read(lex_mode_e.BASH_REGEX)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, '('), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.BASH_REGEX)
|
|
|
t = lexer.Read(lex_mode_e.BASH_REGEX)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, 'foo'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.BASH_REGEX)
|
|
|
t = lexer.Read(lex_mode_e.BASH_REGEX)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, '|'), t)
|
|
|
|
|
|
def testDBracketState(self):
|
|
|
lexer = _InitLexer('-z foo')
|
|
|
t = lexer.Read(LexMode.DBRACKET)
|
|
|
t = lexer.Read(lex_mode_e.DBRACKET)
|
|
|
self.assertTokensEqual(ast.token(Id.BoolUnary_z, '-z'), t)
|
|
|
self.assertEqual(Kind.BoolUnary, LookupKind(t.id))
|
|
|
|
|
|
@@ -145,27 +145,27 @@ def testLookAhead(self): |
|
|
# the function; then Peek() the next token. Then Lookahead in that state.
|
|
|
lexer = _InitLexer('func()')
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, 'func'), t)
|
|
|
|
|
|
#self.assertEqual(Id.Op_LParen, lexer.LookAhead())
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Op_LParen, '('), t)
|
|
|
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Op_RParen, ')'), lexer.LookAhead(LexMode.OUTER))
|
|
|
ast.token(Id.Op_RParen, ')'), lexer.LookAhead(lex_mode_e.OUTER))
|
|
|
|
|
|
lexer = _InitLexer('func ()')
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.Lit_Chars, 'func'), t)
|
|
|
|
|
|
t = lexer.Read(LexMode.OUTER)
|
|
|
t = lexer.Read(lex_mode_e.OUTER)
|
|
|
self.assertTokensEqual(ast.token(Id.WS_Space, ' '), t)
|
|
|
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Op_LParen, '('), lexer.LookAhead(LexMode.OUTER))
|
|
|
ast.token(Id.Op_LParen, '('), lexer.LookAhead(lex_mode_e.OUTER))
|
|
|
|
|
|
|
|
|
class LineLexerTest(unittest.TestCase):
|
|
|
@@ -177,55 +177,55 @@ def testReadOuter(self): |
|
|
# Lines always end with '\n'
|
|
|
l = LineLexer(LEXER_DEF, '')
|
|
|
try:
|
|
|
l.Read(LexMode.OUTER)
|
|
|
l.Read(lex_mode_e.OUTER)
|
|
|
except AssertionError as e:
|
|
|
print(e)
|
|
|
else:
|
|
|
raise AssertionError('Expected error')
|
|
|
|
|
|
l = LineLexer(LEXER_DEF, '\n')
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Op_Newline, '\n'), l.Read(LexMode.OUTER))
|
|
|
ast.token(Id.Op_Newline, '\n'), l.Read(lex_mode_e.OUTER))
|
|
|
|
|
|
def testRead_VS_ARG_UNQ(self):
|
|
|
l = LineLexer(LEXER_DEF, "'hi'")
|
|
|
t = l.Read(LexMode.VS_ARG_UNQ)
|
|
|
t = l.Read(lex_mode_e.VS_ARG_UNQ)
|
|
|
self.assertEqual(Id.Left_SingleQuote, t.id)
|
|
|
|
|
|
def testLookAhead(self):
|
|
|
# Lines always end with '\n'
|
|
|
l = LineLexer(LEXER_DEF, '')
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Unknown_Tok, ''), l.LookAhead(LexMode.OUTER))
|
|
|
ast.token(Id.Unknown_Tok, ''), l.LookAhead(lex_mode_e.OUTER))
|
|
|
|
|
|
l = LineLexer(LEXER_DEF, 'foo')
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Lit_Chars, 'foo'), l.Read(LexMode.OUTER))
|
|
|
ast.token(Id.Lit_Chars, 'foo'), l.Read(lex_mode_e.OUTER))
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Unknown_Tok, ''), l.LookAhead(LexMode.OUTER))
|
|
|
ast.token(Id.Unknown_Tok, ''), l.LookAhead(lex_mode_e.OUTER))
|
|
|
|
|
|
l = LineLexer(LEXER_DEF, 'foo bar')
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Lit_Chars, 'foo'), l.Read(LexMode.OUTER))
|
|
|
ast.token(Id.Lit_Chars, 'foo'), l.Read(lex_mode_e.OUTER))
|
|
|
self.assertEqual(
|
|
|
ast.token(Id.Lit_Chars, 'bar'), l.LookAhead(LexMode.OUTER))
|
|
|
ast.token(Id.Lit_Chars, 'bar'), l.LookAhead(lex_mode_e.OUTER))
|
|
|
|
|
|
# No lookahead; using the cursor!
|
|
|
l = LineLexer(LEXER_DEF, 'func(')
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Lit_Chars, 'func'), l.Read(LexMode.OUTER))
|
|
|
ast.token(Id.Lit_Chars, 'func'), l.Read(lex_mode_e.OUTER))
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Op_LParen, '('), l.LookAhead(LexMode.OUTER))
|
|
|
ast.token(Id.Op_LParen, '('), l.LookAhead(lex_mode_e.OUTER))
|
|
|
|
|
|
l = LineLexer(LEXER_DEF, 'func (')
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Lit_Chars, 'func'), l.Read(LexMode.OUTER))
|
|
|
ast.token(Id.Lit_Chars, 'func'), l.Read(lex_mode_e.OUTER))
|
|
|
self.assertTokensEqual(
|
|
|
ast.token(Id.Op_LParen, '('), l.LookAhead(LexMode.OUTER))
|
|
|
ast.token(Id.Op_LParen, '('), l.LookAhead(lex_mode_e.OUTER))
|
|
|
|
|
|
|
|
|
OUTER_RE = CompileAll(LEXER_DEF[LexMode.OUTER])
|
|
|
DOUBLE_QUOTED_RE = CompileAll(LEXER_DEF[LexMode.DQ])
|
|
|
OUTER_RE = CompileAll(LEXER_DEF[lex_mode_e.OUTER])
|
|
|
DOUBLE_QUOTED_RE = CompileAll(LEXER_DEF[lex_mode_e.DQ])
|
|
|
|
|
|
|
|
|
class FunctionTest(unittest.TestCase):
|
|
|
|