View
@@ -5,65 +5,14 @@
from core import lexer
from core import reader
from osh import lex
from osh import word_parse
from osh import cmd_parse
from osh.meta import Id, IdInstance
# bin/osh should work without compiling fastlex? But we want all the unit
# tests to run with a known version of it.
try:
import fastlex
except ImportError:
fastlex = None
class MatchToken_Slow(object):
"""An abstract matcher that doesn't depend on OSH."""
def __init__(self, lexer_def):
self.lexer_def = {}
for state, pat_list in lexer_def.items():
self.lexer_def[state] = lexer.CompileAll(pat_list)
def __call__(self, lex_mode, line, start_pos):
"""Returns (id, end_pos)."""
# Simulate the EOL handling in re2c.
if start_pos >= len(line):
return Id.Eol_Tok, start_pos
re_list = self.lexer_def[lex_mode]
matches = []
for regex, tok_type in re_list:
m = regex.match(line, start_pos) # left-anchored
if m:
matches.append((m.end(0), tok_type, m.group(0)))
if not matches:
raise AssertionError('no match at position %d: %r' % (start_pos, line))
end_pos, tok_type, tok_val = max(matches, key=lambda m: m[0])
return tok_type, end_pos
def MatchToken_Fast(lex_mode, line, start_pos):
"""Returns (id, end_pos)."""
tok_type, end_pos = fastlex.MatchToken(lex_mode.enum_id, line, start_pos)
# IMPORTANT: We're reusing Id instances here. Ids are very common, so this
# saves memory.
return IdInstance(tok_type), end_pos
def _MakeMatcher():
# NOTE: Could have an environment variable to control this for speed?
#return MatchToken_Slow(lex.LEXER_DEF)
if fastlex:
return MatchToken_Fast
else:
return MatchToken_Slow(lex.LEXER_DEF)
from osh import match
from osh import word_parse
def InitLexer(s, arena):
"""For tests only."""
match_func = _MakeMatcher()
match_func = match.MakeMatcher()
line_lexer = lexer.LineLexer(match_func, '', arena)
line_reader = reader.StringLineReader(s, arena)
lx = lexer.Lexer(line_lexer, line_reader)
@@ -89,7 +38,7 @@ def InitLexer(s, arena):
def MakeParser(line_reader, arena):
"""Top level parser."""
line_lexer = lexer.LineLexer(_MakeMatcher(), '', arena)
line_lexer = lexer.LineLexer(match.MakeMatcher(), '', arena)
lx = lexer.Lexer(line_lexer, line_reader)
w_parser = word_parse.WordParser(lx, line_reader)
c_parser = cmd_parse.CommandParser(w_parser, lx, line_reader, arena)
@@ -108,7 +57,7 @@ def MakeParserForCompletion(code_str, arena):
# NOTE: We don't need to use a arena here? Or we need a "scratch arena" that
# doesn't interfere with the rest of the program.
line_reader = reader.StringLineReader(code_str, arena)
line_lexer = lexer.LineLexer(_MakeMatcher(), '', arena) # AtEnd() is true
line_lexer = lexer.LineLexer(match.MakeMatcher(), '', arena) # AtEnd() is true
lx = lexer.Lexer(line_lexer, line_reader)
w_parser = word_parse.WordParser(lx, line_reader)
c_parser = cmd_parse.CommandParser(w_parser, lx, line_reader, arena)
@@ -117,14 +66,14 @@ def MakeParserForCompletion(code_str, arena):
def MakeWordParserForHereDoc(lines, arena):
line_reader = reader.VirtualLineReader(lines, arena)
line_lexer = lexer.LineLexer(_MakeMatcher(), '', arena)
line_lexer = lexer.LineLexer(match.MakeMatcher(), '', arena)
lx = lexer.Lexer(line_lexer, line_reader)
return word_parse.WordParser(lx, line_reader)
def MakeWordParserForPlugin(code_str, arena):
line_reader = reader.StringLineReader(code_str, arena)
line_lexer = lexer.LineLexer(_MakeMatcher(), '', arena)
line_lexer = lexer.LineLexer(match.MakeMatcher(), '', arena)
lx = lexer.Lexer(line_lexer, line_reader)
return word_parse.WordParser(lx, line_reader)
View
@@ -131,7 +131,7 @@ parser() {
echo
echo 'Lexer / Parser'
wc -l osh/{*_parse.py,lex.py,parse_lib.py} core/word.py | sort -n
wc -l osh/{*_parse.py,lex.py,match.py,parse_lib.py} core/word.py | sort -n
echo
echo 'Compiler / Middle End'