Permalink
Browse files

Extract a function and move the ECHO_E_LEXER definition.

  • Loading branch information...
Andy Chu
Andy Chu committed May 30, 2018
1 parent 9dbc472 commit 7a81eb61ec65f26a4c025cede071bdafe8622d33
Showing with 34 additions and 19 deletions.
  1. +1 −3 core/builtin.py
  2. +33 −16 osh/match.py
View
@@ -160,8 +160,6 @@ def Resolve(argv0):
# Implementation of builtins.
#
ECHO_LEXER = match.SimpleLexer(lex.ECHO_E_DEF)
ECHO_SPEC = _Register('echo')
ECHO_SPEC.ShortFlag('-e') # no backslash escapes
ECHO_SPEC.ShortFlag('-n')
@@ -188,7 +186,7 @@ def Echo(argv):
new_argv = []
for a in argv:
parts = []
for id_, value in ECHO_LEXER.Tokens(a):
for id_, value in match.ECHO_LEXER.Tokens(a):
p = word_compile.EvalCStringToken(id_, value)
# Unusual behavior: '\c' prints what is there and aborts processing!
View
@@ -3,15 +3,21 @@
match.py - match with generated re2c code or Python regexes.
"""
import os
#from core import util
from osh import lex
from osh.meta import Id, IdInstance
# bin/osh should work without compiling fastlex? But we want all the unit
# tests to run with a known version of it.
try:
import fastlex
except ImportError:
if os.environ.get('FASTLEX') == '0': # For manual testing
fastlex = None
else:
try:
import fastlex
except ImportError:
fastlex = None
#if fastlex:
if 0:
@@ -20,6 +26,23 @@
import re
def _LongestMatch(re_list, line, start_pos):
# Simulate the EOL handling in re2c.
if start_pos >= len(line):
return Id.Eol_Tok, start_pos
matches = []
for regex, tok_type in re_list:
m = regex.match(line, start_pos) # left-anchored
if m:
matches.append((m.end(0), tok_type, m.group(0)))
if not matches:
raise AssertionError('no match at position %d: %r' % (start_pos, line))
end_pos, tok_type, tok_val = max(matches, key=lambda m: m[0])
#util.log('%s %s', tok_type, end_pos)
return tok_type, end_pos
def _CompileAll(pat_list):
result = []
for is_regex, pat, token_id in pat_list:
@@ -38,21 +61,10 @@ def __init__(self, lexer_def):
def __call__(self, lex_mode, line, start_pos):
"""Returns (id, end_pos)."""
# Simulate the EOL handling in re2c.
if start_pos >= len(line):
return Id.Eol_Tok, start_pos
re_list = self.lexer_def[lex_mode]
matches = []
for regex, tok_type in re_list:
m = regex.match(line, start_pos) # left-anchored
if m:
matches.append((m.end(0), tok_type, m.group(0)))
if not matches:
raise AssertionError('no match at position %d: %r' % (start_pos, line))
end_pos, tok_type, tok_val = max(matches, key=lambda m: m[0])
return tok_type, end_pos
return _LongestMatch(re_list, line, start_pos)
def _MatchOshToken_Fast(lex_mode, line, start_pos):
"""Returns (id, end_pos)."""
@@ -107,3 +119,8 @@ def Tokens(self, line):
def IsValidVarName(s):
return _VAR_NAME_RE.match(s)
# TODO: Conditionally create it
ECHO_LEXER = SimpleLexer(lex.ECHO_E_DEF)

0 comments on commit 7a81eb6

Please sign in to comment.