Skip to content

Commit

Permalink
[fix] Instantiate simple_var_sub with 2 fields
Browse files Browse the repository at this point in the history
Tweak abbreviation of simple_var_sub
  • Loading branch information
Andy C committed Feb 2, 2023
1 parent c171c28 commit 7bf70a3
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 10 deletions.
10 changes: 9 additions & 1 deletion frontend/lexer.py
Expand Up @@ -42,12 +42,20 @@ def IsPlusEquals(tok):
return tok.line.content[index] == '+'


def TokenSliceLeft(tok, left_index):
# type: (Token, int) -> str
""" Slice token directly, without creating intermediate string """
assert left_index > 0
left = tok.col + left_index
return tok.line.content[left : tok.col + tok.length]


def TokenSliceRight(tok, right_index):
# type: (Token, int) -> str
""" Slice token directly, without creating intermediate string """
assert right_index < 0
right = tok.col + tok.length + right_index
return tok.line.content[ tok.col : right]
return tok.line.content[tok.col : right]


def DummyToken(id_, val):
Expand Down
8 changes: 6 additions & 2 deletions frontend/syntax_abbrev.py
Expand Up @@ -75,8 +75,12 @@ def _simple_var_sub(obj):
p_node = runtime.NewRecord('$')
p_node.abbrev = True

n1 = runtime.NewLeaf(obj.name, color_e.StringConst)
p_node.unnamed_fields.append(n1)
if obj.left.id != Id.VSub_Name:
n1 = runtime.NewLeaf(Id_str(obj.left.id), color_e.OtherConst)
p_node.unnamed_fields.append(n1)

n2 = runtime.NewLeaf(obj.name, color_e.StringConst)
p_node.unnamed_fields.append(n2)

return p_node

Expand Down
13 changes: 8 additions & 5 deletions oil_lang/expr_to_ast.py
Expand Up @@ -18,8 +18,8 @@
variant, variant_type, variant_type_t,
)
from _devbuild.gen import grammar_nt

from core.pyerror import log, p_die
from frontend import lexer

from typing import TYPE_CHECKING, List, Tuple, Optional, cast
if TYPE_CHECKING:
Expand Down Expand Up @@ -609,7 +609,7 @@ def Expr(self, pnode):
(bare, bare), tok)

# $? is allowed
return simple_var_sub(tok)
return simple_var_sub(tok, lexer.TokenSliceLeft(tok, 1))

else:
nt_name = self.number2symbol[typ]
Expand Down Expand Up @@ -1234,7 +1234,8 @@ def _NonRangeChars(self, p_node):
return cast(single_quoted, p_child.children[1].tok)

if typ == grammar_nt.simple_var_sub:
return simple_var_sub(children[0].tok)
tok = children[0].tok
return simple_var_sub(tok, lexer.TokenSliceLeft(tok, 1))

if typ == grammar_nt.char_literal:
return class_literal_term.CharLiteral(children[0].tok)
Expand Down Expand Up @@ -1264,7 +1265,8 @@ def _ClassLiteralTerm(self, p_node):
if ISNONTERMINAL(typ):
p_child = children[0]
if typ == grammar_nt.simple_var_sub:
return simple_var_sub(p_child.children[0].tok)
tok = p_child.children[0].tok
return simple_var_sub(tok, lexer.TokenSliceLeft(tok, 1))

if typ == grammar_nt.braced_var_sub:
return cast(braced_var_sub, p_child.children[1].tok)
Expand Down Expand Up @@ -1377,7 +1379,8 @@ def _ReAtom(self, p_atom):
return cast(single_quoted, p_child.children[1].tok)

if typ == grammar_nt.simple_var_sub:
return simple_var_sub(children[0].tok)
tok = children[0].tok
return simple_var_sub(tok, lexer.TokenSliceLeft(tok, 1))

if typ == grammar_nt.char_literal:
return children[0].tok
Expand Down
6 changes: 4 additions & 2 deletions osh/word_parse.py
Expand Up @@ -77,6 +77,7 @@
from core import pyutil
from core import ui
from frontend import consts
from frontend import lexer
from frontend import reader
from osh import tdop
from osh import arith_parse
Expand Down Expand Up @@ -857,7 +858,8 @@ def _ReadLikeDQ(self, left_token, is_oil_expr, out_parts):
out_parts.append(part)

elif self.token_kind == Kind.VSub:
part = simple_var_sub(self.cur_token)
tok = self.cur_token
part = simple_var_sub(tok, lexer.TokenSliceLeft(tok, 1))
out_parts.append(part)
# NOTE: parsing "$f(x)" would BREAK CODE. Could add a more for it
# later.
Expand Down Expand Up @@ -1519,7 +1521,7 @@ def _ReadCompoundWord3(self, lex_mode, eof_type, empty_ok):
elif self.token_kind == Kind.VSub:
vsub_token = self.cur_token

part = simple_var_sub(vsub_token) # type: word_part_t
part = simple_var_sub(vsub_token, lexer.TokenSliceLeft(vsub_token, 1)) # type: word_part_t
if self.token_type == Id.VSub_DollarName:
# Look ahead for $strfunc(x)
# $f(x) or --name=$f(x) is allowed
Expand Down

0 comments on commit 7bf70a3

Please sign in to comment.