Skip to content

Commit

Permalink
[frontend refactor] Add location.RightTokenForWord()
Browse files Browse the repository at this point in the history
To get rid of GetToken()

Also change redir_param.HereDoc() from span_id -> Token
  • Loading branch information
Andy C committed May 17, 2023
1 parent 38a9dbd commit a069bcc
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 37 deletions.
42 changes: 27 additions & 15 deletions frontend/location.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,33 +357,45 @@ def LeftTokenForWord(w):
raise AssertionError('for -Wreturn-type in C++')


def OfWordRight(w):
# type: (word_t) -> int
"""Needed for here doc delimiters.
TODO: Should return a Token
def RightTokenForWord(w):
# type: (word_t) -> Token
"""
Used for alias expansion and history substitution
and here doc delimiters?
"""
UP_w = w
with tagswitch(w) as case:
if case(word_e.Compound):
w = cast(CompoundWord, UP_w)
if len(w.parts):
end = w.parts[-1]
return _OfWordPartRight(end)
return _RightTokenForWordPart(end)
else:
# This is possible for empty brace sub alternative {a,b,}
return runtime.NO_SPID
return None

elif case(word_e.Token):
tok = cast(Token, UP_w)
return tok.span_id
return tok

elif case(word_e.BracedTree):
w = cast(word.BracedTree, UP_w)
# Note: this case may be unused
return _RightTokenForWordPart(w.parts[-1])

elif case(word_e.String):
w = cast(word.String, UP_w)
# Note: this case may be unused
return RightTokenForWord(w.blame_loc)

else:
raise AssertionError(w.tag())

raise AssertionError('for -Wreturn-type in C++')



#
# Wrappers to remove
#
Expand All @@ -401,24 +413,24 @@ def OfWordPartLeft(part):
return runtime.NO_SPID


def _OfWordPartRight(part):
# type: (word_part_t) -> int
def OfWordLeft(w):
# type: (word_t) -> int
"""
Span ID wrapper to remove
TODO: Should return a Token
"""
tok = _RightTokenForWordPart(part)
tok = LeftTokenForWord(w)
if tok:
return tok.span_id
else:
return runtime.NO_SPID


def OfWordLeft(w):
def OfWordRight(w):
# type: (word_t) -> int
"""
TODO: Should return a Token
Span ID wrapper to remove
"""
tok = LeftTokenForWord(w)
tok = RightTokenForWord(w)
if tok:
return tok.span_id
else:
Expand Down
4 changes: 3 additions & 1 deletion frontend/syntax.asdl
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,9 @@ module syntax
redir_param =
Word %CompoundWord
| HereDoc(word here_begin, # e.g. EOF or 'EOF'
int here_end_span_id, # span is whole line (for defunct osh2oil)
Token? here_end_tok, # Token consisting of the whole line
# It's always filled in AFTER creation, but
# temporarily so optional
List[word_part] stdin_parts # one for each line
)

Expand Down
13 changes: 5 additions & 8 deletions osh/cmd_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,9 @@ def _ParseHereDocBody(parse_ctx, r, line_reader, arena):

end_line, end_pos = last_line

# Create a span with the end terminator. Maintains the invariant that
# the spans "add up".
h.here_end_span_id = arena.NewTokenId(
# Create a Token with the end terminator. Maintains the invariant that the
# tokens "add up".
h.here_end_tok = arena.NewToken(
Id.Undefined_Tok, end_pos, len(end_line.content), end_line, '')


Expand Down Expand Up @@ -829,11 +829,8 @@ def _MaybeExpandAliases(self, words):

# We are expanding an alias, so copy the rest of the words and re-parse.
if i < n:
left_spid = location.OfWordLeft(words[i])
right_spid = location.OfWordRight(words[-1])

left_tok = self.arena.GetToken(left_spid)
right_tok = self.arena.GetToken(right_spid)
left_tok = location.LeftTokenForWord(words[i])
right_tok = location.RightTokenForWord(words[-1])

# OLD CONSTRAINT
#assert left_tok.line_id == right_tok.line_id
Expand Down
20 changes: 8 additions & 12 deletions osh/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,17 +106,17 @@ def Eval(self, line):
w = words[1]
except IndexError:
raise util.HistoryError("No first word in %r" % prev)
spid1 = location.OfWordLeft(w)
spid2 = location.OfWordRight(w)
tok1 = location.LeftTokenForWord(w)
tok2 = location.RightTokenForWord(w)

elif ch == '$':
try:
w = words[-1]
except IndexError:
raise util.HistoryError("No last word in %r" % prev)

spid1 = location.OfWordLeft(w)
spid2 = location.OfWordRight(w)
tok1 = location.LeftTokenForWord(w)
tok2 = location.RightTokenForWord(w)

elif ch == '*':
try:
Expand All @@ -125,18 +125,14 @@ def Eval(self, line):
except IndexError:
raise util.HistoryError("Couldn't find words in %r" % prev)

spid1 = location.OfWordLeft(w1)
spid2 = location.OfWordRight(w2)
tok1 = location.LeftTokenForWord(w1)
tok2 = location.RightTokenForWord(w2)

else:
raise AssertionError(ch)

arena = self.parse_ctx.arena
span1 = arena.GetToken(spid1)
span2 = arena.GetToken(spid2)

begin = span1.col
end = span2.col + span2.length
begin = tok1.col
end = tok2.col + tok2.length

out = prev[begin:end]

Expand Down
2 changes: 1 addition & 1 deletion tools/osh2oil.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ def DoRedirect(self, node, local_symbols):
for part in here_doc.stdin_parts:
self.DoWordPart(part, local_symbols)

self.cursor.SkipUntil(here_doc.here_end_span_id + 1)
self.cursor.SkipUntil(here_doc.here_end_tok.span_id + 1)
if delim_quoted:
self.f.write("'''\n")
else:
Expand Down

0 comments on commit a069bcc

Please sign in to comment.