Skip to content

Commit

Permalink
lexer: patch shlex to support pipe and only return last element
Browse files Browse the repository at this point in the history
  • Loading branch information
rsteube committed Jul 28, 2023
1 parent d3358bb commit 106b0e9
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 0 deletions.
25 changes: 25 additions & 0 deletions internal/lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -96,4 +96,29 @@ func TestSplit(t *testing.T) {
Tokens: []string{"example", "action", "--", ""},
Prefix: `example 'action' -- `,
})

_test(`example 'action' -- | echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- | echo `,
})

_test(`example 'action' -- || echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- || echo `,
})

_test(`example 'action' -- && echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- && echo `,
})

_test(`example 'action' -- ; echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- ; echo `,
})

_test(`example 'action' -- & echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- & echo `,
})
}
44 changes: 44 additions & 0 deletions third_party/github.com/google/shlex/shlex.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ const (
nonEscapingQuoteRunes = "'"
escapeRunes = `\`
commentRunes = "#"
terminateRunes = "|&;"
)

// Classes of rune token
Expand All @@ -90,6 +91,7 @@ const (
nonEscapingQuoteRuneClass
escapeRuneClass
commentRuneClass
terminateRuneClass
eofRuneClass
)

Expand All @@ -99,6 +101,7 @@ const (
WordToken
SpaceToken
CommentToken
TerminateToken
)

// Lexer state machine states
Expand Down Expand Up @@ -129,6 +132,7 @@ func newDefaultClassifier() tokenClassifier {
t.addRuneClass(nonEscapingQuoteRunes, nonEscapingQuoteRuneClass)
t.addRuneClass(escapeRunes, escapeRuneClass)
t.addRuneClass(commentRunes, commentRuneClass)
t.addRuneClass(terminateRunes, terminateRuneClass)
return t
}

Expand All @@ -146,6 +150,12 @@ func NewLexer(r io.Reader) *Lexer {
return (*Lexer)(NewTokenizer(r))
}

type TerminateError struct{}

func (m *TerminateError) Error() string {
return "boom"
}

// Next returns the next word, or an error. If there are no more words,
// the error will be io.EOF.
func (l *Lexer) Next() (string, error) {
Expand All @@ -159,6 +169,9 @@ func (l *Lexer) Next() (string, error) {
return token.value, nil
case CommentToken:
// skip comments
case TerminateToken:
// skip terminate tokens
return "", &TerminateError{}
default:
return "", fmt.Errorf("Unknown token type: %v", token.tokenType)
}
Expand Down Expand Up @@ -232,6 +245,12 @@ func (t *Tokenizer) scanStream() (*Token, error) {
tokenType = CommentToken
state = commentState
}
case terminateRuneClass:
{
tokenType = TerminateToken
value = append(value, nextRune)
state = inWordState
}
default:
{
tokenType = WordToken
Expand Down Expand Up @@ -405,6 +424,10 @@ func Split(s string) ([]string, error) {
for {
word, err := l.Next()
if err != nil {
if _, ok := err.(*TerminateError); ok {
subStrings = make([]string, 0)
continue
}
if err == io.EOF {
return subStrings, nil
}
Expand All @@ -413,3 +436,24 @@ func Split(s string) ([]string, error) {
subStrings = append(subStrings, word)
}
}

// Split partitions a string into a slice of strings.
func split(s string, resetOnPipe bool) ([]string, error) {
l := NewLexer(strings.NewReader(s))
subStrings := make([]string, 0)
for {
word, err := l.Next()
if err != nil {
if err == io.EOF {
return subStrings, nil
}
if _, ok := err.(*TerminateError); !ok {
return subStrings, err
}
if resetOnPipe {
subStrings = make([]string, 0)
}
}
subStrings = append(subStrings, word)
}
}

0 comments on commit 106b0e9

Please sign in to comment.