Skip to content

Commit

Permalink
lexer: patch shlex to support pipeline separators
Browse files Browse the repository at this point in the history
  • Loading branch information
rsteube committed Jul 28, 2023
1 parent d3358bb commit 09c0117
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 2 deletions.
2 changes: 1 addition & 1 deletion internal/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ func Split(s string) (*Tokenset, error) {
}

func split(s string) (*Tokenset, error) {
splitted, err := shlex.Split(s)
splitted, err := shlex.SplitP(s)
if strings.HasSuffix(s, " ") {
splitted = append(splitted, "")
}
Expand Down
25 changes: 25 additions & 0 deletions internal/lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -96,4 +96,29 @@ func TestSplit(t *testing.T) {
Tokens: []string{"example", "action", "--", ""},
Prefix: `example 'action' -- `,
})

_test(`example 'action' -- | echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- | echo `,
})

_test(`example 'action' -- || echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- || echo `,
})

_test(`example 'action' -- && echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- && echo `,
})

_test(`example 'action' -- ; echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- ; echo `,
})

_test(`example 'action' -- & echo `, Tokenset{
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- & echo `,
})
}
40 changes: 39 additions & 1 deletion third_party/github.com/google/shlex/shlex.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ const (
nonEscapingQuoteRunes = "'"
escapeRunes = `\`
commentRunes = "#"
terminateRunes = "|&;"
)

// Classes of rune token
Expand All @@ -90,6 +91,7 @@ const (
nonEscapingQuoteRuneClass
escapeRuneClass
commentRuneClass
pipelineRuneClass
eofRuneClass
)

Expand All @@ -99,6 +101,7 @@ const (
WordToken
SpaceToken
CommentToken
PipelineToken
)

// Lexer state machine states
Expand Down Expand Up @@ -129,6 +132,7 @@ func newDefaultClassifier() tokenClassifier {
t.addRuneClass(nonEscapingQuoteRunes, nonEscapingQuoteRuneClass)
t.addRuneClass(escapeRunes, escapeRuneClass)
t.addRuneClass(commentRunes, commentRuneClass)
t.addRuneClass(terminateRunes, pipelineRuneClass)
return t
}

Expand All @@ -146,6 +150,12 @@ func NewLexer(r io.Reader) *Lexer {
return (*Lexer)(NewTokenizer(r))
}

type PipelineSeparatorError struct{}

func (m *PipelineSeparatorError) Error() string {
return "encountered a pipeline separator like `|`"
}

// Next returns the next word, or an error. If there are no more words,
// the error will be io.EOF.
func (l *Lexer) Next() (string, error) {
Expand All @@ -159,6 +169,9 @@ func (l *Lexer) Next() (string, error) {
return token.value, nil
case CommentToken:
// skip comments
case PipelineToken:
// return token but with pseudo err to mark end of pipeline
return token.value, &PipelineSeparatorError{}
default:
return "", fmt.Errorf("Unknown token type: %v", token.tokenType)
}
Expand Down Expand Up @@ -232,6 +245,12 @@ func (t *Tokenizer) scanStream() (*Token, error) {
tokenType = CommentToken
state = commentState
}
case pipelineRuneClass:
{
tokenType = PipelineToken
value = append(value, nextRune)
state = inWordState
}
default:
{
tokenType = WordToken
Expand Down Expand Up @@ -400,6 +419,19 @@ func (t *Tokenizer) Next() (*Token, error) {

// Split partitions a string into a slice of strings.
func Split(s string) ([]string, error) {
return split(s, false)
}

// Split is like Split but only returns the last pipeline.
//
// `echo example | bat -`
// # [bat, -]
func SplitP(s string) ([]string, error) {
return split(s, true)
}

// Split partitions a string into a slice of strings.
func split(s string, resetOnPipe bool) ([]string, error) {
l := NewLexer(strings.NewReader(s))
subStrings := make([]string, 0)
for {
Expand All @@ -408,7 +440,13 @@ func Split(s string) ([]string, error) {
if err == io.EOF {
return subStrings, nil
}
return subStrings, err
if _, ok := err.(*PipelineSeparatorError); !ok {
return subStrings, err
}
if resetOnPipe {
subStrings = make([]string, 0)
continue
}
}
subStrings = append(subStrings, word)
}
Expand Down

0 comments on commit 09c0117

Please sign in to comment.