Skip to content

Commit

Permalink
flesh out get all tokens method
Browse files Browse the repository at this point in the history
  • Loading branch information
boyter committed Jul 24, 2020
1 parent bd05e7f commit a597848
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 1 deletion.
12 changes: 11 additions & 1 deletion processor/parser/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ func (l *Lexer) Next() byte {
return 0
}

// Return the next token for the input
func (l *Lexer) NextToken() Token {

// at the end so return end token
Expand Down Expand Up @@ -156,5 +157,14 @@ func (l *Lexer) NextToken() Token {
}

func (l *Lexer) Tokens() []Token {
return nil
var tokens []Token

t := l.NextToken()

for t.Type != "END" {
tokens = append(tokens, t)
t = l.NextToken()
}

return tokens
}
10 changes: 10 additions & 0 deletions processor/parser/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -380,3 +380,13 @@ func TestNextTokenMultiple(t *testing.T) {
t.Error(`expected PAREN_CLOSE got`, token.Type)
}
}

func TestTokens(t *testing.T) {
lex := NewLexer(`(something AND else) OR (other NOT this)`)

tokens := lex.Tokens()

if len(tokens) != 11 {
t.Error("expected 11 tokens got", len(tokens))
}
}

0 comments on commit a597848

Please sign in to comment.