/
peekable_lex.go
63 lines (51 loc) · 1.53 KB
/
peekable_lex.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
// Copyright 2015 The Serulian Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package parser
import (
"container/list"
"fmt"
)
// peekableLexer wraps a lexer and provides the ability to peek forward without
// losing state.
type peekableLexer struct {
lex *lexer // a reference to the lexer used for tokenization
readTokens *list.List // tokens already read from the lexer during a lookahead.
}
// peekable_lex returns a new peekableLexer for the given lexer.
func peekable_lex(lex *lexer) *peekableLexer {
return &peekableLexer{
lex: lex,
readTokens: list.New(),
}
}
// nextToken returns the next token found in the lexer.
func (l *peekableLexer) nextToken() lexeme {
frontElement := l.readTokens.Front()
if frontElement != nil {
return l.readTokens.Remove(frontElement).(lexeme)
}
return l.lex.nextToken()
}
// peekToken performs lookahead of the given count on the token stream.
func (l *peekableLexer) peekToken(count int) lexeme {
if count < 1 {
panic(fmt.Sprintf("Expected count > 1, received: %v", count))
}
// Ensure that the readTokens has at least the requested number of tokens.
if l.readTokens.Len() < count {
for {
l.readTokens.PushBack(l.lex.nextToken())
if l.readTokens.Len() == count {
break
}
}
}
// Retrieve the count-th token from the list.
var element *list.Element
element = l.readTokens.Front()
for i := 1; i < count; i++ {
element = element.Next()
}
return element.Value.(lexeme)
}