-
-
Notifications
You must be signed in to change notification settings - Fork 1
/
lexer.go
114 lines (102 loc) · 2.42 KB
/
lexer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
package kdl
import (
"io"
"github.com/alecthomas/participle/v2/lexer"
)
var (
lex = lexer.MustStateful(lexer.Rules{
`Root`: {
{`RawStringStart`, `r(#*)"`, lexer.Push(`RawString`)},
{`Ident`, `[-_!\p{L}][-_\p{L}0-9]*`, nil},
{`String`, `"(\\.|[^"])*"`, nil},
{`Number`, `\d+`, nil},
{`Punct`, `[\\{};=]`, nil},
{`Skip`, `/-`, nil},
{`NL`, `\n\r?`, nil},
{`startComment`, `/\*`, lexer.Push(`Comment`)},
{`singleLineComment`, `//.*`, nil},
{`whitespace`, `[ \t]+`, nil},
},
`RawString`: {
{`RawStringEnd`, `"\1`, lexer.Pop()},
{`RawStringText`, `[^"]*`, nil},
},
`Comment`: {
{`startInnerComment`, `/\*`, lexer.Push(`Comment`)},
{`endComment`, `\*/`, lexer.Pop()},
{`commentText`, `[^/*]+`, nil},
},
})
identToken = lex.Symbols()["Ident"]
stringEndToken = lex.Symbols()["RawStringEnd"]
stringToken = lex.Symbols()["String"]
numberToken = lex.Symbols()["Number"]
)
// A Lexer that inserts semi-colons and collapses \-separated lines.
type fixupLexerDefinition struct{}
func (l fixupLexerDefinition) Lex(path string, r io.Reader) (lexer.Lexer, error) { // nolint: golint
ll, err := lex.Lex(path, r)
if err != nil {
return nil, err
}
return &fixupLexer{lexer: ll}, nil
}
func (l fixupLexerDefinition) Symbols() map[string]lexer.TokenType { // nolint: golint
return lex.Symbols()
}
type fixupLexer struct {
lexer lexer.Lexer
last lexer.Token
next *lexer.Token
eof bool
}
func (l *fixupLexer) Next() (lexer.Token, error) {
next:
for {
if l.eof {
return lexer.EOFToken(l.last.Pos), nil
}
var token lexer.Token
if l.next != nil {
token = *l.next
l.next = nil
} else {
var err error
token, err = l.lexer.Next()
if err != nil {
return token, err
}
// Always insert a ; before }
if token.Value == "}" {
l.next = &token
return lexer.Token{Type: ';', Value: ";", Pos: token.Pos}, nil
}
}
l.eof = token.EOF()
// Delete \\ followed by \n
if token.Value == "\\" {
l.last = token
continue next
}
if token.Value != "\n" && !token.EOF() {
l.last = token
return token, nil
}
// Do we need to insert a semi-colon?
if l.last.Value == "}" {
token.Value = ";"
token.Type = ';'
} else {
switch l.last.Type {
case numberToken, stringEndToken, identToken, stringToken:
token.Value = ";"
token.Type = ';'
default:
l.last = token
continue next
}
}
l.last = token
return token, nil
}
}