This repository has been archived by the owner on May 2, 2023. It is now read-only.
/
lexer.go
177 lines (155 loc) · 3.79 KB
/
lexer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
package highlight
import (
"bufio"
"fmt"
"io"
"mime"
"path"
"strings"
)
// Lexer defines a simple state-based lexer.
type Lexer struct {
Name string
States States
Filters Filters
Formatter Filter
Filenames []string
MimeTypes []string
}
func (l Lexer) Format(r *bufio.Reader, emit func(Token) error) error {
if l.Formatter == nil {
return l.Tokenize(r, emit)
}
return l.Tokenize(r, l.Formatter.Filter(emit))
}
// Tokenize reads from the given input and emits tokens to the output channel.
// Will end on any error from the reader, including io.EOF to signify the end
// of input.
func (l Lexer) Tokenize(br *bufio.Reader, emit func(Token) error) error {
states, err := l.States.Compile()
if err != nil {
return err
}
emit = l.Filters.Filter(emit)
stack := &Stack{"root"}
eol := false
var subject = ""
for {
next, err := br.ReadString('\n')
if err == bufio.ErrBufferFull {
eol = false
} else if err == io.EOF {
eol = true
} else if err != nil {
return emit(EndToken)
} else {
eol = strings.HasSuffix(next, "\n")
}
subject = subject + next
if subject == "" && err == io.EOF {
emit(EndToken)
return err
}
for subject != "" {
// Match current state against current subject
stateName := stack.Peek()
state := states.Get(stateName)
// Tokenize input
n, rule, tokens, err := state.Match(subject)
if err != nil {
return emit(EndToken)
}
// No rules matched
if rule == nil {
if !eol {
// Read more data for the current line
break
} else {
// Emit entire subject as an error
tokens = []Token{{Value: subject, Type: Error}}
n = len(subject)
}
}
// Emit each token to the output
for _, t := range tokens {
t.State = stateName
if err := emit(t); err != nil {
emit(EndToken)
return err
}
}
// Update state
if rule == nil {
if !eol {
// Didn't match at all, reset to root state
stack.Empty()
stack.Push("root")
}
} else {
// Push new states as appropriate
for _, state := range rule.Stack() {
if state == "#pop" {
stack.Pop()
} else if state != "" {
stack.Push(state)
}
}
}
if stack.Len() == 0 {
return emit(EndToken)
}
// Consume matched part
subject = subject[n:]
}
}
return nil
}
// TokenizeString is a convenience method
func (l Lexer) TokenizeString(s string) ([]Token, error) {
r := bufio.NewReader(strings.NewReader(s))
tokens := []Token{}
err := l.Tokenize(r, func(t Token) error {
tokens = append(tokens, t)
return nil
})
return tokens, err
}
// AcceptsFilename returns true if this Lexer thinks it is suitable for the
// given filename. An error will be returned iff an invalid filename pattern
// is registered by the Lexer.
func (l Lexer) AcceptsFilename(name string) (bool, error) {
for _, fn := range l.Filenames {
if matched, err := path.Match(fn, name); err != nil {
return false, fmt.Errorf("malformed filename pattern '%s' for "+
"lexer '%s': %s", fn, l.Name, err)
} else if matched {
return true, nil
}
}
return false, nil
}
// AcceptsMediaType returns true if this Lexer thinks it is suitable for the
// given meda (MIME) type. An error wil be returned iff the given mime type
// is invalid.
func (l Lexer) AcceptsMediaType(media string) (bool, error) {
if mime, _, err := mime.ParseMediaType(media); err != nil {
return false, err
} else {
for _, mt := range l.MimeTypes {
if mime == mt {
return true, nil
}
}
}
return false, nil
}
// ListMediaTypes lists the media types this Lexer supports,
// e.g. ["application/json"]
func (l Lexer) ListMediaTypes() []string {
return l.MimeTypes
}
// ListFilenames lists the filename patterns this Lexer supports,
// e.g. ["*.json"]
func (l Lexer) ListFilenames() []string {
return l.Filenames
}