-
Notifications
You must be signed in to change notification settings - Fork 4
/
token.go
256 lines (231 loc) · 4.62 KB
/
token.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
package token
import (
"fmt"
)
// token
type TokenType int
const (
TOKEN_ILLEGAL TokenType = (iota - 1) // Illegal token
TOKEN_EOF //End Of File
TOKEN_PLUS // +
TOKEN_MINUS // -
TOKEN_MULTIPLY // *
TOKEN_DIVIDE // '/'
TOKEN_MOD // '%'
TOKEN_POWER // **
TOKEN_INCREMENT // ++
TOKEN_DECREMENT // --
TOKEN_LPAREN // (
TOKEN_RPAREN // )
TOKEN_ASSIGN // =
TOKEN_SEMICOLON //;
TOKEN_COLON //:
TOKEN_COMMA //,
TOKEN_DOT //.
TOKEN_LBRACE // {
TOKEN_RBRACE // }
TOKEN_BANG // !
TOKEN_LBRACKET // [
TOKEN_RBRACKET // ]
TOKEN_COMMENT // #
TOKEN_LT // <
TOKEN_LE // <=
TOKEN_GT // >
TOKEN_GE // >=
TOKEN_EQ // ==
TOKEN_NEQ // !=
TOKEN_MATCH // =~
TOKEN_NOTMATCH // !~
TOKEN_AND // &&
TOKEN_OR // ||
TOKEN_NUMBER //10 or 10.1
TOKEN_IDENTIFIER //identifier
TOKEN_STRING //""
//reserved keywords
TOKEN_TRUE //true
TOKEN_FALSE //false
TOKEN_NIL // nil
TOKEN_LET //let
TOKEN_RETURN //return
TOKEN_FUNCTION //fn
TOKEN_IF //if
TOKEN_ELSE //else
TOKEN_WHILE //while
TOKEN_DO //do
TOKEN_FOR //for
TOKEN_IN //in
TOKEN_BREAK //break
TOKEN_CONTINUE //continue
TOKEN_IMPORT //import
TOKEN_REGEX // regular expression
)
//for debug & testing
func (tt TokenType) String() string {
switch tt {
case TOKEN_ILLEGAL:
return "ILLEGAL"
case TOKEN_EOF:
return "EOF"
case TOKEN_PLUS:
return "+"
case TOKEN_MINUS:
return "-"
case TOKEN_MULTIPLY:
return "*"
case TOKEN_DIVIDE:
return "/"
case TOKEN_MOD:
return "%"
case TOKEN_POWER:
return "**"
case TOKEN_INCREMENT:
return "++"
case TOKEN_DECREMENT:
return "--"
case TOKEN_LPAREN:
return "("
case TOKEN_RPAREN:
return ")"
case TOKEN_ASSIGN:
return "="
case TOKEN_SEMICOLON:
return ";"
case TOKEN_COLON:
return ":"
case TOKEN_COMMA:
return ","
case TOKEN_DOT:
return "."
case TOKEN_LBRACE:
return "{"
case TOKEN_RBRACE:
return "}"
case TOKEN_BANG:
return "!"
case TOKEN_LBRACKET:
return "["
case TOKEN_RBRACKET:
return "]"
case TOKEN_COMMENT:
return "#"
case TOKEN_LT:
return "<"
case TOKEN_LE:
return "<="
case TOKEN_GT:
return ">"
case TOKEN_GE:
return ">="
case TOKEN_EQ:
return "=="
case TOKEN_NEQ:
return "!="
case TOKEN_MATCH:
return "=~"
case TOKEN_NOTMATCH:
return "!~"
case TOKEN_AND:
return "&&"
case TOKEN_OR:
return "||"
case TOKEN_NUMBER:
return "NUMBER"
case TOKEN_IDENTIFIER:
return "IDENTIFIER"
case TOKEN_STRING:
return "STRING"
case TOKEN_TRUE:
return "TRUE"
case TOKEN_FALSE:
return "FALSE"
case TOKEN_NIL:
return "NIL"
case TOKEN_LET:
return "LET"
case TOKEN_RETURN:
return "RETURN"
case TOKEN_FUNCTION:
return "FUNCTION"
case TOKEN_IF:
return "IF"
case TOKEN_ELSE:
return "ELSE"
case TOKEN_WHILE:
return "WHILE"
case TOKEN_DO:
return "DO"
case TOKEN_FOR:
return "FOR"
case TOKEN_IN:
return "IN"
case TOKEN_BREAK:
return "BREAK"
case TOKEN_CONTINUE:
return "CONTINUE"
case TOKEN_IMPORT:
return "IMPORT"
case TOKEN_REGEX:
return "<REGEX>"
default:
return "UNKNOWN"
}
}
var keywords = map[string]TokenType{
"true": TOKEN_TRUE,
"false": TOKEN_FALSE,
"nil": TOKEN_NIL,
"let": TOKEN_LET,
"return": TOKEN_RETURN,
"fn": TOKEN_FUNCTION,
"if": TOKEN_IF,
"else": TOKEN_ELSE,
"while": TOKEN_WHILE,
"do": TOKEN_DO,
"for": TOKEN_FOR,
"in": TOKEN_IN,
"break": TOKEN_BREAK,
"continue": TOKEN_CONTINUE,
"import": TOKEN_IMPORT,
}
type Token struct {
Pos Position
Type TokenType
Literal string
}
//Stringer method for Token
func (t Token) String() string {
return fmt.Sprintf("Position: %s, Type: %s, Literal: %s", t.Pos, t.Type, t.Literal)
}
//Position is the location of a code point in the source
type Position struct {
Filename string
Offset int //offset relative to entire file
Line int
Col int //offset relative to each line
}
//Stringer method for Position
func (p Position) String() string {
var msg string
if p.Filename == "" {
msg = fmt.Sprint(" <", p.Line, ":", p.Col, "> ")
} else {
msg = fmt.Sprint(" <", p.Filename, ":", p.Line, ":", p.Col, "> ")
}
return msg
}
//We could not use `Line()` as function name, because `Line` is the struct's field
func (p Position) Sline() string { //String line
var msg string
if p.Filename == "" {
msg = fmt.Sprint(p.Line)
} else {
msg = fmt.Sprint(" <", p.Filename, ":", p.Line, "> ")
}
return msg
}
func LookupIdent(ident string) TokenType {
if tok, ok := keywords[ident]; ok {
return tok
}
return TOKEN_IDENTIFIER
}