forked from aymerick/raymond
-
Notifications
You must be signed in to change notification settings - Fork 0
/
token.go
122 lines (105 loc) · 3.23 KB
/
token.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
package lexer
import "fmt"
const (
TokenError TokenKind = iota
TokenEOF
// mustache delimiters
TokenOpen // OPEN
TokenClose // CLOSE
TokenOpenRawBlock // OPEN_RAW_BLOCK
TokenCloseRawBlock // CLOSE_RAW_BLOCK
TokenOpenEndRawBlock // END_RAW_BLOCK
TokenOpenUnescaped // OPEN_UNESCAPED
TokenCloseUnescaped // CLOSE_UNESCAPED
TokenOpenBlock // OPEN_BLOCK
TokenOpenEndBlock // OPEN_ENDBLOCK
TokenInverse // INVERSE
TokenOpenInverse // OPEN_INVERSE
TokenOpenInverseChain // OPEN_INVERSE_CHAIN
TokenOpenPartial // OPEN_PARTIAL
TokenComment // COMMENT
// inside mustaches
TokenOpenSexpr // OPEN_SEXPR
TokenCloseSexpr // CLOSE_SEXPR
TokenEquals // EQUALS
TokenData // DATA
TokenSep // SEP
TokenOpenBlockParams // OPEN_BLOCK_PARAMS
TokenCloseBlockParams // CLOSE_BLOCK_PARAMS
// tokens with content
TokenContent // CONTENT
TokenID // ID
TokenString // STRING
TokenNumber // NUMBER
TokenBoolean // BOOLEAN
)
const (
// Option to generate token position in its string representation
DUMP_TOKEN_POS = false
// Option to generate values for all token kinds for their string representations
DUMP_ALL_TOKENS_VAL = true
)
// TokenKind represents a Token type.
type TokenKind int
// Token represents a scanned token.
type Token struct {
Kind TokenKind // Token kind
Val string // Token value
Pos int // Byte position in input string
Line int // Line number in input string
}
// tokenName permits to display token name given token type
var tokenName = map[TokenKind]string{
TokenError: "Error",
TokenEOF: "EOF",
TokenContent: "Content",
TokenComment: "Comment",
TokenOpen: "Open",
TokenClose: "Close",
TokenOpenUnescaped: "OpenUnescaped",
TokenCloseUnescaped: "CloseUnescaped",
TokenOpenBlock: "OpenBlock",
TokenOpenEndBlock: "OpenEndBlock",
TokenOpenRawBlock: "OpenRawBlock",
TokenCloseRawBlock: "CloseRawBlock",
TokenOpenEndRawBlock: "OpenEndRawBlock",
TokenOpenBlockParams: "OpenBlockParams",
TokenCloseBlockParams: "CloseBlockParams",
TokenInverse: "Inverse",
TokenOpenInverse: "OpenInverse",
TokenOpenInverseChain: "OpenInverseChain",
TokenOpenPartial: "OpenPartial",
TokenOpenSexpr: "OpenSexpr",
TokenCloseSexpr: "CloseSexpr",
TokenID: "ID",
TokenEquals: "Equals",
TokenString: "String",
TokenNumber: "Number",
TokenBoolean: "Boolean",
TokenData: "Data",
TokenSep: "Sep",
}
// String returns the token kind string representation for debugging.
func (k TokenKind) String() string {
s := tokenName[k]
if s == "" {
return fmt.Sprintf("Token-%d", int(k))
}
return s
}
// String returns the token string representation for debugging.
func (t Token) String() string {
result := ""
if DUMP_TOKEN_POS {
result += fmt.Sprintf("%d:", t.Pos)
}
result += fmt.Sprintf("%s", t.Kind)
if (DUMP_ALL_TOKENS_VAL || (t.Kind >= TokenContent)) && len(t.Val) > 0 {
if len(t.Val) > 100 {
result += fmt.Sprintf("{%.20q...}", t.Val)
} else {
result += fmt.Sprintf("{%q}", t.Val)
}
}
return result
}