/
lexer.go
113 lines (99 loc) · 3.25 KB
/
lexer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
package hl
import (
"fmt"
"github.com/alecthomas/chroma"
. "github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/lexers"
"github.com/alecthomas/chroma/styles"
"github.com/vito/bass/pkg/bass"
)
var BassLexer = lexers.Register(MustNewLazyLexer(
&Config{
Name: "Bass",
Aliases: []string{"bass"},
Filenames: []string{"*.bass"},
MimeTypes: []string{"text/x-bass", "application/x-bass"},
},
bassRules,
))
var class2chroma = map[Class]chroma.TokenType{
Bool: chroma.KeywordConstant,
Const: chroma.KeywordConstant,
Cond: chroma.Keyword,
Repeat: chroma.NameBuiltin,
Var: chroma.NameBuiltinPseudo,
Def: chroma.KeywordDeclaration,
Fn: chroma.NameFunction,
Op: chroma.NameBuiltin,
Special: chroma.Keyword,
Import: chroma.KeywordNamespace,
}
// taken from chroma's TTY formatter
var ttyMap = map[string]string{
"30m": "#000000", "31m": "#7f0000", "32m": "#007f00", "33m": "#7f7fe0",
"34m": "#00007f", "35m": "#7f007f", "36m": "#007f7f", "37m": "#e5e5e5",
"90m": "#555555", "91m": "#ff0000", "92m": "#00ff00", "93m": "#ffff00",
"94m": "#0000ff", "95m": "#ff00ff", "96m": "#00ffff", "97m": "#ffffff",
}
// TTY style matches to hex codes used by the TTY formatter to map them to
// specific ANSI escape codes.
var TTYStyle = styles.Register(chroma.MustNewStyle("tty", chroma.StyleEntries{
chroma.Comment: ttyMap["95m"] + " italic",
chroma.CommentPreproc: ttyMap["90m"],
chroma.KeywordConstant: ttyMap["33m"],
chroma.Keyword: ttyMap["31m"],
chroma.KeywordDeclaration: ttyMap["35m"],
chroma.NameBuiltin: ttyMap["31m"],
chroma.NameBuiltinPseudo: ttyMap["36m"],
chroma.NameFunction: ttyMap["34m"],
chroma.NameNamespace: ttyMap["34m"],
chroma.LiteralNumber: ttyMap["31m"],
chroma.LiteralString: ttyMap["32m"],
chroma.LiteralStringSymbol: ttyMap["33m"],
chroma.Operator: ttyMap["31m"],
chroma.Punctuation: ttyMap["90m"],
}))
const symChars = `\w!$%*+<=>?.#\-`
func bassRules() Rules {
rootRules := []Rule{
{`^#!.*$`, CommentPreproc, nil},
{`;.*$`, CommentSingle, nil},
{`[\s]+`, Text, nil},
{`-?\d+`, LiteralNumberInteger, nil},
{`0x-?[abcdef\d]+`, LiteralNumberHex, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`:[` + symChars + `]+`, LiteralStringSymbol, nil},
{"&", Operator, nil},
}
scope := bass.NewRunScope(bass.Ground, bass.RunState{})
for _, class := range Classify(scope) {
words := make([]string, len(class.Bindings))
for i := range class.Bindings {
words[i] = string(class.Bindings[i])
}
if len(words) == 0 {
// none; prevent zero-length match
continue
}
tokenType, found := class2chroma[class.Class]
if !found {
panic(fmt.Sprintf("unknown chroma token type for class: %s", class))
}
pattern := Words(`((?<![`+symChars+`/])|^)`, `((?![`+symChars+`])|$)`, words...)
rootRules = append(rootRules, Rule{
Pattern: pattern,
Type: tokenType,
Mutator: nil,
})
}
rootRules = append(rootRules,
Rule{`(?<=\()[` + symChars + `]+`, NameFunction, nil},
Rule{`[` + symChars + `]+`, NameVariable, nil},
Rule{`/`, NameFunction, nil},
Rule{`(\[|\])`, Punctuation, nil},
Rule{`(\{|\})`, Punctuation, nil},
Rule{`(\(|\))`, Punctuation, nil})
return Rules{
"root": rootRules,
}
}