/
tokenize_test.go
63 lines (54 loc) · 2.41 KB
/
tokenize_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
package matcher
import (
"html"
"testing"
"github.com/stretchr/testify/assert"
)
func TestTokenize(t *testing.T) {
tokens := Tokenize("run")
assert.Equal(t, tokens[0], "run", "they should be equal")
assert.Equal(t, len(tokens), 1, "they should be equal")
}
func TestTokenizeExtraSpaces(t *testing.T) {
tokens := Tokenize("run away ")
assert.Equal(t, tokens[0], "run", "they should be equal")
assert.Equal(t, tokens[1], "away", "they should be equal")
assert.Equal(t, len(tokens), 2, "they should be equal")
}
func TestTokenizeQuotes(t *testing.T) {
tokens := Tokenize("run \"very far\" 'away and away'")
assert.Equal(t, tokens[0], "run", "they should be equal")
assert.Equal(t, tokens[1], "very far", "they should be equal")
assert.Equal(t, tokens[2], "away and away", "they should be equal")
assert.Equal(t, len(tokens), 3, "they should be equal")
}
func TestTokenizeRules(t *testing.T) {
tokens := Tokenize("run <speed> [distance] away")
assert.Equal(t, tokens[0], "run", "they should be equal")
assert.Equal(t, tokens[1], "<speed>", "they should be equal")
assert.Equal(t, tokens[2], "[distance]", "they should be equal")
assert.Equal(t, tokens[3], "away", "they should be equal")
assert.Equal(t, len(tokens), 4, "they should be equal")
}
func TestTokenizeFlags(t *testing.T) {
tokens := Tokenize("run --distance=far --speed=\"super very fast\" --skip -x ")
assert.Equal(t, tokens[0], "run", "they should be equal")
assert.Equal(t, tokens[1], "--distance=far", "they should be equal")
assert.Equal(t, tokens[2], "--speed=super very fast", "they should be equal")
assert.Equal(t, tokens[3], "--skip", "they should be equal")
assert.Equal(t, tokens[4], "-x", "they should be equal")
assert.Equal(t, len(tokens), 5, "they should be equal")
}
func TestTokenizeUnicodeWhitespace(t *testing.T) {
tokens := Tokenize("run" + html.UnescapeString(" ") + "far away")
assert.Equal(t, tokens[0], "run", "they should be equal")
assert.Equal(t, tokens[1], "far", "they should be equal")
assert.Equal(t, tokens[2], "away", "they should be equal")
}
func TestTokenizeUnicodeTokens(t *testing.T) {
tokens := Tokenize("🚀🐢 far 🌐🏳️🌈 \"🍻 . 🔥\"")
assert.Equal(t, tokens[0], "🚀🐢", "they should be equal")
assert.Equal(t, tokens[1], "far", "they should be equal")
assert.Equal(t, tokens[2], "🌐🏳️🌈", "they should be equal")
assert.Equal(t, tokens[3], "🍻 . 🔥", "they should be equal")
}