-
Notifications
You must be signed in to change notification settings - Fork 2
/
compileLexerSpec.js
115 lines (100 loc) · 3.39 KB
/
compileLexerSpec.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
const expect = require('expect.js')
const compile = require('../../lib/compileLexer')
describe('E2E: Compile lexer', () => {
it('should correctly parse with simple definition', () => {
const tokenize = compile([
{ type: 'WS', value: ' ' }
])
expect(tokenize(' ')).to.eql({
tokens: [
{ type: 'WS', data: { value: ' ' }, start: 0, end: 1 },
{ type: 'WS', data: { value: ' ' }, start: 1, end: 2 },
{ type: 'WS', data: { value: ' ' }, start: 2, end: 3 }
]
})
})
it('should correctly parse with regex definition', () => {
const tokenize = compile([
{ type: 'WS', value: ' ' },
{ type: 'LT', regex: '[a-z]+', regexFlags: 'i' },
])
expect(tokenize(' Sth ')).to.eql({
tokens: [
{ type: 'WS', data: { value: ' ' }, start: 0, end: 1 },
{ type: 'WS', data: { value: ' ' }, start: 1, end: 2 },
{ type: 'LT', data: { value: 'Sth' }, start: 2, end: 5 },
{ type: 'WS', data: { value: ' ' }, start: 5, end: 6 }
]
})
})
it('should correctly parse with regex (named groups) definition', () => {
const tokenize = compile([
{ type: 'WS', value: ' ' },
{ type: 'LT', regex: '(?<first>[a-z])(?<later>[a-z]*)', regexFlags: 'i' },
])
expect(tokenize(' Sth ')).to.eql({
tokens: [
{ type: 'WS', data: { value: ' ' }, start: 0, end: 1 },
{ type: 'WS', data: { value: ' ' }, start: 1, end: 2 },
{ type: 'LT', data: { first: 'S', later: 'th' }, start: 2, end: 5 },
{ type: 'WS', data: { value: ' ' }, start: 5, end: 6 }
]
})
})
it('should correctly parse with text definition', () => {
const tokenize = compile([
{ type: 'WS', regex: '[ ]+' },
{ type: 'FN', value: '@fn' },
])
expect(tokenize(' @fn ')).to.eql({
tokens: [
{ type: 'WS', data: { value: ' ' }, start: 0, end: 2 },
{ type: 'FN', data: { value: '@fn' }, start: 2, end: 5 },
{ type: 'WS', data: { value: ' ' }, start: 5, end: 6 }
]
})
})
it('should correctly parse with validated regex definition', () => {
const tokenize = compile([
{ type: 'WS', regex: '[ ]+' },
{ type: 'FN', regex: '@fn', valid: '@fn( |$)' },
{ type: 'WORD', regex: '[^ ]*' },
])
expect(tokenize(' @fnx ')).to.eql({
tokens: [
{ type: 'WS', data: { value: ' ' }, start: 0, end: 2 },
{ type: 'WORD', data: { value: '@fnx' }, start: 2, end: 6 },
{ type: 'WS', data: { value: ' ' }, start: 6, end: 7 }
]
})
expect(tokenize(' @fn x')).to.eql({
tokens: [
{ type: 'WS', data: { value: ' ' }, start: 0, end: 2 },
{ type: 'FN', data: { value: '@fn' }, start: 2, end: 5 },
{ type: 'WS', data: { value: ' ' }, start: 5, end: 6 },
{ type: 'WORD', data: { value: 'x' }, start: 6, end: 7 }
]
})
})
it('should fail because of validated regex definition', () => {
const tokenize = compile([
{ type: 'WS', regex: '[ ]+' },
{ type: 'FN', regex: '@fn', valid: '@fn( |$)' }
])
expect(tokenize(' @fnx ')).to.eql({
error: 'Unrecognized token',
index: 2,
line: 1,
column: 3
})
})
it('should fail because of no definitions', () => {
const tokenize = compile([])
expect(tokenize(' @fnx ')).to.eql({
error: 'Unrecognized token',
index: 0,
line: 1,
column: 1
})
})
})