/
common.js
162 lines (132 loc) · 5.73 KB
/
common.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
import fs from 'fs';
import path from 'path';
import assert from 'assert';
import { parse, walk, fork, lexer, generate, version, tokenTypes } from 'css-tree';
const fixtureFilename = './fixtures/stringify.css';
const fixture = normalize(fs.readFileSync(fixtureFilename, 'utf-8'));;
const types = Object.keys(parse.config.node).sort()
.filter(type => type !== 'DeclarationList'); // DeclarationList doesn't appear in StyleSheet
function normalize(str) {
return str.replace(/\n|\r\n?|\f/g, '\n');
}
describe('Common', () => {
it('should expose version', () => {
assert.strictEqual(version, JSON.parse(fs.readFileSync('./package.json')).version);
});
it('JSON.strigify()', () => {
const ast = parse(fixture, {
filename: path.basename(fixtureFilename),
positions: true
});
// fs.writeFileSync(fixtureFilename.replace(/\.css/, '.ast'), JSON.stringify(ast, null, 4) + '\n', 'utf-8');
assert.strictEqual(
JSON.stringify(ast, null, 4),
normalize(fs.readFileSync('./fixtures/stringify.ast', 'utf-8').trim())
);
});
it('test CSS should contain all node types', () => {
const foundTypes = new Set();
const ast = parse(fixture);
walk(ast, node => foundTypes.add(node.type));
assert.deepStrictEqual(
[...foundTypes].sort(),
types.sort().filter(type => type !== 'WhiteSpace') // FIXME: temporary filter white space
);
});
describe('extension in base classes should not cause to exception', () => {
beforeEach(() => {
Object.prototype.objectExtraField = () => {};
Array.prototype.arrayExtraField = () => {};
});
afterEach(() => {
delete Object.prototype.objectExtraField;
delete Array.prototype.arrayExtraField;
});
it('fork()', () => {
assert.doesNotThrow(() => {
fork({
node: {
Test: {
structure: {
foo: 'Rule',
bar: [['Rule']]
}
}
}
});
});
});
});
describe('custom tokenizer should work via fork()', () => {
it('custom tokenizer should be set', () => {
const customTokenizer = () => {};
const forkedCssTree = fork({
tokenize: customTokenizer
});
assert.strictEqual(forkedCssTree.tokenize, customTokenizer);
});
it('custom tokenizer should affect the parser', () => {
const customTokenizer = (source, onToken) => {
onToken(tokenTypes.Ident, 0, source.length);
};
const forkedCssTree = fork({
tokenize: customTokenizer
});
const parserOptions = { context: 'value' };
const input = 'foo(bar)';
const defaultAst = parse(input, parserOptions);
const forkAst = forkedCssTree.parse(input, parserOptions);
// Default parser should give an AST with a function node whose first child is an identifier
assert.strictEqual(forkAst.children.size, 1);
assert.strictEqual(defaultAst.children.first.type, 'Function');
assert.strictEqual(defaultAst.children.first.children.size, 1);
assert.strictEqual(defaultAst.children.first.children.first.type, 'Identifier');
// Forked parser should give an AST with an identifier node
assert.strictEqual(forkAst.children.size, 1);
assert.strictEqual(forkAst.children.first.type, 'Identifier');
});
it('custom tokenizer should affect the lexer', () => {
const customTokenizer = (source, onToken) => {
onToken(tokenTypes.Ident, 0, source.length);
};
const forkedCssTree = fork({
tokenize: customTokenizer
});
const syntax = 'foo( <number> )';
const input = 'foo(1)';
// Default lexer should match the function syntax
assert(lexer.match(syntax, input).matched);
// Forked lexer should not match the function syntax, because the input isn't tokenized as a function
const forkedResult = forkedCssTree.lexer.match(syntax, input);
assert.strictEqual(forkedResult.matched, null);
});
it('custom tokenizer should affect the generator', () => {
// This custom tokenizer only generates a single token
const customTokenizer = (_, onToken) => {
onToken(tokenTypes.Ident, 0, 1);
};
const forkedCssTree = fork({
tokenize: customTokenizer,
node: {
Identifier: {
structure: {
name: String
},
generate(node) {
// This should be the custom tokenizer
this.tokenize(node.name);
}
}
}
});
const parserOptions = { context: 'value' };
const input = 'foo';
const ast = parse(input, parserOptions);
// Default generator should generate the whole input as-is
assert.equal(generate(ast), input);
// Custom tokenizer only generates a single token for the first character,
// so if the generator uses the custom tokenizer, it should only generate the first character
assert.equal(forkedCssTree.generate(ast), input[0]);
});
});
});