Skip to content

Commit 83f5709

Browse files
committed
Working on performance improvements
1 parent e4b2ab2 commit 83f5709

File tree

11 files changed

+17684
-210
lines changed

11 files changed

+17684
-210
lines changed

benchmark.js

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
'use strict';
2+
3+
const fs = require('fs');
4+
const path = require('path');
5+
const parser = require('./');
6+
7+
const css = fs.readFileSync(path.resolve(__dirname, './test/fixtures/ayyo.css'), 'utf8');
8+
9+
if (typeof console.profile !== 'function') {
10+
console.profile = label => console.time(label);
11+
console.profileEnd = label => console.timeEnd(label);
12+
}
13+
14+
console.profile('Parse CSS');
15+
const model = parser.default(css);
16+
console.profileEnd('Parse CSS');
17+
18+
console.log(model.children.length);
19+
20+
// console.time('CSS Tree');
21+
// const ast = csstree.parse(css);
22+
// console.timeEnd('CSS Tree');

index.js

Lines changed: 74 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -4,56 +4,96 @@ import StreamReader from '@emmetio/stream-reader';
44
import Stylesheet from './lib/stylesheet';
55
import createRule from './lib/rule';
66
import createProperty from './lib/property';
7-
import token, { unknown, Token } from './lib/tokens/index';
7+
import { Token } from './lib/tokens/index';
88

9-
export default function(source) {
9+
import atKeyword from './lib/tokens/at-keyword';
10+
import string from './lib/tokens/string';
11+
import separator from './lib/tokens/separator';
12+
import comment, { multiLineComment } from './lib/tokens/comment';
13+
import whitespace from './lib/tokens/whitespace';
14+
15+
const LBRACE = 40; // (
16+
const RBRACE = 41; // )
17+
18+
export default function parseStylesheet(source) {
1019
const stream = typeof source === 'string' ? new StreamReader(source) : source;
1120
const root = new Stylesheet();
21+
let ctx = root, child, accum;
22+
let token, start;
1223
let tokens = [];
13-
let ctx = root, child, t;
1424

1525
while (!stream.eof()) {
16-
t = token(stream);
17-
18-
if (!t) {
19-
// unable to identify following character, consume it as unknown token
20-
stream.start = stream.pos;
21-
stream.next();
22-
tokens.push(unknown(stream));
23-
} else if (t.propertyTerminator) {
24-
// Tokens consumed before are CSS property
25-
tokens.push(t);
26-
ctx.addChild(createProperty(stream, tokens, t));
27-
tokens = [];
28-
} else if (t.ruleStart) {
29-
// Tokens consumed before are CSS rule
30-
child = createRule(stream, tokens, t);
31-
if (child) {
26+
token = atKeyword(stream) || separator(stream) || whitespace(stream) || comment(stream);
27+
if (token) {
28+
if (accum) {
29+
tokens.push(accum);
30+
accum = null;
31+
}
32+
33+
if (token.propertyTerminator) {
34+
ctx.addChild(createProperty(stream, tokens, token));
35+
tokens = [];
36+
} else if (token.ruleStart) {
37+
child = createRule(stream, tokens, token);
3238
ctx.addChild(child);
3339
ctx = child;
40+
tokens = [];
41+
} else if (token.ruleEnd) {
42+
// Finalize context section
43+
ctx.addChild(createProperty(stream, tokens));
44+
45+
if (ctx.type !== 'root') {
46+
// In case of invalid stylesheet with redundant `}`,
47+
// don’t modify root section.
48+
ctx.contentRange.end = token.end;
49+
ctx = ctx.parent;
50+
}
51+
52+
tokens = [];
53+
} else {
54+
tokens.push(token);
3455
}
35-
tokens = [];
36-
} else if (t.ruleEnd) {
37-
// Finalize previously consumed tokens as CSS property
38-
ctx.addChild(createProperty(stream, tokens));
39-
tokens = [];
40-
41-
// In case of invalid stylesheet with redundant `}`,
42-
// don’t modify root section.
43-
if (ctx.type !== 'root') {
44-
ctx.contentEnd = t;
45-
}
4656

47-
ctx = ctx.parent || ctx;
57+
continue;
58+
}
59+
60+
start = stream.pos;
61+
if (braces(stream) || string(stream) || !isNaN(stream.next())) {
62+
if (!accum) {
63+
accum = new Token(stream, start);
64+
} else {
65+
accum.end = stream.pos;
66+
}
4867
} else {
49-
tokens.push(t);
68+
throw new Error(`Unexpected end-of-stream at ${stream.pos}`);
5069
}
5170
}
5271

53-
// save unterminated tokens as property
72+
// Finalize all the rest properties
5473
ctx.addChild(createProperty(stream, tokens));
5574

5675
return root;
5776
}
5877

59-
export { token, createProperty, createRule, Token }
78+
function braces(stream) {
79+
if (stream.eat(LBRACE)) {
80+
let stack = 1;
81+
82+
// Handle edge case: do not consume single-line comment inside braces
83+
// since most likely it’s an unquoted url like `http://example.com`
84+
while (!stream.eof()) {
85+
if (stream.eat(RBRACE)) {
86+
stack--;
87+
if (!stack) {
88+
break;
89+
}
90+
} else if (!string(stream) && !multiLineComment(stream)) {
91+
stream.next();
92+
}
93+
}
94+
95+
return true;
96+
}
97+
98+
return false;
99+
}

lib/preparse.js

Lines changed: 0 additions & 99 deletions
This file was deleted.

lib/tokens/arguments.js

Lines changed: 30 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,17 @@
11
'use strict';
22

33
import Token from './token';
4-
import FragmentsToken from './fragments';
54
import { RULE_START, RULE_END } from './separator';
5+
import { last } from '../utils';
66

77
const ARGUMENTS_START = 40; // (
88
const ARGUMENTS_END = 41; // )
99

1010
export default function(stream, tokenConsumer) {
11-
const start = stream.pos;
11+
if (stream.peek() === ARGUMENTS_START) {
12+
stream.start = stream.pos;
13+
stream.next();
1214

13-
if (stream.eat(ARGUMENTS_START)) {
1415
const tokens = [];
1516
let t, ch;
1617
// in LESS, it’s possible to separate arguments list either by `;` or `,`.
@@ -34,38 +35,51 @@ export default function(stream, tokenConsumer) {
3435
tokens.push(t);
3536
}
3637

37-
return createArguments(stream, tokens, start, usePropTerminator);
38+
return createArgumentList(stream, tokens, usePropTerminator);
3839
}
3940
}
4041

4142
function isUnexpectedTerminator(code) {
4243
return code === RULE_START || code === RULE_END;
4344
}
4445

45-
function createArguments(stream, tokens, start, usePropTerminator) {
46-
const sep = usePropTerminator ? semicolonSeparator : commaSeparator;
47-
const list = [];
46+
function createArgumentList(stream, tokens, usePropTerminator) {
47+
const argsToken = new Token(stream, 'arguments');
48+
const isSeparator = usePropTerminator ? semicolonSeparator : commaSeparator;
4849
let arg = [];
4950

5051
for (let i = 0, il = tokens.length, token; i < il; i++) {
5152
token = tokens[i];
52-
if (sep(token)) {
53-
if (arg.length) {
54-
list.push(new FragmentsToken(stream, arg));
55-
arg = [];
56-
} else {
57-
list.push(new Token(stream, token.start, token.start));
58-
}
53+
if (isSeparator(token)) {
54+
argsToken.add(createArgument(stream, arg) || createEmptyArgument(stream, token.start));
5955
} else {
6056
arg.push(token);
6157
}
6258
}
6359

6460
if (arg.length) {
65-
list.push(new FragmentsToken(stream, arg));
61+
argsToken.add(createArgument(stream, arg));
62+
}
63+
64+
return argsToken;
65+
}
66+
67+
function createArgument(stream, tokens) {
68+
if (tokens && tokens.length) {
69+
const arg = new Token(stream, 'argument', tokens[0].start, last(tokens).end);
70+
71+
for (let i = 0; i < tokens.length; i++) {
72+
arg.add(tokens[i]);
73+
}
74+
75+
return arg;
6676
}
77+
}
6778

68-
return new ArgumentsToken(list, stream, start);
79+
function createEmptyArgument(stream, pos) {
80+
const token = new Token(stream, 'argument', pos, pos);
81+
token.property('empty', true);
82+
return token;
6983
}
7084

7185
function commaSeparator(token) {
@@ -75,14 +89,3 @@ function commaSeparator(token) {
7589
function semicolonSeparator(token) {
7690
return token.propertyTerminator;
7791
}
78-
79-
/**
80-
* A token that represents a set of arguments between `(` and `)`
81-
*/
82-
export class ArgumentsToken extends Token {
83-
constructor(list, stream, start, end) {
84-
super(stream, start, end);
85-
this.list = list;
86-
this.type = 'arguments';
87-
}
88-
}

lib/tokens/at-keyword.js

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,10 @@ export default function(stream) {
1212
if (stream.eatWhile(64 /* @ */)) {
1313
const name = ident(stream);
1414
if (name) {
15-
return new AtKeywordToken(name, stream, start);
15+
stream.start = start;
16+
return new Token(stream, 'at-keyword').add(name);
1617
}
1718
}
1819

1920
stream.pos = start;
2021
}
21-
22-
export class AtKeywordToken extends Token {
23-
constructor(name, stream, start, end) {
24-
super(stream, start, end);
25-
this.type = 'at-keyword';
26-
this.name = name;
27-
}
28-
}

0 commit comments

Comments
 (0)