Skip to content

Commit

Permalink
Align token names to spec
Browse files Browse the repository at this point in the history
  • Loading branch information
lahmatiy committed May 6, 2021
1 parent fe4fe73 commit 672af19
Show file tree
Hide file tree
Showing 16 changed files with 90 additions and 75 deletions.
3 changes: 2 additions & 1 deletion lib/parser/create.js
Expand Up @@ -194,8 +194,9 @@ export function createParser(config) {

eat(tokenType) {
if (this.tokenType !== tokenType) {
const tokenName = tokenNames[tokenType].slice(0, -6).replace(/-/g, ' ').replace(/^./, m => m.toUpperCase());
let message = `${/[[\](){}]/.test(tokenName) ? `"${tokenName}"` : tokenName} is expected`;
let offset = this.tokenStart;
let message = tokenNames[tokenType] + ' is expected';

// tweak message and offset
switch (tokenType) {
Expand Down
30 changes: 27 additions & 3 deletions lib/tokenizer/names.js
@@ -1,3 +1,27 @@
import * as types from './types.js';

export default Object.fromEntries(Object.entries(types).map(([a, b]) => [b, a]));
export default [
'EOF-token',
'ident-token',
'function-token',
'at-keyword-token',
'hash-token',
'string-token',
'bad-string-token',
'url-token',
'bad-url-token',
'delim-token',
'number-token',
'percentage-token',
'dimension-token',
'whitespace-token',
'CDO-token',
'CDC-token',
'colon-token',
'semicolon-token',
'comma-token',
'[-token',
']-token',
'(-token',
')-token',
'{-token',
'}-token'
];
2 changes: 1 addition & 1 deletion test/fixture/ast/atrule/atrule/media.json
Expand Up @@ -171,7 +171,7 @@
{
"source": "@media (foo:1",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": "@media (foo:1) ~",
Expand Down
2 changes: 1 addition & 1 deletion test/fixture/ast/mediaQuery/MediaQuery.json
Expand Up @@ -176,7 +176,7 @@
{
"source": "(foo: bar baz)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": "(foo: 'foo')",
Expand Down
2 changes: 1 addition & 1 deletion test/fixture/ast/selector/Nth.json
Expand Up @@ -493,7 +493,7 @@
{
"source": ":nth-child(3 n)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":nth-child(+ 2n)",
Expand Down
2 changes: 1 addition & 1 deletion test/fixture/ast/selector/functional-pseudo/dir.json
Expand Up @@ -48,7 +48,7 @@
{
"source": ":dir(foo var)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
}
]
}
4 changes: 2 additions & 2 deletions test/fixture/ast/selector/functional-pseudo/has.json
Expand Up @@ -167,7 +167,7 @@
{
"source": ":has(.a{)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":has(,.b)",
Expand All @@ -187,7 +187,7 @@
{
"source": ":has(.a,.b{)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":has(var(--test))",
Expand Down
2 changes: 1 addition & 1 deletion test/fixture/ast/selector/functional-pseudo/lang.json
Expand Up @@ -48,7 +48,7 @@
{
"source": ":lang(en en)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
}
]
}
4 changes: 2 additions & 2 deletions test/fixture/ast/selector/functional-pseudo/matches.json
Expand Up @@ -127,7 +127,7 @@
{
"source": ":matches(.a{)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":matches(,.b)",
Expand All @@ -147,7 +147,7 @@
{
"source": ":matches(.a,.b{)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":matches(var(--test))",
Expand Down
4 changes: 2 additions & 2 deletions test/fixture/ast/selector/functional-pseudo/not.json
Expand Up @@ -127,7 +127,7 @@
{
"source": ":not(.a{)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":not(,.b)",
Expand All @@ -147,7 +147,7 @@
{
"source": ":not(.a,.b{)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":not(var(--test))",
Expand Down
6 changes: 3 additions & 3 deletions test/fixture/ast/selector/functional-pseudo/slotted.json
Expand Up @@ -62,7 +62,7 @@
{
"source": "::slotted(.a{)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": "::slotted(,.b)",
Expand All @@ -72,7 +72,7 @@
{
"source": "::slotted(.a,)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": "::slotted(var(--test))",
Expand All @@ -82,7 +82,7 @@
{
"source": "::slotted(foo,bar)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
}
]
}
8 changes: 4 additions & 4 deletions test/fixture/ast/stylesheet/errors.json
Expand Up @@ -83,7 +83,7 @@
{
"source": ":pseudo({}",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": ":lang(var(--test)) {}",
Expand Down Expand Up @@ -118,17 +118,17 @@
{
"source": ".a",
"offset": " ^",
"error": "LeftCurlyBracket is expected"
"error": "\"{\" is expected"
},
{
"source": ".a \n ",
"offset": " ^",
"error": "LeftCurlyBracket is expected"
"error": "\"{\" is expected"
},
{
"source": " \n \n.a \n ",
"offset": " ^",
"error": "LeftCurlyBracket is expected"
"error": "\"{\" is expected"
},
{
"source": ".foo { var(--side): 20px }",
Expand Down
6 changes: 3 additions & 3 deletions test/fixture/ast/value/function/var.json
Expand Up @@ -395,7 +395,7 @@
"should be a error here (since unmatched tokens are disallowed), but ok for now": {
"source": "var(--name, ()])",
"offset": " ^",
"_error": "RightParenthesis is expected",
"_error": ")-token is expected",
"ast": {
"type": "Function",
"name": "var",
Expand Down Expand Up @@ -446,12 +446,12 @@
{
"source": "var(--name, !important ())",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
},
{
"source": "var(--name, foo; bar)",
"offset": " ^",
"error": "RightParenthesis is expected"
"error": "\")\" is expected"
}
]
}
12 changes: 6 additions & 6 deletions test/fixture/tokenize/function-token.json
Expand Up @@ -4,17 +4,17 @@
{
"value": "url('foo')",
"tokens": [
{ "type": "Function", "chunk": "url(" },
{ "type": "String", "chunk": "'foo'" },
{ "type": "RightParenthesis", "chunk": ")" }
{ "type": "function-token", "chunk": "url(" },
{ "type": "string-token", "chunk": "'foo'" },
{ "type": ")-token", "chunk": ")" }
]
},
{
"value": "url(\"foo\")",
"tokens": [
{ "type": "Function", "chunk": "url(" },
{ "type": "String", "chunk": "\"foo\"" },
{ "type": "RightParenthesis", "chunk": ")" }
{ "type": "function-token", "chunk": "url(" },
{ "type": "string-token", "chunk": "\"foo\"" },
{ "type": ")-token", "chunk": ")" }
]
}
],
Expand Down
12 changes: 1 addition & 11 deletions test/fixture/tokenize/index.js
Expand Up @@ -9,13 +9,6 @@ function ensureArray(value) {
return Array.isArray(value) ? value : [];
}

function camelize(str) {
return str.replace(
/(^|-)(.)/g,
(_, prefix, ch) => prefix + ch.toUpperCase()
);
}

function processTests(tests, key, type, locator) {
return ensureArray(tests[key]).map((value, idx) => {
return {
Expand Down Expand Up @@ -49,10 +42,7 @@ export const tests = fs.readdirSync(__dirname).reduce(function(result, filename)

const locator = new JsonLocator(absFilename);
const tests = JSON.parse(fs.readFileSync(absFilename));
const type = path.basename(filename, '.json').replace(
/^(.+)-token$/,
(_, type) => camelize(type)
);
const type = path.basename(filename, '.json');

result[filename] = {
type,
Expand Down
66 changes: 33 additions & 33 deletions test/tokenizer.js
Expand Up @@ -6,37 +6,37 @@ describe('tokenize/stream', () => {
const createStream = source => new TokenStream(source, tokenize);
const css = '.test\n{\n prop: url(foo/bar.jpg) url( a\\(\\33 \\).\\ \\"\\\'test ) calc(1 + 1) \\x \\aa ;\n}<!--<-->\\\n';
const tokens = [
{ type: 'Delim', chunk: '.', balance: 93 },
{ type: 'Ident', chunk: 'test', balance: 93 },
{ type: 'WhiteSpace', chunk: '\n', balance: 93 },
{ type: 'LeftCurlyBracket', chunk: '{', balance: 25 },
{ type: 'WhiteSpace', chunk: '\n ', balance: 25 },
{ type: 'Ident', chunk: 'prop', balance: 25 },
{ type: 'Colon', chunk: ':', balance: 25 },
{ type: 'WhiteSpace', chunk: ' ', balance: 25 },
{ type: 'Url', chunk: 'url(foo/bar.jpg)', balance: 25 },
{ type: 'WhiteSpace', chunk: ' ', balance: 25 },
{ type: 'Url', chunk: 'url( a\\(\\33 \\).\\ \\"\\\'test )', balance: 25 },
{ type: 'WhiteSpace', chunk: ' ', balance: 25 },
{ type: 'Function', chunk: 'calc(', balance: 18 },
{ type: 'Number', chunk: '1', balance: 18 },
{ type: 'WhiteSpace', chunk: ' ', balance: 18 },
{ type: 'Delim', chunk: '+', balance: 18 },
{ type: 'WhiteSpace', chunk: ' ', balance: 18 },
{ type: 'Number', chunk: '1', balance: 18 },
{ type: 'RightParenthesis', chunk: ')', balance: 12 },
{ type: 'WhiteSpace', chunk: ' ', balance: 25 },
{ type: 'Ident', chunk: '\\x', balance: 25 },
{ type: 'WhiteSpace', chunk: ' ', balance: 25 },
{ type: 'Ident', chunk: '\\aa ', balance: 25 },
{ type: 'Semicolon', chunk: ';', balance: 25 },
{ type: 'WhiteSpace', chunk: '\n', balance: 25 },
{ type: 'RightCurlyBracket', chunk: '}', balance: 3 },
{ type: 'CDO', chunk: '<!--', balance: 93 },
{ type: 'Delim', chunk: '<', balance: 93 },
{ type: 'CDC', chunk: '-->', balance: 93 },
{ type: 'Delim', chunk: '\\', balance: 93 },
{ type: 'WhiteSpace', chunk: '\n', balance: 93 }
{ type: 'delim-token', chunk: '.', balance: 93 },
{ type: 'ident-token', chunk: 'test', balance: 93 },
{ type: 'whitespace-token', chunk: '\n', balance: 93 },
{ type: '{-token', chunk: '{', balance: 25 },
{ type: 'whitespace-token', chunk: '\n ', balance: 25 },
{ type: 'ident-token', chunk: 'prop', balance: 25 },
{ type: 'colon-token', chunk: ':', balance: 25 },
{ type: 'whitespace-token', chunk: ' ', balance: 25 },
{ type: 'url-token', chunk: 'url(foo/bar.jpg)', balance: 25 },
{ type: 'whitespace-token', chunk: ' ', balance: 25 },
{ type: 'url-token', chunk: 'url( a\\(\\33 \\).\\ \\"\\\'test )', balance: 25 },
{ type: 'whitespace-token', chunk: ' ', balance: 25 },
{ type: 'function-token', chunk: 'calc(', balance: 18 },
{ type: 'number-token', chunk: '1', balance: 18 },
{ type: 'whitespace-token', chunk: ' ', balance: 18 },
{ type: 'delim-token', chunk: '+', balance: 18 },
{ type: 'whitespace-token', chunk: ' ', balance: 18 },
{ type: 'number-token', chunk: '1', balance: 18 },
{ type: ')-token', chunk: ')', balance: 12 },
{ type: 'whitespace-token', chunk: ' ', balance: 25 },
{ type: 'ident-token', chunk: '\\x', balance: 25 },
{ type: 'whitespace-token', chunk: ' ', balance: 25 },
{ type: 'ident-token', chunk: '\\aa ', balance: 25 },
{ type: 'semicolon-token', chunk: ';', balance: 25 },
{ type: 'whitespace-token', chunk: '\n', balance: 25 },
{ type: '}-token', chunk: '}', balance: 3 },
{ type: 'CDO-token', chunk: '<!--', balance: 93 },
{ type: 'delim-token', chunk: '<', balance: 93 },
{ type: 'CDC-token', chunk: '-->', balance: 93 },
{ type: 'delim-token', chunk: '\\', balance: 93 },
{ type: 'whitespace-token', chunk: '\n', balance: 93 }
];
const dump = tokens.map(({ type, chunk, balance }, idx) => ({
idx,
Expand Down Expand Up @@ -132,7 +132,7 @@ describe('tokenize/stream', () => {
it('skip()', () => {
const stream = createStream(css);
const targetTokens = tokens.filter(token =>
token.type === 'Ident' || token.type === 'Delim'
token.type === 'ident-token' || token.type === 'delim-token'
);
const actual = targetTokens
.map(function(token, idx, idents) {
Expand All @@ -143,7 +143,7 @@ describe('tokenize/stream', () => {
return tokenNames[stream.tokenType];
});

assert.strictEqual(actual.length, 8); // 4 x Indentifier + 4 x Delim
assert.strictEqual(actual.length, 8); // 4 x Indentifier + 4 x delim-token
assert.deepStrictEqual(actual, targetTokens.map(token => token.type));
});

Expand Down

0 comments on commit 672af19

Please sign in to comment.