From 958397a72b29f4152c7a43c0e9b3c8bc1ad7744c Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Wed, 11 Mar 2020 18:33:20 +0100 Subject: [PATCH 01/11] Add onComment parser option, add TokenStream#forEachToken() method --- CHANGELOG.md | 46 ++++++++++++++++++--------------- docs/parsing.md | 7 +++++ lib/common/TokenStream.js | 27 +++++++++++++------- lib/parser/create.js | 16 +++++++++++- test/parse.js | 54 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 120 insertions(+), 30 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ecda9e55..0076c237 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,26 +1,32 @@ ## next -- Removed `List#each()` and `List#eachRight()` methods, `List#forEach()` and `List#forEachRight()` should be used instead -- Changed `List` to be iterable (iterates data) -- Changed `List#first`, `List#last` and `List#isEmpty` to getters -- Changed `List#getSize()` method to `List#size` getters -- Removed `Lexer#matchDeclaration()` method -- Exposed parser's inner configuration as `parse.config` -- Changed `TokenStream#getRawLength()` to take second parameter as a function (rule) that check a char code for stop scan -- Added `consumeUntilBalanceEnd()`, `consumeUntilLeftCurlyBracket()`, `consumeUntilLeftCurlyBracketOrSemicolon()`, `consumeUntilExclamationMarkOrSemicolon()` and `consumeUntilSemicolonIncluded()` methods to parser to use with `Raw` instead of `Raw.mode` -- Fixed `Lexer#dump()` to dump atrules syntaxes as well +- Exposed `version` of the lib (i.e. `import { version } from 'css-tree'`) - Removed `dist/default-syntax.json` from package -- Exposed `version` of the lib -- Changed generator's handler `chunk()` to `token()` and `tokenize()` -- Parser: - - Produce `{ type: 'Combinator', name: ' ' }` node instead of `WhiteSpace` node - - Don't produce `WhiteSpace` nodes anymore, with the single exception: a custom property declaration with no tokens in a value except a white space - - Add a whitespace to `+` and `-` operators, when a whitespace before and/or after an operatorß -- Changed `Nth` to serialize `+n` as `n` -- Changed generator to determine when a white space required between emiting tokens -- Added `mode` option for `generate()` to specify a mode of token separation: `spec` or `safe` (by default) -- Renamed `HexColor` node type into `Hash` -- Removed `element()` specific parsing rules +- Tokenizer + - Changed `TokenStream#getRawLength()` to take second parameter as a function (rule) that check a char code to stop a scanning + - Added `TokenStream#forEachToken(fn)` method +- Parser + - Renamed `HexColor` node type into `Hash` + - Changed selector parsing to produce `{ type: 'Combinator', name: ' ' }` node instead of `WhiteSpace` node + - Don't produce `WhiteSpace` nodes anymore, with the single exception: a custom property declaration with a single white space token as a value + - Parser adds a whitespace to `+` and `-` operators, when a whitespace is before and/or after an operator + - Removed `element()` specific parsing rules + - Exposed parser's inner configuration as `parse.config` + - Added `onComment` option + - Added `consumeUntilBalanceEnd()`, `consumeUntilLeftCurlyBracket()`, `consumeUntilLeftCurlyBracketOrSemicolon()`, `consumeUntilExclamationMarkOrSemicolon()` and `consumeUntilSemicolonIncluded()` methods to parser's inner API to use with `Raw` instead of `Raw.mode` +- Generator + - Generator is now determines itself when a white space required between emitting tokens + - Changed `chunk()` handler to `token()` (put a token to output) and `tokenize()` (split a string into tokens and put each of them to output) + - Added `mode` option for `generate()` to specify a mode of token separation: `spec` or `safe` (by default) + - Changed `Nth` serialiation to serialize `+n` as `n` +- Lexer + - Removed `Lexer#matchDeclaration()` method + - Fixed `Lexer#dump()` to dump atrules syntaxes as well +- List + - Changed `List` to be iterable (iterates data) + - Changed `List#first`, `List#last` and `List#isEmpty` to getters + - Changed `List#getSize()` method to `List#size` getter + - Removed `List#each()` and `List#eachRight()` methods, `List#forEach()` and `List#forEachRight()` should be used instead ## 1.0.0-alpha.39 (December 5, 2019) diff --git a/docs/parsing.md b/docs/parsing.md index 820cb925..30fed4ce 100644 --- a/docs/parsing.md +++ b/docs/parsing.md @@ -109,6 +109,13 @@ csstree.parse('example { foo; bar: 1! }', { // ------------------------------^ ``` +### onComment + +Type: `function(value, loc)` or `null` +Default: `null` + +A handler to call for every comment in parsing source. Value is passing without surrounding `/*` and `*/`. `loc` will be `null` until `positions` option is set to `true`. + ### filename Type: `string` diff --git a/lib/common/TokenStream.js b/lib/common/TokenStream.js index be5346c3..d0b79bd9 100644 --- a/lib/common/TokenStream.js +++ b/lib/common/TokenStream.js @@ -261,21 +261,30 @@ module.exports = class TokenStream { } } - dump() { - let offset = this.firstCharOffset; - - return Array.prototype.slice.call(this.offsetAndType, 0, this.tokenCount).map(function(item, idx) { + forEachToken(fn) { + for (let i = 0, offset = this.firstCharOffset; i < this.tokenCount; i++) { const start = offset; + const item = this.offsetAndType[i]; const end = item & OFFSET_MASK; + const type = item >> TYPE_SHIFT; offset = end; - return { - idx, - type: NAME[item >> TYPE_SHIFT], + fn(type, start, end, i); + } + } + dump() { + const tokens = new Array(this.tokenCount); + + this.forEachToken((type, start, end, index) => { + tokens[index] = { + idx: index, + type: NAME[type], chunk: this.source.substring(start, end), - balance: this.balance[idx] + balance: this.balance[index] }; - }, this); + }); + + return tokens; } }; diff --git a/lib/parser/create.js b/lib/parser/create.js index f52f7e77..395b3140 100644 --- a/lib/parser/create.js +++ b/lib/parser/create.js @@ -7,6 +7,7 @@ const { findWhiteSpaceStart, cmpChar, cmpStr } = require('../tokenizer/utils'); const NAME = require('../tokenizer/names'); const { WhiteSpace, + Comment, Ident, Function: FunctionToken, Url, @@ -290,12 +291,25 @@ module.exports = function createParser(config) { parser.parseValue = 'parseValue' in options ? Boolean(options.parseValue) : true; parser.parseCustomProperty = 'parseCustomProperty' in options ? Boolean(options.parseCustomProperty) : false; - const { context = 'default' } = options; + const { context = 'default', onComment } = options; if (context in parser.context === false) { throw new Error('Unknown context `' + context + '`'); } + if (typeof onComment === 'function') { + parser.forEachToken((type, start, end) => { + if (type === Comment) { + const loc = parser.getLocation(start, end); + const value = cmpStr(source, end - 2, end, '*/') + ? source.slice(start + 2, end - 2) + : source.slice(start + 2, end); + + onComment(value, loc); + } + }); + } + const ast = parser.context[context].call(parser, options); if (!parser.eof) { diff --git a/test/parse.js b/test/parse.js index f7f8a410..994dcef6 100644 --- a/test/parse.js +++ b/test/parse.js @@ -280,6 +280,60 @@ describe('parse', () => { }); }); + describe('onComment', () => { + const source = '/*123*/.foo[a=/* 234 */] {\n color: red; /* 345*/\n background: url(/*456*/foo);\n} /*567*'; + + it('with no locations', () => { + const actual = []; + parse(source, { + onComment(value, loc) { + actual.push({ value, loc }); + } + }); + + assert.deepEqual(actual, [ + { value: '123', loc: null }, + { value: ' 234 ', loc: null }, + { value: ' 345', loc: null }, + { value: '567*', loc: null } + ]); + }); + + it('with locations', () => { + const actual = []; + const offsetToPos = offset => { + const lines = source.slice(0, offset).split('\n'); + return { + offset, + line: lines.length, + column: lines.pop().length + 1 + }; + }; + const loc = (start, end) => { + return { + source: 'test.css', + start: offsetToPos(start), + end: offsetToPos(end) + }; + }; + + parse(source, { + filename: 'test.css', + positions: true, + onComment(value, loc) { + actual.push({ value, loc }); + } + }); + + assert.deepEqual(actual, [ + { value: '123', loc: loc(0, 7) }, + { value: ' 234 ', loc: loc(14, 23) }, + { value: ' 345', loc: loc(41, 49) }, + { value: '567*', loc: loc(83, 89) } + ]); + }); + }); + describe('positions', () => { it('should start with line 1 column 1 by default', () => { const positions = []; From 4f40e7d10479aafeb66f4eb74a97ecc182f7b033 Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Wed, 11 Mar 2020 18:33:26 +0100 Subject: [PATCH 02/11] Update deps --- package-lock.json | 488 ++++++++++++++++++++++++++++------------------ package.json | 14 +- 2 files changed, 307 insertions(+), 195 deletions(-) diff --git a/package-lock.json b/package-lock.json index 0f42a169..e9074479 100644 --- a/package-lock.json +++ b/package-lock.json @@ -119,47 +119,58 @@ } }, "@rollup/plugin-commonjs": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-11.0.1.tgz", - "integrity": "sha512-SaVUoaLDg3KnIXC5IBNIspr1APTYDzk05VaYcI6qz+0XX3ZlSCwAkfAhNSOxfd5GAdcm/63Noi4TowOY9MpcDg==", + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-11.0.2.tgz", + "integrity": "sha512-MPYGZr0qdbV5zZj8/2AuomVpnRVXRU5XKXb3HVniwRoRCreGlf5kOE081isNWeiLIi6IYkwTX9zE0/c7V8g81g==", "dev": true, "requires": { "@rollup/pluginutils": "^3.0.0", - "estree-walker": "^0.6.1", + "estree-walker": "^1.0.1", "is-reference": "^1.1.2", "magic-string": "^0.25.2", "resolve": "^1.11.0" } }, "@rollup/plugin-json": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-4.0.1.tgz", - "integrity": "sha512-soxllkhOGgchswBAAaTe7X9G80U2tjjHvXv0sBrriLJcC/89PkP59iTrKPOfbz3SjX088mKDmMhAscuyLz8ZSg==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-4.0.2.tgz", + "integrity": "sha512-t4zJMc98BdH42mBuzjhQA7dKh0t4vMJlUka6Fz0c+iO5IVnWaEMiYBy1uBj9ruHZzXBW23IPDGL9oCzBkQ9Udg==", "dev": true, "requires": { - "rollup-pluginutils": "^2.5.0" + "@rollup/pluginutils": "^3.0.4" } }, "@rollup/plugin-node-resolve": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-7.0.0.tgz", - "integrity": "sha512-+vOx2+WMBMFotYKM3yYeDGZxIvcQ7yO4g+SuKDFsjKaq8Lw3EPgfB6qNlp8Z/3ceDCEhHvC9/b+PgBGwDQGbzQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-7.1.1.tgz", + "integrity": "sha512-14ddhD7TnemeHE97a4rLOhobfYvUVcaYuqTnL8Ti7Jxi9V9Jr5LY7Gko4HZ5k4h4vqQM0gBQt6tsp9xXW94WPA==", "dev": true, "requires": { - "@rollup/pluginutils": "^3.0.0", + "@rollup/pluginutils": "^3.0.6", "@types/resolve": "0.0.8", "builtin-modules": "^3.1.0", "is-module": "^1.0.0", - "resolve": "^1.11.1" + "resolve": "^1.14.2" + }, + "dependencies": { + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + } } }, "@rollup/pluginutils": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.0.4.tgz", - "integrity": "sha512-buc0oeq2zqQu2mpMyvZgAaQvitikYjT/4JYhA4EXwxX8/g0ZGHoGiX+0AwmfhrNqH4oJv67gn80sTZFQ/jL1bw==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.0.8.tgz", + "integrity": "sha512-rYGeAc4sxcZ+kPG/Tw4/fwJODC3IXHYDH4qusdN/b6aLw5LPUbzpecYbEJh4sVQGPFJxd2dBU4kc1H3oy9/bnw==", "dev": true, "requires": { - "estree-walker": "^0.6.1" + "estree-walker": "^1.0.1" } }, "@types/color-name": { @@ -175,9 +186,9 @@ "dev": true }, "@types/node": { - "version": "12.7.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.7.11.tgz", - "integrity": "sha512-Otxmr2rrZLKRYIybtdG/sgeO+tHY20GxeDjcGmUnmmlCWyEnv2a2x1ZXBo3BTec4OiTXMQCiazB8NMBf0iRlFw==", + "version": "13.9.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.9.0.tgz", + "integrity": "sha512-0ARSQootUG1RljH2HncpsY2TJBfGQIKOOi7kxzUY6z54ePu/ZD+wJA8zI2Q6v8rol2qpG/rvqsReco8zNMPvhQ==", "dev": true }, "@types/resolve": { @@ -190,39 +201,50 @@ } }, "acorn": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", - "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", + "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", "dev": true }, "acorn-jsx": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.0.2.tgz", - "integrity": "sha512-tiNTrP1MP0QrChmD2DdupCr6HWSFeKVw5d/dHTu4Y7rkAkRhU/Dt7dphAfIUyxtHpl/eBVip5uTNSpQJHylpAw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.2.0.tgz", + "integrity": "sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==", "dev": true }, "ajv": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", - "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.0.tgz", + "integrity": "sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw==", "dev": true, "requires": { - "fast-deep-equal": "^2.0.1", + "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", + "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", + "dev": true, + "requires": { + "type-fest": "^0.11.0" + }, + "dependencies": { + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true + } + } }, "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", "dev": true }, "ansi-styles": { @@ -292,9 +314,9 @@ "dev": true }, "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz", + "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==", "dev": true }, "balanced-match": { @@ -449,12 +471,12 @@ } }, "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", "dev": true, "requires": { - "restore-cursor": "^2.0.0" + "restore-cursor": "^3.1.0" } }, "cli-width": { @@ -559,17 +581,16 @@ "dev": true }, "coveralls": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.6.tgz", - "integrity": "sha512-Pgh4v3gCI4T/9VijVrm8Ym5v0OgjvGLKj3zTUwkvsCiwqae/p6VLzpsFNjQS2i6ewV7ef+DjFJ5TSKxYt/mCrA==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.9.tgz", + "integrity": "sha512-nNBg3B1+4iDox5A5zqHKzUTiwl2ey4k2o0NEcVZYvl+GOSJdKBj4AJGKLv6h3SvWch7tABHePAQOSZWM9E2hMg==", "dev": true, "requires": { - "growl": "~> 1.10.0", "js-yaml": "^3.13.1", - "lcov-parse": "^0.0.10", + "lcov-parse": "^1.0.0", "log-driver": "^1.2.7", "minimist": "^1.2.0", - "request": "^2.86.0" + "request": "^2.88.0" } }, "cp-file": { @@ -704,9 +725,9 @@ "dev": true }, "eslint": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.5.1.tgz", - "integrity": "sha512-32h99BoLYStT1iq1v2P9uwpyznQ4M2jRiFB6acitKz52Gqn+vPaMDUTB1bYi1WN4Nquj2w+t+bimYUG83DC55A==", + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", @@ -716,19 +737,19 @@ "debug": "^4.0.1", "doctrine": "^3.0.0", "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.2", + "eslint-utils": "^1.4.3", "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.1", + "espree": "^6.1.2", "esquery": "^1.0.1", "esutils": "^2.0.2", "file-entry-cache": "^5.0.1", "functional-red-black-tree": "^1.0.1", "glob-parent": "^5.0.0", - "globals": "^11.7.0", + "globals": "^12.1.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^6.4.1", + "inquirer": "^7.0.0", "is-glob": "^4.0.0", "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", @@ -737,7 +758,7 @@ "minimatch": "^3.0.4", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "optionator": "^0.8.2", + "optionator": "^0.8.3", "progress": "^2.0.0", "regexpp": "^2.0.1", "semver": "^6.1.2", @@ -746,6 +767,17 @@ "table": "^5.2.3", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "globals": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", + "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } + } } }, "eslint-scope": { @@ -759,12 +791,12 @@ } }, "eslint-utils": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.2.tgz", - "integrity": "sha512-eAZS2sEUMlIeCjBeubdj45dmBHQwPHWyBcT1VSYB7o9x9WRRqKxyUoiXlRjyAwzN7YEzHJlYg0NmzDRWx6GP4Q==", + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", "dev": true, "requires": { - "eslint-visitor-keys": "^1.0.0" + "eslint-visitor-keys": "^1.1.0" } }, "eslint-visitor-keys": { @@ -774,13 +806,13 @@ "dev": true }, "espree": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.1.tgz", - "integrity": "sha512-EYbr8XZUhWbYCqQRW0duU5LxzL5bETN6AjKBGy1302qqzPaCH10QbRg3Wvco79Z8x9WbiE8HYB4e75xl6qUYvQ==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", + "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", "dev": true, "requires": { - "acorn": "^7.0.0", - "acorn-jsx": "^5.0.2", + "acorn": "^7.1.1", + "acorn-jsx": "^5.2.0", "eslint-visitor-keys": "^1.1.0" } }, @@ -791,9 +823,9 @@ "dev": true }, "esquery": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", - "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.1.0.tgz", + "integrity": "sha512-MxYW9xKmROWF672KqjO75sszsA8Mxhw06YFeS5VHlB98KDHbOSurm3ArsjO60Eaf3QmGMCP1yn+0JQkNLo/97Q==", "dev": true, "requires": { "estraverse": "^4.0.0" @@ -815,9 +847,9 @@ "dev": true }, "estree-walker": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz", - "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-1.0.1.tgz", + "integrity": "sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==", "dev": true }, "esutils": { @@ -850,15 +882,15 @@ "dev": true }, "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", "dev": true }, "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, "fast-levenshtein": { @@ -868,9 +900,9 @@ "dev": true }, "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", "dev": true, "requires": { "escape-string-regexp": "^1.0.5" @@ -1117,9 +1149,9 @@ "dev": true }, "import-fresh": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.1.0.tgz", - "integrity": "sha512-PpuksHKGt8rXfWEr9m9EHIpgyyaltBy8+eF6GJM0QCAxMgxCfucMF3mjecK2QsJr0amJW7gTqh5/wht0z2UhEQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", + "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", "dev": true, "requires": { "parent-module": "^1.0.0", @@ -1149,24 +1181,85 @@ "dev": true }, "inquirer": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", - "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.1.0.tgz", + "integrity": "sha512-5fJMWEmikSYu0nv/flMc475MhGbB7TSPd/2IpFV4I4rMklboCH2rQjYY5kKiYGHqUF9gvaambupcJFFG9dvReg==", "dev": true, "requires": { - "ansi-escapes": "^3.2.0", - "chalk": "^2.4.2", - "cli-cursor": "^2.1.0", + "ansi-escapes": "^4.2.1", + "chalk": "^3.0.0", + "cli-cursor": "^3.1.0", "cli-width": "^2.0.0", "external-editor": "^3.0.3", - "figures": "^2.0.0", - "lodash": "^4.17.12", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^6.4.0", - "string-width": "^2.1.0", - "strip-ansi": "^5.1.0", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.4.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", "through": "^2.3.6" + }, + "dependencies": { + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "dev": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "is-arrayish": { @@ -1425,9 +1518,9 @@ } }, "lcov-parse": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/lcov-parse/-/lcov-parse-0.0.10.tgz", - "integrity": "sha1-GwuP+ayceIklBYK3C3ExXZ2m2aM=", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcov-parse/-/lcov-parse-1.0.0.tgz", + "integrity": "sha1-6w1GtUER68VhrLTECO+TY73I9+A=", "dev": true }, "levn": { @@ -1499,9 +1592,9 @@ } }, "magic-string": { - "version": "0.25.4", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.4.tgz", - "integrity": "sha512-oycWO9nEVAP2RVPbIoDoA4Y7LFIJ3xRYov93gAyJhZkET1tNuB0u7uWkZS2LpBWTJUWnmau/To8ECWRC+jKNfw==", + "version": "0.25.7", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz", + "integrity": "sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==", "dev": true, "requires": { "sourcemap-codec": "^1.4.4" @@ -1554,24 +1647,24 @@ "dev": true }, "mime-db": { - "version": "1.40.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", - "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==", + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==", "dev": true }, "mime-types": { - "version": "2.1.24", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", - "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", "dev": true, "requires": { - "mime-db": "1.40.0" + "mime-db": "1.43.0" } }, "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true }, "minimatch": { @@ -1584,9 +1677,9 @@ } }, "minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.3.tgz", + "integrity": "sha512-+bMdgqjMN/Z77a6NlY/I3U5LlRDbnmaAk6lDveAPKwSpcPM4tKAuYsvYF8xjhOPXhOYGe/73vVLVez5PW+jqhw==", "dev": true }, "mkdirp": { @@ -1672,9 +1765,9 @@ "dev": true }, "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", "dev": true }, "natural-compare": { @@ -1770,12 +1863,12 @@ } }, "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", "dev": true, "requires": { - "mimic-fn": "^1.0.0" + "mimic-fn": "^2.1.0" } }, "optimist": { @@ -1803,17 +1896,17 @@ } }, "optionator": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", - "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", "dev": true, "requires": { "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.4", + "fast-levenshtein": "~2.0.6", "levn": "~0.3.0", "prelude-ls": "~1.1.2", "type-check": "~0.3.2", - "wordwrap": "~1.0.0" + "word-wrap": "~1.2.3" } }, "os-homedir": { @@ -1964,9 +2057,9 @@ "dev": true }, "psl": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.4.0.tgz", - "integrity": "sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz", + "integrity": "sha512-5NsSEDv8zY70ScRnOTn7bK7eanl2MvFrOrS/R6x+dBt5g1ghnj9Zv90kO8GwT8gxcu2ANyFprnFYB85IogIJOQ==", "dev": true }, "punycode": { @@ -2018,9 +2111,9 @@ } }, "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", "dev": true, "requires": { "aws-sign2": "~0.7.0", @@ -2030,7 +2123,7 @@ "extend": "~3.0.2", "forever-agent": "~0.6.1", "form-data": "~2.3.2", - "har-validator": "~5.1.0", + "har-validator": "~5.1.3", "http-signature": "~1.2.0", "is-typedarray": "~1.0.0", "isstream": "~0.1.2", @@ -2040,7 +2133,7 @@ "performance-now": "^2.1.0", "qs": "~6.5.2", "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", + "tough-cookie": "~2.5.0", "tunnel-agent": "^0.6.0", "uuid": "^3.3.2" } @@ -2073,12 +2166,12 @@ "dev": true }, "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", "dev": true, "requires": { - "onetime": "^2.0.0", + "onetime": "^5.1.0", "signal-exit": "^3.0.2" } }, @@ -2092,9 +2185,9 @@ } }, "rollup": { - "version": "1.23.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.23.1.tgz", - "integrity": "sha512-95C1GZQpr/NIA0kMUQmSjuMDQ45oZfPgDBcN0yZwBG7Kee//m7H68vgIyg+SPuyrTZ5PrXfyLK80OzXeKG5dAA==", + "version": "1.32.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.32.1.tgz", + "integrity": "sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A==", "dev": true, "requires": { "@types/estree": "*", @@ -2103,9 +2196,9 @@ } }, "rollup-plugin-terser": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/rollup-plugin-terser/-/rollup-plugin-terser-5.2.0.tgz", - "integrity": "sha512-jQI+nYhtDBc9HFRBz8iGttQg7li9klmzR62RG2W2nN6hJ/FI2K2ItYQ7kJ7/zn+vs+BP1AEccmVRjRN989I+Nw==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/rollup-plugin-terser/-/rollup-plugin-terser-5.3.0.tgz", + "integrity": "sha512-XGMJihTIO3eIBsVGq7jiNYOdDMb3pVxuzY0uhOE/FM4x/u9nQgr3+McsjzqBn3QfHIpNSZmFnpoKAwHBEcsT7g==", "dev": true, "requires": { "@babel/code-frame": "^7.5.5", @@ -2122,21 +2215,29 @@ "dev": true, "requires": { "estree-walker": "^0.6.1" + }, + "dependencies": { + "estree-walker": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz", + "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==", + "dev": true + } } }, "run-async": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", - "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz", + "integrity": "sha512-xJTbh/d7Lm7SBhc1tNvTpeCHaEzoyxPrqNlvSdMfBTYwaY++UJFyXUOxAtsRUXjlqOfj8luNaR9vjCh4KeV+pg==", "dev": true, "requires": { "is-promise": "^2.1.0" } }, "rxjs": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.3.tgz", - "integrity": "sha512-wuYsAYYFdWTAnAaPoKGNhfpWwKZbJW+HgAJ+mImp+Epl7BG8oNWBCTyRM8gba9k4lk8BgWdoYm21Mo/RYhhbgA==", + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz", + "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==", "dev": true, "requires": { "tslib": "^1.9.0" @@ -2220,9 +2321,9 @@ } }, "sourcemap-codec": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.6.tgz", - "integrity": "sha512-1ZooVLYFxC448piVLBbtOxFcXwnymH9oUF8nRd3CuYDVvkRBxRl6pB4Mtas5a4drtL+E8LDgFkQNcgIw6tc8Hg==", + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz", + "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==", "dev": true }, "spawn-wrap": { @@ -2295,22 +2396,35 @@ } }, "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "dev": true, "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" }, "dependencies": { + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, "requires": { - "ansi-regex": "^3.0.0" + "ansi-regex": "^5.0.0" } } } @@ -2379,9 +2493,9 @@ } }, "terser": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/terser/-/terser-4.6.2.tgz", - "integrity": "sha512-6FUjJdY2i3WZAtYBtnV06OOcOfzl+4hSKYE9wgac8rkLRBToPDDrBB2AcHwQD/OKDxbnvhVy2YgOPWO2SsKWqg==", + "version": "4.6.6", + "resolved": "https://registry.npmjs.org/terser/-/terser-4.6.6.tgz", + "integrity": "sha512-4lYPyeNmstjIIESr/ysHg2vUPRGf2tzF9z2yYwnowXVuVzLEamPN1Gfrz7f8I9uEPuHcbFlW4PLIAsJoxXyJ1g==", "dev": true, "requires": { "commander": "^2.20.0", @@ -2437,27 +2551,19 @@ "dev": true }, "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", "dev": true, "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true - } + "psl": "^1.1.28", + "punycode": "^2.1.1" } }, "tslib": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", - "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.1.tgz", + "integrity": "sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA==", "dev": true }, "tunnel-agent": { @@ -2484,6 +2590,12 @@ "prelude-ls": "~1.1.2" } }, + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true + }, "uglify-js": { "version": "3.6.9", "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.9.tgz", @@ -2561,10 +2673,10 @@ "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, - "wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true }, "wrap-ansi": { diff --git a/package.json b/package.json index 646ec704..03162e62 100644 --- a/package.json +++ b/package.json @@ -40,17 +40,17 @@ "source-map": "^0.6.1" }, "devDependencies": { + "@rollup/plugin-commonjs": "^11.0.2", + "@rollup/plugin-json": "^4.0.2", + "@rollup/plugin-node-resolve": "^7.1.1", "clap": "^2.0.1", - "coveralls": "^3.0.4", - "eslint": "^6.3.0", + "coveralls": "^3.0.9", + "eslint": "^6.8.0", "json-to-ast": "^2.1.0", "mocha": "^5.2.0", "nyc": "^14.1.1", - "rollup": "^1.22.0", - "@rollup/plugin-commonjs": "^11.0.1", - "@rollup/plugin-json": "^4.0.0", - "@rollup/plugin-node-resolve": "^7.0.0", - "rollup-plugin-terser": "^5.2.0" + "rollup": "^1.32.1", + "rollup-plugin-terser": "^5.3.0" }, "engines": { "node": ">=8.0.0" From 79999b13bbe725679b8d309cd55a4a0f0207cf37 Mon Sep 17 00:00:00 2001 From: xiaoluoboding Date: Fri, 13 Mar 2020 11:48:50 +0800 Subject: [PATCH 03/11] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bf44b1ee..17583ce8 100644 --- a/README.md +++ b/README.md @@ -92,7 +92,7 @@ Syntax matching: // parse CSS to AST as a declaration value var ast = csstree.parse('red 1px solid', { context: 'value' }); -// march to syntax of `border` property +// match to syntax of `border` property var matchResult = csstree.lexer.matchProperty('border', ast); // check first value node is a From 9a7a36077100c14ac86956931aac3cddc3e87a29 Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Wed, 11 Mar 2020 23:46:30 +0100 Subject: [PATCH 04/11] Rework tests to not parse anything until test is actually run --- test/find.js | 61 ++++++++++++++------------- test/helpers/index.js | 20 +++++++++ test/lexer-match-atrule-descriptor.js | 38 +++++++++-------- test/lexer-match-atrule-prelude.js | 30 +++++++------ test/lexer-match-property.js | 48 +++++++++++---------- test/lexer-match-type.js | 26 ++++++------ test/lexer-search-fragments.js | 16 +++---- test/walk.js | 17 +++++--- 8 files changed, 145 insertions(+), 111 deletions(-) create mode 100644 test/helpers/index.js diff --git a/test/find.js b/test/find.js index 60e6aacc..9ae5bacb 100644 --- a/test/find.js +++ b/test/find.js @@ -1,40 +1,43 @@ const assert = require('assert'); const { parse, find, findLast, findAll } = require('./helpers/lib'); -const ast = parse(` - .foo { color: red; background: green; } - .bar, .qux.foo { font-weight: bold; color: blue; } -`); -const firstFoo = ast - .children.first // Rule - .prelude // SelectorList - .children.first // Selector - .children.first; // ClassSelector -const lastFoo = ast - .children.last // Rule - .prelude // SelectorList - .children.last // Selector - .children.last; // ClassSelector +const { lazyValues } = require('./helpers'); +const values = lazyValues({ + ast: () => parse(` + .foo { color: red; background: green; } + .bar, .qux.foo { font-weight: bold; color: blue; } + `), + firstFoo: () => values.ast + .children.first // Rule + .prelude // SelectorList + .children.first // Selector + .children.first, // ClassSelector + lastFoo: () => values.ast + .children.last // Rule + .prelude // SelectorList + .children.last // Selector + .children.last // ClassSelector +}); describe('Search', () => { describe('find', () => { it('base', () => { - const actual = find(ast, node => + const actual = find(values.ast, node => node.type === 'ClassSelector' && node.name === 'foo' ); - assert.equal(actual, firstFoo); + assert.equal(actual, values.firstFoo); }); it('using refs', () => { - const actual = find(ast, (node, item, list) => + const actual = find(values.ast, (node, item, list) => node.type === 'ClassSelector' && node.name === 'foo' && list.head !== item ); - assert.equal(actual, lastFoo); + assert.equal(actual, values.lastFoo); }); it('using context', () => { - const actual = find(ast, function(node) { + const actual = find(values.ast, function(node) { return ( node.type === 'ClassSelector' && node.name === 'foo' && @@ -42,29 +45,29 @@ describe('Search', () => { ); }); - assert.equal(actual, lastFoo); + assert.equal(actual, values.lastFoo); }); }); describe('findLast', () => { it('findLast', () => { - const actual = findLast(ast, node => + const actual = findLast(values.ast, node => node.type === 'ClassSelector' && node.name === 'foo' ); - assert.equal(actual, lastFoo); + assert.equal(actual, values.lastFoo); }); it('using refs', () => { - const actual = findLast(ast, (node, item, list) => + const actual = findLast(values.ast, (node, item, list) => node.type === 'ClassSelector' && node.name === 'foo' && list.head === item ); - assert.equal(actual, firstFoo); + assert.equal(actual, values.firstFoo); }); it('using context', () => { - const actual = findLast(ast, function(node) { + const actual = findLast(values.ast, function(node) { return ( node.type === 'ClassSelector' && node.name === 'foo' && @@ -72,17 +75,17 @@ describe('Search', () => { ); }); - assert.equal(actual, firstFoo); + assert.equal(actual, values.firstFoo); }); }); it('findAll', () => { - const actual = findAll(ast, node => + const actual = findAll(values.ast, node => node.type === 'ClassSelector' && node.name === 'foo' ); assert.equal(actual.length, 2); - assert.equal(actual[0], firstFoo); - assert.equal(actual[1], lastFoo); + assert.equal(actual[0], values.firstFoo); + assert.equal(actual[1], values.lastFoo); }); }); diff --git a/test/helpers/index.js b/test/helpers/index.js new file mode 100644 index 00000000..52548c43 --- /dev/null +++ b/test/helpers/index.js @@ -0,0 +1,20 @@ +function lazyValues(dict) { + const result = Object.create(null); + + for (const [key, compute] of Object.entries(dict)) { + Object.defineProperty(result, key, { + configurable: true, + get() { + const value = compute.call(result); + Object.defineProperty(result, key, { value }); + return value; + } + }); + } + + return result; +}; + +module.exports = { + lazyValues +}; diff --git a/test/lexer-match-atrule-descriptor.js b/test/lexer-match-atrule-descriptor.js index b416fc36..24c1aa01 100644 --- a/test/lexer-match-atrule-descriptor.js +++ b/test/lexer-match-atrule-descriptor.js @@ -1,25 +1,27 @@ const assert = require('assert'); const { parse, lexer, fork } = require('./helpers/lib'); +const { lazyValues } = require('./helpers'); const fixture = require('./fixture/syntax'); - -describe('Lexer#matchAtruleDescriptor()', () => { - const swapValue = parse('swap', { context: 'value' }); - const xxxValue = parse('xxx', { context: 'value' }); - const fontDisplaySyntax = 'auto | block | swap | fallback | optional'; - const customSyntax = fork(prev => ({ +const values = lazyValues({ + swapValue: () => parse('swap', { context: 'value' }), + xxxValue: () => parse('xxx', { context: 'value' }), + fontDisplaySyntax: () => 'auto | block | swap | fallback | optional', + customSyntax: () => fork(prev => ({ ...prev, atrules: { 'font-face': { descriptors: { - 'font-display': fontDisplaySyntax, - '-foo-font-display': `${fontDisplaySyntax} | xxx` + 'font-display': values.fontDisplaySyntax, + '-foo-font-display': `${values.fontDisplaySyntax} | xxx` } } } - })); + })) +}); +describe('Lexer#matchAtruleDescriptor()', () => { it('should match', () => { - const match = customSyntax.lexer.matchAtruleDescriptor('font-face', 'font-display', swapValue); + const match = values.customSyntax.lexer.matchAtruleDescriptor('font-face', 'font-display', values.swapValue); assert(match.matched); assert.equal(match.error, null); @@ -27,14 +29,14 @@ describe('Lexer#matchAtruleDescriptor()', () => { describe('vendor prefixes', () => { it('vendor prefix in keyword name', () => { - const match = customSyntax.lexer.matchAtruleDescriptor('-prefix-font-face', 'font-display', swapValue); + const match = values.customSyntax.lexer.matchAtruleDescriptor('-prefix-font-face', 'font-display', values.swapValue); assert(match.matched); assert.equal(match.error, null); }); it('vendor prefix in declarator name', () => { - const match = customSyntax.lexer.matchAtruleDescriptor('font-face', '-prefix-font-display', swapValue); + const match = values.customSyntax.lexer.matchAtruleDescriptor('font-face', '-prefix-font-display', values.swapValue); assert(match.matched); assert.equal(match.error, null); @@ -43,11 +45,11 @@ describe('Lexer#matchAtruleDescriptor()', () => { it('case insensetive with vendor prefix', () => { let match; - match = customSyntax.lexer.matchAtruleDescriptor('FONT-FACE', 'FONT-DISPLAY', swapValue); + match = values.customSyntax.lexer.matchAtruleDescriptor('FONT-FACE', 'FONT-DISPLAY', values.swapValue); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchAtruleDescriptor('FONT-face', '-VENDOR-Font-Display', swapValue); + match = values.customSyntax.lexer.matchAtruleDescriptor('FONT-face', '-VENDOR-Font-Display', values.swapValue); assert(match.matched); assert.equal(match.error, null); }); @@ -55,13 +57,13 @@ describe('Lexer#matchAtruleDescriptor()', () => { it('should use verdor version first', () => { let match; - match = customSyntax.lexer.matchAtruleDescriptor('font-face', '-foo-font-display', xxxValue); + match = values.customSyntax.lexer.matchAtruleDescriptor('font-face', '-foo-font-display', values.xxxValue); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchAtruleDescriptor('font-face', 'font-display', xxxValue); + match = values.customSyntax.lexer.matchAtruleDescriptor('font-face', 'font-display', values.xxxValue); assert.equal(match.matched, null); - assert.equal(match.error.message, 'Mismatch\n syntax: ' + fontDisplaySyntax + '\n value: xxx\n --------^'); + assert.equal(match.error.message, 'Mismatch\n syntax: ' + values.fontDisplaySyntax + '\n value: xxx\n --------^'); }); }); @@ -80,7 +82,7 @@ describe('Lexer#matchAtruleDescriptor()', () => { }); it('should not be matched to at-rules with no descriptors', () => { - const match = lexer.matchAtruleDescriptor('keyframes', 'font-face', swapValue); + const match = lexer.matchAtruleDescriptor('keyframes', 'font-face', values.swapValue); assert.equal(match.matched, null); assert.equal(match.error.message, 'At-rule `keyframes` has no known descriptors'); diff --git a/test/lexer-match-atrule-prelude.js b/test/lexer-match-atrule-prelude.js index 13b1717d..4d08112e 100644 --- a/test/lexer-match-atrule-prelude.js +++ b/test/lexer-match-atrule-prelude.js @@ -1,20 +1,22 @@ const assert = require('assert'); const { parse, lexer, fork } = require('./helpers/lib'); +const { lazyValues } = require('./helpers'); const fixture = require('./fixture/syntax'); - -describe('Lexer#matchAtrulePrelude()', () => { - const animationName = parse('animation-name', { context: 'atrulePrelude', atrule: 'keyframes' }); - const number = parse('123', { context: 'atrulePrelude', atrule: 'unknown' }); - const customSyntax = fork({ +const values = lazyValues({ + animationName: () => parse('animation-name', { context: 'atrulePrelude', atrule: 'keyframes' }), + number: () => parse('123', { context: 'atrulePrelude', atrule: 'unknown' }), + customSyntax: () => fork({ atrules: { '-foo-keyframes': { prelude: '' } } - }); + }) +}); +describe('Lexer#matchAtrulePrelude()', () => { it('should match', () => { - const match = customSyntax.lexer.matchAtrulePrelude('keyframes', animationName); + const match = values.customSyntax.lexer.matchAtrulePrelude('keyframes', values.animationName); assert(match.matched); assert.equal(match.error, null); @@ -22,7 +24,7 @@ describe('Lexer#matchAtrulePrelude()', () => { describe('vendor prefixes', () => { it('vendor prefix', () => { - const match = customSyntax.lexer.matchAtrulePrelude('-webkit-keyframes', animationName); + const match = values.customSyntax.lexer.matchAtrulePrelude('-webkit-keyframes', values.animationName); assert(match.matched); assert.equal(match.error, null); @@ -31,11 +33,11 @@ describe('Lexer#matchAtrulePrelude()', () => { it('case insensetive with vendor prefix', () => { let match; - match = customSyntax.lexer.matchAtrulePrelude('KEYFRAMES', animationName); + match = values.customSyntax.lexer.matchAtrulePrelude('KEYFRAMES', values.animationName); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchAtrulePrelude('-VENDOR-Keyframes', animationName); + match = values.customSyntax.lexer.matchAtrulePrelude('-VENDOR-Keyframes', values.animationName); assert(match.matched); assert.equal(match.error, null); }); @@ -43,11 +45,11 @@ describe('Lexer#matchAtrulePrelude()', () => { it('should use verdor version first', () => { let match; - match = customSyntax.lexer.matchAtrulePrelude('-foo-keyframes', number); + match = values.customSyntax.lexer.matchAtrulePrelude('-foo-keyframes', values.number); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchAtrulePrelude('keyframes', number); + match = values.customSyntax.lexer.matchAtrulePrelude('keyframes', values.number); assert.equal(match.matched, null); assert.equal(match.error.message, 'Mismatch\n syntax: \n value: 123\n --------^'); }); @@ -69,14 +71,14 @@ describe('Lexer#matchAtrulePrelude()', () => { describe('should not be matched to at-rules with no prelude', () => { it('regular name', () => { - const match = lexer.matchAtrulePrelude('font-face', animationName); + const match = lexer.matchAtrulePrelude('font-face', values.animationName); assert.equal(match.matched, null); assert.equal(match.error.message, 'At-rule `font-face` should not contain a prelude'); }); it('with verdor prefix', () => { - const match = lexer.matchAtrulePrelude('-prefix-font-face', animationName); + const match = lexer.matchAtrulePrelude('-prefix-font-face', values.animationName); assert.equal(match.matched, null); assert.equal(match.error.message, 'At-rule `-prefix-font-face` should not contain a prelude'); diff --git a/test/lexer-match-property.js b/test/lexer-match-property.js index 9a2ea87c..ef37aa41 100644 --- a/test/lexer-match-property.js +++ b/test/lexer-match-property.js @@ -1,6 +1,19 @@ const assert = require('assert'); const { parse, lexer, fork } = require('./helpers/lib'); +const { lazyValues } = require('./helpers'); const fixture = require('./fixture/syntax'); +const values = lazyValues({ + bar: () => parse('bar', { context: 'value' }), + qux: () => parse('qux', { context: 'value' }), + customSyntax: () => fork(function(prev, assign) { + return assign(prev, { + properties: { + foo: 'bar', + '-baz-foo': 'qux' + } + }); + }) +}); function getMatch(lexer, property, value, syntax) { return syntax @@ -9,55 +22,44 @@ function getMatch(lexer, property, value, syntax) { } describe('Lexer#matchProperty()', () => { - const bar = parse('bar', { context: 'value' }); - const qux = parse('qux', { context: 'value' }); - const customSyntax = fork(function(prev, assign) { - return assign(prev, { - properties: { - foo: 'bar', - '-baz-foo': 'qux' - } - }); - }); - describe('vendor prefixes and hacks', () => { it('vendor prefix', () => { - const match = customSyntax.lexer.matchProperty('-vendor-foo', bar); + const match = values.customSyntax.lexer.matchProperty('-vendor-foo', values.bar); assert(match.matched); assert.equal(match.error, null); }); it('hacks', () => { - const match = customSyntax.lexer.matchProperty('_foo', bar); + const match = values.customSyntax.lexer.matchProperty('_foo', values.bar); assert(match.matched); - assert.equal(customSyntax.lexer.lastMatchError, null); + assert.equal(values.customSyntax.lexer.lastMatchError, null); }); it('vendor prefix and hack', () => { - const match = customSyntax.lexer.matchProperty('_-vendor-foo', bar); + const match = values.customSyntax.lexer.matchProperty('_-vendor-foo', values.bar); assert(match.matched); - assert.equal(customSyntax.lexer.lastMatchError, null); + assert.equal(values.customSyntax.lexer.lastMatchError, null); }); it('case insensetive with vendor prefix and hack', () => { let match; - match = customSyntax.lexer.matchProperty('FOO', bar); + match = values.customSyntax.lexer.matchProperty('FOO', values.bar); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchProperty('-VENDOR-Foo', bar); + match = values.customSyntax.lexer.matchProperty('-VENDOR-Foo', values.bar); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchProperty('_FOO', bar); + match = values.customSyntax.lexer.matchProperty('_FOO', values.bar); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchProperty('_-VENDOR-Foo', bar); + match = values.customSyntax.lexer.matchProperty('_-VENDOR-Foo', values.bar); assert(match.matched); assert.equal(match.error, null); }); @@ -65,18 +67,18 @@ describe('Lexer#matchProperty()', () => { it('should use verdor version first', () => { let match; - match = customSyntax.lexer.matchProperty('-baz-foo', qux); + match = values.customSyntax.lexer.matchProperty('-baz-foo', values.qux); assert(match.matched); assert.equal(match.error, null); - match = customSyntax.lexer.matchProperty('-baz-baz-foo', qux); + match = values.customSyntax.lexer.matchProperty('-baz-baz-foo', values.qux); assert.equal(match.matched, null); assert.equal(match.error.message, 'Unknown property `-baz-baz-foo`'); }); }); it('custom property', () => { - const match = lexer.matchProperty('--foo', bar); + const match = lexer.matchProperty('--foo', values.bar); assert.equal(match.matched, null); assert.equal(match.error.message, 'Lexer matching doesn\'t applicable for custom properties'); diff --git a/test/lexer-match-type.js b/test/lexer-match-type.js index 225901e6..8539f600 100644 --- a/test/lexer-match-type.js +++ b/test/lexer-match-type.js @@ -1,48 +1,50 @@ const assert = require('assert'); +const { lazyValues } = require('./helpers'); const { parse, fork } = require('./helpers/lib'); - -describe('Lexer#matchType()', () => { - const singleNumber = parse('1', { context: 'value' }); - const severalNumbers = parse('1, 2, 3', { context: 'value' }); - const cssWideKeyword = parse('inherit', { context: 'value' }); - const customSyntax = fork(prev => ({ +const values = lazyValues({ + singleNumber: () => parse('1', { context: 'value' }), + severalNumbers: () => parse('1, 2, 3', { context: 'value' }), + cssWideKeyword: () => parse('inherit', { context: 'value' }), + customSyntax: () => fork(prev => ({ ...prev, types: { foo: '#', bar: '[ 1 | 2 | 3 ]' } - })); + })) +}); +describe('Lexer#matchType()', () => { it('should match type', () => { - const match = customSyntax.lexer.matchType('bar', singleNumber); + const match = values.customSyntax.lexer.matchType('bar', values.singleNumber); assert(match.matched); assert.equal(match.error, null); }); it('should match type using nested', () => { - const match = customSyntax.lexer.matchType('foo', severalNumbers); + const match = values.customSyntax.lexer.matchType('foo', values.severalNumbers); assert(match.matched); assert.equal(match.error, null); }); it('should fail on matching wrong value', () => { - const match = customSyntax.lexer.matchType('bar', severalNumbers); + const match = values.customSyntax.lexer.matchType('bar', values.severalNumbers); assert.equal(match.matched, null); assert.equal(match.error.rawMessage, 'Mismatch'); }); it('should return null and save error for unknown type', () => { - const match = customSyntax.lexer.matchType('baz', singleNumber); + const match = values.customSyntax.lexer.matchType('baz', values.singleNumber); assert.equal(match.matched, null); assert.equal(match.error.message, 'Unknown type `baz`'); }); it('should not match to CSS wide names', () => { - const match = customSyntax.lexer.matchType('foo', cssWideKeyword); + const match = values.customSyntax.lexer.matchType('foo', values.cssWideKeyword); assert.equal(match.matched, null); assert.equal(match.error.message, 'Mismatch\n syntax: #\n value: inherit\n --------^'); diff --git a/test/lexer-search-fragments.js b/test/lexer-search-fragments.js index 1d74eff0..553cfd04 100644 --- a/test/lexer-search-fragments.js +++ b/test/lexer-search-fragments.js @@ -1,15 +1,15 @@ const assert = require('assert'); const { parse, generate, lexer } = require('./helpers/lib'); -describe('lexer search fragments', () => { - function translateFragments(fragments) { - return fragments.map(fragment => generate({ - type: 'Value', - loc: null, - children: fragment.nodes - })); - } +function translateFragments(fragments) { + return fragments.map(fragment => generate({ + type: 'Value', + loc: null, + children: fragment.nodes + })); +} +describe('lexer search fragments', () => { describe('findValueFragments()', () => { it('should find single entry', () => { const declaration = parse('border: 1px solid red', { context: 'declaration' }); diff --git a/test/walk.js b/test/walk.js index 8ba024b3..6043562e 100644 --- a/test/walk.js +++ b/test/walk.js @@ -1,5 +1,6 @@ const assert = require('assert'); const path = require('path'); +const { lazyValues } = require('./helpers'); const { parse, walk } = require('./helpers/lib'); const notInsideAtrulePrelude = stack => stack.every(node => node.type !== 'AtrulePrelude'); const { tests, forEachTest: forEachParseTest } = require('./fixture/parse'); @@ -156,13 +157,15 @@ describe('AST traversal', () => { }); describe('traverse order', () => { - const ast = parse('.a.b { foo: bar; baz: qux } .c {} @media all { .d:not(.e) { aa: bb; cc: dd } f { ee: ff } }'); - const expectedOrder = 'a b foo bar baz qux c media all d not e aa bb cc dd f ee ff'.split(' '); + const values = lazyValues({ + ast: () => parse('.a.b { foo: bar; baz: qux } .c {} @media all { .d:not(.e) { aa: bb; cc: dd } f { ee: ff } }'), + expectedOrder: () => 'a b foo bar baz qux c media all d not e aa bb cc dd f ee ff'.split(' ') + }); it('natural', () => { const visitedNames = []; - walk(ast, { + walk(values.ast, { enter: (node) => { if (node.name || node.property) { visitedNames.push(node.name || node.property); @@ -172,14 +175,14 @@ describe('AST traversal', () => { assert.deepEqual( visitedNames, - expectedOrder + values.expectedOrder ); }); it('reverse', () => { const visitedNames = []; - walk(ast, { + walk(values.ast, { reverse: true, enter: (node) => { if (node.name || node.property) { @@ -190,13 +193,13 @@ describe('AST traversal', () => { assert.deepEqual( visitedNames, - expectedOrder.slice().reverse() + values.expectedOrder.slice().reverse() ); }); }); describe('bad options', () => { - const ast = parse('.foo { color: red }'); + const ast = {}; it('should throws when no enter/leave handlers is set', () => { assert.throws( From ca67540cad5b6b35a899dd0ab24a3a968469c9f6 Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Tue, 24 Mar 2020 00:53:56 +0100 Subject: [PATCH 05/11] Add parser.consumeNumber() Remove last usage of `this.source` in syntax --- lib/parser/create.js | 9 ++++++++- lib/syntax/node/Dimension.js | 9 +++------ lib/syntax/node/Percentage.js | 7 ++----- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/lib/parser/create.js b/lib/parser/create.js index 395b3140..e56bd2ee 100644 --- a/lib/parser/create.js +++ b/lib/parser/create.js @@ -3,7 +3,7 @@ const SyntaxError = require('../common/SyntaxError'); const TokenStream = require('../common/TokenStream'); const List = require('../common/List'); const tokenize = require('../tokenizer'); -const { findWhiteSpaceStart, cmpChar, cmpStr } = require('../tokenizer/utils'); +const { consumeNumber, findWhiteSpaceStart, cmpChar, cmpStr } = require('../tokenizer/utils'); const NAME = require('../tokenizer/names'); const { WhiteSpace, @@ -226,6 +226,13 @@ module.exports = function createParser(config) { return name; }, + consumeNumber(type) { + const number = source.substring(this.tokenStart, consumeNumber(source, this.tokenStart)); + + this.eat(type); + + return number; + }, getLocation(start, end) { if (needPositions) { diff --git a/lib/syntax/node/Dimension.js b/lib/syntax/node/Dimension.js index f3b81f6e..f5bc7279 100644 --- a/lib/syntax/node/Dimension.js +++ b/lib/syntax/node/Dimension.js @@ -1,4 +1,3 @@ -const { consumeNumber } = require('../../tokenizer/utils'); const { Dimension } = require('../../tokenizer/types'); module.exports = { @@ -9,15 +8,13 @@ module.exports = { }, parse: function() { const start = this.tokenStart; - const numberEnd = consumeNumber(this.source, start); - - this.eat(Dimension); + const value = this.consumeNumber(Dimension); return { type: 'Dimension', loc: this.getLocation(start, this.tokenStart), - value: this.substring(start, numberEnd), - unit: this.substring(numberEnd, this.tokenStart) + value, + unit: this.substring(start + value.length, this.tokenStart) }; }, generate: function(node) { diff --git a/lib/syntax/node/Percentage.js b/lib/syntax/node/Percentage.js index 63c14cef..52d60e11 100644 --- a/lib/syntax/node/Percentage.js +++ b/lib/syntax/node/Percentage.js @@ -1,4 +1,3 @@ -const { consumeNumber } = require('../../tokenizer/utils'); const { Percentage } = require('../../tokenizer/types'); module.exports = { @@ -8,14 +7,12 @@ module.exports = { }, parse: function() { const start = this.tokenStart; - const numberEnd = consumeNumber(this.source, start); - - this.eat(Percentage); + const value = this.consumeNumber(Percentage); return { type: 'Percentage', loc: this.getLocation(start, this.tokenStart), - value: this.substring(start, numberEnd) + value }; }, generate: function(node) { From 62011085ad251dea453146d3de0dc77c83b2dc89 Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Tue, 24 Mar 2020 01:12:07 +0100 Subject: [PATCH 06/11] Change tokenizer API --- CHANGELOG.md | 1 + lib/common/TokenStream.js | 105 ++++++++++++++++++------------------ lib/generator/create.js | 9 +--- lib/lexer/prepare-tokens.js | 21 +++----- lib/parser/create.js | 2 +- lib/tokenizer/index.js | 23 ++------ test/tokenizer.js | 49 +++++++++-------- 7 files changed, 97 insertions(+), 113 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0076c237..dc0070a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ - Exposed `version` of the lib (i.e. `import { version } from 'css-tree'`) - Removed `dist/default-syntax.json` from package - Tokenizer + - Changed `tokenize()` to take a function as second argument, which will be called for every token. No stream instance is creating when second argument is ommited. - Changed `TokenStream#getRawLength()` to take second parameter as a function (rule) that check a char code to stop a scanning - Added `TokenStream#forEachToken(fn)` method - Parser diff --git a/lib/common/TokenStream.js b/lib/common/TokenStream.js index d0b79bd9..2cfcf904 100644 --- a/lib/common/TokenStream.js +++ b/lib/common/TokenStream.js @@ -25,8 +25,8 @@ const balancePair = new Map([ ]); module.exports = class TokenStream { - constructor() { - this.open('', 0).close(); + constructor(source, tokenize) { + this.setSource(source, tokenize); } reset() { this.eof = false; @@ -35,72 +35,75 @@ module.exports = class TokenStream { this.tokenStart = this.firstCharOffset; this.tokenEnd = this.firstCharOffset; } - open(source, firstCharOffset) { + setSource(source = '', tokenize = () => {}) { + source = String(source || ''); + const sourceLength = source.length; const offsetAndType = adoptBuffer(this.offsetAndType, source.length + 1); // +1 because of eof-token const balance = adoptBuffer(this.balance, source.length + 1); let tokenCount = 0; let balanceCloseType = 0; let balanceStart = 0; + let firstCharOffset = -1; // capture buffers this.offsetAndType = null; this.balance = null; - return { - token(type, offset) { - switch (type) { - default: - balance[tokenCount] = sourceLength; - break; - - case balanceCloseType: { - let balancePrev = balanceStart & OFFSET_MASK; - balanceStart = balance[balancePrev]; - balanceCloseType = balanceStart >> TYPE_SHIFT; - balance[tokenCount] = balancePrev; - balance[balancePrev++] = tokenCount; - for (; balancePrev < tokenCount; balancePrev++) { - if (balance[balancePrev] === sourceLength) { - balance[balancePrev] = tokenCount; - } - } - break; - } - - case LeftParenthesis: - case FunctionToken: - case LeftSquareBracket: - case LeftCurlyBracket: - balance[tokenCount] = balanceStart; - balanceCloseType = balancePair.get(type); - balanceStart = (balanceCloseType << TYPE_SHIFT) | tokenCount; - break; - } + tokenize(source, (type, start, end) => { + switch (type) { + default: + balance[tokenCount] = sourceLength; + break; - offsetAndType[tokenCount++] = (type << TYPE_SHIFT) | offset; - }, - close: () => { - // finalize buffers - offsetAndType[tokenCount] = (EOF << TYPE_SHIFT) | sourceLength; // - balance[tokenCount] = sourceLength; - balance[sourceLength] = sourceLength; // prevents false positive balance match with any token - while (balanceStart !== 0) { - const balancePrev = balanceStart & OFFSET_MASK; + case balanceCloseType: { + let balancePrev = balanceStart & OFFSET_MASK; balanceStart = balance[balancePrev]; - balance[balancePrev] = sourceLength; + balanceCloseType = balanceStart >> TYPE_SHIFT; + balance[tokenCount] = balancePrev; + balance[balancePrev++] = tokenCount; + for (; balancePrev < tokenCount; balancePrev++) { + if (balance[balancePrev] === sourceLength) { + balance[balancePrev] = tokenCount; + } + } + break; } - this.source = source; - this.firstCharOffset = firstCharOffset; - this.tokenCount = tokenCount; - this.offsetAndType = offsetAndType; - this.balance = balance; + case LeftParenthesis: + case FunctionToken: + case LeftSquareBracket: + case LeftCurlyBracket: + balance[tokenCount] = balanceStart; + balanceCloseType = balancePair.get(type); + balanceStart = (balanceCloseType << TYPE_SHIFT) | tokenCount; + break; + } - this.reset(); - this.next(); + offsetAndType[tokenCount++] = (type << TYPE_SHIFT) | end; + if (firstCharOffset === -1) { + firstCharOffset = start; } - }; + }); + + // finalize buffers + offsetAndType[tokenCount] = (EOF << TYPE_SHIFT) | sourceLength; // + balance[tokenCount] = sourceLength; + balance[sourceLength] = sourceLength; // prevents false positive balance match with any token + while (balanceStart !== 0) { + const balancePrev = balanceStart & OFFSET_MASK; + balanceStart = balance[balancePrev]; + balance[balancePrev] = sourceLength; + } + + this.source = source; + this.firstCharOffset = firstCharOffset === -1 ? 0 : firstCharOffset; + this.tokenCount = tokenCount; + this.offsetAndType = offsetAndType; + this.balance = balance; + + this.reset(); + this.next(); } lookupType(offset) { diff --git a/lib/generator/create.js b/lib/generator/create.js index aa7e81f8..5462ad85 100644 --- a/lib/generator/create.js +++ b/lib/generator/create.js @@ -24,13 +24,8 @@ function processChildren(node, delimeter) { } function processChunk(chunk) { - tokenize(chunk, { - open: (source, lastOffset) => ({ - token: (type, offset) => { - this.token(type, source.slice(lastOffset, lastOffset = offset)); - }, - close() { } - }) + tokenize(chunk, (type, start, end) => { + this.token(type, chunk.slice(start, end)); }); } diff --git a/lib/lexer/prepare-tokens.js b/lib/lexer/prepare-tokens.js index df4bcd92..b9248b62 100644 --- a/lib/lexer/prepare-tokens.js +++ b/lib/lexer/prepare-tokens.js @@ -29,20 +29,13 @@ const astToTokens = { function stringToTokens(str) { const tokens = []; - tokenize(str, { - open(source, startOffset) { - return { - token(type, offset) { - tokens.push({ - type, - value: source.slice(startOffset, startOffset = offset), - node: null - }); - }, - close() {} - }; - } - }); + tokenize(str, (type, start, end) => + tokens.push({ + type, + value: str.slice(start, end), + node: null + }) + ); return tokens; } diff --git a/lib/parser/create.js b/lib/parser/create.js index e56bd2ee..1bb26788 100644 --- a/lib/parser/create.js +++ b/lib/parser/create.js @@ -280,7 +280,7 @@ module.exports = function createParser(config) { source = source_; options = options || {}; - tokenize(source, parser); + parser.setSource(source, tokenize); locationMap.setSource( source, options.offset, diff --git a/lib/tokenizer/index.js b/lib/tokenizer/index.js index 6b915a57..4b19cb96 100644 --- a/lib/tokenizer/index.js +++ b/lib/tokenizer/index.js @@ -1,4 +1,3 @@ -const TokenStream = require('../common/TokenStream'); const TYPE = require('./types'); const NAME = require('./names'); const charCodeDefinitions = require('./char-code-definitions'); @@ -22,7 +21,7 @@ const { consumeBadUrlRemnants } = utils; -function tokenize(source, stream) { +function tokenize(source, onToken) { function getCharCode(offset) { return offset < sourceLength ? source.charCodeAt(offset) : 0; } @@ -243,16 +242,11 @@ function tokenize(source, stream) { } } - if (!stream) { - stream = new TokenStream(); - } - // ensure source is a string source = String(source || ''); const sourceLength = source.length; - const start = isBOM(getCharCode(0)); - const { token, close } = stream.open(source, start); + let start = isBOM(getCharCode(0)); let offset = start; let type; @@ -385,10 +379,8 @@ function tokenize(source, stream) { // ... consume them and all following code points up to and including the first U+002A ASTERISK (*) // followed by a U+002F SOLIDUS (/), or up to an EOF code point. type = TYPE.Comment; - offset = source.indexOf('*/', offset + 2) + 2; - if (offset === 1) { - offset = source.length; - } + offset = source.indexOf('*/', offset + 2); + offset = offset === -1 ? source.length : offset + 2; } else { type = TYPE.Delim; offset++; @@ -507,13 +499,8 @@ function tokenize(source, stream) { } // put token to stream - token(type, offset); + onToken(type, start, start = offset); } - - // close stream - close(); - - return stream; } // extend tokenizer with static methods from utils diff --git a/test/tokenizer.js b/test/tokenizer.js index 8b28d21c..334de0da 100644 --- a/test/tokenizer.js +++ b/test/tokenizer.js @@ -1,8 +1,9 @@ const assert = require('assert'); -const { tokenize } = require('./helpers/lib'); +const { TokenStream, tokenize } = require('./helpers/lib'); const fixture = require('./fixture/tokenize'); -describe('parser/stream', () => { +describe('tokenize/stream', () => { + const createStream = source => new TokenStream(source, tokenize); const css = '.test\n{\n prop: url(foo/bar.jpg) url( a\\(\\33 \\).\\ \\"\\\'test ) calc(1 + 1) \\x \\aa ;\n}\\\n'; const tokens = [ { type: 'Delim', chunk: '.', balance: 93 }, @@ -55,7 +56,7 @@ describe('parser/stream', () => { }, { offset: 0 }); it('edge case: no arguments', () => { - const stream = tokenize(); + const stream = createStream(); assert.equal(stream.eof, true); assert.equal(stream.tokenType, 0); @@ -63,7 +64,7 @@ describe('parser/stream', () => { }); it('edge case: empty input', () => { - const stream = tokenize(''); + const stream = createStream(''); assert.equal(stream.eof, true); assert.equal(stream.tokenType, 0); @@ -71,8 +72,8 @@ describe('parser/stream', () => { }); it('should convert input to string', () => { - const stream = tokenize({ - toString: () => { + const stream = createStream({ + toString() { return css; } }); @@ -81,19 +82,19 @@ describe('parser/stream', () => { }); it('should accept a Buffer', () => { - const stream = tokenize(Buffer.from(css)); + const stream = createStream(Buffer.from(css)); assert.equal(stream.source, css); }); it('dump()', () => { - const stream = tokenize(css); + const stream = createStream(css); assert.deepEqual(stream.dump(), dump); }); it('next() types', () => { - const stream = tokenize(css); + const stream = createStream(css); const actual = []; while (!stream.eof) { @@ -105,7 +106,7 @@ describe('parser/stream', () => { }); it('next() start', () => { - const stream = tokenize(css); + const stream = createStream(css); const actual = []; while (!stream.eof) { @@ -117,7 +118,7 @@ describe('parser/stream', () => { }); it('next() end', () => { - const stream = tokenize(css); + const stream = createStream(css); const actual = []; while (!stream.eof) { @@ -129,7 +130,7 @@ describe('parser/stream', () => { }); it('skip()', () => { - const stream = tokenize(css); + const stream = createStream(css); const targetTokens = tokens.filter(token => token.type === 'Ident' || token.type === 'Delim' ); @@ -147,7 +148,7 @@ describe('parser/stream', () => { }); it('skip() to end', () => { - const stream = tokenize(css); + const stream = createStream(css); stream.skip(tokens.length); @@ -244,7 +245,7 @@ describe('parser/stream', () => { tests.forEach(function(test, idx) { it('testcase#' + idx, () => { - const stream = tokenize(test.source); + const stream = createStream(test.source, tokenize); const startOffset = test.start.indexOf('^'); const skipToOffset = test.skip.indexOf('^'); let startToken = stream.tokenIndex; @@ -268,8 +269,8 @@ describe('parser/stream', () => { }); it('dynamic buffer', () => { - const bufferSize = tokenize(css).offsetAndType.length + 10; - const stream = tokenize('.'.repeat(bufferSize)); + const bufferSize = createStream(css, tokenize).offsetAndType.length + 10; + const stream = createStream('.'.repeat(bufferSize), tokenize); let count = 0; while (!stream.eof) { @@ -283,14 +284,18 @@ describe('parser/stream', () => { describe('values', () => { ['valid', 'invalid'].forEach(testType => { - fixture.forEachTest(testType, (name, value, tokens) => { + fixture.forEachTest(testType, (name, value, expected) => { it(name, () => { + const actual = []; + + tokenize(value, (type, start, end) => actual.push({ + type: tokenize.NAME[type], + chunk: value.substring(start, end) + })); + assert[testType === 'valid' ? 'deepEqual' : 'notDeepEqual']( - tokenize(value).dump().map(token => ({ - type: token.type, - chunk: token.chunk - })), - tokens + actual, + expected ); }); }); From 9431fb315f776c9d61eefc929f046acea6489dd9 Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Tue, 24 Mar 2020 01:31:19 +0100 Subject: [PATCH 07/11] Remove TokenStream#skipWS() --- CHANGELOG.md | 1 + lib/common/TokenStream.js | 13 ------------- lib/syntax/node/Ratio.js | 4 ++-- 3 files changed, 3 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dc0070a7..82609640 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - Changed `tokenize()` to take a function as second argument, which will be called for every token. No stream instance is creating when second argument is ommited. - Changed `TokenStream#getRawLength()` to take second parameter as a function (rule) that check a char code to stop a scanning - Added `TokenStream#forEachToken(fn)` method + - Removed `TokenStream#skipWS()` method - Parser - Renamed `HexColor` node type into `Hash` - Changed selector parsing to produce `{ type: 'Combinator', name: ' ' }` node instead of `WhiteSpace` node diff --git a/lib/common/TokenStream.js b/lib/common/TokenStream.js index 2cfcf904..3edc25e1 100644 --- a/lib/common/TokenStream.js +++ b/lib/common/TokenStream.js @@ -215,19 +215,6 @@ module.exports = class TokenStream { return this.source.substring(start, this.tokenStart); } - skipWS() { - let skipTokenCount = 0; - - for (let i = this.tokenIndex; i < this.tokenCount; i++, skipTokenCount++) { - if ((this.offsetAndType[i] >> TYPE_SHIFT) !== WhiteSpace) { - break; - } - } - - if (skipTokenCount > 0) { - this.skip(skipTokenCount); - } - } skipSC() { while (this.tokenType === WhiteSpace || this.tokenType === Comment) { this.next(); diff --git a/lib/syntax/node/Ratio.js b/lib/syntax/node/Ratio.js index cbcc0ffd..3228a60a 100644 --- a/lib/syntax/node/Ratio.js +++ b/lib/syntax/node/Ratio.js @@ -11,7 +11,7 @@ const FULLSTOP = 0x002E; // U+002E FULL STOP (.) // to test a term is unsigned number without an exponent part. // Additional checking may be applied on lexer validation. function consumeNumber() { - this.skipWS(); + this.skipSC(); const value = this.consume(NumberToken); @@ -41,7 +41,7 @@ module.exports = { const left = consumeNumber.call(this); let right; - this.skipWS(); + this.skipSC(); this.eatDelim(SOLIDUS); right = consumeNumber.call(this); From 9fda581328da53141e9b42f63ccd1a2e48e86b9c Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Tue, 24 Mar 2020 01:31:41 +0100 Subject: [PATCH 08/11] Remove TokenStream#getTokenLength() --- CHANGELOG.md | 1 + lib/common/TokenStream.js | 3 --- lib/syntax/node/AnPlusB.js | 4 ++-- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 82609640..be48ec79 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - Changed `TokenStream#getRawLength()` to take second parameter as a function (rule) that check a char code to stop a scanning - Added `TokenStream#forEachToken(fn)` method - Removed `TokenStream#skipWS()` method + - Removed `TokenStream#getTokenLength()` method - Parser - Renamed `HexColor` node type into `Hash` - Changed selector parsing to produce `{ type: 'Combinator', name: ' ' }` node instead of `WhiteSpace` node diff --git a/lib/common/TokenStream.js b/lib/common/TokenStream.js index 3edc25e1..28cdc143 100644 --- a/lib/common/TokenStream.js +++ b/lib/common/TokenStream.js @@ -208,9 +208,6 @@ module.exports = class TokenStream { getTokenValue() { return this.source.substring(this.tokenStart, this.tokenEnd); } - getTokenLength() { - return this.tokenEnd - this.tokenStart; - } substrToCursor(start) { return this.source.substring(start, this.tokenStart); } diff --git a/lib/syntax/node/AnPlusB.js b/lib/syntax/node/AnPlusB.js index 63a66a96..17a5ad88 100644 --- a/lib/syntax/node/AnPlusB.js +++ b/lib/syntax/node/AnPlusB.js @@ -125,7 +125,7 @@ module.exports = { expectCharCode.call(this, 1, N); - switch (this.getTokenLength()) { + switch (this.tokenEnd - this.tokenStart) { // -n // -n // -n ['+' | '-'] @@ -173,7 +173,7 @@ module.exports = { expectCharCode.call(this, 0, N); - switch (this.getTokenLength()) { + switch (this.tokenEnd - this.tokenStart) { // '+'? n // '+'? n // '+'? n ['+' | '-'] From 3e6bf4fc94e3ad35a2190a7ad528b9cd3ae917d3 Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Wed, 25 Mar 2020 13:41:48 +0100 Subject: [PATCH 09/11] Remove TokenStream#getTokenValue() method and related --- lib/common/TokenStream.js | 10 +++------- lib/parser/create.js | 4 ++-- lib/syntax/node/AttributeSelector.js | 3 +-- lib/syntax/node/Percentage.js | 7 ++----- 4 files changed, 8 insertions(+), 16 deletions(-) diff --git a/lib/common/TokenStream.js b/lib/common/TokenStream.js index 28cdc143..cce2b8f9 100644 --- a/lib/common/TokenStream.js +++ b/lib/common/TokenStream.js @@ -151,6 +151,9 @@ module.exports = class TokenStream { return this.firstCharOffset; } + substrToCursor(start) { + return this.source.substring(start, this.tokenStart); + } // TODO: -> skipUntilBalanced getRawLength(startToken, stopConsume) { @@ -205,13 +208,6 @@ module.exports = class TokenStream { ); } - getTokenValue() { - return this.source.substring(this.tokenStart, this.tokenEnd); - } - substrToCursor(start) { - return this.source.substring(start, this.tokenStart); - } - skipSC() { while (this.tokenType === WhiteSpace || this.tokenType === Comment) { this.next(); diff --git a/lib/parser/create.js b/lib/parser/create.js index 1bb26788..d7bfc65a 100644 --- a/lib/parser/create.js +++ b/lib/parser/create.js @@ -213,11 +213,11 @@ module.exports = function createParser(config) { }, consume(tokenType) { - const value = this.getTokenValue(); + const start = this.tokenStart; this.eat(tokenType); - return value; + return this.substrToCursor(start); }, consumeFunctionName() { const name = source.substring(this.tokenStart, this.tokenEnd - 1); diff --git a/lib/syntax/node/AttributeSelector.js b/lib/syntax/node/AttributeSelector.js index b76e5e3a..64c88e00 100644 --- a/lib/syntax/node/AttributeSelector.js +++ b/lib/syntax/node/AttributeSelector.js @@ -120,8 +120,7 @@ module.exports = { // attribute flags if (this.tokenType === Ident) { - flags = this.getTokenValue(); - this.next(); + flags = this.consume(Ident); this.skipSC(); } diff --git a/lib/syntax/node/Percentage.js b/lib/syntax/node/Percentage.js index 52d60e11..e5e17f83 100644 --- a/lib/syntax/node/Percentage.js +++ b/lib/syntax/node/Percentage.js @@ -6,13 +6,10 @@ module.exports = { value: String }, parse: function() { - const start = this.tokenStart; - const value = this.consumeNumber(Percentage); - return { type: 'Percentage', - loc: this.getLocation(start, this.tokenStart), - value + loc: this.getLocation(this.tokenStart, this.tokenEnd), + value: this.consumeNumber(Percentage) }; }, generate: function(node) { From 3f6b54a376719e1765472b64516ada87b83e1a97 Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Wed, 25 Mar 2020 14:10:28 +0100 Subject: [PATCH 10/11] TokenStream#getRawLength() -> skipUntilBalanced() --- lib/common/TokenStream.js | 81 +++++++++++++++++++-------------------- lib/syntax/node/Raw.js | 4 +- test/tokenizer.js | 2 +- 3 files changed, 42 insertions(+), 45 deletions(-) diff --git a/lib/common/TokenStream.js b/lib/common/TokenStream.js index cce2b8f9..701a0488 100644 --- a/lib/common/TokenStream.js +++ b/lib/common/TokenStream.js @@ -155,42 +155,6 @@ module.exports = class TokenStream { return this.source.substring(start, this.tokenStart); } - // TODO: -> skipUntilBalanced - getRawLength(startToken, stopConsume) { - let cursor = startToken; - let balanceEnd; - let offset; - - loop: - for (; cursor < this.tokenCount; cursor++) { - balanceEnd = this.balance[cursor]; - - // stop scanning on balance edge that points to offset before start token - if (balanceEnd < startToken) { - break loop; - } - - offset = cursor > 0 ? this.offsetAndType[cursor - 1] & OFFSET_MASK : this.firstCharOffset; - - // check stop condition - switch (stopConsume(this.source.charCodeAt(offset))) { - case 1: // just stop - break loop; - - case 2: // stop & included - cursor++; - break loop; - - default: - // fast forward to the end of balanced block - if (this.balance[balanceEnd] === cursor) { - cursor = balanceEnd; - } - } - } - - return cursor - this.tokenIndex; - } isBalanceEdge(pos) { return this.balance[this.tokenIndex] < pos; } @@ -208,11 +172,6 @@ module.exports = class TokenStream { ); } - skipSC() { - while (this.tokenType === WhiteSpace || this.tokenType === Comment) { - this.next(); - } - } skip(tokenCount) { let next = this.tokenIndex + tokenCount; @@ -243,6 +202,46 @@ module.exports = class TokenStream { this.tokenStart = this.tokenEnd = this.source.length; } } + skipSC() { + while (this.tokenType === WhiteSpace || this.tokenType === Comment) { + this.next(); + } + } + skipUntilBalanced(startToken, stopConsume) { + let cursor = startToken; + let balanceEnd; + let offset; + + loop: + for (; cursor < this.tokenCount; cursor++) { + balanceEnd = this.balance[cursor]; + + // stop scanning on balance edge that points to offset before start token + if (balanceEnd < startToken) { + break loop; + } + + offset = cursor > 0 ? this.offsetAndType[cursor - 1] & OFFSET_MASK : this.firstCharOffset; + + // check stop condition + switch (stopConsume(this.source.charCodeAt(offset))) { + case 1: // just stop + break loop; + + case 2: // stop & included + cursor++; + break loop; + + default: + // fast forward to the end of balanced block + if (this.balance[balanceEnd] === cursor) { + cursor = balanceEnd; + } + } + } + + this.skip(cursor - this.tokenIndex); + } forEachToken(fn) { for (let i = 0, offset = this.firstCharOffset; i < this.tokenCount; i++) { diff --git a/lib/syntax/node/Raw.js b/lib/syntax/node/Raw.js index a97a856a..07b238c7 100644 --- a/lib/syntax/node/Raw.js +++ b/lib/syntax/node/Raw.js @@ -21,9 +21,7 @@ module.exports = { const startOffset = this.getTokenStart(startToken); let endOffset; - this.skip( - this.getRawLength(startToken, consumeUntil || this.consumeUntilBalanceEnd) - ); + this.skipUntilBalanced(startToken, consumeUntil || this.consumeUntilBalanceEnd); if (excludeWhiteSpace && this.tokenStart > startOffset) { endOffset = getOffsetExcludeWS.call(this); diff --git a/test/tokenizer.js b/test/tokenizer.js index 334de0da..35fe4e52 100644 --- a/test/tokenizer.js +++ b/test/tokenizer.js @@ -259,7 +259,7 @@ describe('tokenize/stream', () => { stream.next(); } - stream.skip(stream.getRawLength(startToken, test.mode || (() => 0))); + stream.skipUntilBalanced(startToken, test.mode || (() => 0)); assert.equal( stream.source.substring(startOffset, stream.tokenStart), test.expected From 283066962e58f5bc76d945bcf3e4057e3e9fa79e Mon Sep 17 00:00:00 2001 From: Roman Dvornov Date: Mon, 18 May 2020 22:40:50 +0200 Subject: [PATCH 11/11] Renaming in test files --- test/{syntax-match.js => definition-syntax-match.js} | 2 +- .../complex-cases.json | 0 .../component-matching.json | 0 .../core-combinators.json | 0 .../{syntax-match => definition-syntax-match}/core-comma.json | 0 .../core-function.json | 0 .../core-multipliers.json | 0 .../core-parentheses.json | 0 .../{syntax-match => definition-syntax-match}/core-string.json | 0 .../{syntax-match => definition-syntax-match}/custom-ident.json | 0 .../{syntax-match => definition-syntax-match}/generic.json | 0 test/fixture/{syntax-match => definition-syntax-match}/index.js | 0 .../{syntax-match => definition-syntax-match}/length.json | 0 test/fixture/{syntax => definition-syntax}/atkeyword.json | 0 test/fixture/{syntax => definition-syntax}/atrules.json | 0 .../{syntax => definition-syntax}/bracketed-range-notation.json | 0 test/fixture/{syntax => definition-syntax}/combinator.json | 0 test/fixture/{syntax => definition-syntax}/comma.json | 0 .../{syntax => definition-syntax}/default-properties.json | 0 test/fixture/{syntax => definition-syntax}/edgecases.json | 0 test/fixture/{syntax => definition-syntax}/function.json | 0 test/fixture/{syntax => definition-syntax}/index.js | 0 test/fixture/{syntax => definition-syntax}/keyword.json | 0 test/fixture/{syntax => definition-syntax}/multiplier.json | 0 test/fixture/{syntax => definition-syntax}/numeric.json | 0 test/fixture/{syntax => definition-syntax}/parentheses.json | 0 test/fixture/{syntax => definition-syntax}/property.json | 0 test/fixture/{syntax => definition-syntax}/slash.json | 0 test/fixture/{syntax => definition-syntax}/type.json | 0 test/fixture/{syntax => definition-syntax}/var.json | 0 test/lexer-match-atrule-descriptor.js | 2 +- test/lexer-match-atrule-prelude.js | 2 +- test/lexer-match-property.js | 2 +- test/parse.js | 2 +- 34 files changed, 5 insertions(+), 5 deletions(-) rename test/{syntax-match.js => definition-syntax-match.js} (98%) rename test/fixture/{syntax-match => definition-syntax-match}/complex-cases.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/component-matching.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/core-combinators.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/core-comma.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/core-function.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/core-multipliers.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/core-parentheses.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/core-string.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/custom-ident.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/generic.json (100%) rename test/fixture/{syntax-match => definition-syntax-match}/index.js (100%) rename test/fixture/{syntax-match => definition-syntax-match}/length.json (100%) rename test/fixture/{syntax => definition-syntax}/atkeyword.json (100%) rename test/fixture/{syntax => definition-syntax}/atrules.json (100%) rename test/fixture/{syntax => definition-syntax}/bracketed-range-notation.json (100%) rename test/fixture/{syntax => definition-syntax}/combinator.json (100%) rename test/fixture/{syntax => definition-syntax}/comma.json (100%) rename test/fixture/{syntax => definition-syntax}/default-properties.json (100%) rename test/fixture/{syntax => definition-syntax}/edgecases.json (100%) rename test/fixture/{syntax => definition-syntax}/function.json (100%) rename test/fixture/{syntax => definition-syntax}/index.js (100%) rename test/fixture/{syntax => definition-syntax}/keyword.json (100%) rename test/fixture/{syntax => definition-syntax}/multiplier.json (100%) rename test/fixture/{syntax => definition-syntax}/numeric.json (100%) rename test/fixture/{syntax => definition-syntax}/parentheses.json (100%) rename test/fixture/{syntax => definition-syntax}/property.json (100%) rename test/fixture/{syntax => definition-syntax}/slash.json (100%) rename test/fixture/{syntax => definition-syntax}/type.json (100%) rename test/fixture/{syntax => definition-syntax}/var.json (100%) diff --git a/test/syntax-match.js b/test/definition-syntax-match.js similarity index 98% rename from test/syntax-match.js rename to test/definition-syntax-match.js index 33b822fa..9fab249c 100644 --- a/test/syntax-match.js +++ b/test/definition-syntax-match.js @@ -3,7 +3,7 @@ const prepareTokens = require('../lib/lexer/prepare-tokens'); const genericSyntaxes = require('../lib/lexer/generic'); const { buildMatchGraph } = require('../lib/lexer/match-graph'); const { matchAsList, matchAsTree } = require('../lib/lexer/match'); -const fixture = require('./fixture/syntax-match'); +const fixture = require('./fixture/definition-syntax-match'); function processMatchResult(mr) { if (Array.isArray(mr)) { diff --git a/test/fixture/syntax-match/complex-cases.json b/test/fixture/definition-syntax-match/complex-cases.json similarity index 100% rename from test/fixture/syntax-match/complex-cases.json rename to test/fixture/definition-syntax-match/complex-cases.json diff --git a/test/fixture/syntax-match/component-matching.json b/test/fixture/definition-syntax-match/component-matching.json similarity index 100% rename from test/fixture/syntax-match/component-matching.json rename to test/fixture/definition-syntax-match/component-matching.json diff --git a/test/fixture/syntax-match/core-combinators.json b/test/fixture/definition-syntax-match/core-combinators.json similarity index 100% rename from test/fixture/syntax-match/core-combinators.json rename to test/fixture/definition-syntax-match/core-combinators.json diff --git a/test/fixture/syntax-match/core-comma.json b/test/fixture/definition-syntax-match/core-comma.json similarity index 100% rename from test/fixture/syntax-match/core-comma.json rename to test/fixture/definition-syntax-match/core-comma.json diff --git a/test/fixture/syntax-match/core-function.json b/test/fixture/definition-syntax-match/core-function.json similarity index 100% rename from test/fixture/syntax-match/core-function.json rename to test/fixture/definition-syntax-match/core-function.json diff --git a/test/fixture/syntax-match/core-multipliers.json b/test/fixture/definition-syntax-match/core-multipliers.json similarity index 100% rename from test/fixture/syntax-match/core-multipliers.json rename to test/fixture/definition-syntax-match/core-multipliers.json diff --git a/test/fixture/syntax-match/core-parentheses.json b/test/fixture/definition-syntax-match/core-parentheses.json similarity index 100% rename from test/fixture/syntax-match/core-parentheses.json rename to test/fixture/definition-syntax-match/core-parentheses.json diff --git a/test/fixture/syntax-match/core-string.json b/test/fixture/definition-syntax-match/core-string.json similarity index 100% rename from test/fixture/syntax-match/core-string.json rename to test/fixture/definition-syntax-match/core-string.json diff --git a/test/fixture/syntax-match/custom-ident.json b/test/fixture/definition-syntax-match/custom-ident.json similarity index 100% rename from test/fixture/syntax-match/custom-ident.json rename to test/fixture/definition-syntax-match/custom-ident.json diff --git a/test/fixture/syntax-match/generic.json b/test/fixture/definition-syntax-match/generic.json similarity index 100% rename from test/fixture/syntax-match/generic.json rename to test/fixture/definition-syntax-match/generic.json diff --git a/test/fixture/syntax-match/index.js b/test/fixture/definition-syntax-match/index.js similarity index 100% rename from test/fixture/syntax-match/index.js rename to test/fixture/definition-syntax-match/index.js diff --git a/test/fixture/syntax-match/length.json b/test/fixture/definition-syntax-match/length.json similarity index 100% rename from test/fixture/syntax-match/length.json rename to test/fixture/definition-syntax-match/length.json diff --git a/test/fixture/syntax/atkeyword.json b/test/fixture/definition-syntax/atkeyword.json similarity index 100% rename from test/fixture/syntax/atkeyword.json rename to test/fixture/definition-syntax/atkeyword.json diff --git a/test/fixture/syntax/atrules.json b/test/fixture/definition-syntax/atrules.json similarity index 100% rename from test/fixture/syntax/atrules.json rename to test/fixture/definition-syntax/atrules.json diff --git a/test/fixture/syntax/bracketed-range-notation.json b/test/fixture/definition-syntax/bracketed-range-notation.json similarity index 100% rename from test/fixture/syntax/bracketed-range-notation.json rename to test/fixture/definition-syntax/bracketed-range-notation.json diff --git a/test/fixture/syntax/combinator.json b/test/fixture/definition-syntax/combinator.json similarity index 100% rename from test/fixture/syntax/combinator.json rename to test/fixture/definition-syntax/combinator.json diff --git a/test/fixture/syntax/comma.json b/test/fixture/definition-syntax/comma.json similarity index 100% rename from test/fixture/syntax/comma.json rename to test/fixture/definition-syntax/comma.json diff --git a/test/fixture/syntax/default-properties.json b/test/fixture/definition-syntax/default-properties.json similarity index 100% rename from test/fixture/syntax/default-properties.json rename to test/fixture/definition-syntax/default-properties.json diff --git a/test/fixture/syntax/edgecases.json b/test/fixture/definition-syntax/edgecases.json similarity index 100% rename from test/fixture/syntax/edgecases.json rename to test/fixture/definition-syntax/edgecases.json diff --git a/test/fixture/syntax/function.json b/test/fixture/definition-syntax/function.json similarity index 100% rename from test/fixture/syntax/function.json rename to test/fixture/definition-syntax/function.json diff --git a/test/fixture/syntax/index.js b/test/fixture/definition-syntax/index.js similarity index 100% rename from test/fixture/syntax/index.js rename to test/fixture/definition-syntax/index.js diff --git a/test/fixture/syntax/keyword.json b/test/fixture/definition-syntax/keyword.json similarity index 100% rename from test/fixture/syntax/keyword.json rename to test/fixture/definition-syntax/keyword.json diff --git a/test/fixture/syntax/multiplier.json b/test/fixture/definition-syntax/multiplier.json similarity index 100% rename from test/fixture/syntax/multiplier.json rename to test/fixture/definition-syntax/multiplier.json diff --git a/test/fixture/syntax/numeric.json b/test/fixture/definition-syntax/numeric.json similarity index 100% rename from test/fixture/syntax/numeric.json rename to test/fixture/definition-syntax/numeric.json diff --git a/test/fixture/syntax/parentheses.json b/test/fixture/definition-syntax/parentheses.json similarity index 100% rename from test/fixture/syntax/parentheses.json rename to test/fixture/definition-syntax/parentheses.json diff --git a/test/fixture/syntax/property.json b/test/fixture/definition-syntax/property.json similarity index 100% rename from test/fixture/syntax/property.json rename to test/fixture/definition-syntax/property.json diff --git a/test/fixture/syntax/slash.json b/test/fixture/definition-syntax/slash.json similarity index 100% rename from test/fixture/syntax/slash.json rename to test/fixture/definition-syntax/slash.json diff --git a/test/fixture/syntax/type.json b/test/fixture/definition-syntax/type.json similarity index 100% rename from test/fixture/syntax/type.json rename to test/fixture/definition-syntax/type.json diff --git a/test/fixture/syntax/var.json b/test/fixture/definition-syntax/var.json similarity index 100% rename from test/fixture/syntax/var.json rename to test/fixture/definition-syntax/var.json diff --git a/test/lexer-match-atrule-descriptor.js b/test/lexer-match-atrule-descriptor.js index 24c1aa01..e7e5428a 100644 --- a/test/lexer-match-atrule-descriptor.js +++ b/test/lexer-match-atrule-descriptor.js @@ -1,7 +1,7 @@ const assert = require('assert'); const { parse, lexer, fork } = require('./helpers/lib'); const { lazyValues } = require('./helpers'); -const fixture = require('./fixture/syntax'); +const fixture = require('./fixture/definition-syntax'); const values = lazyValues({ swapValue: () => parse('swap', { context: 'value' }), xxxValue: () => parse('xxx', { context: 'value' }), diff --git a/test/lexer-match-atrule-prelude.js b/test/lexer-match-atrule-prelude.js index 4d08112e..134f6095 100644 --- a/test/lexer-match-atrule-prelude.js +++ b/test/lexer-match-atrule-prelude.js @@ -1,7 +1,7 @@ const assert = require('assert'); const { parse, lexer, fork } = require('./helpers/lib'); const { lazyValues } = require('./helpers'); -const fixture = require('./fixture/syntax'); +const fixture = require('./fixture/definition-syntax'); const values = lazyValues({ animationName: () => parse('animation-name', { context: 'atrulePrelude', atrule: 'keyframes' }), number: () => parse('123', { context: 'atrulePrelude', atrule: 'unknown' }), diff --git a/test/lexer-match-property.js b/test/lexer-match-property.js index ef37aa41..bf292fd5 100644 --- a/test/lexer-match-property.js +++ b/test/lexer-match-property.js @@ -1,7 +1,7 @@ const assert = require('assert'); const { parse, lexer, fork } = require('./helpers/lib'); const { lazyValues } = require('./helpers'); -const fixture = require('./fixture/syntax'); +const fixture = require('./fixture/definition-syntax'); const values = lazyValues({ bar: () => parse('bar', { context: 'value' }), qux: () => parse('qux', { context: 'value' }), diff --git a/test/parse.js b/test/parse.js index 994dcef6..553b9105 100644 --- a/test/parse.js +++ b/test/parse.js @@ -1,7 +1,7 @@ const assert = require('assert'); const { parse, walk, List } = require('./helpers/lib'); const forEachParseTest = require('./fixture/parse').forEachTest; -const genericTypesFixture = require('./fixture/syntax-match/generic.json'); +const genericTypesFixture = require('./fixture/definition-syntax-match/generic.json'); const stringifyWithNoInfo = ast => JSON.stringify(ast, (key, value) => key !== 'loc' ? value : undefined, 4); function createParseErrorTest(name, test, options) {