From 067a4fd375a53904b84d0e3a4d4a5edcaf83b552 Mon Sep 17 00:00:00 2001 From: Markus Felten Date: Thu, 3 Jan 2019 00:37:36 +0100 Subject: [PATCH] feat: get rid of cjs module BREAKING CHANGE: only provide esm module --- .markdown-doctest-setup.js | 6 +- README.md | 91 +++--- package.json | 9 +- rollup.config.js | 18 -- src/{known-tokens.js => known-tokens.mjs} | 0 src/{parser.js => parser.mjs} | 0 src/{tokenizer.js => tokenizer.mjs} | 0 ...oken-test.js => identifier-token-test.mjs} | 0 ...on-parser-test.js => json-parser-test.mjs} | 0 ...d-token-test.js => keyword-token-test.mjs} | 0 ...en-test.js => line-comment-token-test.mjs} | 0 .../{mini-lang-test.js => mini-lang-test.mjs} | 0 ...rammar-test.js => simple-grammar-test.mjs} | 0 tests/token-kitchen-sink-test.js | 294 ------------------ tests/token-kitchen-sink-test.mjs | 293 +++++++++++++++++ .../{tokenizer-test.js => tokenizer-test.mjs} | 0 ...oken-test.js => whitespace-token-test.mjs} | 0 17 files changed, 344 insertions(+), 367 deletions(-) delete mode 100644 rollup.config.js rename src/{known-tokens.js => known-tokens.mjs} (100%) rename src/{parser.js => parser.mjs} (100%) rename src/{tokenizer.js => tokenizer.mjs} (100%) rename tests/{identifier-token-test.js => identifier-token-test.mjs} (100%) rename tests/{json-parser-test.js => json-parser-test.mjs} (100%) rename tests/{keyword-token-test.js => keyword-token-test.mjs} (100%) rename tests/{line-comment-token-test.js => line-comment-token-test.mjs} (100%) rename tests/{mini-lang-test.js => mini-lang-test.mjs} (100%) rename tests/{simple-grammar-test.js => simple-grammar-test.mjs} (100%) delete mode 100644 tests/token-kitchen-sink-test.js create mode 100644 tests/token-kitchen-sink-test.mjs rename tests/{tokenizer-test.js => tokenizer-test.mjs} (100%) rename tests/{whitespace-token-test.js => whitespace-token-test.mjs} (100%) diff --git a/.markdown-doctest-setup.js b/.markdown-doctest-setup.js index 62c224c7..a1a981fd 100644 --- a/.markdown-doctest-setup.js +++ b/.markdown-doctest-setup.js @@ -1,5 +1,3 @@ module.exports = { - require: { - 'pratt-parser': require('./dist/parser') - } -} + require: {} +}; diff --git a/README.md b/README.md index 74cd342c..60b423c8 100644 --- a/README.md +++ b/README.md @@ -23,8 +23,10 @@ Based on [Top Down Operator Precedence](https://tdop.github.io) and [Douglas Crockford TDOP](https://github.com/douglascrockford/TDOP) + + ```javascript -const {Parser, WhiteSpaceToken, NumberToken} = require('pratt-parser'); +import { Parser, WhiteSpaceToken, NumberToken } from "pratt-parser"; function Value(value) { return Object.create(null, { @@ -35,34 +37,31 @@ function Value(value) { } const myGrammar = new Parser({ - tokens: [ - WhiteSpaceToken, - NumberToken - ], + tokens: [WhiteSpaceToken, NumberToken], prefix: { - '(': { + "(": { nud(grammar) { const e = grammar.expression(0); - grammar.advance(')'); + grammar.advance(")"); return e; } } }, infix: { - ')': {}, - '+': { + ")": {}, + "+": { precedence: 50, combine: (left, right) => Value(left.value + right.value) }, - '-': { + "-": { precedence: 50, combine: (left, right) => Value(left.value - right.value) }, - '*': { + "*": { precedence: 60, combine: (left, right) => Value(left.value * right.value) }, - '/': { + "/": { precedence: 60, combine: (left, right) => Value(left.value / right.value) } @@ -78,27 +77,27 @@ console.log(myGrammar.parse("(1 + (1 + 4 * 3)) * (2 + 1)").value); ### Table of Contents -- [pratt-parser](#pratt-parser) -- [pratt-parser](#pratt-parser-1) -- [pratt-parser](#pratt-parser-2) -- [Parser](#parser) - - [Parameters](#parameters) - - [error](#error) - - [Parameters](#parameters-1) - - [parse](#parse) - - [Parameters](#parameters-2) -- [RootToken](#roottoken) - - [parseString](#parsestring) - - [Parameters](#parameters-3) -- [WhiteSpaceToken](#whitespacetoken) -- [LineCommentToken](#linecommenttoken) -- [EOFToken](#eoftoken) -- [Tokenizer](#tokenizer) - - [Parameters](#parameters-4) - - [tokens](#tokens) - - [Parameters](#parameters-5) - - [error](#error-1) - - [Parameters](#parameters-6) +- [pratt-parser](#pratt-parser) +- [pratt-parser](#pratt-parser-1) +- [pratt-parser](#pratt-parser-2) +- [Parser](#parser) + - [Parameters](#parameters) + - [error](#error) + - [Parameters](#parameters-1) + - [parse](#parse) + - [Parameters](#parameters-2) +- [RootToken](#roottoken) + - [parseString](#parsestring) + - [Parameters](#parameters-3) +- [WhiteSpaceToken](#whitespacetoken) +- [LineCommentToken](#linecommenttoken) +- [EOFToken](#eoftoken) +- [Tokenizer](#tokenizer) + - [Parameters](#parameters-4) + - [tokens](#tokens) + - [Parameters](#parameters-5) + - [error](#error-1) + - [Parameters](#parameters-6) ## pratt-parser @@ -112,8 +111,8 @@ Creates a grammar for later parsing ### Parameters -- `grammar` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** definition of the grammar with operators... -- `options` (optional, default `{}`) +- `grammar` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** definition of the grammar with operators... +- `options` (optional, default `{}`) ### error @@ -121,7 +120,7 @@ Forwards error to the tokenizer #### Parameters -- `args` **...any** +- `args` **...any** Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** error @@ -131,8 +130,8 @@ Parses the input and delivers the outermoost expression. #### Parameters -- `chunk` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** input text -- `context` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** object transparently passed to tokenizer +- `chunk` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** input text +- `context` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** object transparently passed to tokenizer Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** evaluated input @@ -147,9 +146,9 @@ Modifies ParsePosition so that it points behind the detected token. #### Parameters -- `pp` **PrsePosition** +- `pp` **PrsePosition** -Returns **Token** +Returns **Token** ## WhiteSpaceToken @@ -169,7 +168,7 @@ Creates a tokenizer for later parsing ### Parameters -- `grammar` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** definition of the grammar with operators... +- `grammar` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** definition of the grammar with operators... ### tokens @@ -177,16 +176,16 @@ delivers tokens from the input #### Parameters -- `chunk` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** the input to be processed -- `context` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** additional info to be used by the actual token types +- `chunk` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** the input to be processed +- `context` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** additional info to be used by the actual token types ### error #### Parameters -- `message` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** -- `context` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** token initiating the error -- `value` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** +- `message` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** +- `context` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** token initiating the error +- `value` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** Returns **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** error diff --git a/package.json b/package.json index 4b3e47fc..ddf3ec13 100644 --- a/package.json +++ b/package.json @@ -4,8 +4,8 @@ "publishConfig": { "access": "public" }, - "main": "dist/parser.js", - "module": "src/parser.js", + "main": "src/parser.mjs", + "module": "src/parser.mjs", "description": "TDOP parser", "keywords": [ "tokenizer", @@ -23,9 +23,8 @@ "scripts": { "cover": "c8 --temp-directory build/coverage ava && c8 report -r lcov --temp-directory build/coverage", "docs": "documentation readme src/parser.js --section=API", - "lint": "documentation lint src/parser.js", - "posttest": "markdown-doctest && npm run prepare", - "pretest": "rollup -c tests/rollup.config.js", + "lint": "documentation lint src/parser.mjs", + "posttest": "markdown-doctest", "test": "ava" }, "dependencies": {}, diff --git a/rollup.config.js b/rollup.config.js deleted file mode 100644 index 4c722a17..00000000 --- a/rollup.config.js +++ /dev/null @@ -1,18 +0,0 @@ -import json from "rollup-plugin-json"; -import cleanup from 'rollup-plugin-cleanup'; -import executable from 'rollup-plugin-executable'; -import commonjs from 'rollup-plugin-commonjs'; -import resolve from 'rollup-plugin-node-resolve'; -import pkg from './package.json'; - -export default { - output: { - file: pkg.main, - format: 'cjs', - interop: false - }, - - plugins: [], - - input: pkg.module -}; diff --git a/src/known-tokens.js b/src/known-tokens.mjs similarity index 100% rename from src/known-tokens.js rename to src/known-tokens.mjs diff --git a/src/parser.js b/src/parser.mjs similarity index 100% rename from src/parser.js rename to src/parser.mjs diff --git a/src/tokenizer.js b/src/tokenizer.mjs similarity index 100% rename from src/tokenizer.js rename to src/tokenizer.mjs diff --git a/tests/identifier-token-test.js b/tests/identifier-token-test.mjs similarity index 100% rename from tests/identifier-token-test.js rename to tests/identifier-token-test.mjs diff --git a/tests/json-parser-test.js b/tests/json-parser-test.mjs similarity index 100% rename from tests/json-parser-test.js rename to tests/json-parser-test.mjs diff --git a/tests/keyword-token-test.js b/tests/keyword-token-test.mjs similarity index 100% rename from tests/keyword-token-test.js rename to tests/keyword-token-test.mjs diff --git a/tests/line-comment-token-test.js b/tests/line-comment-token-test.mjs similarity index 100% rename from tests/line-comment-token-test.js rename to tests/line-comment-token-test.mjs diff --git a/tests/mini-lang-test.js b/tests/mini-lang-test.mjs similarity index 100% rename from tests/mini-lang-test.js rename to tests/mini-lang-test.mjs diff --git a/tests/simple-grammar-test.js b/tests/simple-grammar-test.mjs similarity index 100% rename from tests/simple-grammar-test.js rename to tests/simple-grammar-test.mjs diff --git a/tests/token-kitchen-sink-test.js b/tests/token-kitchen-sink-test.js deleted file mode 100644 index 6795db41..00000000 --- a/tests/token-kitchen-sink-test.js +++ /dev/null @@ -1,294 +0,0 @@ -import test from 'ava'; - -import { - WhiteSpaceToken, - NumberToken, - StringToken, - IdentifierToken -} from '../src/known-tokens'; -import { Tokenizer } from '../src/tokenizer'; - -const path = require('path'); -const fs = require('fs'); - -const tokenizer = new Tokenizer({ - tokens: [WhiteSpaceToken, NumberToken, StringToken, IdentifierToken], - infix: { - '=': { - precedence: 77 - }, - '+': {}, - '-': {}, - '*': { - precedence: 42 - }, - '/': {}, - '(': {}, - ')': {}, - '[': {}, - ']': {}, - '{': {}, - '}': {}, - ':': {}, - '<': {}, - '>': {}, - '.': {}, - ',': {}, - ';': {}, - '<=': {}, - '>=': {}, - '=>': {}, - '===': {}, - '!===': {} - } -}); - -test('Kitchen sink', t => { - const tokens = [ - { - type: 'number', - value: 4711, - line: 1, - pos: 0 - }, - { - type: 'number', - value: 0.23, - line: 1, - pos: 5 - }, - { - type: 'number', - value: 12345.0, - line: 1, - pos: 10 - }, - { - type: 'number', - value: 12.4e20, - line: 1, - pos: 18 - }, - { - type: 'number', - value: 0.4e7, - line: 1, - pos: 26 - }, - { - type: 'string', - value: 'str2', - line: 2, - pos: 1 - }, - { - type: 'string', - value: 'str3', - line: 2, - pos: 7 - }, - { - type: 'string', - value: '\\\b\f\n\r\t"\'A', - line: 2 - }, - { - type: 'string', - value: 'str4', - line: 2 - }, - { - type: 'string', - value: 'str5', - line: 2 - }, - { - type: 'identifier', - value: 'name1', - line: 3 - }, - { - type: 'identifier', - value: 'name_2', - line: 3 - }, - { - type: 'identifier', - value: '_name3', - line: 3 - }, - { - type: 'identifier', - value: 'n', - line: 4 - }, - { - type: 'operator', - value: '+', - line: 5 - }, - { - type: 'operator', - value: '-', - line: 6 - }, - { - type: 'operator', - value: '*', - line: 7, - precedence: 42 - }, - { - type: 'operator', - value: '/', - line: 8 - }, - { - type: 'operator', - value: '(', - line: 9 - }, - { - type: 'operator', - value: ')', - line: 9 - }, - { - type: 'operator', - value: '{', - line: 10 - }, - { - type: 'operator', - value: '}', - line: 10 - }, - { - type: 'operator', - value: '[', - line: 11 - }, - { - type: 'operator', - value: ']', - line: 11 - }, - { - type: 'operator', - value: ':', - line: 12 - }, - { - type: 'operator', - value: ',', - line: 12 - }, - { - type: 'operator', - value: ';', - line: 12 - }, - { - type: 'operator', - value: '.', - line: 12 - }, - { - type: 'operator', - value: '<', - line: 13 - }, - { - type: 'operator', - value: '===', - line: 13 - }, - { - type: 'operator', - value: '!===', - line: 13 - // pos: 22 - }, - { - type: 'operator', - value: '>', - line: 13 - }, - { - type: 'operator', - value: '<=', - line: 14 - }, - { - type: 'operator', - value: '>=', - line: 15 - }, - { - type: 'operator', - value: '=', - line: 16, - precedence: 77 - }, - { - type: 'number', - value: 2, - line: 17 - }, - { - type: 'operator', - value: '+', - line: 17 - }, - { - type: 'operator', - value: '(', - line: 17 - }, - { - type: 'number', - value: 3, - line: 17 - }, - { - type: 'operator', - value: '*', - line: 17 - }, - { - type: 'number', - value: 17, - line: 17 - }, - { - type: 'operator', - value: ')', - line: 17 - } - ]; - - const s = fs.readFileSync( - path.join(__dirname, '..', 'tests', 'fixtures', 'tokens1.txt'), - { - encoding: 'utf8' - } - ); - - let i = 0; - - for (const token of tokenizer.tokens(s)) { - const refToken = tokens[i]; - - t.is(token.type, refToken.type); - t.is(token.id, refToken.id); - t.is(token.lineNumber, refToken.line); - - if (refToken.pos !== undefined) { - t.is(token.positionInLine, refToken.pos); - } - if (refToken.precedence !== undefined) { - t.is(token.precedence, refToken.precedence); - } - i++; - } -}); diff --git a/tests/token-kitchen-sink-test.mjs b/tests/token-kitchen-sink-test.mjs new file mode 100644 index 00000000..a2b37904 --- /dev/null +++ b/tests/token-kitchen-sink-test.mjs @@ -0,0 +1,293 @@ +import test from "ava"; + +import { + WhiteSpaceToken, + NumberToken, + StringToken, + IdentifierToken +} from "../src/known-tokens"; +import { Tokenizer } from "../src/tokenizer"; +import { join, dirname } from "path"; +import { readFileSync } from "fs"; +import { fileURLToPath } from "url"; + +const here = dirname(fileURLToPath(import.meta.url)); + +const tokenizer = new Tokenizer({ + tokens: [WhiteSpaceToken, NumberToken, StringToken, IdentifierToken], + infix: { + "=": { + precedence: 77 + }, + "+": {}, + "-": {}, + "*": { + precedence: 42 + }, + "/": {}, + "(": {}, + ")": {}, + "[": {}, + "]": {}, + "{": {}, + "}": {}, + ":": {}, + "<": {}, + ">": {}, + ".": {}, + ",": {}, + ";": {}, + "<=": {}, + ">=": {}, + "=>": {}, + "===": {}, + "!===": {} + } +}); + +test("Kitchen sink", t => { + const tokens = [ + { + type: "number", + value: 4711, + line: 1, + pos: 0 + }, + { + type: "number", + value: 0.23, + line: 1, + pos: 5 + }, + { + type: "number", + value: 12345.0, + line: 1, + pos: 10 + }, + { + type: "number", + value: 12.4e20, + line: 1, + pos: 18 + }, + { + type: "number", + value: 0.4e7, + line: 1, + pos: 26 + }, + { + type: "string", + value: "str2", + line: 2, + pos: 1 + }, + { + type: "string", + value: "str3", + line: 2, + pos: 7 + }, + { + type: "string", + value: "\\\b\f\n\r\t\"'A", + line: 2 + }, + { + type: "string", + value: "str4", + line: 2 + }, + { + type: "string", + value: "str5", + line: 2 + }, + { + type: "identifier", + value: "name1", + line: 3 + }, + { + type: "identifier", + value: "name_2", + line: 3 + }, + { + type: "identifier", + value: "_name3", + line: 3 + }, + { + type: "identifier", + value: "n", + line: 4 + }, + { + type: "operator", + value: "+", + line: 5 + }, + { + type: "operator", + value: "-", + line: 6 + }, + { + type: "operator", + value: "*", + line: 7, + precedence: 42 + }, + { + type: "operator", + value: "/", + line: 8 + }, + { + type: "operator", + value: "(", + line: 9 + }, + { + type: "operator", + value: ")", + line: 9 + }, + { + type: "operator", + value: "{", + line: 10 + }, + { + type: "operator", + value: "}", + line: 10 + }, + { + type: "operator", + value: "[", + line: 11 + }, + { + type: "operator", + value: "]", + line: 11 + }, + { + type: "operator", + value: ":", + line: 12 + }, + { + type: "operator", + value: ",", + line: 12 + }, + { + type: "operator", + value: ";", + line: 12 + }, + { + type: "operator", + value: ".", + line: 12 + }, + { + type: "operator", + value: "<", + line: 13 + }, + { + type: "operator", + value: "===", + line: 13 + }, + { + type: "operator", + value: "!===", + line: 13 + // pos: 22 + }, + { + type: "operator", + value: ">", + line: 13 + }, + { + type: "operator", + value: "<=", + line: 14 + }, + { + type: "operator", + value: ">=", + line: 15 + }, + { + type: "operator", + value: "=", + line: 16, + precedence: 77 + }, + { + type: "number", + value: 2, + line: 17 + }, + { + type: "operator", + value: "+", + line: 17 + }, + { + type: "operator", + value: "(", + line: 17 + }, + { + type: "number", + value: 3, + line: 17 + }, + { + type: "operator", + value: "*", + line: 17 + }, + { + type: "number", + value: 17, + line: 17 + }, + { + type: "operator", + value: ")", + line: 17 + } + ]; + + const s = readFileSync(join(here, "..", "tests", "fixtures", "tokens1.txt"), { + encoding: "utf8" + }); + + let i = 0; + + for (const token of tokenizer.tokens(s)) { + const refToken = tokens[i]; + + t.is(token.type, refToken.type); + t.is(token.id, refToken.id); + t.is(token.lineNumber, refToken.line); + + if (refToken.pos !== undefined) { + t.is(token.positionInLine, refToken.pos); + } + if (refToken.precedence !== undefined) { + t.is(token.precedence, refToken.precedence); + } + i++; + } +}); diff --git a/tests/tokenizer-test.js b/tests/tokenizer-test.mjs similarity index 100% rename from tests/tokenizer-test.js rename to tests/tokenizer-test.mjs diff --git a/tests/whitespace-token-test.js b/tests/whitespace-token-test.mjs similarity index 100% rename from tests/whitespace-token-test.js rename to tests/whitespace-token-test.mjs