From bb58d3e549dc5a5e067895ec4a0b3257b434f225 Mon Sep 17 00:00:00 2001 From: Jun Yang Date: Wed, 16 Nov 2022 22:53:28 +0800 Subject: [PATCH] refactor: delay creation of `operatorsTrie` and hide this implementation BREAKING CHANGE: use `operators` instead of `operatorsTrie` as Tokenizer constructor argument, #500 --- rollup.config.mjs | 1 + src/liquid-options.ts | 9 +-- src/parser/parser.ts | 2 +- src/parser/tokenizer.ts | 9 ++- src/tags/assign.ts | 2 +- src/tags/capture.ts | 2 +- src/tags/case.ts | 2 +- src/tags/cycle.ts | 2 +- src/tags/decrement.ts | 2 +- src/tags/for.ts | 2 +- src/tags/include.ts | 2 +- src/tags/increment.ts | 2 +- src/tags/layout.ts | 2 +- src/tags/liquid.ts | 2 +- src/tags/render.ts | 2 +- src/tags/tablerow.ts | 2 +- src/template/value.ts | 2 +- src/tokens/liquid-tag-token.ts | 2 +- src/tokens/tag-token.ts | 2 +- test/unit/parser/tokenizer.ts | 126 ++++++++++++++++----------------- test/unit/render/expression.ts | 5 +- 21 files changed, 86 insertions(+), 96 deletions(-) diff --git a/rollup.config.mjs b/rollup.config.mjs index d8ff2d9d80..bb30ac2609 100644 --- a/rollup.config.mjs +++ b/rollup.config.mjs @@ -16,6 +16,7 @@ const treeshake = { propertyReadSideEffects: false } const tsconfig = (target) => ({ + check: true, tsconfigOverride: { include: [ 'src' ], exclude: [ 'test', 'benchmark' ], diff --git a/src/liquid-options.ts b/src/liquid-options.ts index bf91548752..88a16f639d 100644 --- a/src/liquid-options.ts +++ b/src/liquid-options.ts @@ -4,7 +4,6 @@ import { LRU } from './cache/lru' import { FS } from './fs/fs' import * as fs from './fs/node' import { defaultOperators, Operators } from './render/operator' -import { createTrie, Trie } from './util/operator-trie' import { filters } from './filters' import { assert } from './types' @@ -99,7 +98,6 @@ interface NormalizedOptions extends LiquidOptions { layouts?: string[]; cache?: LiquidCache; outputEscape?: OutputEscape; - operatorsTrie?: Trie; } export interface NormalizedFullOptions extends NormalizedOptions { @@ -130,7 +128,6 @@ export interface NormalizedFullOptions extends NormalizedOptions { globals: object; keepOutputType: boolean; operators: Operators; - operatorsTrie: Trie; } export const defaultOptions: NormalizedFullOptions = { @@ -160,14 +157,10 @@ export const defaultOptions: NormalizedFullOptions = { lenientIf: false, globals: {}, keepOutputType: false, - operators: defaultOperators, - operatorsTrie: createTrie(defaultOperators) + operators: defaultOperators } export function normalize (options: LiquidOptions): NormalizedFullOptions { - if (options.hasOwnProperty('operators')) { - (options as NormalizedOptions).operatorsTrie = createTrie(options.operators!) - } if (options.hasOwnProperty('root')) { if (!options.hasOwnProperty('partials')) options.partials = options.root if (!options.hasOwnProperty('layouts')) options.layouts = options.root diff --git a/src/parser/parser.ts b/src/parser/parser.ts index ca7c3d7cb7..3cae7a9c77 100644 --- a/src/parser/parser.ts +++ b/src/parser/parser.ts @@ -29,7 +29,7 @@ export default class Parser { this.loader = new Loader(this.liquid.options) } public parse (html: string, filepath?: string): Template[] { - const tokenizer = new Tokenizer(html, this.liquid.options.operatorsTrie, filepath) + const tokenizer = new Tokenizer(html, this.liquid.options.operators, filepath) const tokens = tokenizer.readTopLevelTokens(this.liquid.options) return this.parseTokens(tokens) } diff --git a/src/parser/tokenizer.ts b/src/parser/tokenizer.ts index fc472f5b8c..324c911b7e 100644 --- a/src/parser/tokenizer.ts +++ b/src/parser/tokenizer.ts @@ -22,21 +22,24 @@ import { TokenizationError } from '../util/error' import { NormalizedFullOptions, defaultOptions } from '../liquid-options' import { TYPES, QUOTE, BLANK, IDENTIFIER } from '../util/character' import { matchOperator } from './match-operator' -import { Trie } from '../util/operator-trie' +import { Trie, createTrie } from '../util/operator-trie' import { Expression } from '../render/expression' +import { Operators } from '../render/operator' import { LiquidTagToken } from '../tokens/liquid-tag-token' export class Tokenizer { p = 0 N: number private rawBeginAt = -1 + private opTrie: Trie constructor ( public input: string, - private trie: Trie = defaultOptions.operatorsTrie, + operators: Operators = defaultOptions.operators, public file: string = '' ) { this.N = input.length + this.opTrie = createTrie(operators) } readExpression () { @@ -62,7 +65,7 @@ export class Tokenizer { } readOperator (): OperatorToken | undefined { this.skipBlank() - const end = matchOperator(this.input, this.p, this.trie) + const end = matchOperator(this.input, this.p, this.opTrie) if (end === -1) return return new OperatorToken(this.input, this.p, (this.p = end), this.file) } diff --git a/src/tags/assign.ts b/src/tags/assign.ts index 82f08d766c..38ed5436fc 100644 --- a/src/tags/assign.ts +++ b/src/tags/assign.ts @@ -2,7 +2,7 @@ import { Value, Tokenizer, assert, TagImplOptions, TagToken, Context } from '../ export default { parse: function (token: TagToken) { - const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operators) this.key = tokenizer.readIdentifier().content tokenizer.skipBlank() assert(tokenizer.peek() === '=', () => `illegal token ${token.getText()}`) diff --git a/src/tags/capture.ts b/src/tags/capture.ts index 3dde40d376..82339da15e 100644 --- a/src/tags/capture.ts +++ b/src/tags/capture.ts @@ -3,7 +3,7 @@ import { evalQuotedToken } from '../render/expression' export default { parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) { - const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators) this.variable = readVariableName(tokenizer) assert(this.variable, () => `${tagToken.args} not valid identifier`) diff --git a/src/tags/case.ts b/src/tags/case.ts index 742d011350..0dec1e3e9c 100644 --- a/src/tags/case.ts +++ b/src/tags/case.ts @@ -12,7 +12,7 @@ export default { .on('tag:when', (token: TagToken) => { p = [] - const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operators) while (!tokenizer.end()) { const value = tokenizer.readValue() diff --git a/src/tags/cycle.ts b/src/tags/cycle.ts index a542df1099..87a322941e 100644 --- a/src/tags/cycle.ts +++ b/src/tags/cycle.ts @@ -4,7 +4,7 @@ import { Tokenizer } from '../parser/tokenizer' export default { parse: function (tagToken: TagToken) { - const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators) const group = tokenizer.readValue() tokenizer.skipBlank() diff --git a/src/tags/decrement.ts b/src/tags/decrement.ts index b8b3c49557..932196335c 100644 --- a/src/tags/decrement.ts +++ b/src/tags/decrement.ts @@ -3,7 +3,7 @@ import { isNumber, stringify } from '../util/underscore' export default { parse: function (token: TagToken) { - const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operators) this.variable = tokenizer.readIdentifier().content }, render: function (context: Context, emitter: Emitter) { diff --git a/src/tags/for.ts b/src/tags/for.ts index 2c47ddffcd..2d74268429 100644 --- a/src/tags/for.ts +++ b/src/tags/for.ts @@ -10,7 +10,7 @@ type valueof = T[keyof T] export default { type: 'block', parse: function (token: TagToken, remainTokens: TopLevelToken[]) { - const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operators) const variable = tokenizer.readIdentifier() const inStr = tokenizer.readIdentifier() diff --git a/src/tags/include.ts b/src/tags/include.ts index 87cb7b13c2..9cffe14a9d 100644 --- a/src/tags/include.ts +++ b/src/tags/include.ts @@ -7,7 +7,7 @@ export default { renderFilePath, parse: function (token: TagToken) { const args = token.args - const tokenizer = new Tokenizer(args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(args, this.liquid.options.operators) this['file'] = this.parseFilePath(tokenizer, this.liquid) this['currentFile'] = token.file diff --git a/src/tags/increment.ts b/src/tags/increment.ts index 5cde2dc34d..6b63999939 100644 --- a/src/tags/increment.ts +++ b/src/tags/increment.ts @@ -3,7 +3,7 @@ import { Tokenizer, Emitter, TagToken, Context, TagImplOptions } from '../types' export default { parse: function (token: TagToken) { - const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operators) this.variable = tokenizer.readIdentifier().content }, render: function (context: Context, emitter: Emitter) { diff --git a/src/tags/layout.ts b/src/tags/layout.ts index fc5d618fd0..e2e3230807 100644 --- a/src/tags/layout.ts +++ b/src/tags/layout.ts @@ -7,7 +7,7 @@ export default { parseFilePath, renderFilePath, parse: function (token: TagToken, remainTokens: TopLevelToken[]) { - const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operators) this['file'] = this.parseFilePath(tokenizer, this.liquid) this['currentFile'] = token.file this.hash = new Hash(tokenizer.remaining()) diff --git a/src/tags/liquid.ts b/src/tags/liquid.ts index 00be32fb28..cd33733840 100644 --- a/src/tags/liquid.ts +++ b/src/tags/liquid.ts @@ -4,7 +4,7 @@ import { Tokenizer } from '../parser/tokenizer' export default { parse: function (token: TagToken) { - const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operators) const tokens = tokenizer.readLiquidTagTokens(this.liquid.options) this.tpls = this.liquid.parser.parseTokens(tokens) }, diff --git a/src/tags/render.ts b/src/tags/render.ts index eb9033953c..62a6c83a03 100644 --- a/src/tags/render.ts +++ b/src/tags/render.ts @@ -10,7 +10,7 @@ export default { renderFilePath, parse: function (token: TagToken) { const args = token.args - const tokenizer = new Tokenizer(args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(args, this.liquid.options.operators) this['file'] = this.parseFilePath(tokenizer, this.liquid) this['currentFile'] = token.file while (!tokenizer.end()) { diff --git a/src/tags/tablerow.ts b/src/tags/tablerow.ts index 0fc17fa617..f4a25a1794 100644 --- a/src/tags/tablerow.ts +++ b/src/tags/tablerow.ts @@ -5,7 +5,7 @@ import { Tokenizer } from '../parser/tokenizer' export default { parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) { - const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators) const variable = tokenizer.readIdentifier() tokenizer.skipBlank() diff --git a/src/template/value.ts b/src/template/value.ts index 0a1bae05ec..b02157adf4 100644 --- a/src/template/value.ts +++ b/src/template/value.ts @@ -12,7 +12,7 @@ export class Value { * @param str the value to be valuated, eg.: "foobar" | truncate: 3 */ public constructor (str: string, liquid: Liquid) { - const tokenizer = new Tokenizer(str, liquid.options.operatorsTrie) + const tokenizer = new Tokenizer(str, liquid.options.operators) this.initial = tokenizer.readExpression() this.filters = tokenizer.readFilters().map(({ name, args }) => new Filter(name, liquid.filters.get(name), args, liquid)) } diff --git a/src/tokens/liquid-tag-token.ts b/src/tokens/liquid-tag-token.ts index e8a0f6fa72..d7bc2cc509 100644 --- a/src/tokens/liquid-tag-token.ts +++ b/src/tokens/liquid-tag-token.ts @@ -22,7 +22,7 @@ export class LiquidTagToken extends DelimitedToken { this.name = '' this.args = '' } else { - const tokenizer = new Tokenizer(this.content, options.operatorsTrie) + const tokenizer = new Tokenizer(this.content, options.operators) this.name = tokenizer.readTagName() if (!this.name) throw new TokenizationError(`illegal liquid tag syntax`, this) diff --git a/src/tokens/tag-token.ts b/src/tokens/tag-token.ts index b8ebecce0c..746d69a7ca 100644 --- a/src/tokens/tag-token.ts +++ b/src/tokens/tag-token.ts @@ -18,7 +18,7 @@ export class TagToken extends DelimitedToken { const value = input.slice(begin + tagDelimiterLeft.length, end - tagDelimiterRight.length) super(TokenKind.Tag, value, input, begin, end, trimTagLeft, trimTagRight, file) - const tokenizer = new Tokenizer(this.content, options.operatorsTrie) + const tokenizer = new Tokenizer(this.content, options.operators) this.name = tokenizer.readTagName() if (!this.name) throw new TokenizationError(`illegal tag syntax`, this) diff --git a/test/unit/parser/tokenizer.ts b/test/unit/parser/tokenizer.ts index d5e5d6d0e8..996191be86 100644 --- a/test/unit/parser/tokenizer.ts +++ b/test/unit/parser/tokenizer.ts @@ -9,64 +9,60 @@ import { TagToken } from '../../../src/tokens/tag-token' import { QuotedToken } from '../../../src/tokens/quoted-token' import { OutputToken } from '../../../src/tokens/output-token' import { HTMLToken } from '../../../src/tokens/html-token' -import { createTrie } from '../../../src/util/operator-trie' -import { defaultOperators } from '../../../src/types' import { LiquidTagToken } from '../../../src/tokens/liquid-tag-token' describe('Tokenizer', function () { - const trie = createTrie(defaultOperators) - it('should read quoted', () => { expect(new Tokenizer('"foo" ff').readQuoted()!.getText()).to.equal('"foo"') - expect(new Tokenizer(' "foo"ff', trie).readQuoted()!.getText()).to.equal('"foo"') + expect(new Tokenizer(' "foo"ff').readQuoted()!.getText()).to.equal('"foo"') }) it('should read value', () => { - expect(new Tokenizer('a[ b][ "c d" ]', trie).readValueOrThrow().getText()).to.equal('a[ b][ "c d" ]') - expect(new Tokenizer('a.b[c[d.e]]', trie).readValueOrThrow().getText()).to.equal('a.b[c[d.e]]') + expect(new Tokenizer('a[ b][ "c d" ]').readValueOrThrow().getText()).to.equal('a[ b][ "c d" ]') + expect(new Tokenizer('a.b[c[d.e]]').readValueOrThrow().getText()).to.equal('a.b[c[d.e]]') }) it('should read identifier', () => { - expect(new Tokenizer('foo bar', trie).readIdentifier()).to.haveOwnProperty('content', 'foo') - expect(new Tokenizer('foo bar', trie).readWord()).to.haveOwnProperty('content', 'foo') + expect(new Tokenizer('foo bar').readIdentifier()).to.haveOwnProperty('content', 'foo') + expect(new Tokenizer('foo bar').readWord()).to.haveOwnProperty('content', 'foo') }) it('should read number value', () => { - const token: NumberToken = new Tokenizer('2.33.2', trie).readValueOrThrow() as any + const token: NumberToken = new Tokenizer('2.33.2').readValueOrThrow() as any expect(token).to.be.instanceOf(NumberToken) expect(token.whole.getText()).to.equal('2') expect(token.decimal!.getText()).to.equal('33') expect(token.getText()).to.equal('2.33') }) it('should read quoted value', () => { - const value = new Tokenizer('"foo"a', trie).readValue() + const value = new Tokenizer('"foo"a').readValue() expect(value).to.be.instanceOf(QuotedToken) expect(value!.getText()).to.equal('"foo"') }) it('should read property access value', () => { - expect(new Tokenizer('a[b]["c d"]', trie).readValueOrThrow().getText()).to.equal('a[b]["c d"]') + expect(new Tokenizer('a[b]["c d"]').readValueOrThrow().getText()).to.equal('a[b]["c d"]') }) it('should read quoted property access value', () => { - const value = new Tokenizer('["a prop"]', trie).readValue() + const value = new Tokenizer('["a prop"]').readValue() expect(value).to.be.instanceOf(PropertyAccessToken) expect((value as PropertyAccessToken).variable.getText()).to.equal('"a prop"') }) it('should throw for broken quoted property access', () => { - const tokenizer = new Tokenizer('[5]', trie) + const tokenizer = new Tokenizer('[5]') expect(() => tokenizer.readValueOrThrow()).to.throw() }) it('should throw for incomplete quoted property access', () => { - const tokenizer = new Tokenizer('["a prop"', trie) + const tokenizer = new Tokenizer('["a prop"') expect(() => tokenizer.readValueOrThrow()).to.throw() }) it('should read hash', () => { - const hash1 = new Tokenizer('foo: 3', trie).readHash() + const hash1 = new Tokenizer('foo: 3').readHash() expect(hash1!.name.content).to.equal('foo') expect(hash1!.value!.getText()).to.equal('3') - const hash2 = new Tokenizer(', foo: a[ "bar"]', trie).readHash() + const hash2 = new Tokenizer(', foo: a[ "bar"]').readHash() expect(hash2!.name.content).to.equal('foo') expect(hash2!.value!.getText()).to.equal('a[ "bar"]') }) it('should read multiple hashs', () => { - const hashes = new Tokenizer(', limit: 3 reverse offset:off', trie).readHashes() + const hashes = new Tokenizer(', limit: 3 reverse offset:off').readHashes() expect(hashes).to.have.lengthOf(3) const [limit, reverse, offset] = hashes expect(limit.name.content).to.equal('limit') @@ -79,7 +75,7 @@ describe('Tokenizer', function () { expect(offset.value!.getText()).to.equal('off') }) it('should read hash value with property access', () => { - const hashes = new Tokenizer('cols: 2, rows: data["rows"]', trie).readHashes() + const hashes = new Tokenizer('cols: 2, rows: data["rows"]').readHashes() expect(hashes).to.have.lengthOf(2) const [cols, rols] = hashes @@ -92,7 +88,7 @@ describe('Tokenizer', function () { describe('#readTopLevelTokens()', () => { it('should read HTML token', function () { const html = '

Lorem Ipsum

' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) @@ -101,7 +97,7 @@ describe('Tokenizer', function () { }) it('should read tag token', function () { const html = '

{% for p in a[1]%}

' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -112,7 +108,7 @@ describe('Tokenizer', function () { }) it('should allow unclosed tag inside {% raw %}', function () { const html = '{%raw%} {%if%} {%else {%endraw%}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -121,7 +117,7 @@ describe('Tokenizer', function () { }) it('should allow unclosed endraw tag inside {% raw %}', function () { const html = '{%raw%} {%endraw {%raw%} {%endraw%}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -130,12 +126,12 @@ describe('Tokenizer', function () { }) it('should throw when {% raw %} not closed', function () { const html = '{%raw%} {%endraw {%raw%}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) expect(() => tokenizer.readTopLevelTokens()).to.throw('raw "{%raw%} {%end..." not closed, line:1, col:8') }) it('should read output token', function () { const html = '

{{foo | date: "%Y-%m-%d"}}

' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -145,7 +141,7 @@ describe('Tokenizer', function () { }) it('should handle consecutive value and tags', function () { const html = '{{foo}}{{bar}}{%foo%}{%bar%}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(4) @@ -167,7 +163,7 @@ describe('Tokenizer', function () { }) it('should keep white spaces and newlines', function () { const html = '{%foo%}\n{%bar %} \n {%alice%}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(5) expect(tokens[1]).instanceOf(HTMLToken) @@ -177,7 +173,7 @@ describe('Tokenizer', function () { }) it('should handle multiple lines tag', function () { const html = '{%foo\na:a\nb:1.23\n%}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) expect(tokens[0]).instanceOf(TagToken) @@ -186,7 +182,7 @@ describe('Tokenizer', function () { }) it('should handle multiple lines value', function () { const html = '{{foo\n|date:\n"%Y-%m-%d"\n}}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) expect(tokens[0]).instanceOf(OutputToken) @@ -194,7 +190,7 @@ describe('Tokenizer', function () { }) it('should handle complex object property access', function () { const html = '{{ obj["my:property with anything"] }}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) const output = tokens[0] as OutputToken @@ -203,18 +199,18 @@ describe('Tokenizer', function () { }) it('should throw if tag not closed', function () { const html = '{% assign foo = bar {{foo}}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) expect(() => tokenizer.readTopLevelTokens()).to.throw(/tag "{% assign foo..." not closed/) }) it('should throw if output not closed', function () { - const tokenizer = new Tokenizer('{{name}', trie) + const tokenizer = new Tokenizer('{{name}') expect(() => tokenizer.readTopLevelTokens()).to.throw(/output "{{name}" not closed/) }) }) describe('#readTagToken()', () => { it('should skip quoted delimiters', function () { const html = '{% assign a = "%} {% }} {{" %}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const token = tokenizer.readTagToken() expect(token).instanceOf(TagToken) @@ -225,7 +221,7 @@ describe('Tokenizer', function () { describe('#readOutputToken()', () => { it('should skip quoted delimiters', function () { const html = '{{ "%} {%" | append: "}} {{" }}' - const tokenizer = new Tokenizer(html, trie) + const tokenizer = new Tokenizer(html) const token = tokenizer.readOutputToken() expect(token).instanceOf(OutputToken) @@ -234,7 +230,7 @@ describe('Tokenizer', function () { }) describe('#readRange()', () => { it('should read `(1..3)`', () => { - const range = new Tokenizer('(1..3)', trie).readRange() + const range = new Tokenizer('(1..3)').readRange() expect(range).to.be.instanceOf(RangeToken) expect(range!.getText()).to.deep.equal('(1..3)') const { lhs, rhs } = range! @@ -244,23 +240,23 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.equal('3') }) it('should throw for `(..3)`', () => { - expect(() => new Tokenizer('(..3)', trie).readRange()).to.throw('unexpected token "..3)", value expected') + expect(() => new Tokenizer('(..3)').readRange()).to.throw('unexpected token "..3)", value expected') }) it('should read `(a.b..c["..d"])`', () => { - const range = new Tokenizer('(a.b..c["..d"])', trie).readRange() + const range = new Tokenizer('(a.b..c["..d"])').readRange() expect(range).to.be.instanceOf(RangeToken) expect(range!.getText()).to.deep.equal('(a.b..c["..d"])') }) }) describe('#readFilter()', () => { it('should read a simple filter', function () { - const tokenizer = new Tokenizer('| plus', trie) + const tokenizer = new Tokenizer('| plus') const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token).to.have.property('args').to.deep.equal([]) }) it('should read a filter with argument', function () { - const tokenizer = new Tokenizer(' | plus: 1', trie) + const tokenizer = new Tokenizer(' | plus: 1') const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -270,18 +266,18 @@ describe('Tokenizer', function () { expect(one.getText()).to.equal('1') }) it('should read a filter with colon but no argument', function () { - const tokenizer = new Tokenizer('| plus:', trie) + const tokenizer = new Tokenizer('| plus:') const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token).to.have.property('args').to.deep.equal([]) }) it('should read null if name not found', function () { - const tokenizer = new Tokenizer('|', trie) + const tokenizer = new Tokenizer('|') const token = tokenizer.readFilter() expect(token).to.be.null }) it('should read a filter with k/v argument', function () { - const tokenizer = new Tokenizer(' | plus: a:1', trie) + const tokenizer = new Tokenizer(' | plus: a:1') const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -292,7 +288,7 @@ describe('Tokenizer', function () { expect(v.getText()).to.equal('1') }) it('should read a filter with "arr[0]" argument', function () { - const tokenizer = new Tokenizer('| plus: arr[0]', trie) + const tokenizer = new Tokenizer('| plus: arr[0]') const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -305,7 +301,7 @@ describe('Tokenizer', function () { expect(pa.props[0].getText()).to.equal('0') }) it('should read a filter with obj.foo argument', function () { - const tokenizer = new Tokenizer('| plus: obj.foo', trie) + const tokenizer = new Tokenizer('| plus: obj.foo') const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -318,7 +314,7 @@ describe('Tokenizer', function () { expect(pa.props[0].getText()).to.equal('foo') }) it('should read a filter with obj["foo"] argument', function () { - const tokenizer = new Tokenizer('| plus: obj["good luck"]', trie) + const tokenizer = new Tokenizer('| plus: obj["good luck"]') const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -332,7 +328,7 @@ describe('Tokenizer', function () { }) describe('#readFilters()', () => { it('should read simple filters', function () { - const tokenizer = new Tokenizer('| plus: 3 | capitalize', trie) + const tokenizer = new Tokenizer('| plus: 3 | capitalize') const tokens = tokenizer.readFilters() expect(tokens).to.have.lengthOf(2) @@ -345,7 +341,7 @@ describe('Tokenizer', function () { expect(tokens[1].args).to.have.lengthOf(0) }) it('should read filters', function () { - const tokenizer = new Tokenizer('| plus: a:3 | capitalize | append: foo[a.b["c d"]]', trie) + const tokenizer = new Tokenizer('| plus: a:3 | capitalize | append: foo[a.b["c d"]]') const tokens = tokenizer.readFilters() expect(tokens).to.have.lengthOf(3) @@ -368,14 +364,14 @@ describe('Tokenizer', function () { }) describe('#readExpression()', () => { it('should read expression `a `', () => { - const exp = [...new Tokenizer('a ', trie).readExpressionTokens()] + const exp = [...new Tokenizer('a ').readExpressionTokens()] expect(exp).to.have.lengthOf(1) expect(exp[0]).to.be.instanceOf(PropertyAccessToken) expect(exp[0].getText()).to.deep.equal('a') }) it('should read expression `a[][b]`', () => { - const exp = [...new Tokenizer('a[][b]', trie).readExpressionTokens()] + const exp = [...new Tokenizer('a[][b]').readExpressionTokens()] expect(exp).to.have.lengthOf(1) const pa = exp[0] as PropertyAccessToken @@ -390,7 +386,7 @@ describe('Tokenizer', function () { expect(p2.getText()).to.equal('b') }) it('should read expression `a.`', () => { - const exp = [...new Tokenizer('a.', trie).readExpressionTokens()] + const exp = [...new Tokenizer('a.').readExpressionTokens()] expect(exp).to.have.lengthOf(1) const pa = exp[0] as PropertyAccessToken @@ -399,14 +395,14 @@ describe('Tokenizer', function () { expect(pa.props).to.have.lengthOf(0) }) it('should read expression `a ==`', () => { - const exp = [...new Tokenizer('a ==', trie).readExpressionTokens()] + const exp = [...new Tokenizer('a ==').readExpressionTokens()] expect(exp).to.have.lengthOf(1) expect(exp[0]).to.be.instanceOf(PropertyAccessToken) expect(exp[0].getText()).to.deep.equal('a') }) it('should read expression `a==b`', () => { - const exp = new Tokenizer('a==b', trie).readExpressionTokens() + const exp = new Tokenizer('a==b').readExpressionTokens() const [a, equals, b] = exp expect(a).to.be.instanceOf(PropertyAccessToken) @@ -419,11 +415,11 @@ describe('Tokenizer', function () { expect(b.getText()).to.deep.equal('b') }) it('should read expression `^`', () => { - const exp = new Tokenizer('^', trie).readExpressionTokens() + const exp = new Tokenizer('^').readExpressionTokens() expect([...exp]).to.deep.equal([]) }) it('should read expression `a == b`', () => { - const exp = new Tokenizer('a == b', trie).readExpressionTokens() + const exp = new Tokenizer('a == b').readExpressionTokens() const [a, equals, b] = exp expect(a).to.be.instanceOf(PropertyAccessToken) @@ -436,7 +432,7 @@ describe('Tokenizer', function () { expect(b.getText()).to.deep.equal('b') }) it('should read expression `(1..3) contains 3`', () => { - const exp = new Tokenizer('(1..3) contains 3', trie).readExpressionTokens() + const exp = new Tokenizer('(1..3) contains 3').readExpressionTokens() const [range, contains, rhs] = exp expect(range).to.be.instanceOf(RangeToken) @@ -449,7 +445,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('3') }) it('should read expression `a[b] == c`', () => { - const exp = new Tokenizer('a[b] == c', trie).readExpressionTokens() + const exp = new Tokenizer('a[b] == c').readExpressionTokens() const [lhs, contains, rhs] = exp expect(lhs).to.be.instanceOf(PropertyAccessToken) @@ -462,7 +458,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('c') }) it('should read expression `c[a["b"]] >= c`', () => { - const exp = new Tokenizer('c[a["b"]] >= c', trie).readExpressionTokens() + const exp = new Tokenizer('c[a["b"]] >= c').readExpressionTokens() const [lhs, op, rhs] = exp expect(lhs).to.be.instanceOf(PropertyAccessToken) @@ -475,7 +471,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('c') }) it('should read expression `"][" == var`', () => { - const exp = new Tokenizer('"][" == var', trie).readExpressionTokens() + const exp = new Tokenizer('"][" == var').readExpressionTokens() const [lhs, equals, rhs] = exp expect(lhs).to.be.instanceOf(QuotedToken) @@ -488,7 +484,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('var') }) it('should read expression `"\\\'" == "\\""`', () => { - const exp = new Tokenizer('"\\\'" == "\\""', trie).readExpressionTokens() + const exp = new Tokenizer('"\\\'" == "\\""').readExpressionTokens() const [lhs, equals, rhs] = exp expect(lhs).to.be.instanceOf(QuotedToken) @@ -503,7 +499,7 @@ describe('Tokenizer', function () { }) describe('#readLiquidTagTokens', () => { it('should read newline terminated tokens', () => { - const tokenizer = new Tokenizer('echo \'hello\'', trie) + const tokenizer = new Tokenizer('echo \'hello\'') const tokens = tokenizer.readLiquidTagTokens() expect(tokens.length).to.equal(1) const tag = tokens[0] @@ -516,18 +512,18 @@ describe('Tokenizer', function () { echo 'hello' decrement foo - `, trie) + `) const tokens = tokenizer.readLiquidTagTokens() expect(tokens.length).to.equal(2) }) it('should throw if line does not start with an identifier', () => { - const tokenizer = new Tokenizer('!', trie) + const tokenizer = new Tokenizer('!') expect(() => tokenizer.readLiquidTagTokens()).to.throw(/illegal liquid tag syntax/) }) }) describe('#read inline comment tags', () => { it('should allow hash characters in tag names', () => { - const tokenizer = new Tokenizer('{% # some comment %}', trie) + const tokenizer = new Tokenizer('{% # some comment %}') const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) const tag = tokens[0] as TagToken @@ -536,7 +532,7 @@ describe('Tokenizer', function () { expect(tag.args).to.equal('some comment') }) it('should handle leading whitespace', () => { - const tokenizer = new Tokenizer('{%\n # some comment %}', trie) + const tokenizer = new Tokenizer('{%\n # some comment %}') const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) const tag = tokens[0] as TagToken @@ -545,7 +541,7 @@ describe('Tokenizer', function () { expect(tag.args).to.equal('some comment') }) it('should handle no trailing whitespace', () => { - const tokenizer = new Tokenizer('{%\n #some comment %}', trie) + const tokenizer = new Tokenizer('{%\n #some comment %}') const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) const tag = tokens[0] as TagToken diff --git a/test/unit/render/expression.ts b/test/unit/render/expression.ts index 128d064a2d..1cabb8be46 100644 --- a/test/unit/render/expression.ts +++ b/test/unit/render/expression.ts @@ -3,13 +3,10 @@ import { expect } from 'chai' import { Drop } from '../../../src/drop/drop' import { Context } from '../../../src/context/context' import { toPromise, toValueSync } from '../../../src/util/async' -import { defaultOperators } from '../../../src/render/operator' -import { createTrie } from '../../../src/util/operator-trie' describe('Expression', function () { const ctx = new Context({}) - const trie = createTrie(defaultOperators) - const create = (str: string) => new Tokenizer(str, trie).readExpression() + const create = (str: string) => new Tokenizer(str).readExpression() it('should throw when context not defined', done => { toPromise(create('foo').evaluate(undefined!, false))