Skip to content

Commit

Permalink
Merge ec8de0b into 82ba548
Browse files Browse the repository at this point in the history
  • Loading branch information
harttle committed Jun 3, 2023
2 parents 82ba548 + ec8de0b commit 7e3b827
Show file tree
Hide file tree
Showing 41 changed files with 286 additions and 198 deletions.
1 change: 0 additions & 1 deletion .github/workflows/check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ on: [push, pull_request]
jobs:
check:
name: Check
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
"postversion": "npm run build:dist",
"build": "npm run build:dist && npm run build:docs",
"build:dist": "rollup -c rollup.config.mjs",
"build:cjs": "BUNDLES=cjs rollup -c rollup.config.mjs",
"build:docs": "bin/build-docs.sh"
},
"bin": {
Expand Down
8 changes: 2 additions & 6 deletions src/filters/array.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,9 @@ export function uniq<T> (arr: T[]): T[] {
})
}

export function sample<T> (v: T[] | string, count: number | undefined = undefined): T[] | string {
export function sample<T> (v: T[] | string, count: number | undefined = undefined): (T | string)[] {
v = toValue(v)
if (isNil(v)) return []
if (!isArray(v)) {
v = stringify(v)
return [...v].sort(() => Math.random()).slice(0, count).join('')
}

if (!isArray(v)) v = stringify(v)
return [...v].sort(() => Math.random()).slice(0, count)
}
2 changes: 2 additions & 0 deletions src/parser/parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { TopLevelToken, OutputToken } from '../tokens'
import { Template, Output, HTML } from '../template'
import { LiquidCache } from '../cache'
import { FS, Loader, LookupType } from '../fs'
import { TokenizationError } from '../util/error'
import type { Liquid } from '../liquid'

export class Parser {
Expand Down Expand Up @@ -47,6 +48,7 @@ export class Parser {
}
return new HTML(token)
} catch (e) {
if (e instanceof TokenizationError) throw e
throw new ParseError(e as Error, token)
}
}
Expand Down
1 change: 1 addition & 0 deletions src/parser/token-kind.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,6 @@ export enum TokenKind {
Range = 512,
Quoted = 1024,
Operator = 2048,
FilteredValue = 4096,
Delimited = Tag | Output
}
4 changes: 2 additions & 2 deletions src/parser/tokenizer.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ describe('Tokenizer', function () {
it('should throw when {% raw %} not closed', function () {
const html = '{%raw%} {%endraw {%raw%}'
const tokenizer = new Tokenizer(html)
expect(() => tokenizer.readTopLevelTokens()).toThrow('raw "{%raw%} {%end..." not closed, line:1, col:8')
expect(() => tokenizer.readTopLevelTokens()).toThrow('raw "{%raw%} {%endraw {%raw%}" not closed, line:1, col:8')
})
it('should read output token', function () {
const html = '<p>{{foo | date: "%Y-%m-%d"}}</p>'
Expand Down Expand Up @@ -191,7 +191,7 @@ describe('Tokenizer', function () {
it('should throw if tag not closed', function () {
const html = '{% assign foo = bar {{foo}}'
const tokenizer = new Tokenizer(html)
expect(() => tokenizer.readTopLevelTokens()).toThrow(/tag "{% assign foo..." not closed/)
expect(() => tokenizer.readTopLevelTokens()).toThrow('tag "{% assign foo = bar {{foo}}" not closed, line:1, col:1')
})
it('should throw if output not closed', function () {
const tokenizer = new Tokenizer('{{name}')
Expand Down
72 changes: 47 additions & 25 deletions src/parser/tokenizer.ts
Original file line number Diff line number Diff line change
@@ -1,23 +1,25 @@
import { TagToken, HTMLToken, HashToken, QuotedToken, LiquidTagToken, OutputToken, ValueToken, Token, RangeToken, FilterToken, TopLevelToken, PropertyAccessToken, OperatorToken, LiteralToken, IdentifierToken, NumberToken } from '../tokens'
import { Trie, createTrie, ellipsis, literalValues, assert, TokenizationError, TYPES, QUOTE, BLANK, IDENTIFIER } from '../util'
import { FilteredValueToken, TagToken, HTMLToken, HashToken, QuotedToken, LiquidTagToken, OutputToken, ValueToken, Token, RangeToken, FilterToken, TopLevelToken, PropertyAccessToken, OperatorToken, LiteralToken, IdentifierToken, NumberToken } from '../tokens'
import { Trie, createTrie, ellipsis, literalValues, TokenizationError, TYPES, QUOTE, BLANK, IDENTIFIER } from '../util'
import { Operators, Expression } from '../render'
import { NormalizedFullOptions, defaultOptions } from '../liquid-options'
import { FilterArg } from './filter-arg'
import { matchOperator } from './match-operator'
import { whiteSpaceCtrl } from './whitespace-ctrl'

export class Tokenizer {
p = 0
p: number
N: number
private rawBeginAt = -1
private opTrie: Trie

constructor (
public input: string,
operators: Operators = defaultOptions.operators,
public file?: string
public file?: string,
private range?: [number, number]
) {
this.N = input.length
this.p = range ? range[0] : 0
this.N = range ? range[1] : input.length
this.opTrie = createTrie(operators)
}

Expand Down Expand Up @@ -46,6 +48,13 @@ export class Tokenizer {
if (end === -1) return
return new OperatorToken(this.input, this.p, (this.p = end), this.file)
}
readFilteredValue (): FilteredValueToken {
const begin = this.p
const initial = this.readExpression()
this.assert(initial.valid(), `invalid value expression: ${this.snapshot()}`)
const filters = this.readFilters()
return new FilteredValueToken(initial, filters, this.input, begin, this.p, this.file)
}
readFilters (): FilterToken[] {
const filters = []
while (true) {
Expand All @@ -57,11 +66,14 @@ export class Tokenizer {
readFilter (): FilterToken | null {
this.skipBlank()
if (this.end()) return null
assert(this.peek() === '|', () => `unexpected token at ${this.snapshot()}`)
this.assert(this.peek() === '|', `expected "|" before filter`)
this.p++
const begin = this.p
const name = this.readIdentifier()
if (!name.size()) return null
if (!name.size()) {
this.assert(this.end(), `expected filter name`)
return null
}
const args = []
this.skipBlank()
if (this.peek() === ':') {
Expand All @@ -70,8 +82,12 @@ export class Tokenizer {
const arg = this.readFilterArg()
arg && args.push(arg)
this.skipBlank()
assert(this.end() || this.peek() === ',' || this.peek() === '|', () => `unexpected character ${this.snapshot()}`)
this.assert(this.end() || this.peek() === ',' || this.peek() === '|', () => `unexpected character ${this.snapshot()}`)
} while (this.peek() === ',')
} else if (this.peek() === '|' || this.end()) {
// do nothing
} else {
throw this.error('expected ":" after filter name')
}
return new FilterToken(name.getText(), args, this.input, begin, this.p, this.file)
}
Expand Down Expand Up @@ -117,7 +133,7 @@ export class Tokenizer {
const { file, input } = this
const begin = this.p
if (this.readToDelimiter(options.tagDelimiterRight) === -1) {
throw this.mkError(`tag ${this.snapshot(begin)} not closed`, begin)
throw this.error(`tag ${this.snapshot(begin)} not closed`, begin)
}
const token = new TagToken(input, begin, this.p, options, file)
if (token.name === 'raw') this.rawBeginAt = begin
Expand All @@ -141,7 +157,7 @@ export class Tokenizer {
const { outputDelimiterRight } = options
const begin = this.p
if (this.readToDelimiter(outputDelimiterRight) === -1) {
throw this.mkError(`output ${this.snapshot(begin)} not closed`, begin)
throw this.error(`output ${this.snapshot(begin)} not closed`, begin)
}
return new OutputToken(input, begin, this.p, options, file)
}
Expand Down Expand Up @@ -170,32 +186,38 @@ export class Tokenizer {
this.p++
}
}
throw this.mkError(`raw ${this.snapshot(this.rawBeginAt)} not closed`, begin)
throw this.error(`raw ${this.snapshot(this.rawBeginAt)} not closed`, begin)
}

readLiquidTagTokens (options: NormalizedFullOptions = defaultOptions): LiquidTagToken[] {
const tokens: LiquidTagToken[] = []
while (this.p < this.N) {
const token = this.readLiquidTagToken(options)
if (token.name) tokens.push(token)
token && tokens.push(token)
}
return tokens
}

readLiquidTagToken (options: NormalizedFullOptions): LiquidTagToken {
const { file, input } = this
readLiquidTagToken (options: NormalizedFullOptions): LiquidTagToken | undefined {
this.skipBlank()
if (this.end()) return

const begin = this.p
let end = this.N
if (this.readToDelimiter('\n') !== -1) end = this.p
return new LiquidTagToken(input, begin, end, options, file)
this.readToDelimiter('\n')
const end = this.p
return new LiquidTagToken(this.input, begin, end, options, this.file)
}

error (msg: string, pos: number = this.p) {
return new TokenizationError(msg, new IdentifierToken(this.input, pos, this.N, this.file))
}

mkError (msg: string, begin: number) {
return new TokenizationError(msg, new IdentifierToken(this.input, begin, this.N, this.file))
assert (pred: unknown, msg: string | (() => string), pos?: number) {
if (!pred) throw this.error(typeof msg === 'function' ? msg() : msg, pos)
}

snapshot (begin: number = this.p) {
return JSON.stringify(ellipsis(this.input.slice(begin), 16))
return JSON.stringify(ellipsis(this.input.slice(begin, this.N), 32))
}

/**
Expand All @@ -208,7 +230,7 @@ export class Tokenizer {
readIdentifier (): IdentifierToken {
this.skipBlank()
const begin = this.p
while (this.peekType() & IDENTIFIER) ++this.p
while (!this.end() && this.peekType() & IDENTIFIER) ++this.p
return new IdentifierToken(this.input, begin, this.p, this.file)
}

Expand Down Expand Up @@ -246,7 +268,7 @@ export class Tokenizer {
}

remaining () {
return this.input.slice(this.p)
return this.input.slice(this.p, this.N)
}

advance (i = 1) {
Expand Down Expand Up @@ -319,7 +341,7 @@ export class Tokenizer {

readValueOrThrow (): ValueToken {
const value = this.readValue()
assert(value, () => `unexpected token ${this.snapshot()}, value expected`)
this.assert(value, () => `unexpected token ${this.snapshot()}, value expected`)
return value!
}

Expand Down Expand Up @@ -368,8 +390,8 @@ export class Tokenizer {
return TYPES[this.input.charCodeAt(this.p + n)]
}

peek (n = 0) {
return this.input[this.p + n]
peek (n = 0): string {
return this.p + n >= this.N ? '' : this.input[this.p + n]
}

skipBlank () {
Expand Down
3 changes: 3 additions & 0 deletions src/render/expression.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ export class Expression {
}
return operands[0]
}
public valid () {
return !!this.postfix.length
}
}

export function * evalToken (token: Token | undefined, ctx: Context, lenient = false): IterableIterator<unknown> {
Expand Down
16 changes: 9 additions & 7 deletions src/tags/assign.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
import { Value, assert, Tokenizer, Liquid, TopLevelToken, TagToken, Context, Tag } from '..'
import { Value, Liquid, TopLevelToken, TagToken, Context, Tag } from '..'
export default class extends Tag {
private key: string
private value: Value

constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
const tokenizer = new Tokenizer(token.args, liquid.options.operators)
this.key = tokenizer.readIdentifier().content
tokenizer.skipBlank()
assert(tokenizer.peek() === '=', () => `illegal token ${token.getText()}`)
tokenizer.advance()
this.value = new Value(tokenizer.remaining(), this.liquid)
this.key = this.tokenizer.readIdentifier().content
this.tokenizer.assert(this.key, 'expected variable name')

this.tokenizer.skipBlank()
this.tokenizer.assert(this.tokenizer.peek() === '=', 'expected "="')

this.tokenizer.advance()
this.value = new Value(this.tokenizer.readFilteredValue(), this.liquid)
}
* render (ctx: Context): Generator<unknown, void, unknown> {
ctx.bottom()[this.key] = yield this.value.value(ctx, this.liquid.options.lenientIf)
Expand Down
20 changes: 9 additions & 11 deletions src/tags/capture.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Liquid, Tag, Tokenizer, assert, Template, Context, TagToken, TopLevelToken } from '..'
import { Liquid, Tag, Template, Context, TagToken, TopLevelToken } from '..'
import { evalQuotedToken } from '../render'
import { isTagToken } from '../util'

Expand All @@ -7,9 +7,7 @@ export default class extends Tag {
templates: Template[] = []
constructor (tagToken: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(tagToken, remainTokens, liquid)
const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators)
this.variable = readVariableName(tokenizer)!
assert(this.variable, () => `${tagToken.args} not valid identifier`)
this.variable = this.readVariableName()

while (remainTokens.length) {
const token = remainTokens.shift()!
Expand All @@ -23,11 +21,11 @@ export default class extends Tag {
const html = yield r.renderTemplates(this.templates, ctx)
ctx.bottom()[this.variable] = html
}
}

function readVariableName (tokenizer: Tokenizer) {
const word = tokenizer.readIdentifier().content
if (word) return word
const quoted = tokenizer.readQuoted()
if (quoted) return evalQuotedToken(quoted)
private readVariableName () {
const word = this.token.tokenizer.readIdentifier().content
if (word) return word
const quoted = this.token.tokenizer.readQuoted()
if (quoted) return evalQuotedToken(quoted)
throw this.token.tokenizer.error('invalid capture name')
}
}
11 changes: 5 additions & 6 deletions src/tags/case.ts
Original file line number Diff line number Diff line change
@@ -1,24 +1,23 @@
import { ValueToken, Liquid, Tokenizer, toValue, evalToken, Value, Emitter, TagToken, TopLevelToken, Context, Template, Tag, ParseStream } from '..'
import { ValueToken, Liquid, toValue, evalToken, Value, Emitter, TagToken, TopLevelToken, Context, Template, Tag, ParseStream } from '..'

export default class extends Tag {
value: Value
branches: { values: ValueToken[], templates: Template[] }[] = []
elseTemplates: Template[] = []
constructor (tagToken: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(tagToken, remainTokens, liquid)
this.value = new Value(tagToken.args, this.liquid)
this.value = new Value(this.tokenizer.readFilteredValue(), this.liquid)
this.elseTemplates = []

let p: Template[] = []
const stream: ParseStream = this.liquid.parser.parseStream(remainTokens)
.on('tag:when', (token: TagToken) => {
p = []

const tokenizer = new Tokenizer(token.args, this.liquid.options.operators)
const values: ValueToken[] = []
while (!tokenizer.end()) {
values.push(tokenizer.readValueOrThrow())
tokenizer.readTo(',')
while (!token.tokenizer.end()) {
values.push(token.tokenizer.readValueOrThrow())
token.tokenizer.readTo(',')
}
this.branches.push({
values,
Expand Down
23 changes: 11 additions & 12 deletions src/tags/cycle.ts
Original file line number Diff line number Diff line change
@@ -1,27 +1,26 @@
import { Tokenizer, assert, TopLevelToken, Liquid, ValueToken, evalToken, Emitter, TagToken, Context, Tag } from '..'
import { TopLevelToken, Liquid, ValueToken, evalToken, Emitter, TagToken, Context, Tag } from '..'

export default class extends Tag {
private candidates: ValueToken[] = []
private group?: ValueToken
constructor (tagToken: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(tagToken, remainTokens, liquid)
const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators)
const group = tokenizer.readValue()
tokenizer.skipBlank()
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
const group = this.tokenizer.readValue()
this.tokenizer.skipBlank()

if (group) {
if (tokenizer.peek() === ':') {
if (this.tokenizer.peek() === ':') {
this.group = group
tokenizer.advance()
this.tokenizer.advance()
} else this.candidates.push(group)
}

while (!tokenizer.end()) {
const value = tokenizer.readValue()
while (!this.tokenizer.end()) {
const value = this.tokenizer.readValue()
if (value) this.candidates.push(value)
tokenizer.readTo(',')
this.tokenizer.readTo(',')
}
assert(this.candidates.length, () => `empty candidates: ${tagToken.getText()}`)
this.tokenizer.assert(this.candidates.length, () => `empty candidates: "${token.getText()}"`)
}

* render (ctx: Context, emitter: Emitter): Generator<unknown, unknown, unknown> {
Expand Down

0 comments on commit 7e3b827

Please sign in to comment.