Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: inline comment tag #514

Merged
merged 5 commits into from
Jul 7, 2022
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/builtin/tags/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@ import Break from './break'
import Continue from './continue'
import echo from './echo'
import liquid from './liquid'
import inlineComment from './inline-comment'
import { TagImplOptions } from '../../template/tag/tag-impl-options'

const tags: { [key: string]: TagImplOptions } = {
assign, 'for': For, capture, 'case': Case, comment, include, render, decrement, increment, cycle, 'if': If, layout, block, raw, tablerow, unless, 'break': Break, 'continue': Continue, echo, liquid
assign, 'for': For, capture, 'case': Case, comment, include, render, decrement, increment, cycle, 'if': If, layout, block, raw, tablerow, unless, 'break': Break, 'continue': Continue, echo, liquid, '#': inlineComment
}

export default tags
11 changes: 11 additions & 0 deletions src/builtin/tags/inline-comment.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import { TagToken } from '../../tokens/tag-token'
import { TopLevelToken } from '../../tokens/toplevel-token'
import { TagImplOptions } from '../../template/tag/tag-impl-options'

export default {
parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) {
if (tagToken.args.search(/\n\s*[^#\s]/g) !== -1) {
throw new Error('every line of an inline comment must start with a \'#\' character')
}
}
} as TagImplOptions
14 changes: 12 additions & 2 deletions src/parser/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,8 +207,7 @@ export class Tokenizer {
const begin = this.p
let end = this.N
if (this.readToDelimiter('\n') !== -1) end = this.p
const token = new LiquidTagToken(input, begin, end, options, file)
return token
return new LiquidTagToken(input, begin, end, options, file)
}

mkError (msg: string, begin: number) {
Expand All @@ -234,6 +233,17 @@ export class Tokenizer {
return new IdentifierToken(this.input, begin, this.p, this.file)
}

readTagName (): string {
this.skipBlank()
const begin = this.p
// Handle inline comment tags
if (this.input[this.p] === '#') {
return this.input.slice(begin, ++this.p)
}
harttle marked this conversation as resolved.
Show resolved Hide resolved
while (this.peekType() & IDENTIFIER) ++this.p
return this.input.slice(begin, this.p)
}

readHashes (jekyllStyle?: boolean) {
const hashes = []
while (true) {
Expand Down
2 changes: 1 addition & 1 deletion src/tokens/liquid-tag-token.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ export class LiquidTagToken extends DelimitedToken {
this.args = ''
} else {
const tokenizer = new Tokenizer(this.content, options.operatorsTrie)
this.name = tokenizer.readIdentifier().getText()
this.name = tokenizer.readTagName()
if (!this.name) throw new TokenizationError(`illegal liquid tag syntax`, this)

tokenizer.skipBlank()
Expand Down
2 changes: 1 addition & 1 deletion src/tokens/tag-token.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export class TagToken extends DelimitedToken {
super(TokenKind.Tag, value, input, begin, end, trimTagLeft, trimTagRight, file)

const tokenizer = new Tokenizer(this.content, options.operatorsTrie)
this.name = tokenizer.readIdentifier().getText()
this.name = tokenizer.readTagName()
if (!this.name) throw new TokenizationError(`illegal tag syntax`, this)

tokenizer.skipBlank()
Expand Down
95 changes: 95 additions & 0 deletions test/integration/builtin/tags/inline-comment.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import { Liquid } from '../../../../src/liquid'
import { expect, use } from 'chai'
import * as chaiAsPromised from 'chai-as-promised'

use(chaiAsPromised)

describe('tags/inline-comment', function () {
const liquid = new Liquid()
it('should ignore plain string', async function () {
const src = 'My name is {% # super %} Shopify.'
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('My name is Shopify.')
})
it('should ignore output tokens', async function () {
const src = '{% #\n{{ foo}} \n %}'
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('')
})
it('should support whitespace control', async function () {
const src = '{%- # some comment \n -%}\nfoo'
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('foo')
})
it('should handle hash without trailing whitespace', async function () {
const src = '{% #some comment %}'
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('')
})
it('should handle hash without leading whitespace', async function () {
const src = '{%#some comment %}'
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('')
})
it('should handle empty comment', async function () {
const src = '{%#%}'
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('')
})
it('should support multiple lines', async function () {
const src = [
'{%-',
' # spread inline comments',
' # over multiple lines',
'-%}'
].join('\n')
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('')
})
it('should enforce leading hashes', async function () {
const src = [
'{%-',
' # spread inline comments',
' over multiple lines',
'-%}'
].join('\n')
return expect(liquid.parseAndRender(src))
.to.be.rejectedWith(/every line of an inline comment must start with a '#' character/)
})
describe('sync support', function () {
it('should ignore plain string', function () {
const src = 'My name is {% # super %} Shopify.'
const html = liquid.parseAndRenderSync(src)
return expect(html).to.equal('My name is Shopify.')
})
})
describe('liquid tag', function () {
it('should treat lines starting with a hash as a comment', async function () {
const src = [
'{% liquid ',
' # first comment line',
' # second comment line',
'',
' # another comment line',
' echo \'Hello \'',
'',
' # more comments',
' echo \'goodbye\'',
'-%}'
].join('\n')
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('Hello goodbye')
})
it('should handle lots of hashes', async function () {
const src = [
'{% liquid',
' ##########################',
' # spread inline comments #',
' ##########################',
'-%}'
].join('\n')
const html = await liquid.parseAndRender(src)
return expect(html).to.equal('')
})
})
})
29 changes: 29 additions & 0 deletions test/unit/parser/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -525,4 +525,33 @@ describe('Tokenizer', function () {
expect(() => tokenizer.readLiquidTagTokens()).to.throw(/illegal liquid tag syntax/)
})
})
describe('#read inline comment tags', () => {
it('should allow hash characters in tag names', () => {
const tokenizer = new Tokenizer('{% # some comment %}', trie)
const tokens = tokenizer.readTopLevelTokens()
expect(tokens.length).to.equal(1)
const tag = tokens[0] as TagToken
expect(tag).instanceOf(TagToken)
expect(tag.name).to.equal('#')
expect(tag.args).to.equal('some comment')
})
it('should handle leading whitespace', () => {
const tokenizer = new Tokenizer('{%\n # some comment %}', trie)
const tokens = tokenizer.readTopLevelTokens()
expect(tokens.length).to.equal(1)
const tag = tokens[0] as TagToken
expect(tag).instanceOf(TagToken)
expect(tag.name).to.equal('#')
expect(tag.args).to.equal('some comment')
})
it('should handle no trailing whitespace', () => {
const tokenizer = new Tokenizer('{%\n #some comment %}', trie)
const tokens = tokenizer.readTopLevelTokens()
expect(tokens.length).to.equal(1)
const tag = tokens[0] as TagToken
expect(tag).instanceOf(TagToken)
expect(tag.name).to.equal('#')
expect(tag.args).to.equal('some comment')
})
})
})