Skip to content

Commit

Permalink
Merge 61aeafb into fd51d44
Browse files Browse the repository at this point in the history
  • Loading branch information
thysultan committed Mar 9, 2020
2 parents fd51d44 + 61aeafb commit 5b51636
Show file tree
Hide file tree
Showing 7 changed files with 98 additions and 67 deletions.
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,14 @@ const declaration = {
line: 1, column: 1
}

const comment = {
value: '/*@noflip*/',
type: 'comm',
props: '/',
children: '@noflip',
line: 1, column: 1
}

const ruleset = {
value: 'h1,h2',
type: 'rule',
Expand Down
1 change: 1 addition & 0 deletions src/Enum.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ export var MS = '-ms-'
export var MOZ = '-moz-'
export var WEBKIT = '-webkit-'

export var COMMENT = 'comm'
export var RULESET = 'rule'
export var DECLARATION = 'decl'

Expand Down
20 changes: 15 additions & 5 deletions src/Parser.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {RULESET, DECLARATION} from './Enum.js'
import {COMMENT, RULESET, DECLARATION} from './Enum.js'
import {abs, trim, from, sizeof, strlen, substr, append, replace} from './Utility.js'
import {node, next, peek, caret, token, alloc, dealloc, comment, delimit, whitespace, identifier} from './Tokenizer.js'
import {node, char, next, peek, caret, token, alloc, dealloc, delimit, whitespace, identifier, commenter} from './Tokenizer.js'

/**
* @param {string} value
Expand All @@ -12,7 +12,7 @@ export function compile (value) {

/**
* @param {string} value
* @param {string[]} root
* @param {object} root
* @param {string[]} rule
* @param {string[]} rules
* @param {string[]} rulesets
Expand Down Expand Up @@ -48,7 +48,7 @@ export function parse (value, root, rule, rules, rulesets, points, declarations)
break
// /
case 47:
token(peek()) > 5 ? comment(next()) : characters += '/'
token(peek()) > 5 ? append(comment(commenter(next(), caret()), root), declarations) : characters += '/'
break
// {
case 123 * variable:
Expand Down Expand Up @@ -114,7 +114,7 @@ export function parse (value, root, rule, rules, rulesets, points, declarations)

/**
* @param {string} value
* @param {string[]} root
* @param {object} root
* @param {number} index
* @param {number} offset
* @param {string[]} rules
Expand All @@ -138,6 +138,16 @@ export function ruleset (value, root, index, offset, rules, points, type, props,
return node(value, root, offset === 0 ? RULESET : type, props, children, length)
}

/**
* @param {number} value
* @param {string[]} root
* @param {number} type
* @return {object}
*/
export function comment (value, root) {
return node(value, root, COMMENT, from(char()), substr(value, 2, -2), 0)
}

/**
* @param {string} value
* @param {string[]} root
Expand Down
3 changes: 2 additions & 1 deletion src/Serializer.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {IMPORT, RULESET, DECLARATION} from './Enum.js'
import {IMPORT, COMMENT, RULESET, DECLARATION} from './Enum.js'
import {sizeof} from './Utility.js'

/**
Expand Down Expand Up @@ -28,6 +28,7 @@ export function stringify (element, index, children, callback) {

switch (element.type) {
case IMPORT: case DECLARATION: return element.return = output || prefix + element.value
case COMMENT: return output
case RULESET: element.value = element.props.join(',')
}

Expand Down
107 changes: 56 additions & 51 deletions src/Tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ export var characters = ''

/**
* @param {string} value
* @param {string} root
* @param {object} root
* @param {string} type
* @param {string[]} props
* @param {object[]} children
Expand All @@ -19,6 +19,13 @@ export function node (value, root, type, props, children, length) {
return {value: value, root: root, type: type, props: props, children: children, line: line, column: column, length: length, prefix: '', return: ''}
}

/**
* @return {number}
*/
export function char () {
return character
}

/**
* @return {number}
*/
Expand Down Expand Up @@ -103,29 +110,49 @@ export function dealloc (value) {

/**
* @param {number} type
* @return {number}
* @return {string}
*/
export function comment (type) {
while (next())
switch (character + type) {
// / /n
case 47 + 10:
return 0
// * *
case 42 + 42:
if (peek() === 47)
return next()
}
export function delimit (type) {
return trim(slice(position - 1, delimiter(type === 91 ? type + 2 : type === 40 ? type + 1 : type)))
}

return 0
/**
* @param {string} value
* @return {string[]}
*/
export function tokenize (value) {
return dealloc(tokenizer(alloc(value)))
}

/**
* @param {number} type
* @return {string}
*/
export function delimit (type) {
return trim(slice(caret() - 1, delimiter(type === 91 ? type + 2 : type === 40 ? type + 1 : type)))
export function whitespace (type) {
while (character = peek())
if (character < 33)
next()
else
break

return token(type) > 2 || token(character) > 2 ? '' : ' '
}

/**
* @param {string[]} children
* @return {string[]}
*/
export function tokenizer (children) {
while (next())
switch (token(character)) {
case 4: case 0: append(identifier(position - 1), children)
break
case 2: append(delimit(character), children)
break
default: append(from(character), children)
}

return children
}

/**
Expand All @@ -137,7 +164,7 @@ export function delimiter (type) {
switch (character) {
// ] ) " '
case type:
return caret()
return position
// " '
case 34: case 39:
return delimiter(type === 34 || type === 39 ? type : character)
Expand All @@ -152,21 +179,24 @@ export function delimiter (type) {
break
}

return caret()
return position
}

/**
* @param {number} type
* @return {string}
* @param {number} index
* @return {number}
*/
export function whitespace (type) {
while (character = peek())
if (character < 33)
next()
else
export function commenter (type, index) {
while (next())
// //
if (type + character === 47 + 10)
break
// /*
else if (type + character === 42 + 42 && peek() === 47)
break

return token(type) > 2 || token(character) > 2 ? '' : ' '
return '/*' + slice(index, position - 1) + '*' + from(type === 47 ? type : next())
}

/**
Expand All @@ -177,30 +207,5 @@ export function identifier (index) {
while (!token(peek()))
next()

return slice(index, caret())
}

/**
* @param {string} value
* @return {string[]}
*/
export function tokenize (value) {
return dealloc(tokenizer(alloc(value)))
}

/**
* @param {string[]} children
* @return {string[]}
*/
export function tokenizer (children) {
while (next())
switch (token(character)) {
case 4: case 0: append(identifier(caret() - 1), children)
break
case 2: append(delimit(character), children)
break
default: append(from(character), children)
}

return children
return slice(index, position)
}
6 changes: 6 additions & 0 deletions test/Middleware.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,10 @@ describe('Middleware', () => {
`.user{width:0;}`, `p,a{width:1;}`, `h1.user{width:1;}`, `h1.user h2.user:last-child{width:2;}`, `@media{.user{width:1;}}`
].join(''))
})

test('comments', () => {
expect(serialize(compile(`/*@noflip*/ .user{//noflip\n\n}`), middleware([value => value.type === 'comm' ? 'color:red;' : '', stringify]))).to.deep.equal([
`color:red;.user{color:red;}`
].join())
})
})
20 changes: 10 additions & 10 deletions test/Parser.js
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ describe('Parser', () => {
`.user{color:red;}`,
`.user button{color:blue;}.user button{color:red;}`,
`.user h1{color:red;color:red;}`
].join(''));
].join(''))
})

test('&', () => {
Expand Down Expand Up @@ -332,7 +332,7 @@ describe('Parser', () => {
).to.equal([
`.user span,.user h1{color:red;}`,
`.user h1,.user:after,.user:before{color:red;}`
].join(''))
].join(''))
})

test('[title="a,b"] and :matches(a,b)', () => {
Expand Down Expand Up @@ -575,27 +575,27 @@ describe('Parser', () => {
expect(stylis(`h1{color:red/**}`)).to.equal(`.user h1{color:red;}`)
})

test('comments(context character I)', () => {
test('context character I', () => {
expect(stylis(`.a{color:red;/* } */}`)).to.equal(`.user .a{color:red;}`)
})

test('comments(context character II)', () => {
test('context character II', () => {
expect(stylis(`.a{color:red;/*}*/}`)).to.equal(`.user .a{color:red;}`)
})

test('comments(context character III)', () => {
test('context character III', () => {
expect(stylis(`.a{color:red;/*{*/}`)).to.equal(`.user .a{color:red;}`)
})

test('comments(context character IV)', () => {
test('context character IV', () => {
expect(stylis(`.a{/**/color:red}`)).to.equal(`.user .a{color:red;}`)
})

test('comments(context character V)', () => {
test('context character V', () => {
expect(stylis(`.a{color:red;/*//color:blue;*/}`)).to.equal(`.user .a{color:red;}`)
})

test('comments(context character VI)', () => {
test('context character VI', () => {
expect(
stylis(
`background: url("img}.png");.a {background: url("img}.png");}`
Expand All @@ -606,7 +606,7 @@ describe('Parser', () => {
].join(''))
})

test('comments(context character VII)', () => {
test('context character VII', () => {
expect(
stylis(`background: url(img}.png);.a {background: url(img}.png);}`)
).to.equal([
Expand All @@ -615,7 +615,7 @@ describe('Parser', () => {
].join(''))
})

test('comments(context character VIII)', () => {
test('context character VIII', () => {
expect(
stylis(`background: url[img}.png];.a {background: url[img}.png];}`)
).to.equal([
Expand Down

0 comments on commit 5b51636

Please sign in to comment.