Skip to content

Commit

Permalink
fix: recognise Nunjucks double curly variables within CSS rules
Browse files Browse the repository at this point in the history
  • Loading branch information
revelt committed Mar 17, 2021
1 parent 2f148b0 commit 5963e7b
Show file tree
Hide file tree
Showing 8 changed files with 544 additions and 381 deletions.
2 changes: 1 addition & 1 deletion packages/codsen-tokenizer/coverage/coverage-summary.json
@@ -1 +1 @@
{"total":{"lines":{"total":995,"covered":932,"skipped":0,"pct":93.67},"statements":{"total":1024,"covered":959,"skipped":0,"pct":93.65},"functions":{"total":43,"covered":41,"skipped":0,"pct":95.35},"branches":{"total":1607,"covered":1481,"skipped":0,"pct":92.16}}}
{"total":{"lines":{"total":997,"covered":934,"skipped":0,"pct":93.68},"statements":{"total":1026,"covered":961,"skipped":0,"pct":93.66},"functions":{"total":43,"covered":41,"skipped":0,"pct":95.35},"branches":{"total":1624,"covered":1498,"skipped":0,"pct":92.24}}}
12 changes: 9 additions & 3 deletions packages/codsen-tokenizer/dist/codsen-tokenizer.cjs.js
Expand Up @@ -769,7 +769,9 @@ function tokenizer(str, originalOpts) {
});
selectorChunkStartedAt = undefined;
token.selectorsEnd = _i;
} else if (str[_i] === "{" && token.openingCurlyAt && !token.closingCurlyAt) {
} else if (str[_i] === "{" && str[_i - 1] !== "{" &&
str[_i + 1] !== "{" &&
token.openingCurlyAt && !token.closingCurlyAt) {
for (var y = _i; y--;) {
if (!str[y].trim() || "{}\"';".includes(str[y])) {
if (property && property.start && !property.end) {
Expand Down Expand Up @@ -1061,7 +1063,8 @@ function tokenizer(str, originalOpts) {
var R2 = void 0;
if (!doNothing && (property.start || str[_i] === "!")) {
var idxRightIncl = stringLeftRight.right(str, _i - 1);
R1 = ";{}<>".includes(str[idxRightIncl]) ||
R1 = ";<>".includes(str[idxRightIncl]) ||
str[idxRightIncl] === "{" && str[_i - 1] !== "{" || str[idxRightIncl] === "}" && str[_i - 1] !== "}" ||
"'\"".includes(str[idxRightIncl]) && (
!layers ||
!layers.length ||
Expand Down Expand Up @@ -1256,6 +1259,9 @@ function tokenizer(str, originalOpts) {
property.valueStarts = _i;
}
}
if (!doNothing && str[_i] === "{" && str[_i + 1] === "{" && property && property.valueStarts && !property.valueEnds && str.indexOf("}}", _i) > 0) {
doNothing = str.indexOf("}}") + 2;
}
if (!doNothing && token.type === "rule" && str[_i] && str[_i].trim() && !"{}".includes(str[_i]) && !selectorChunkStartedAt && !token.openingCurlyAt) {
if (!",".includes(str[_i])) {
selectorChunkStartedAt = _i;
Expand Down Expand Up @@ -1566,7 +1572,7 @@ function tokenizer(str, originalOpts) {
attrib.attribNameStartsAt = _i;
}
if (!doNothing && token.type === "rule") {
if (str[_i] === "{" && !token.openingCurlyAt) {
if (str[_i] === "{" && str[_i + 1] !== "{" && str[_i - 1] !== "{" && !token.openingCurlyAt) {
token.openingCurlyAt = _i;
} else if (str[_i] === "}" && token.openingCurlyAt && !token.closingCurlyAt) {
token.closingCurlyAt = _i;
Expand Down
14 changes: 11 additions & 3 deletions packages/codsen-tokenizer/dist/codsen-tokenizer.dev.umd.js
Expand Up @@ -4220,7 +4220,9 @@ function tokenizer(str, originalOpts) {
});
selectorChunkStartedAt = undefined;
token.selectorsEnd = _i;
} else if (str[_i] === "{" && token.openingCurlyAt && !token.closingCurlyAt) {
} else if (str[_i] === "{" && str[_i - 1] !== "{" && // avoid Nunjucks variable as CSS rule's value
str[_i + 1] !== "{" && // avoid Nunjucks variable as CSS rule's value
token.openingCurlyAt && !token.closingCurlyAt) {
// we encounted an opening curly even though closing hasn't
// been met yet:
// <style>.a{float:left;x">.b{color: red}
Expand Down Expand Up @@ -4814,7 +4816,8 @@ function tokenizer(str, originalOpts) {

if (!doNothing && (property.start || str[_i] === "!")) {
var idxRightIncl = right(str, _i - 1);
R1 = ";{}<>".includes(str[idxRightIncl]) || // or it's a quote
R1 = ";<>".includes(str[idxRightIncl]) || // avoid Nunjucks ESP tags, {{ zzz }}
str[idxRightIncl] === "{" && str[_i - 1] !== "{" || str[idxRightIncl] === "}" && str[_i - 1] !== "}" || // or it's a quote
"'\"".includes(str[idxRightIncl]) && ( // but then it has to be a matching counterpart
// either there are no layers
!layers || // or there are but they're empty
Expand Down Expand Up @@ -5099,6 +5102,11 @@ function tokenizer(str, originalOpts) {
} else {
property.valueStarts = _i;
}
} // catch double opening curlies inside a css property


if (!doNothing && str[_i] === "{" && str[_i + 1] === "{" && property && property.valueStarts && !property.valueEnds && str.indexOf("}}", _i) > 0) {
doNothing = str.indexOf("}}") + 2;
} // catch the start of a css chunk
// -------------------------------------------------------------------------

Expand Down Expand Up @@ -5676,7 +5684,7 @@ function tokenizer(str, originalOpts) {


if (!doNothing && token.type === "rule") {
if (str[_i] === "{" && !token.openingCurlyAt) {
if (str[_i] === "{" && str[_i + 1] !== "{" && str[_i - 1] !== "{" && !token.openingCurlyAt) {
token.openingCurlyAt = _i;
} else if (str[_i] === "}" && token.openingCurlyAt && !token.closingCurlyAt) {
token.closingCurlyAt = _i;
Expand Down
12 changes: 9 additions & 3 deletions packages/codsen-tokenizer/dist/codsen-tokenizer.esm.js
Expand Up @@ -750,7 +750,9 @@ function tokenizer(str, originalOpts) {
});
selectorChunkStartedAt = undefined;
token.selectorsEnd = i;
} else if (str[i] === "{" && token.openingCurlyAt && !token.closingCurlyAt) {
} else if (str[i] === "{" && str[i - 1] !== "{" &&
str[i + 1] !== "{" &&
token.openingCurlyAt && !token.closingCurlyAt) {
for (let y = i; y--;) {
if (!str[y].trim() || `{}"';`.includes(str[y])) {
if (property && property.start && !property.end) {
Expand Down Expand Up @@ -1039,7 +1041,8 @@ function tokenizer(str, originalOpts) {
let R2;
if (!doNothing && (property.start || str[i] === "!")) {
const idxRightIncl = right(str, i - 1);
R1 = `;{}<>`.includes(str[idxRightIncl]) ||
R1 = `;<>`.includes(str[idxRightIncl]) ||
str[idxRightIncl] === `{` && str[i - 1] !== `{` || str[idxRightIncl] === `}` && str[i - 1] !== `}` ||
`'"`.includes(str[idxRightIncl]) && (
!layers ||
!layers.length ||
Expand Down Expand Up @@ -1234,6 +1237,9 @@ function tokenizer(str, originalOpts) {
property.valueStarts = i;
}
}
if (!doNothing && str[i] === "{" && str[i + 1] === "{" && property && property.valueStarts && !property.valueEnds && str.indexOf("}}", i) > 0) {
doNothing = str.indexOf("}}") + 2;
}
if (!doNothing && token.type === "rule" && str[i] && str[i].trim() && !"{}".includes(str[i]) && !selectorChunkStartedAt && !token.openingCurlyAt) {
if (!",".includes(str[i])) {
selectorChunkStartedAt = i;
Expand Down Expand Up @@ -1542,7 +1548,7 @@ function tokenizer(str, originalOpts) {
attrib.attribNameStartsAt = i;
}
if (!doNothing && token.type === "rule") {
if (str[i] === "{" && !token.openingCurlyAt) {
if (str[i] === "{" && str[i + 1] !== "{" && str[i - 1] !== "{" && !token.openingCurlyAt) {
token.openingCurlyAt = i;
} else if (str[i] === "}" && token.openingCurlyAt && !token.closingCurlyAt) {
token.closingCurlyAt = i;
Expand Down
2 changes: 1 addition & 1 deletion packages/codsen-tokenizer/dist/codsen-tokenizer.mjs

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion packages/codsen-tokenizer/dist/codsen-tokenizer.umd.js

Large diffs are not rendered by default.

0 comments on commit 5963e7b

Please sign in to comment.