Skip to content

Commit

Permalink
Extract unified plugins (#11632)
Browse files Browse the repository at this point in the history
  • Loading branch information
sosukesuzuki committed Oct 7, 2021
1 parent 5808270 commit 5909f5b
Show file tree
Hide file tree
Showing 6 changed files with 133 additions and 111 deletions.
116 changes: 5 additions & 111 deletions src/language-markdown/parser-markdown.js
Expand Up @@ -4,11 +4,14 @@ const remarkParse = require("remark-parse");
const unified = require("unified");
const remarkMath = require("remark-math");
const footnotes = require("remark-footnotes");
const parseFrontMatter = require("../utils/front-matter/parse.js");
const pragma = require("./pragma.js");
const { locStart, locEnd } = require("./loc.js");
const { mapAst, INLINE_NODE_WRAPPER_TYPES } = require("./utils.js");
const mdx = require("./mdx.js");
const htmlToJsx = require("./unified-plugins/html-to-jsx.js");
const frontMatter = require("./unified-plugins/front-matter.js");
const liquid = require("./unified-plugins/liquid.js");
const wikiLink = require("./unified-plugins/wiki-link.js");
const looseItems = require("./unified-plugins/loose-items.js");

/**
* based on [MDAST](https://github.com/syntax-tree/mdast) with following modifications:
Expand Down Expand Up @@ -47,115 +50,6 @@ function identity(x) {
return x;
}

function htmlToJsx() {
return (ast) =>
mapAst(ast, (node, _index, [parent]) => {
if (
node.type !== "html" ||
mdx.COMMENT_REGEX.test(node.value) ||
INLINE_NODE_WRAPPER_TYPES.includes(parent.type)
) {
return node;
}

return { ...node, type: "jsx" };
});
}

function frontMatter() {
const proto = this.Parser.prototype;
proto.blockMethods = ["frontMatter", ...proto.blockMethods];
proto.blockTokenizers.frontMatter = tokenizer;

function tokenizer(eat, value) {
const parsed = parseFrontMatter(value);

if (parsed.frontMatter) {
return eat(parsed.frontMatter.raw)(parsed.frontMatter);
}
}
tokenizer.onlyAtStart = true;
}

function liquid() {
const proto = this.Parser.prototype;
const methods = proto.inlineMethods;
methods.splice(methods.indexOf("text"), 0, "liquid");
proto.inlineTokenizers.liquid = tokenizer;

function tokenizer(eat, value) {
const match = value.match(/^({%.*?%}|{{.*?}})/s);

if (match) {
return eat(match[0])({
type: "liquidNode",
value: match[0],
});
}
}
tokenizer.locator = function (value, fromIndex) {
return value.indexOf("{", fromIndex);
};
}

function wikiLink() {
const entityType = "wikiLink";
const wikiLinkRegex = /^\[\[(?<linkContents>.+?)]]/s;
const proto = this.Parser.prototype;
const methods = proto.inlineMethods;
methods.splice(methods.indexOf("link"), 0, entityType);
proto.inlineTokenizers.wikiLink = tokenizer;

function tokenizer(eat, value) {
const match = wikiLinkRegex.exec(value);

if (match) {
const linkContents = match.groups.linkContents.trim();

return eat(match[0])({
type: entityType,
value: linkContents,
});
}
}

tokenizer.locator = function (value, fromIndex) {
return value.indexOf("[", fromIndex);
};
}

function looseItems() {
const proto = this.Parser.prototype;
const originalList = proto.blockTokenizers.list;

function fixListNodes(value, node, parent) {
if (node.type === "listItem") {
node.loose = node.spread || value.charAt(value.length - 1) === "\n";
if (node.loose) {
parent.loose = true;
}
}
return node;
}

proto.blockTokenizers.list = function list(realEat, value, silent) {
function eat(subvalue) {
const realAdd = realEat(subvalue);

function add(node, parent) {
return realAdd(fixListNodes(subvalue, node, parent), parent);
}
add.reset = function (node, parent) {
return realAdd.reset(fixListNodes(subvalue, node, parent), parent);
};

return add;
}
eat.now = realEat.now;
return originalList.call(this, eat, value, silent);
};
}

const baseParser = {
astFormat: "mdast",
hasPragma: pragma.hasPragma,
Expand Down
20 changes: 20 additions & 0 deletions src/language-markdown/unified-plugins/front-matter.js
@@ -0,0 +1,20 @@
"use strict";

const parseFrontMatter = require("../../utils/front-matter/parse.js");

function frontMatter() {
const proto = this.Parser.prototype;
proto.blockMethods = ["frontMatter", ...proto.blockMethods];
proto.blockTokenizers.frontMatter = tokenizer;

function tokenizer(eat, value) {
const parsed = parseFrontMatter(value);

if (parsed.frontMatter) {
return eat(parsed.frontMatter.raw)(parsed.frontMatter);
}
}
tokenizer.onlyAtStart = true;
}

module.exports = frontMatter;
20 changes: 20 additions & 0 deletions src/language-markdown/unified-plugins/html-to-jsx.js
@@ -0,0 +1,20 @@
"use strict";

const mdx = require("../mdx.js");
const { mapAst, INLINE_NODE_WRAPPER_TYPES } = require("../utils.js");

function htmlToJsx() {
return (ast) =>
mapAst(ast, (node, _index, [parent]) => {
if (
node.type !== "html" ||
mdx.COMMENT_REGEX.test(node.value) ||
INLINE_NODE_WRAPPER_TYPES.includes(parent.type)
) {
return node;
}
return { ...node, type: "jsx" };
});
}

module.exports = htmlToJsx;
24 changes: 24 additions & 0 deletions src/language-markdown/unified-plugins/liquid.js
@@ -0,0 +1,24 @@
"use strict";

function liquid() {
const proto = this.Parser.prototype;
const methods = proto.inlineMethods;
methods.splice(methods.indexOf("text"), 0, "liquid");
proto.inlineTokenizers.liquid = tokenizer;

function tokenizer(eat, value) {
const match = value.match(/^({%.*?%}|{{.*?}})/s);

if (match) {
return eat(match[0])({
type: "liquidNode",
value: match[0],
});
}
}
tokenizer.locator = function (value, fromIndex) {
return value.indexOf("{", fromIndex);
};
}

module.exports = liquid;
35 changes: 35 additions & 0 deletions src/language-markdown/unified-plugins/loose-items.js
@@ -0,0 +1,35 @@
"use strict";

function looseItems() {
const proto = this.Parser.prototype;
const originalList = proto.blockTokenizers.list;

function fixListNodes(value, node, parent) {
if (node.type === "listItem") {
node.loose = node.spread || value.charAt(value.length - 1) === "\n";
if (node.loose) {
parent.loose = true;
}
}
return node;
}

proto.blockTokenizers.list = function list(realEat, value, silent) {
function eat(subvalue) {
const realAdd = realEat(subvalue);

function add(node, parent) {
return realAdd(fixListNodes(subvalue, node, parent), parent);
}
add.reset = function (node, parent) {
return realAdd.reset(fixListNodes(subvalue, node, parent), parent);
};

return add;
}
eat.now = realEat.now;
return originalList.call(this, eat, value, silent);
};
}

module.exports = looseItems;
29 changes: 29 additions & 0 deletions src/language-markdown/unified-plugins/wiki-link.js
@@ -0,0 +1,29 @@
"use strict";

function wikiLink() {
const entityType = "wikiLink";
const wikiLinkRegex = /^\[\[(?<linkContents>.+?)]]/s;
const proto = this.Parser.prototype;
const methods = proto.inlineMethods;
methods.splice(methods.indexOf("link"), 0, entityType);
proto.inlineTokenizers.wikiLink = tokenizer;

function tokenizer(eat, value) {
const match = wikiLinkRegex.exec(value);

if (match) {
const linkContents = match.groups.linkContents.trim();

return eat(match[0])({
type: entityType,
value: linkContents,
});
}
}

tokenizer.locator = function (value, fromIndex) {
return value.indexOf("[", fromIndex);
};
}

module.exports = wikiLink;

0 comments on commit 5909f5b

Please sign in to comment.