Skip to content

Commit

Permalink
feat(Wikilinks): First pass at supporting Wikilinks Syntax
Browse files Browse the repository at this point in the history
  • Loading branch information
andymac4182 committed Apr 11, 2023
1 parent 3943446 commit 57e9f65
Show file tree
Hide file tree
Showing 4 changed files with 241 additions and 15 deletions.
3 changes: 3 additions & 0 deletions src/MarkdownTransformer/index.ts
Expand Up @@ -8,6 +8,7 @@ import { MarkdownParser } from "prosemirror-markdown";
import { Schema, Node as PMNode } from "prosemirror-model";
import { markdownItMedia } from "./media";
import myTokenizer from "./callout";
import wikilinksPlugin from "./wikilinks";

function filterMdToPmSchemaMapping(schema: Schema, map: any) {
return Object.keys(map).reduce((newMap: any, key: string) => {
Expand Down Expand Up @@ -154,6 +155,8 @@ export class MarkdownTransformer implements Transformer<Markdown> {
tokenizer.use(myTokenizer);
}

tokenizer.use(wikilinksPlugin);

//if (schema.nodes.task) {
// tokenizer.use(taskLists);
//}
Expand Down
183 changes: 183 additions & 0 deletions src/MarkdownTransformer/wikilinks.ts
@@ -0,0 +1,183 @@
import MarkdownIt from "markdown-it";
import StateInline from "markdown-it/lib/rules_inline/state_inline";

export function wikilinks(state: StateInline): boolean {
const max = state.posMax;

if (
state.src.charCodeAt(state.pos) !== 0x5b ||
state.src.charCodeAt(state.pos + 1) !== 0x5b /* [ */
) {
return false;
}

const wikilinkStart = state.pos + 2;
const wikiLinkEnd = findLinkEnd(state, state.pos);
if (wikiLinkEnd < 0) {
return false;
}

const { hashFragment, headerStart } = findLinkToHeader(
state,
state.pos,
wikiLinkEnd
);
const { alias, aliasStart, aliasEnd } = findAlias(
state,
state.pos,
wikiLinkEnd
);

const pageNameStart = wikilinkStart;
const pageNameEnd = Math.min(
wikiLinkEnd,
headerStart > 0 ? headerStart : wikiLinkEnd,
aliasStart > 0 ? aliasStart - 1 : wikiLinkEnd
);
const linkToPage = state.src.slice(pageNameStart, pageNameEnd);

if (alias) {
state.pos = aliasStart;
state.posMax = aliasEnd;
} else {
state.pos = wikilinkStart;
state.posMax = wikiLinkEnd;
}

const href = `wikilinks:${linkToPage}${hashFragment ?? ""}`;

let token = state.push("link_open", "a", 1);
token.attrs = [["href", href]];
if (alias) {
token.attrs.push(["title", alias]);
}
state.md.inline.tokenize(state);
token = state.push("link_close", "a", -1);

state.pos = wikiLinkEnd + 3;
state.posMax = max;
return true;
}

function findLinkEnd(state: StateInline, start: number) {
let labelEnd = -1;
let found = false;

const max = state.posMax,
oldPos = state.pos;

state.pos = start + 2;

while (state.pos < max) {
if (
state.src.charCodeAt(state.pos - 1) === 0x5d &&
state.src.charCodeAt(state.pos) === 0x5d
) {
found = true;
break;
}

state.md.inline.skipToken(state);
}

if (found) {
labelEnd = state.pos - 1;
}

// restore old state
state.pos = oldPos;

return labelEnd;
}

function findLinkToHeader(
state: StateInline,
start: number,
max: number
): { hashFragment?: string; headerStart: number; headerEnd: number } {
let headerStart = -1,
headerEnd = -1,
found = false,
foundStart = false,
hashFragment = undefined;
const oldPos = state.pos;

state.pos = start + 2;

while (state.pos < max) {
if (state.src.charCodeAt(state.pos) === 0x23 /* # */) {
foundStart = true;
headerStart = state.pos;
}

if (
foundStart &&
(state.src.charCodeAt(state.pos) === 0x5d /* ] (Link End) */ ||
state.src.charCodeAt(state.pos) === 0x7c) /* | (Alias Start) */
) {
found = true;
break;
}

state.pos++;
}

if (found) {
headerEnd = state.pos;
hashFragment = state.src.slice(headerStart, headerEnd);
} else {
headerStart = -1;
headerEnd = -1;
}

// restore old state
state.pos = oldPos;

return { hashFragment, headerStart, headerEnd };
}

function findAlias(
state: StateInline,
start: number,
max: number
): { alias: string | undefined; aliasStart: number; aliasEnd: number } {
let aliasStart = -1,
aliasEnd = -1,
found = false,
foundStart = false,
alias = undefined;
const oldPos = state.pos;

state.pos = start + 2;

while (state.pos <= max) {
if (state.src.charCodeAt(state.pos) === 0x7c /* # */) {
foundStart = true;
aliasStart = state.pos + 1;
}

if (
foundStart &&
state.src.charCodeAt(state.pos + 1) === 0x5d /* ] (Link End) */
) {
found = true;
break;
}

state.pos++;
}

if (found) {
aliasEnd = state.pos + 1;
alias = state.src.slice(aliasStart, aliasEnd);
}

// restore old state
state.pos = oldPos;

return { alias, aliasStart, aliasEnd };
}

export default function wikilinksPlugin(md: MarkdownIt): void {
md.inline.ruler.push("wikilinks", wikilinks);
}
42 changes: 42 additions & 0 deletions src/Publisher.ts
Expand Up @@ -119,6 +119,48 @@ export class Publisher {
);
const confluencePagesToPublish = flattenTree(confluencePageTree);

const fileToPageIdMap: Record<string, string> = {};

confluencePagesToPublish.forEach((node) => {
if (!node.file.pageId) {
throw new Error("Missing Page ID");
}

fileToPageIdMap[node.file.fileName] = node.file.pageId;
});

confluencePagesToPublish.forEach((node) => {
node.file.contents = traverse(node.file.contents, {
text: (node, _parent) => {
if (
node.marks &&
node.marks[0].type === "link" &&
node.marks[0].attrs
) {
if (
typeof node.marks[0].attrs.href === "string" &&
node.marks[0].attrs.href.startsWith("wikilink")
) {
const wikilinkUrl = new URL(
node.marks[0].attrs.href
);
const pagename = `${wikilinkUrl.pathname}.md`;

const linkPageId = fileToPageIdMap[pagename];
const confluenceUrl = `${this.settings.confluenceBaseUrl}/wiki/spaces/~557058aea5688c52b94d15aabe96def1abc413/pages/${linkPageId}${wikilinkUrl.hash}`;
console.log({
pagename,
fileToPageIdMap,
wikiurl: confluenceUrl,
});
node.marks[0].attrs.href = confluenceUrl;
return node;
}
}
},
}) as JSONDocNode;
});

const adrFileTasks = confluencePagesToPublish.map((file) => {
return this.publishFile(file);
});
Expand Down
28 changes: 13 additions & 15 deletions src/mdToADF.ts
Expand Up @@ -3,8 +3,6 @@ import {
JSONTransformer,
} from "@atlaskit/editor-json-transformer";
import { MarkdownTransformer } from "./MarkdownTransformer";
import { ConfluenceTransformer } from "@atlaskit/editor-confluence-transformer";
import { confluenceSchema as schema } from "@atlaskit/adf-schema/schema-confluence";
import { traverse } from "@atlaskit/adf-utils/traverse";
import { MarkdownFile } from "./adaptors/types";
import { AdfFile } from "./Publisher";
Expand All @@ -15,40 +13,40 @@ const frontmatterRegex = /^\s*?---\n([\s\S]*?)\n---/g;
export default class MdToADF {
transformer: MarkdownTransformer;
serializer: JSONTransformer;
confluenceSerializer: ConfluenceTransformer;
constructor() {
this.transformer = new MarkdownTransformer();
this.serializer = new JSONTransformer();
this.confluenceSerializer = new ConfluenceTransformer(schema);
}

private parse(markdown: string) {
const prosenodes = this.transformer.parse(markdown);
const adfNodes = this.serializer.encode(prosenodes);
const nodes = this.replaceLinkWithInlineSmartCard(adfNodes);
const nodes = this.processADF(adfNodes);
return nodes;
}

private replaceLinkWithInlineSmartCard(adf: JSONDocNode): JSONDocNode {
private processADF(adf: JSONDocNode): JSONDocNode {
const olivia = traverse(adf, {
text: (node, _parent) => {
if (
node.marks &&
node.marks[0].type === "link" &&
node.marks[0].attrs &&
node.marks[0].attrs.href === node.text
node.marks[0].attrs
) {
node.type = "inlineCard";
node.attrs = { url: node.marks[0].attrs.href };
delete node.marks;
delete node.text;
return node;
if (
!node.marks[0].attrs.title &&
node.marks[0].attrs.href === node.text
) {
node.type = "inlineCard";
node.attrs = { url: node.marks[0].attrs.href };
delete node.marks;
delete node.text;
return node;
}
}
},
});

console.log({ textingReplacement: JSON.stringify(olivia) });

if (!olivia) {
throw new Error("Failed to traverse");
}
Expand Down

0 comments on commit 57e9f65

Please sign in to comment.