Skip to content

Commit

Permalink
Merge pull request #59 from obsidian-confluence/feat/wikilinks
Browse files Browse the repository at this point in the history
  • Loading branch information
andymac4182 committed Apr 11, 2023
2 parents 3943446 + 20656f4 commit 5e8a4a5
Show file tree
Hide file tree
Showing 4 changed files with 258 additions and 19 deletions.
5 changes: 2 additions & 3 deletions src/MarkdownTransformer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import { MarkdownParser } from "prosemirror-markdown";
import { Schema, Node as PMNode } from "prosemirror-model";
import { markdownItMedia } from "./media";
import myTokenizer from "./callout";
import wikilinksPlugin from "./wikilinks";

function filterMdToPmSchemaMapping(schema: Schema, map: any) {
return Object.keys(map).reduce((newMap: any, key: string) => {
Expand Down Expand Up @@ -154,9 +155,7 @@ export class MarkdownTransformer implements Transformer<Markdown> {
tokenizer.use(myTokenizer);
}

//if (schema.nodes.task) {
// tokenizer.use(taskLists);
//}
tokenizer.use(wikilinksPlugin);

(["nodes", "marks"] as (keyof SchemaMapping)[]).forEach((key) => {
for (const idx in pmSchemaToMdMapping[key]) {
Expand Down
180 changes: 180 additions & 0 deletions src/MarkdownTransformer/wikilinks.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
import MarkdownIt from "markdown-it";
import StateInline from "markdown-it/lib/rules_inline/state_inline";

export function wikilinks(state: StateInline): boolean {
const max = state.posMax;

if (
state.src.charCodeAt(state.pos) !== 0x5b ||
state.src.charCodeAt(state.pos + 1) !== 0x5b /* [ */
) {
return false;
}

const wikilinkStart = state.pos + 2;
const wikiLinkEnd = findLinkEnd(state, state.pos);
if (wikiLinkEnd < 0) {
return false;
}

const { hashFragment, headerStart } = findLinkToHeader(
state,
state.pos,
wikiLinkEnd
);
const { alias, aliasStart, aliasEnd } = findAlias(
state,
state.pos,
wikiLinkEnd
);

const pageNameStart = wikilinkStart;
const pageNameEnd = Math.min(
wikiLinkEnd,
headerStart > 0 ? headerStart : wikiLinkEnd,
aliasStart > 0 ? aliasStart - 1 : wikiLinkEnd
);
const linkToPage = state.src.slice(pageNameStart, pageNameEnd);

if (alias) {
state.pos = aliasStart;
state.posMax = aliasEnd;
} else {
state.pos = wikilinkStart;
state.posMax = wikiLinkEnd;
}

const href = `wikilinks:${linkToPage}${hashFragment ?? ""}`;

let token = state.push("link_open", "a", 1);
token.attrs = [["href", href]];
state.md.inline.tokenize(state);
token = state.push("link_close", "a", -1);

state.pos = wikiLinkEnd + 3;
state.posMax = max;
return true;
}

function findLinkEnd(state: StateInline, start: number) {
let labelEnd = -1;
let found = false;

const max = state.posMax,
oldPos = state.pos;

state.pos = start + 2;

while (state.pos < max) {
if (
state.src.charCodeAt(state.pos - 1) === 0x5d &&
state.src.charCodeAt(state.pos) === 0x5d
) {
found = true;
break;
}

state.md.inline.skipToken(state);
}

if (found) {
labelEnd = state.pos - 1;
}

// restore old state
state.pos = oldPos;

return labelEnd;
}

function findLinkToHeader(
state: StateInline,
start: number,
max: number
): { hashFragment?: string; headerStart: number; headerEnd: number } {
let headerStart = -1,
headerEnd = -1,
found = false,
foundStart = false,
hashFragment = undefined;
const oldPos = state.pos;

state.pos = start + 2;

while (state.pos < max) {
if (state.src.charCodeAt(state.pos) === 0x23 /* # */) {
foundStart = true;
headerStart = state.pos;
}

if (
foundStart &&
(state.src.charCodeAt(state.pos) === 0x5d /* ] (Link End) */ ||
state.src.charCodeAt(state.pos) === 0x7c) /* | (Alias Start) */
) {
found = true;
break;
}

state.pos++;
}

if (found) {
headerEnd = state.pos;
hashFragment = state.src.slice(headerStart, headerEnd);
} else {
headerStart = -1;
headerEnd = -1;
}

// restore old state
state.pos = oldPos;

return { hashFragment, headerStart, headerEnd };
}

function findAlias(
state: StateInline,
start: number,
max: number
): { alias: string | undefined; aliasStart: number; aliasEnd: number } {
let aliasStart = -1,
aliasEnd = -1,
found = false,
foundStart = false,
alias = undefined;
const oldPos = state.pos;

state.pos = start + 2;

while (state.pos <= max) {
if (state.src.charCodeAt(state.pos) === 0x7c /* # */) {
foundStart = true;
aliasStart = state.pos + 1;
}

if (
foundStart &&
state.src.charCodeAt(state.pos + 1) === 0x5d /* ] (Link End) */
) {
found = true;
break;
}

state.pos++;
}

if (found) {
aliasEnd = state.pos + 1;
alias = state.src.slice(aliasStart, aliasEnd);
}

// restore old state
state.pos = oldPos;

return { alias, aliasStart, aliasEnd };
}

export default function wikilinksPlugin(md: MarkdownIt): void {
md.inline.ruler.push("wikilinks", wikilinks);
}
67 changes: 66 additions & 1 deletion src/Publisher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ export interface AdfFile {
tags: string[];
pageId: string | undefined;
dontChangeParentPageId: boolean;
spaceKey?: string;
}

interface ConfluenceNode {
Expand Down Expand Up @@ -119,6 +120,56 @@ export class Publisher {
);
const confluencePagesToPublish = flattenTree(confluencePageTree);

const fileToPageIdMap: Record<string, AdfFile> = {};

confluencePagesToPublish.forEach((node) => {
if (!node.file.pageId) {
throw new Error("Missing Page ID");
}

fileToPageIdMap[node.file.fileName] = node.file;
});

confluencePagesToPublish.forEach((node) => {
node.file.contents = traverse(node.file.contents, {
text: (node, _parent) => {
if (
node.marks &&
node.marks[0].type === "link" &&
node.marks[0].attrs
) {
if (
typeof node.marks[0].attrs.href === "string" &&
node.marks[0].attrs.href.startsWith("wikilink")
) {
const wikilinkUrl = new URL(
node.marks[0].attrs.href
);
const pagename = `${wikilinkUrl.pathname}.md`;

const linkPage = fileToPageIdMap[pagename];
if (linkPage) {
const confluenceUrl = `${this.settings.confluenceBaseUrl}/wiki/spaces/${linkPage.spaceKey}/pages/${linkPage.pageId}${wikilinkUrl.hash}`;
node.marks[0].attrs.href = confluenceUrl;
if (node.text === wikilinkUrl.pathname) {
node.type = "inlineCard";
node.attrs = {
url: node.marks[0].attrs.href,
};
delete node.marks;
delete node.text;
return node;
}
} else {
node.marks.remove(node.marks[0]);
}
return node;
}
}
},
}) as JSONDocNode;
});

const adrFileTasks = confluencePagesToPublish.map((file) => {
return this.publishFile(file);
});
Expand Down Expand Up @@ -168,10 +219,12 @@ export class Publisher {
topPageId
);
node.file.pageId = pageDetails.id;
node.file.spaceKey = pageDetails.spaceKey;
version = pageDetails.version;
existingAdf = pageDetails.existingAdf;
} else {
node.file.pageId = parentPageId;
node.file.spaceKey = spaceKey;
version = 0;
existingAdf = "";
}
Expand Down Expand Up @@ -212,14 +265,24 @@ export class Publisher {
const contentById =
await this.confluenceClient.content.getContentById({
id: file.pageId,
expand: ["version", "body.atlas_doc_format", "ancestors"],
expand: [
"version",
"body.atlas_doc_format",
"ancestors",
"space",
],
});

if (!contentById.space?.key) {
throw new Error("Missing Space Key");
}

return {
id: contentById.id,
title: file.pageTitle,
version: contentById?.version?.number ?? 1,
existingAdf: contentById?.body?.atlas_doc_format?.value,
spaceKey: contentById.space.key,
};
}

Expand Down Expand Up @@ -257,6 +320,7 @@ export class Publisher {
title: file.pageTitle,
version: currentPage?.version?.number ?? 1,
existingAdf: currentPage?.body?.atlas_doc_format?.value,
spaceKey,
};
} else {
console.log("Creating page");
Expand Down Expand Up @@ -287,6 +351,7 @@ export class Publisher {
title: file.pageTitle,
version: pageDetails?.version?.number ?? 1,
existingAdf: pageDetails?.body?.atlas_doc_format?.value,
spaceKey,
};
}
}
Expand Down
25 changes: 10 additions & 15 deletions src/mdToADF.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ import {
JSONTransformer,
} from "@atlaskit/editor-json-transformer";
import { MarkdownTransformer } from "./MarkdownTransformer";
import { ConfluenceTransformer } from "@atlaskit/editor-confluence-transformer";
import { confluenceSchema as schema } from "@atlaskit/adf-schema/schema-confluence";
import { traverse } from "@atlaskit/adf-utils/traverse";
import { MarkdownFile } from "./adaptors/types";
import { AdfFile } from "./Publisher";
Expand All @@ -15,40 +13,37 @@ const frontmatterRegex = /^\s*?---\n([\s\S]*?)\n---/g;
export default class MdToADF {
transformer: MarkdownTransformer;
serializer: JSONTransformer;
confluenceSerializer: ConfluenceTransformer;
constructor() {
this.transformer = new MarkdownTransformer();
this.serializer = new JSONTransformer();
this.confluenceSerializer = new ConfluenceTransformer(schema);
}

private parse(markdown: string) {
const prosenodes = this.transformer.parse(markdown);
const adfNodes = this.serializer.encode(prosenodes);
const nodes = this.replaceLinkWithInlineSmartCard(adfNodes);
const nodes = this.processADF(adfNodes);
return nodes;
}

private replaceLinkWithInlineSmartCard(adf: JSONDocNode): JSONDocNode {
private processADF(adf: JSONDocNode): JSONDocNode {
const olivia = traverse(adf, {
text: (node, _parent) => {
if (
node.marks &&
node.marks[0].type === "link" &&
node.marks[0].attrs &&
node.marks[0].attrs.href === node.text
node.marks[0].attrs
) {
node.type = "inlineCard";
node.attrs = { url: node.marks[0].attrs.href };
delete node.marks;
delete node.text;
return node;
if (node.marks[0].attrs.href === node.text) {
node.type = "inlineCard";
node.attrs = { url: node.marks[0].attrs.href };
delete node.marks;
delete node.text;
return node;
}
}
},
});

console.log({ textingReplacement: JSON.stringify(olivia) });

if (!olivia) {
throw new Error("Failed to traverse");
}
Expand Down

0 comments on commit 5e8a4a5

Please sign in to comment.