Skip to content

Commit

Permalink
feat: Move ImageUpload and MermaidRendering to plugins to allow for m…
Browse files Browse the repository at this point in the history
…ore plugins easily
  • Loading branch information
andymac4182 committed May 11, 2023
1 parent 2a61181 commit cfae670
Show file tree
Hide file tree
Showing 12 changed files with 446 additions and 284 deletions.
11 changes: 4 additions & 7 deletions packages/cli/src/index.ts
Expand Up @@ -8,6 +8,7 @@ import {
AutoSettingsLoader,
FileSystemAdaptor,
Publisher,
MermaidRendererPlugin,
} from "@markdown-confluence/lib";
import { PuppeteerMermaidRenderer } from "@markdown-confluence/mermaid-puppeteer-renderer";
import { ConfluenceClient } from "confluence.js";
Expand All @@ -18,7 +19,6 @@ async function main() {
const settings = settingLoader.load();

const adaptor = new FileSystemAdaptor(settings); // Make sure this is identical as possible between Obsidian and CLI
const mermaidRenderer = new PuppeteerMermaidRenderer();
const confluenceClient = new ConfluenceClient({
host: settings.confluenceBaseUrl,
authentication: {
Expand All @@ -30,12 +30,9 @@ async function main() {
newErrorHandling: true,
});

const publisher = new Publisher(
adaptor,
settingLoader,
confluenceClient,
mermaidRenderer
);
const publisher = new Publisher(adaptor, settingLoader, confluenceClient, [
new MermaidRendererPlugin(new PuppeteerMermaidRenderer()),
]);

const publishFilter = "";
const results = await publisher.publish(publishFilter);
Expand Down
95 changes: 95 additions & 0 deletions packages/lib/src/ADFProcessingPlugins/ImageUploaderPlugin.ts
@@ -0,0 +1,95 @@
import { filter, traverse } from "@atlaskit/adf-utils/traverse";
import { UploadedImageData } from "../Attachments";
import { JSONDocNode } from "@atlaskit/editor-json-transformer";
import { ADFEntity } from "@atlaskit/adf-utils/dist/types/types";
import { p } from "@atlaskit/adf-utils/builders";
import { ADFProcessingPlugin, PublisherFunctions } from "./types";

export const ImageUploaderPlugin: ADFProcessingPlugin<
string[],
Record<string, UploadedImageData | null>
> = {
extract(adf: JSONDocNode): string[] {
const mediaNodes = filter(
adf,
(node) =>
node.type === "media" && (node.attrs || {})?.["type"] === "file"
);

const imagesToUpload = new Set(
mediaNodes.map((node) => node?.attrs?.["url"])
);

return Array.from(imagesToUpload);
},

async transform(
imagesToUpload: string[],
supportFunctions: PublisherFunctions
): Promise<Record<string, UploadedImageData | null>> {
let imageMap: Record<string, UploadedImageData | null> = {};

for (const imageUrl of imagesToUpload.values()) {
const filename = imageUrl.split("://")[1];
if (!filename) {
continue;
}
const uploadedContent = await supportFunctions.uploadFile(filename);

imageMap = {
...imageMap,
[imageUrl]: uploadedContent,
};
}

return imageMap;
},

load(
adf: JSONDocNode,
imageMap: Record<string, UploadedImageData | null>
): JSONDocNode {
let afterAdf = adf as ADFEntity;

afterAdf =
traverse(afterAdf, {
media: (node, _parent) => {
if (node?.attrs?.["type"] === "file") {
if (!imageMap[node?.attrs?.["url"]]) {
return;
}
const mappedImage = imageMap[node.attrs["url"]];
if (mappedImage) {
node.attrs["collection"] = mappedImage.collection;
node.attrs["id"] = mappedImage.id;
node.attrs["width"] = mappedImage.width;
node.attrs["height"] = mappedImage.height;
delete node.attrs["url"];
return node;
}
}
return;
},
}) || afterAdf;

afterAdf =
traverse(afterAdf, {
mediaSingle: (node, _parent) => {
if (!node || !node.content) {
return;
}
if (
node.content.at(0)?.attrs?.["url"] !== undefined &&
(
node.content.at(0)?.attrs?.["url"] as string
).startsWith("file://")
) {
return p("Invalid Image Path");
}
return;
},
}) || afterAdf;

return afterAdf as JSONDocNode;
},
};
133 changes: 133 additions & 0 deletions packages/lib/src/ADFProcessingPlugins/MermaidRendererPlugin.ts
@@ -0,0 +1,133 @@
import { filter, traverse } from "@atlaskit/adf-utils/traverse";
import { UploadedImageData } from "../Attachments";
import { JSONDocNode } from "@atlaskit/editor-json-transformer";
import { ADFProcessingPlugin, PublisherFunctions } from "./types";
import { ADFEntity } from "@atlaskit/adf-utils/types";
import SparkMD5 from "spark-md5";

export function getMermaidFileName(mermaidContent: string | undefined) {
const mermaidText = mermaidContent ?? "flowchart LR\nid1[Missing Chart]";
const pathMd5 = SparkMD5.hash(mermaidText);
const uploadFilename = `RenderedMermaidChart-${pathMd5}.png`;
return { uploadFilename, mermaidText };
}

export interface ChartData {
name: string;
data: string;
}

export interface MermaidRenderer {
captureMermaidCharts(charts: ChartData[]): Promise<Map<string, Buffer>>;
}

export class MermaidRendererPlugin
implements
ADFProcessingPlugin<
ChartData[],
Record<string, UploadedImageData | null>
>
{
constructor(private mermaidRenderer: MermaidRenderer) {}

extract(adf: JSONDocNode): ChartData[] {
const mermaidNodes = filter(
adf,
(node) =>
node.type == "codeBlock" &&
(node.attrs || {})?.["language"] === "mermaid"
);

const mermaidNodesToUpload = new Set(
mermaidNodes.map((node) => {
const mermaidDetails = getMermaidFileName(
node?.content?.at(0)?.text
);
return {
name: mermaidDetails.uploadFilename,
data: mermaidDetails.mermaidText,
} as ChartData;
})
);

return Array.from(mermaidNodesToUpload);
}

async transform(
mermaidNodesToUpload: ChartData[],
supportFunctions: PublisherFunctions
): Promise<Record<string, UploadedImageData | null>> {
let imageMap: Record<string, UploadedImageData | null> = {};
if (mermaidNodesToUpload.length === 0) {
return imageMap;
}

const mermaidChartsAsImages =
await this.mermaidRenderer.captureMermaidCharts([
...mermaidNodesToUpload,
]);

for (const mermaidImage of mermaidChartsAsImages) {
const uploadedContent = await supportFunctions.uploadBuffer(
mermaidImage[0],
mermaidImage[1]
);

imageMap = {
...imageMap,
[mermaidImage[0]]: uploadedContent,
};
}

return imageMap;
}
load(
adf: JSONDocNode,
imageMap: Record<string, UploadedImageData | null>
): JSONDocNode {
let afterAdf = adf as ADFEntity;

afterAdf =
traverse(afterAdf, {
codeBlock: (node, _parent) => {
if (node?.attrs?.["language"] === "mermaid") {
const mermaidContent = node?.content?.at(0)?.text;
if (!mermaidContent) {
return;
}
const mermaidFilename =
getMermaidFileName(mermaidContent);

if (!imageMap[mermaidFilename.uploadFilename]) {
return;
}
const mappedImage =
imageMap[mermaidFilename.uploadFilename];
if (mappedImage) {
node.type = "mediaSingle";
node.attrs["layout"] = "center";
if (node.content) {
node.content = [
{
type: "media",
attrs: {
type: "file",
collection: mappedImage.collection,
id: mappedImage.id,
width: mappedImage.width,
height: mappedImage.height,
},
},
];
}
delete node.attrs["language"];
return node;
}
}
return;
},
}) || afterAdf;

return afterAdf as JSONDocNode;
}
}
6 changes: 6 additions & 0 deletions packages/lib/src/ADFProcessingPlugins/index.ts
@@ -0,0 +1,6 @@
import { ImageUploaderPlugin } from "./ImageUploaderPlugin";

export * from "./types";
export * from "./MermaidRendererPlugin";

export const AlwaysADFProcessingPlugins = [ImageUploaderPlugin];
90 changes: 90 additions & 0 deletions packages/lib/src/ADFProcessingPlugins/types.ts
@@ -0,0 +1,90 @@
import {
CurrentAttachments,
UploadedImageData,
uploadBuffer,
uploadFile,
} from "../Attachments";
import { JSONDocNode } from "@atlaskit/editor-json-transformer";
import { LoaderAdaptor, RequiredConfluenceClient } from "../adaptors";

export interface PublisherFunctions {
uploadBuffer(
uploadFilename: string,
fileBuffer: Buffer
): Promise<UploadedImageData | null>;
uploadFile(fileNameToUpload: string): Promise<UploadedImageData | null>;
}

export interface ADFProcessingPlugin<E, T> {
extract(adf: JSONDocNode, supportFunctions: PublisherFunctions): E;
transform(items: E, supportFunctions: PublisherFunctions): Promise<T>;
load(
adf: JSONDocNode,
transformedItems: T,
supportFunctions: PublisherFunctions
): JSONDocNode;
}

export function createPublisherFunctions(
confluenceClient: RequiredConfluenceClient,
adaptor: LoaderAdaptor,
pageId: string,
pageFilePath: string,
currentAttachments: CurrentAttachments
): PublisherFunctions {
return {
uploadFile: async function (
fileNameToUpload: string
): Promise<UploadedImageData | null> {
const uploadedContent = await uploadFile(
confluenceClient,
adaptor,
pageId,
pageFilePath,
fileNameToUpload,
currentAttachments
);
return uploadedContent;
},

uploadBuffer: async function (
uploadFilename: string,
fileBuffer: Buffer
): Promise<UploadedImageData | null> {
const uploadedContent = await uploadBuffer(
confluenceClient,
pageId,
uploadFilename,
fileBuffer,
currentAttachments
);

return uploadedContent;
},
};
}

export async function executeADFProcessingPipeline(
plugins: ADFProcessingPlugin<unknown, unknown>[],
adf: JSONDocNode,
supportFunctions: PublisherFunctions
): Promise<JSONDocNode> {
// Extract data in parallel
const extractedData = plugins.map((plugin) =>
plugin.extract(adf, supportFunctions)
);

// Transform data in parallel
const transformedData = await Promise.all(
plugins.map((plugin, index) =>
plugin.transform(extractedData[index], supportFunctions)
)
);

// Load transformed data synchronously using reduce
const finalADF = plugins.reduce((accADF, plugin, index) => {
return plugin.load(accADF, transformedData[index], supportFunctions);
}, adf);

return finalADF;
}
14 changes: 10 additions & 4 deletions packages/lib/src/Attachments.ts
Expand Up @@ -13,6 +13,15 @@ export interface UploadedImageData {
status: ConfluenceImageStatus;
}

export type CurrentAttachments = Record<
string,
{
filehash: string;
attachmentId: string;
collectionName: string;
}
>;

export async function uploadBuffer(
confluenceClient: RequiredConfluenceClient,
pageId: string,
Expand Down Expand Up @@ -78,10 +87,7 @@ export async function uploadFile(
pageId: string,
pageFilePath: string,
fileNameToUpload: string,
currentAttachments: Record<
string,
{ filehash: string; attachmentId: string; collectionName: string }
>
currentAttachments: CurrentAttachments
): Promise<UploadedImageData | null> {
let fileNameForUpload = fileNameToUpload;
let testing = await adaptor.readBinary(fileNameForUpload, pageFilePath);
Expand Down

0 comments on commit cfae670

Please sign in to comment.