-
Notifications
You must be signed in to change notification settings - Fork 31
/
types.ts
90 lines (81 loc) · 2.23 KB
/
types.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import {
CurrentAttachments,
UploadedImageData,
uploadBuffer,
uploadFile,
} from "../Attachments";
import { JSONDocNode } from "@atlaskit/editor-json-transformer";
import { LoaderAdaptor, RequiredConfluenceClient } from "../adaptors";
export interface PublisherFunctions {
uploadBuffer(
uploadFilename: string,
fileBuffer: Buffer
): Promise<UploadedImageData | null>;
uploadFile(fileNameToUpload: string): Promise<UploadedImageData | null>;
}
export interface ADFProcessingPlugin<E, T> {
extract(adf: JSONDocNode, supportFunctions: PublisherFunctions): E;
transform(items: E, supportFunctions: PublisherFunctions): Promise<T>;
load(
adf: JSONDocNode,
transformedItems: T,
supportFunctions: PublisherFunctions
): JSONDocNode;
}
export function createPublisherFunctions(
confluenceClient: RequiredConfluenceClient,
adaptor: LoaderAdaptor,
pageId: string,
pageFilePath: string,
currentAttachments: CurrentAttachments
): PublisherFunctions {
return {
uploadFile: async function (
fileNameToUpload: string
): Promise<UploadedImageData | null> {
const uploadedContent = await uploadFile(
confluenceClient,
adaptor,
pageId,
pageFilePath,
fileNameToUpload,
currentAttachments
);
return uploadedContent;
},
uploadBuffer: async function (
uploadFilename: string,
fileBuffer: Buffer
): Promise<UploadedImageData | null> {
const uploadedContent = await uploadBuffer(
confluenceClient,
pageId,
uploadFilename,
fileBuffer,
currentAttachments
);
return uploadedContent;
},
};
}
export async function executeADFProcessingPipeline(
plugins: ADFProcessingPlugin<unknown, unknown>[],
adf: JSONDocNode,
supportFunctions: PublisherFunctions
): Promise<JSONDocNode> {
// Extract data in parallel
const extractedData = plugins.map((plugin) =>
plugin.extract(adf, supportFunctions)
);
// Transform data in parallel
const transformedData = await Promise.all(
plugins.map((plugin, index) =>
plugin.transform(extractedData[index], supportFunctions)
)
);
// Load transformed data synchronously using reduce
const finalADF = plugins.reduce((accADF, plugin, index) => {
return plugin.load(accADF, transformedData[index], supportFunctions);
}, adf);
return finalADF;
}