/
utils.node.ts
63 lines (55 loc) · 1.81 KB
/
utils.node.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
const SIZE_ONE_MEGA = 1024 * 1024;
/**
* Reads a readable stream into buffer entirely. NodeJS only.
* The maximum allowed size is specified in {@link MAX_INPUT_DOCUMENT_SIZE}.
*
* @param stream - A Node.js Readable stream
* @returns The resultant buffer.
* @throws If buffer size is not big enough.
*/
export async function streamToBuffer(
stream: NodeJS.ReadableStream,
maxSize: number
): Promise<Buffer> {
let pos = 0; // Position in stream
let size = SIZE_ONE_MEGA;
let buffer = Buffer.alloc(size);
return new Promise<Buffer>((resolve, reject) => {
stream.on("readable", () => {
const chunk = stream.read();
if (!chunk) {
return;
}
const nextPos = pos + chunk.length;
if (size < nextPos && nextPos <= maxSize) {
// Keep doubling buffer until it is large enough or over max size
const oldSize = size;
while (size < nextPos && size * 2 < maxSize) {
size *= 2;
}
if (nextPos < size && size < maxSize) {
const newBuffer = Buffer.alloc(size - oldSize);
buffer = Buffer.concat([buffer, newBuffer]);
} else {
const newBuffer = Buffer.alloc(maxSize - oldSize);
size = maxSize;
buffer = Buffer.concat([buffer, newBuffer]);
}
} else if (nextPos > maxSize) {
reject(new Error(`Input stream exceeds maximum allowed size: ${maxSize}`));
return;
}
buffer.fill(chunk, pos, nextPos);
pos = nextPos;
});
stream.on("end", () => {
resolve(buffer.slice(0, pos));
});
stream.on("error", reject);
});
}
export function getFirstFourBytesFromBlob(_data: Blob): Promise<Uint8Array> {
throw new Error("Blob is not supported in NodeJS environment");
}