-
Notifications
You must be signed in to change notification settings - Fork 571
/
reader_from_stream_reader_test.ts
114 lines (94 loc) · 3.19 KB
/
reader_from_stream_reader_test.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { assert, assertEquals } from "../testing/asserts.ts";
import { copy } from "./copy.ts";
import { readerFromStreamReader } from "./reader_from_stream_reader.ts";
import { Buffer } from "../io/buffer.ts";
function repeat(c: string, bytes: number): Uint8Array {
assertEquals(c.length, 1);
const ui8 = new Uint8Array(bytes);
ui8.fill(c.charCodeAt(0));
return ui8;
}
Deno.test("[streams] readerFromStreamReader()", async function () {
const chunks: string[] = ["hello", "deno", "land"];
const expected = chunks.slice();
const readChunks: Uint8Array[] = [];
const readableStream = new ReadableStream({
pull(controller) {
const encoder = new TextEncoder();
const chunk = chunks.shift();
if (!chunk) return controller.close();
controller.enqueue(encoder.encode(chunk));
},
});
const decoder = new TextDecoder();
const reader = readerFromStreamReader(readableStream.getReader());
let i = 0;
while (true) {
const b = new Uint8Array(1024);
const n = await reader.read(b);
if (n === null) break;
readChunks.push(b.subarray(0, n));
assert(i < expected.length);
i++;
}
assertEquals(
expected,
readChunks.map((chunk) => decoder.decode(chunk)),
);
});
Deno.test("[streams] readerFromStreamReader() big chunks", async function () {
const bufSize = 1024;
const chunkSize = 3 * bufSize;
const writer = new Buffer();
// A readable stream can enqueue chunks bigger than Copy bufSize
// Reader returned by toReader should enqueue exceeding bytes
const chunks: string[] = [
"a".repeat(chunkSize),
"b".repeat(chunkSize),
"c".repeat(chunkSize),
];
const expected = chunks.slice();
const readableStream = new ReadableStream({
pull(controller) {
const encoder = new TextEncoder();
const chunk = chunks.shift();
if (!chunk) return controller.close();
controller.enqueue(encoder.encode(chunk));
},
});
const reader = readerFromStreamReader(readableStream.getReader());
const n = await copy(reader, writer, { bufSize });
const expectedWritten = chunkSize * expected.length;
assertEquals(n, chunkSize * expected.length);
assertEquals(writer.length, expectedWritten);
});
Deno.test("[streams] readerFromStreamReader() irregular chunks", async function () {
const bufSize = 1024;
const chunkSize = 3 * bufSize;
const writer = new Buffer();
// A readable stream can enqueue chunks bigger than Copy bufSize
// Reader returned by toReader should enqueue exceeding bytes
const chunks: Uint8Array[] = [
repeat("a", chunkSize),
repeat("b", chunkSize + 253),
repeat("c", chunkSize + 8),
];
const expected = new Uint8Array(
chunks
.slice()
.map((chunk) => [...chunk])
.flat(),
);
const readableStream = new ReadableStream({
pull(controller) {
const chunk = chunks.shift();
if (!chunk) return controller.close();
controller.enqueue(chunk);
},
});
const reader = readerFromStreamReader(readableStream.getReader());
const n = await copy(reader, writer, { bufSize });
assertEquals(n, expected.length);
assertEquals(expected, writer.bytes());
});