diff --git a/io/mod.ts b/io/mod.ts index c172819dfae3..393a7856f16a 100644 --- a/io/mod.ts +++ b/io/mod.ts @@ -24,6 +24,7 @@ export * from "./read_long.ts"; export * from "./read_range.ts"; export * from "./read_short.ts"; export * from "./read_string_delim.ts"; +export * from "./reader_from_stream_reader.ts"; export * from "./slice_long_to_bytes.ts"; export * from "./string_reader.ts"; export * from "./string_writer.ts"; diff --git a/io/reader_from_stream_reader.ts b/io/reader_from_stream_reader.ts new file mode 100644 index 000000000000..4817650bae00 --- /dev/null +++ b/io/reader_from_stream_reader.ts @@ -0,0 +1,42 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +// This module is browser compatible. + +import { Buffer } from "./buffer.ts"; +import { writeAll } from "./write_all.ts"; +import type { Reader } from "./types.ts"; + +/** + * Create a {@linkcode Reader} from a {@linkcode ReadableStreamDefaultReader}. + * + * @example + * ```ts + * import { copy } from "https://deno.land/std@$STD_VERSION/io/copy.ts"; + * import { readerFromStreamReader } from "https://deno.land/std@$STD_VERSION/io/reader_from_stream_reader.ts"; + * + * const res = await fetch("https://deno.land"); + * using file = await Deno.open("./deno.land.html", { create: true, write: true }); + * + * const reader = readerFromStreamReader(res.body!.getReader()); + * await copy(reader, file); + * ``` + */ +export function readerFromStreamReader( + streamReader: ReadableStreamDefaultReader, +): Reader { + const buffer = new Buffer(); + + return { + async read(p: Uint8Array): Promise { + if (buffer.empty()) { + const res = await streamReader.read(); + if (res.done) { + return null; // EOF + } + + await writeAll(buffer, res.value); + } + + return buffer.read(p); + }, + }; +} diff --git a/io/reader_from_stream_reader_test.ts b/io/reader_from_stream_reader_test.ts new file mode 100644 index 000000000000..ceead382e1ad --- /dev/null +++ b/io/reader_from_stream_reader_test.ts @@ -0,0 +1,94 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assert, assertEquals } from "../assert/mod.ts"; +import { copy } from "./copy.ts"; +import { readerFromStreamReader } from "./reader_from_stream_reader.ts"; +import { Buffer } from "./buffer.ts"; + +function repeat(c: string, bytes: number): Uint8Array { + assertEquals(c.length, 1); + const ui8 = new Uint8Array(bytes); + ui8.fill(c.charCodeAt(0)); + return ui8; +} + +Deno.test("[streams] readerFromStreamReader()", async function () { + const chunks: string[] = ["hello", "deno", "land"]; + const expected = chunks.slice(); + const readChunks: Uint8Array[] = []; + const readableStream = ReadableStream.from(chunks) + .pipeThrough(new TextEncoderStream()); + + const decoder = new TextDecoder(); + const reader = readerFromStreamReader(readableStream.getReader()); + + let i = 0; + + while (true) { + const b = new Uint8Array(1024); + const n = await reader.read(b); + + if (n === null) break; + + readChunks.push(b.subarray(0, n)); + assert(i < expected.length); + + i++; + } + + assertEquals( + expected, + readChunks.map((chunk) => decoder.decode(chunk)), + ); +}); + +Deno.test("[streams] readerFromStreamReader() big chunks", async function () { + const bufSize = 1024; + const chunkSize = 3 * bufSize; + const writer = new Buffer(); + + // A readable stream can enqueue chunks bigger than Copy bufSize + // Reader returned by toReader should enqueue exceeding bytes + const chunks: string[] = [ + "a".repeat(chunkSize), + "b".repeat(chunkSize), + "c".repeat(chunkSize), + ]; + const expected = chunks.slice(); + const readableStream = ReadableStream.from(chunks) + .pipeThrough(new TextEncoderStream()); + + const reader = readerFromStreamReader(readableStream.getReader()); + const n = await copy(reader, writer, { bufSize }); + + const expectedWritten = chunkSize * expected.length; + assertEquals(n, chunkSize * expected.length); + assertEquals(writer.length, expectedWritten); +}); + +Deno.test("[streams] readerFromStreamReader() irregular chunks", async function () { + const bufSize = 1024; + const chunkSize = 3 * bufSize; + const writer = new Buffer(); + + // A readable stream can enqueue chunks bigger than Copy bufSize + // Reader returned by toReader should enqueue exceeding bytes + const chunks: Uint8Array[] = [ + repeat("a", chunkSize), + repeat("b", chunkSize + 253), + repeat("c", chunkSize + 8), + ]; + const expected = new Uint8Array( + chunks + .slice() + .map((chunk) => [...chunk]) + .flat(), + ); + const readableStream = ReadableStream.from(chunks); + + const reader = readerFromStreamReader(readableStream.getReader()); + + const n = await copy(reader, writer, { bufSize }); + assertEquals(n, expected.length); + assertEquals(expected, writer.bytes()); +}); diff --git a/streams/reader_from_stream_reader.ts b/streams/reader_from_stream_reader.ts index 54142cb6e9fc..28571361b5cb 100644 --- a/streams/reader_from_stream_reader.ts +++ b/streams/reader_from_stream_reader.ts @@ -1,8 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // This module is browser compatible. -import { Buffer } from "../io/buffer.ts"; -import { writeAll } from "../io/write_all.ts"; +import { readerFromStreamReader as _readerFromStreamReader } from "../io/reader_from_stream_reader.ts"; import type { Reader } from "../io/types.ts"; /** @@ -20,25 +19,10 @@ import type { Reader } from "../io/types.ts"; * await copy(reader, file); * ``` * - * @deprecated (will be removed after 1.0.0) Use {@linkcode ReadableStreamDefaultReader} directly. + * @deprecated (will be removed in 1.0.0) Import from {@link https://deno.land/std/io/reader_from_stream_reader.ts} instead. */ export function readerFromStreamReader( streamReader: ReadableStreamDefaultReader, ): Reader { - const buffer = new Buffer(); - - return { - async read(p: Uint8Array): Promise { - if (buffer.empty()) { - const res = await streamReader.read(); - if (res.done) { - return null; // EOF - } - - await writeAll(buffer, res.value); - } - - return buffer.read(p); - }, - }; + return _readerFromStreamReader(streamReader); }