diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js index f63f82333116e..d59f298f99496 100644 --- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js +++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js @@ -10,11 +10,11 @@ 'use strict'; +import fs from 'fs'; +import os from 'os'; +import path from 'path'; import {patchSetImmediate} from '../../../../scripts/jest/patchSetImmediate'; -global.ReadableStream = - require('web-streams-polyfill/ponyfill/es6').ReadableStream; - let clientExports; let webpackMap; let webpackModules; @@ -1136,4 +1136,37 @@ describe('ReactFlightDOMNode', () => { 'Switched to client rendering because the server rendering errored:\n\nssr-throw', ); }); + + // This is a regression test for a specific issue where byte Web Streams are + // detaching ArrayBuffers, which caused downstream issues (e.g. "Cannot + // perform Construct on a detached ArrayBuffer") for chunks that are using + // Node's internal Buffer pool. + it('should not corrupt the Node.js Buffer pool by detaching ArrayBuffers when using Web Streams', async () => { + // Create a temp file smaller than 4KB to ensure it uses the Buffer pool. + const file = path.join(os.tmpdir(), 'test.bin'); + fs.writeFileSync(file, Buffer.alloc(4095)); + const fileChunk = fs.readFileSync(file); + fs.unlinkSync(file); + + // Verify this chunk uses the Buffer pool (8192 bytes for files < 4KB). + expect(fileChunk.buffer.byteLength).toBe(8192); + + const readable = await serverAct(() => + ReactServerDOMServer.renderToReadableStream(fileChunk, webpackMap), + ); + + // Create a Web Streams WritableStream that tries to use Buffer operations. + const writable = new WritableStream({ + write(chunk) { + // Only write one byte to ensure Node.js is not creating a new Buffer + // pool. Typically, library code (e.g. a compression middleware) would + // call Buffer.from(chunk) or similar, instead of allocating a new + // Buffer directly. With that, the test file could only be ~2600 bytes. + Buffer.allocUnsafe(1); + }, + }); + + // Must not throw an error. + await readable.pipeTo(writable); + }); }); diff --git a/packages/react-server/src/ReactServerStreamConfigEdge.js b/packages/react-server/src/ReactServerStreamConfigEdge.js index dbe6d6f90cab1..90affdc6b8aca 100644 --- a/packages/react-server/src/ReactServerStreamConfigEdge.js +++ b/packages/react-server/src/ReactServerStreamConfigEdge.js @@ -37,7 +37,11 @@ export function flushBuffered(destination: Destination) { // transform streams. https://github.com/whatwg/streams/issues/960 } -const VIEW_SIZE = 2048; +// Chunks larger than VIEW_SIZE are written directly, without copying into the +// internal view buffer. This must be at least half of Node's internal Buffer +// pool size (8192) to avoid corrupting the pool when using +// renderToReadableStream, which uses a byte stream that detaches ArrayBuffers. +const VIEW_SIZE = 4096; let currentView = null; let writtenBytes = 0; @@ -147,14 +151,7 @@ export function typedArrayToBinaryChunk( // If we passed through this straight to enqueue we wouldn't have to convert it but since // we need to copy the buffer in that case, we need to convert it to copy it. // When we copy it into another array using set() it needs to be a Uint8Array. - const buffer = new Uint8Array( - content.buffer, - content.byteOffset, - content.byteLength, - ); - // We clone large chunks so that we can transfer them when we write them. - // Others get copied into the target buffer. - return content.byteLength > VIEW_SIZE ? buffer.slice() : buffer; + return new Uint8Array(content.buffer, content.byteOffset, content.byteLength); } export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number { diff --git a/packages/react-server/src/ReactServerStreamConfigNode.js b/packages/react-server/src/ReactServerStreamConfigNode.js index 3fb698411721e..90609da2c45d6 100644 --- a/packages/react-server/src/ReactServerStreamConfigNode.js +++ b/packages/react-server/src/ReactServerStreamConfigNode.js @@ -38,7 +38,11 @@ export function flushBuffered(destination: Destination) { } } -const VIEW_SIZE = 2048; +// Chunks larger than VIEW_SIZE are written directly, without copying into the +// internal view buffer. This must be at least half of Node's internal Buffer +// pool size (8192) to avoid corrupting the pool when using +// renderToReadableStream, which uses a byte stream that detaches ArrayBuffers. +const VIEW_SIZE = 4096; let currentView = null; let writtenBytes = 0; let destinationHasCapacity = true;