From 01cad9eacac1e1559b06076239793304845b415c Mon Sep 17 00:00:00 2001 From: Janka Uryga Date: Fri, 19 Sep 2025 21:12:37 +0200 Subject: [PATCH 1/5] [Flight] Support Async Modules in Turbopack Server References (#34531) Seems like this was missed in https://github.com/facebook/react/pull/31313 --- .../ReactFlightClientConfigBundlerTurbopack.js | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js b/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js index ba1c220fa4f32..f061fa9816588 100644 --- a/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js +++ b/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js @@ -132,7 +132,19 @@ export function resolveServerReference( ); } } - // TODO: This needs to return async: true if it's an async module. + if (resolvedModuleData.async) { + // If the module is marked as async in a Client Reference, we don't actually care. + // What matters is whether the consumer wants to unwrap it or not. + // For Server References, it is different because the consumer is completely internal + // to the bundler. So instead of passing it to each reference we can mark it in the + // manifest. + return [ + resolvedModuleData.id, + resolvedModuleData.chunks, + name, + 1 /* async */, + ]; + } return [resolvedModuleData.id, resolvedModuleData.chunks, name]; } From 5e3cd53f20189001711f90d4db671f36d49a91f5 Mon Sep 17 00:00:00 2001 From: Jack Pope Date: Fri, 19 Sep 2025 15:49:08 -0400 Subject: [PATCH 2/5] Update MAINTAINERS (#34534) --- MAINTAINERS | 1 + 1 file changed, 1 insertion(+) diff --git a/MAINTAINERS b/MAINTAINERS index 5ad514035dd53..e48736bc1ca41 100644 --- a/MAINTAINERS +++ b/MAINTAINERS @@ -1,5 +1,6 @@ acdlite eps1lon +EugeneChoi4 gaearon gnoff unstubbable From d415fd3ed716f02f463232341ab21e909e0058ca Mon Sep 17 00:00:00 2001 From: Hendrik Liebau Date: Fri, 19 Sep 2025 23:38:11 +0200 Subject: [PATCH 3/5] [Flight] Handle Lazy in `renderDebugModel` (#34536) If we don't handle Lazy types specifically in `renderDebugModel`, all of their properties will be emitted using `renderDebugModel` as well. This also includes its `_debugInfo` property, if the Lazy comes from the Flight client. That array might contain objects that are deduped, and resolving those references in the client can cause runtime errors, e.g.: ``` TypeError: Cannot read properties of undefined (reading '$$typeof') ``` This happened specifically when an "RSC stream" debug info entry, coming from the Flight client through IO tracking, was emitted and its `debugTask` property was deduped, which couldn't be resolved in the client. To avoid actually initializing a lazy causing a side-effect, we make some assumptions about the structure of its payload, and only emit resolved or rejected values, otherwise we emit a halted chunk. --- .../react-server/src/ReactFlightServer.js | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index 31bea759a0a89..66203af1fefdb 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -4702,6 +4702,70 @@ function renderDebugModel( element._store.validated, ]; } + case REACT_LAZY_TYPE: { + // To avoid actually initializing a lazy causing a side-effect, we make + // some assumptions about the structure of the payload even though + // that's not really part of the contract. In practice, this is really + // just coming from React.lazy helper or Flight. + const lazy: LazyComponent = (value: any); + const payload = lazy._payload; + + if (payload !== null && typeof payload === 'object') { + // React.lazy constructor + switch (payload._status) { + case -1 /* Uninitialized */: + case 0 /* Pending */: + break; + case 1 /* Resolved */: { + const id = outlineDebugModel(request, counter, payload._result); + return serializeLazyID(id); + } + case 2 /* Rejected */: { + // We don't log these errors since they didn't actually throw into + // Flight. + const digest = ''; + const id = request.nextChunkId++; + emitErrorChunk(request, id, digest, payload._result, true, null); + return serializeLazyID(id); + } + } + + // React Flight + switch (payload.status) { + case 'pending': + case 'blocked': + case 'resolved_model': + // The value is an uninitialized model from the Flight client. + // It's not very useful to emit that. + break; + case 'resolved_module': + // The value is client reference metadata from the Flight client. + // It's likely for SSR, so we choose not to emit it. + break; + case 'fulfilled': { + const id = outlineDebugModel(request, counter, payload.value); + return serializeLazyID(id); + } + case 'rejected': { + // We don't log these errors since they didn't actually throw into + // Flight. + const digest = ''; + const id = request.nextChunkId++; + emitErrorChunk(request, id, digest, payload.reason, true, null); + return serializeLazyID(id); + } + } + } + + // We couldn't emit a resolved or rejected value synchronously. For now, + // we emit this as a halted chunk. TODO: We could maybe also handle + // pending lazy debug models like we do in serializeDebugThenable, + // if/when we determine that it's worth the added complexity. + request.pendingDebugChunks++; + const id = request.nextChunkId++; + emitDebugHaltChunk(request, id); + return serializeLazyID(id); + } } // $FlowFixMe[method-unbinding] From 565eb7888efa5d8e70273d9b7a16374131348cbb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Markb=C3=A5ge?= Date: Fri, 19 Sep 2025 18:23:18 -0400 Subject: [PATCH 4/5] Unwrap a reference to a Lazy value (#34535) If we are referencing a lazy value that isn't explicitly lazy ($L...) it's because we added it around an element that was blocked to be able to defer things inside. However, once that is unblocked we can start unwrap it and just use the inner element instead for any future reference. The race condition is still there since it's a race condition whether we added the wrapper in the first place. This just makes it consistent with unwrapping of the rest of the path. --- .../react-client/src/ReactFlightClient.js | 71 ++++++++++++++++++- 1 file changed, 69 insertions(+), 2 deletions(-) diff --git a/packages/react-client/src/ReactFlightClient.js b/packages/react-client/src/ReactFlightClient.js index 74a412d6f4c0a..6e96f2f0378ec 100644 --- a/packages/react-client/src/ReactFlightClient.js +++ b/packages/react-client/src/ReactFlightClient.js @@ -1337,7 +1337,11 @@ function fulfillReference( const {response, handler, parentObject, key, map, path} = reference; for (let i = 1; i < path.length; i++) { - while (value.$$typeof === REACT_LAZY_TYPE) { + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { // We never expect to see a Lazy node on this path because we encode those as // separate models. This must mean that we have inserted an extra lazy node // e.g. to replace a blocked element. We must instead look for it inside. @@ -1408,6 +1412,39 @@ function fulfillReference( } value = value[path[i]]; } + + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { + // If what we're referencing is a Lazy it must be because we inserted one as a virtual node + // while it was blocked by other data. If it's no longer blocked, we can unwrap it. + const referencedChunk: SomeChunk = value._payload; + if (referencedChunk === handler.chunk) { + // This is a reference to the thing we're currently blocking. We can peak + // inside of it to get the value. + value = handler.value; + continue; + } else { + switch (referencedChunk.status) { + case RESOLVED_MODEL: + initializeModelChunk(referencedChunk); + break; + case RESOLVED_MODULE: + initializeModuleChunk(referencedChunk); + break; + } + switch (referencedChunk.status) { + case INITIALIZED: { + value = referencedChunk.value; + continue; + } + } + } + break; + } + const mappedValue = map(response, value, parentObject, key); parentObject[key] = mappedValue; @@ -1855,7 +1892,11 @@ function getOutlinedModel( case INITIALIZED: let value = chunk.value; for (let i = 1; i < path.length; i++) { - while (value.$$typeof === REACT_LAZY_TYPE) { + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { const referencedChunk: SomeChunk = value._payload; switch (referencedChunk.status) { case RESOLVED_MODEL: @@ -1924,6 +1965,32 @@ function getOutlinedModel( } value = value[path[i]]; } + + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { + // If what we're referencing is a Lazy it must be because we inserted one as a virtual node + // while it was blocked by other data. If it's no longer blocked, we can unwrap it. + const referencedChunk: SomeChunk = value._payload; + switch (referencedChunk.status) { + case RESOLVED_MODEL: + initializeModelChunk(referencedChunk); + break; + case RESOLVED_MODULE: + initializeModuleChunk(referencedChunk); + break; + } + switch (referencedChunk.status) { + case INITIALIZED: { + value = referencedChunk.value; + continue; + } + } + break; + } + const chunkValue = map(response, value, parentObject, key); if ( parentObject[0] === REACT_ELEMENT_TYPE && From 115e3ec15fcbffae21eceb122cde92ea957743e7 Mon Sep 17 00:00:00 2001 From: Hendrik Liebau Date: Sat, 20 Sep 2025 08:09:44 +0200 Subject: [PATCH 5/5] [ci] Document that full git shas are required for manual prereleases (#34537) Triggering the "(Runtime) Publish Prereleases Manual" workflow with a short git sha doesn't work. It needs the full sha. We might be able to make it work with the short sha as well, but for now we can at least document the restriction. --- scripts/release/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/release/README.md b/scripts/release/README.md index 475714ed7d172..77042a5dac8b6 100644 --- a/scripts/release/README.md +++ b/scripts/release/README.md @@ -37,7 +37,7 @@ The high level process of creating releases is [documented below](#process). Ind If your code lands in the main branch, it will be automatically published to the prerelease channels within the next weekday. However, if you want to immediately publish a prerelease, you can trigger the job to run immediately via the GitHub UI: 1. Wait for the commit you want to release to finish its [(Runtime) Build and Test workflow](https://github.com/facebook/react/actions/workflows/runtime_build_and_test.yml), as the prerelease script needs to download the build from that workflow. -2. Copy the git sha of whichever commit you are trying to release +2. Copy the full git sha of whichever commit you are trying to release 3. Go to https://github.com/facebook/react/actions/workflows/runtime_prereleases_manual.yml 4. Paste the git sha into the "Run workflow" dropdown 5. Let the job finish and it will be released on npm