Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 52 additions & 26 deletions packages/react-client/src/ReactFlightClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,7 @@ type Response = {
_debugRootStack?: null | Error, // DEV-only
_debugRootTask?: null | ConsoleTask, // DEV-only
_debugStartTime: number, // DEV-only
_debugIOStarted: boolean, // DEV-only
_debugFindSourceMapURL?: void | FindSourceMapURLCallback, // DEV-only
_debugChannel?: void | DebugChannel, // DEV-only
_blockedConsole?: null | SomeChunk<ConsoleEntry>, // DEV-only
Expand Down Expand Up @@ -500,7 +501,7 @@ function createErrorChunk<T>(
}

function moveDebugInfoFromChunkToInnerValue<T>(
chunk: InitializedChunk<T>,
chunk: InitializedChunk<T> | InitializedStreamChunk<any>,
value: T,
): void {
// Remove the debug info from the initialized chunk, and add it to the inner
Expand Down Expand Up @@ -1569,6 +1570,10 @@ function fulfillReference(
initializedChunk.reason = handler.reason; // Used by streaming chunks
if (resolveListeners !== null) {
wakeChunk(resolveListeners, handler.value, initializedChunk);
} else {
if (__DEV__) {
moveDebugInfoFromChunkToInnerValue(initializedChunk, handler.value);
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This PR surfaced another bug.

It turns out that resolveChunkDebugInfo adds a .then() listener which forces the wakeChunk pass to be taken. If that doesn't happen, then wakeChunk isn't called.

We forgot that we need to call moveDebugInfoFromChunkToInnerValue everywhere we initialize but don't call wakeChunk. We could potentially just move this out and all it next to all these instead of inside wakeChunk.

}
}
}
}
Expand Down Expand Up @@ -1818,6 +1823,10 @@ function loadServerReference<A: Iterable<any>, T>(
initializedChunk.value = handler.value;
if (resolveListeners !== null) {
wakeChunk(resolveListeners, handler.value, initializedChunk);
} else {
if (__DEV__) {
moveDebugInfoFromChunkToInnerValue(initializedChunk, handler.value);
}
}
}
}
Expand Down Expand Up @@ -2536,6 +2545,10 @@ function missingCall() {
);
}

function markIOStarted(this: Response) {
this._debugIOStarted = true;
}

function ResponseInstance(
this: $FlowFixMe,
bundlerConfig: ServerConsumerModuleMap,
Expand Down Expand Up @@ -2609,6 +2622,10 @@ function ResponseInstance(
// where as if you use createFromReadableStream from the body of the fetch
// then the start time is when the headers resolved.
this._debugStartTime = performance.now();
this._debugIOStarted = false;
// We consider everything before the first setTimeout task to be cached data
// and is not considered I/O required to load the stream.
setTimeout(markIOStarted.bind(this), 0);
}
this._debugFindSourceMapURL = findSourceMapURL;
this._debugChannel = debugChannel;
Expand Down Expand Up @@ -2762,7 +2779,7 @@ function incrementChunkDebugInfo(
}
}

function addDebugInfo(chunk: SomeChunk<any>, debugInfo: ReactDebugInfo): void {
function addAsyncInfo(chunk: SomeChunk<any>, asyncInfo: ReactAsyncInfo): void {
const value = resolveLazy(chunk.value);
if (
typeof value === 'object' &&
Expand All @@ -2774,34 +2791,39 @@ function addDebugInfo(chunk: SomeChunk<any>, debugInfo: ReactDebugInfo): void {
) {
if (isArray(value._debugInfo)) {
// $FlowFixMe[method-unbinding]
value._debugInfo.push.apply(value._debugInfo, debugInfo);
value._debugInfo.push(asyncInfo);
} else {
Object.defineProperty((value: any), '_debugInfo', {
configurable: false,
enumerable: false,
writable: true,
value: debugInfo,
value: [asyncInfo],
});
}
} else {
// $FlowFixMe[method-unbinding]
chunk._debugInfo.push.apply(chunk._debugInfo, debugInfo);
chunk._debugInfo.push(asyncInfo);
}
}

function resolveChunkDebugInfo(
response: Response,
streamState: StreamState,
chunk: SomeChunk<any>,
): void {
if (__DEV__ && enableAsyncDebugInfo) {
// Add the currently resolving chunk's debug info representing the stream
// to the Promise that was waiting on the stream, or its underlying value.
const debugInfo: ReactDebugInfo = [{awaited: streamState._debugInfo}];
if (chunk.status === PENDING || chunk.status === BLOCKED) {
const boundAddDebugInfo = addDebugInfo.bind(null, chunk, debugInfo);
chunk.then(boundAddDebugInfo, boundAddDebugInfo);
} else {
addDebugInfo(chunk, debugInfo);
// Only include stream information after a macrotask. Any chunk processed
// before that is considered cached data.
if (response._debugIOStarted) {
// Add the currently resolving chunk's debug info representing the stream
// to the Promise that was waiting on the stream, or its underlying value.
const asyncInfo: ReactAsyncInfo = {awaited: streamState._debugInfo};
if (chunk.status === PENDING || chunk.status === BLOCKED) {
const boundAddAsyncInfo = addAsyncInfo.bind(null, chunk, asyncInfo);
chunk.then(boundAddAsyncInfo, boundAddAsyncInfo);
} else {
addAsyncInfo(chunk, asyncInfo);
}
}
}
}
Expand Down Expand Up @@ -2837,12 +2859,12 @@ function resolveModel(
model,
);
if (__DEV__) {
resolveChunkDebugInfo(streamState, newChunk);
resolveChunkDebugInfo(response, streamState, newChunk);
}
chunks.set(id, newChunk);
} else {
if (__DEV__) {
resolveChunkDebugInfo(streamState, chunk);
resolveChunkDebugInfo(response, streamState, chunk);
}
resolveModelChunk(response, chunk, model);
}
Expand All @@ -2869,7 +2891,7 @@ function resolveText(
}
const newChunk = createInitializedTextChunk(response, text);
if (__DEV__) {
resolveChunkDebugInfo(streamState, newChunk);
resolveChunkDebugInfo(response, streamState, newChunk);
}
chunks.set(id, newChunk);
}
Expand All @@ -2895,7 +2917,7 @@ function resolveBuffer(
}
const newChunk = createInitializedBufferChunk(response, buffer);
if (__DEV__) {
resolveChunkDebugInfo(streamState, newChunk);
resolveChunkDebugInfo(response, streamState, newChunk);
}
chunks.set(id, newChunk);
}
Expand Down Expand Up @@ -2942,7 +2964,7 @@ function resolveModule(
blockedChunk.status = BLOCKED;
}
if (__DEV__) {
resolveChunkDebugInfo(streamState, blockedChunk);
resolveChunkDebugInfo(response, streamState, blockedChunk);
}
promise.then(
() => resolveModuleChunk(response, blockedChunk, clientReference),
Expand All @@ -2952,12 +2974,12 @@ function resolveModule(
if (!chunk) {
const newChunk = createResolvedModuleChunk(response, clientReference);
if (__DEV__) {
resolveChunkDebugInfo(streamState, newChunk);
resolveChunkDebugInfo(response, streamState, newChunk);
}
chunks.set(id, newChunk);
} else {
if (__DEV__) {
resolveChunkDebugInfo(streamState, chunk);
resolveChunkDebugInfo(response, streamState, chunk);
}
// This can't actually happen because we don't have any forward
// references to modules.
Expand All @@ -2978,13 +3000,13 @@ function resolveStream<T: ReadableStream | $AsyncIterable<any, any, void>>(
if (!chunk) {
const newChunk = createInitializedStreamChunk(response, stream, controller);
if (__DEV__) {
resolveChunkDebugInfo(streamState, newChunk);
resolveChunkDebugInfo(response, streamState, newChunk);
}
chunks.set(id, newChunk);
return;
}
if (__DEV__) {
resolveChunkDebugInfo(streamState, chunk);
resolveChunkDebugInfo(response, streamState, chunk);
}
if (chunk.status !== PENDING) {
// We already resolved. We didn't expect to see this.
Expand Down Expand Up @@ -3034,6 +3056,10 @@ function resolveStream<T: ReadableStream | $AsyncIterable<any, any, void>>(
resolvedChunk.reason = controller;
if (resolveListeners !== null) {
wakeChunk(resolveListeners, chunk.value, (chunk: any));
} else {
if (__DEV__) {
moveDebugInfoFromChunkToInnerValue(resolvedChunk, stream);
}
}
}

Expand Down Expand Up @@ -3433,12 +3459,12 @@ function resolvePostponeDev(
postponeInstance,
);
if (__DEV__) {
resolveChunkDebugInfo(streamState, newChunk);
resolveChunkDebugInfo(response, streamState, newChunk);
}
chunks.set(id, newChunk);
} else {
if (__DEV__) {
resolveChunkDebugInfo(streamState, chunk);
resolveChunkDebugInfo(response, streamState, chunk);
}
triggerErrorOnChunk(response, chunk, postponeInstance);
}
Expand Down Expand Up @@ -3467,12 +3493,12 @@ function resolveErrorModel(
errorWithDigest,
);
if (__DEV__) {
resolveChunkDebugInfo(streamState, newChunk);
resolveChunkDebugInfo(response, streamState, newChunk);
}
chunks.set(id, newChunk);
} else {
if (__DEV__) {
resolveChunkDebugInfo(streamState, chunk);
resolveChunkDebugInfo(response, streamState, chunk);
}
triggerErrorOnChunk(response, chunk, errorWithDigest);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3035,18 +3035,6 @@ describe('ReactFlightDOMBrowser', () => {
{
"time": 0,
},
{
"awaited": {
"byteSize": 0,
"end": 0,
"name": "RSC stream",
"owner": null,
"start": 0,
"value": {
"value": "stream",
},
},
},
]
`);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1248,11 +1248,6 @@ describe('ReactFlightDOMEdge', () => {
owner: greetInfo,
}),
{time: 14},
expect.objectContaining({
awaited: expect.objectContaining({
name: 'RSC stream',
}),
}),
]);
}
// The owner that created the span was the outer server component.
Expand Down
20 changes: 8 additions & 12 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -2325,9 +2325,15 @@ function visitAsyncNode(
return null;
}
visited.add(node);
if (node.end >= 0 && node.end <= request.timeOrigin) {
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Another drive-by fix somewhat related. Most of the time this is covered by the PROMISE_NODE path but it really applies to all of these so we can just apply it universally.

// This was already resolved when we started this render. It must have been either something
// that's part of a start up sequence or externally cached data. We exclude that information.
// The technique for debugging the effects of uncached data on the render is to simply uncache it.
return null;
}
let previousIONode = null;
// First visit anything that blocked this sequence to start in the first place.
if (node.previous !== null && node.end > request.timeOrigin) {
if (node.previous !== null) {
previousIONode = visitAsyncNode(
request,
task,
Expand All @@ -2349,12 +2355,6 @@ function visitAsyncNode(
return previousIONode;
}
case PROMISE_NODE: {
if (node.end <= request.timeOrigin) {
// This was already resolved when we started this render. It must have been either something
// that's part of a start up sequence or externally cached data. We exclude that information.
// The technique for debugging the effects of uncached data on the render is to simply uncache it.
return previousIONode;
}
const awaited = node.awaited;
let match: void | null | PromiseNode | IONode = previousIONode;
const promise = node.promise.deref();
Expand Down Expand Up @@ -2437,11 +2437,7 @@ function visitAsyncNode(
} else if (ioNode !== null) {
const startTime: number = node.start;
const endTime: number = node.end;
if (endTime <= request.timeOrigin) {
// This was already resolved when we started this render. It must have been either something
// that's part of a start up sequence or externally cached data. We exclude that information.
return null;
} else if (startTime < cutOff) {
if (startTime < cutOff) {
// We started awaiting this node before we started rendering this sequence.
// This means that this particular await was never part of the current sequence.
// If we have another await higher up in the chain it might have a more actionable stack
Expand Down
Loading
Loading