[Flight] Lazy load objects from the debug channel (#33728)

When a debug channel is available, we now allow objects to be lazily
requested though the debug channel and only then will the server send
it.

The client will actually eagerly ask for the next level of objects once
it parses its payload. That way those objects have likely loaded by the
time you actually expand that deep e.g. in the console repl. This is
needed since the console repl is synchronous when you ask it to invoke
getters.

Each level is lazily parsed which means that we don't parse the next
level even though we eagerly loaded it. We parse it once the getter is
invoked (in Chrome DevTools you have to click a little `(...)` to invoke
the getter). When the getter is invoked, the chunk is initialized and
parsed. This then causes the next level to be asked for through the
debug channel. Ensuring that if you expand one more level you can do so
synchronously.

Currently debug chunks are eagerly parsed, which means that if you have
things like server component props that are lazy they can end up being
immediately asked for, but I'm trying to move to make the debug chunks
lazy.
This commit is contained in:
Sebastian Markbåge 2025-07-08 10:49:25 -04:00 committed by GitHub
parent f1ecf82bfb
commit bbea677b77
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 115 additions and 4 deletions

View File

@ -120,10 +120,69 @@ async function ServerComponent({noCache}) {
return await fetchThirdParty(noCache); return await fetchThirdParty(noCache);
} }
let veryDeepObject = [
{
bar: {
baz: {
a: {},
},
},
},
{
bar: {
baz: {
a: {},
},
},
},
{
bar: {
baz: {
a: {},
},
},
},
{
bar: {
baz: {
a: {
b: {
c: {
d: {
e: {
f: {
g: {
h: {
i: {
j: {
k: {
l: {
m: {
yay: 'You reached the end',
},
},
},
},
},
},
},
},
},
},
},
},
},
},
},
},
];
export default async function App({prerender, noCache}) { export default async function App({prerender, noCache}) {
const res = await fetch('http://localhost:3001/todos'); const res = await fetch('http://localhost:3001/todos');
const todos = await res.json(); const todos = await res.json();
console.log('Expand me:', veryDeepObject);
const dedupedChild = <ServerComponent noCache={noCache} />; const dedupedChild = <ServerComponent noCache={noCache} />;
const message = getServerState(); const message = getServerState();
return ( return (

View File

@ -1774,6 +1774,40 @@ function applyConstructor(
return undefined; return undefined;
} }
function defineLazyGetter<T>(
response: Response,
chunk: SomeChunk<T>,
parentObject: Object,
key: string,
): any {
// We don't immediately initialize it even if it's resolved.
// Instead, we wait for the getter to get accessed.
Object.defineProperty(parentObject, key, {
get: function () {
if (chunk.status === RESOLVED_MODEL) {
// If it was now resolved, then we initialize it. This may then discover
// a new set of lazy references that are then asked for eagerly in case
// we get that deep.
initializeModelChunk(chunk);
}
switch (chunk.status) {
case INITIALIZED: {
return chunk.value;
}
case ERRORED:
throw chunk.reason;
}
// Otherwise, we didn't have enough time to load the object before it was
// accessed or the connection closed. So we just log that it was omitted.
// TODO: We should ideally throw here to indicate a difference.
return OMITTED_PROP_ERROR;
},
enumerable: true,
configurable: false,
});
return null;
}
function extractIterator(response: Response, model: Array<any>): Iterator<any> { function extractIterator(response: Response, model: Array<any>): Iterator<any> {
// $FlowFixMe[incompatible-use]: This uses raw Symbols because we're extracting from a native array. // $FlowFixMe[incompatible-use]: This uses raw Symbols because we're extracting from a native array.
return model[Symbol.iterator](); return model[Symbol.iterator]();
@ -2014,8 +2048,19 @@ function parseModelString(
if (value.length > 2) { if (value.length > 2) {
const debugChannel = response._debugChannel; const debugChannel = response._debugChannel;
if (debugChannel) { if (debugChannel) {
const ref = value.slice(2); const ref = value.slice(2); // We assume this doesn't have a path just id.
debugChannel('R:' + ref); // Release this reference immediately const id = parseInt(ref, 16);
if (!response._chunks.has(id)) {
// We haven't seen this id before. Query the server to start sending it.
debugChannel('Q:' + ref);
}
// Start waiting. This now creates a pending chunk if it doesn't already exist.
const chunk = getChunk(response, id);
if (chunk.status === INITIALIZED) {
// We already loaded this before. We can just use the real value.
return chunk.value;
}
return defineLazyGetter(response, chunk, parentObject, key);
} }
} }

View File

@ -4820,10 +4820,15 @@ function emitConsoleChunk(
const payload = [methodName, stackTrace, owner, env]; const payload = [methodName, stackTrace, owner, env];
// $FlowFixMe[method-unbinding] // $FlowFixMe[method-unbinding]
payload.push.apply(payload, args); payload.push.apply(payload, args);
let json = serializeDebugModel(request, 500, payload); const objectLimit = request.deferredDebugObjects === null ? 500 : 10;
let json = serializeDebugModel(
request,
objectLimit + stackTrace.length,
payload,
);
if (json[0] !== '[') { if (json[0] !== '[') {
// This looks like an error. Try a simpler object. // This looks like an error. Try a simpler object.
json = serializeDebugModel(request, 500, [ json = serializeDebugModel(request, 10 + stackTrace.length, [
methodName, methodName,
stackTrace, stackTrace,
owner, owner,
@ -5760,6 +5765,8 @@ export function resolveDebugMessage(request: Request, message: string): void {
if (retainedValue !== undefined) { if (retainedValue !== undefined) {
// If we still have this object, and haven't emitted it before, emit it on the stream. // If we still have this object, and haven't emitted it before, emit it on the stream.
const counter = {objectLimit: 10}; const counter = {objectLimit: 10};
deferredDebugObjects.retained.delete(id);
deferredDebugObjects.existing.delete(retainedValue);
emitOutlinedDebugModelChunk(request, id, counter, retainedValue); emitOutlinedDebugModelChunk(request, id, counter, retainedValue);
enqueueFlush(request); enqueueFlush(request);
} }