[Flight] Don't hang forever when prerendering a rejected promise (#32953)

This commit is contained in:
Hendrik Liebau 2025-04-23 11:02:43 +02:00 committed by GitHub
parent 3ef31d196a
commit 914319ae59
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 299 additions and 17 deletions

View File

@ -1354,4 +1354,285 @@ describe('ReactFlightDOMEdge', () => {
expect(error).not.toBe(null);
expect(error.message).toBe('Connection closed.');
});
// @gate experimental
it('should be able to handle a rejected promise in unstable_prerender', async () => {
const expectedError = new Error('Bam!');
const errors = [];
const {prelude} = await ReactServerDOMStaticServer.unstable_prerender(
Promise.reject(expectedError),
webpackMap,
{
onError(err) {
errors.push(err);
},
},
);
expect(errors).toEqual([expectedError]);
const response = ReactServerDOMClient.createFromReadableStream(prelude, {
serverConsumerManifest: {
moduleMap: {},
moduleLoading: {},
},
});
let error = null;
try {
await response;
} catch (x) {
error = x;
}
const expectedMessage = __DEV__
? expectedError.message
: 'An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error.';
expect(error).not.toBe(null);
expect(error.message).toBe(expectedMessage);
});
// @gate experimental
it('should be able to handle an erroring async iterable in unstable_prerender', async () => {
const expectedError = new Error('Bam!');
const errors = [];
const {prelude} = await ReactServerDOMStaticServer.unstable_prerender(
{
async *[Symbol.asyncIterator]() {
await serverAct(() => {
throw expectedError;
});
},
},
webpackMap,
{
onError(err) {
errors.push(err);
},
},
);
expect(errors).toEqual([expectedError]);
const response = ReactServerDOMClient.createFromReadableStream(prelude, {
serverConsumerManifest: {
moduleMap: {},
moduleLoading: {},
},
});
let error = null;
try {
const result = await response;
const iterator = result[Symbol.asyncIterator]();
await iterator.next();
} catch (x) {
error = x;
}
const expectedMessage = __DEV__
? expectedError.message
: 'An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error.';
expect(error).not.toBe(null);
expect(error.message).toBe(expectedMessage);
});
// @gate experimental
it('should be able to handle an erroring readable stream in unstable_prerender', async () => {
const expectedError = new Error('Bam!');
const errors = [];
const {prelude} = await ReactServerDOMStaticServer.unstable_prerender(
new ReadableStream({
async start(controller) {
await serverAct(() => {
setTimeout(() => {
controller.error(expectedError);
});
});
},
}),
webpackMap,
{
onError(err) {
errors.push(err);
},
},
);
expect(errors).toEqual([expectedError]);
const response = ReactServerDOMClient.createFromReadableStream(prelude, {
serverConsumerManifest: {
moduleMap: {},
moduleLoading: {},
},
});
let error = null;
try {
const stream = await response;
await stream.getReader().read();
} catch (x) {
error = x;
}
const expectedMessage = __DEV__
? expectedError.message
: 'An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error.';
expect(error).not.toBe(null);
expect(error.message).toBe(expectedMessage);
});
// @gate experimental
it('can prerender an async iterable', async () => {
const errors = [];
const {prelude} = await ReactServerDOMStaticServer.unstable_prerender(
{
async *[Symbol.asyncIterator]() {
yield 'hello';
yield ' ';
yield 'world';
},
},
webpackMap,
{
onError(err) {
errors.push(err);
},
},
);
expect(errors).toEqual([]);
const response = ReactServerDOMClient.createFromReadableStream(prelude, {
serverConsumerManifest: {
moduleMap: {},
moduleLoading: {},
},
});
let text = '';
const result = await response;
const iterator = result[Symbol.asyncIterator]();
while (true) {
const {done, value} = await iterator.next();
if (done) {
break;
}
text += value;
}
expect(text).toBe('hello world');
});
// @gate experimental
it('can prerender a readable stream', async () => {
const errors = [];
const {prelude} = await ReactServerDOMStaticServer.unstable_prerender(
new ReadableStream({
start(controller) {
controller.enqueue('hello world');
controller.close();
},
}),
webpackMap,
{
onError(err) {
errors.push(err);
},
},
);
expect(errors).toEqual([]);
const response = ReactServerDOMClient.createFromReadableStream(prelude, {
serverConsumerManifest: {
moduleMap: {},
moduleLoading: {},
},
});
const stream = await response;
const result = await readResult(stream);
expect(result).toBe('hello world');
});
// @gate experimental
it('does not return a prerender prelude early when an error is emitted and there are still pending tasks', async () => {
let rejectPromise;
const rejectingPromise = new Promise(
(resolve, reject) => (rejectPromise = reject),
);
const expectedError = new Error('Boom!');
const errors = [];
const {prelude} = await ReactServerDOMStaticServer.unstable_prerender(
[
rejectingPromise,
{
async *[Symbol.asyncIterator]() {
yield 'hello';
yield ' ';
await serverAct(() => {
rejectPromise(expectedError);
});
yield 'world';
},
},
],
webpackMap,
{
onError(err) {
errors.push(err);
},
},
);
expect(errors).toEqual([expectedError]);
const response = ReactServerDOMClient.createFromReadableStream(prelude, {
serverConsumerManifest: {
moduleMap: {},
moduleLoading: {},
},
});
let text = '';
const [promise, iterable] = await response;
const iterator = iterable[Symbol.asyncIterator]();
while (true) {
const {done, value} = await iterator.next();
if (done) {
break;
}
text += value;
}
expect(text).toBe('hello world');
let error = null;
try {
await promise;
} catch (x) {
error = x;
}
const expectedMessage = __DEV__
? expectedError.message
: 'An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error.';
expect(error).not.toBe(null);
expect(error.message).toBe(expectedMessage);
});
});

View File

@ -121,9 +121,6 @@ function prerender(
const stream = new ReadableStream(
{
type: 'bytes',
start: (controller): ?Promise<void> => {
startWork(request);
},
pull: (controller): ?Promise<void> => {
startFlowing(request, controller);
},

View File

@ -768,10 +768,11 @@ function serializeReadableStream(
}
if (entry.done) {
request.abortListeners.delete(abortStream);
const endStreamRow = streamTask.id.toString(16) + ':C\n';
request.completedRegularChunks.push(stringToChunk(endStreamRow));
enqueueFlush(request);
request.abortListeners.delete(abortStream);
callOnAllReadyIfReady(request);
aborted = true;
} else {
try {
@ -869,7 +870,6 @@ function serializeAsyncIterable(
}
if (entry.done) {
request.abortListeners.delete(abortIterable);
let endStreamRow;
if (entry.value === undefined) {
endStreamRow = streamTask.id.toString(16) + ':C\n';
@ -890,6 +890,8 @@ function serializeAsyncIterable(
}
request.completedRegularChunks.push(stringToChunk(endStreamRow));
enqueueFlush(request);
request.abortListeners.delete(abortIterable);
callOnAllReadyIfReady(request);
aborted = true;
} else {
try {
@ -3901,7 +3903,6 @@ function erroredTask(request: Request, task: Task, error: mixed): void {
emitTimingChunk(request, task.id, performance.now());
}
}
request.abortableTasks.delete(task);
task.status = ERRORED;
if (
enablePostpone &&
@ -3916,6 +3917,8 @@ function erroredTask(request: Request, task: Task, error: mixed): void {
const digest = logRecoverableError(request, error, task);
emitErrorChunk(request, task.id, digest, error);
}
request.abortableTasks.delete(task);
callOnAllReadyIfReady(request);
}
const emptyRoot = {};
@ -3995,8 +3998,9 @@ function retryTask(request: Request, task: Task): void {
emitModelChunk(request, task.id, json);
}
request.abortableTasks.delete(task);
task.status = COMPLETED;
request.abortableTasks.delete(task);
callOnAllReadyIfReady(request);
} catch (thrownValue) {
if (request.status === ABORTING) {
request.abortableTasks.delete(task);
@ -4067,7 +4071,6 @@ function performWork(request: Request): void {
currentRequest = request;
prepareToUseHooksForRequest(request);
const hadAbortableTasks = request.abortableTasks.size > 0;
try {
const pingedTasks = request.pingedTasks;
request.pingedTasks = [];
@ -4078,13 +4081,6 @@ function performWork(request: Request): void {
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
if (hadAbortableTasks && request.abortableTasks.size === 0) {
// We can ping after completing but if this happens there already
// wouldn't be any abortable tasks. So we only call allReady after
// the work which actually completed the last pending task
const onAllReady = request.onAllReady;
onAllReady();
}
} catch (error) {
logRecoverableError(request, error, null);
fatalError(request, error);
@ -4246,6 +4242,12 @@ function enqueueFlush(request: Request): void {
}
}
function callOnAllReadyIfReady(request: Request): void {
if (request.abortableTasks.size === 0 && request.abortListeners.size === 0) {
request.onAllReady();
}
}
export function startFlowing(request: Request, destination: Destination): void {
if (request.status === CLOSING) {
request.status = CLOSED;
@ -4285,6 +4287,7 @@ export function abort(request: Request, reason: mixed): void {
// and leave the reference unfulfilled.
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
callOnAllReadyIfReady(request);
} else if (
enablePostpone &&
typeof reason === 'object' &&
@ -4301,6 +4304,7 @@ export function abort(request: Request, reason: mixed): void {
emitPostponeChunk(request, errorId, postponeInstance);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
callOnAllReadyIfReady(request);
} else {
const error =
reason === undefined
@ -4323,9 +4327,8 @@ export function abort(request: Request, reason: mixed): void {
emitErrorChunk(request, errorId, digest, error);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
callOnAllReadyIfReady(request);
}
const onAllReady = request.onAllReady;
onAllReady();
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
@ -4356,6 +4359,7 @@ export function abort(request: Request, reason: mixed): void {
}
abortListeners.forEach(callback => callback(error));
abortListeners.clear();
callOnAllReadyIfReady(request);
}
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);