[Fizz] handle errors in onHeaders (#27712)

`onHeaders` can throw however for now we can assume that headers are
optimistic values since the only things we produce for them are preload
links. This is a pragmatic decision because React could concievably have
headers in the future which were not optimistic and thus non-optional
however it is hard to imagine what these headers might be in practice.
If we need to change this behavior to be fatal in the future it would be
a breaking change.

This commit adds error logging when `onHeaders` throws and ensures the
request can continue to render successfully.
This commit is contained in:
Josh Story 2023-11-15 12:53:38 -08:00 committed by GitHub
parent aec521a96d
commit ee68446ff1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 70 additions and 9 deletions

View File

@ -6131,6 +6131,10 @@ export function emitEarlyPreloads(
if (onHeaders) {
const headers = renderState.headers;
if (headers) {
// Even if onHeaders throws we don't want to call this again so
// we drop the headers state from this point onwards.
renderState.headers = null;
let linkHeader = headers.preconnects;
if (headers.fontPreloads) {
if (linkHeader) {
@ -6205,7 +6209,6 @@ export function emitEarlyPreloads(
// it React will not provide any headers
onHeaders({});
}
renderState.headers = null;
return;
}
}

View File

@ -3841,6 +3841,30 @@ describe('ReactDOMFizzServer', () => {
});
});
it('logs an error if onHeaders throws but continues the render', async () => {
const errors = [];
function onError(error) {
errors.push(error.message);
}
function onHeaders(x) {
throw new Error('bad onHeaders');
}
let pipe;
await act(() => {
({pipe} = renderToPipeableStream(<div>hello</div>, {onHeaders, onError}));
});
expect(errors).toEqual(['bad onHeaders']);
await act(() => {
pipe(writable);
});
expect(getVisibleChildren(container)).toEqual(<div>hello</div>);
});
describe('error escaping', () => {
it('escapes error hash, message, and component stack values in directly flushed errors (html escaping)', async () => {
window.__outlet = {};

View File

@ -1420,6 +1420,28 @@ describe('ReactDOMFizzStaticBrowser', () => {
);
});
// @gate experimental
it('logs an error if onHeaders throws but continues the prerender', async () => {
const errors = [];
function onError(error) {
errors.push(error.message);
}
function onHeaders(x) {
throw new Error('bad onHeaders');
}
const prerendered = await ReactDOMFizzStatic.prerender(<div>hello</div>, {
onHeaders,
onError,
});
expect(prerendered.postponed).toBe(null);
expect(errors).toEqual(['bad onHeaders']);
await readIntoContainer(prerendered.prelude);
expect(getVisibleChildren(container)).toEqual(<div>hello</div>);
});
// @gate enablePostpone
it('does not bootstrap again in a resume if it bootstraps', async () => {
let prerendering = true;

View File

@ -3220,6 +3220,22 @@ function abortTask(task: Task, request: Request, error: mixed): void {
}
}
function safelyEmitEarlyPreloads(
request: Request,
shellComplete: boolean,
): void {
try {
emitEarlyPreloads(
request.renderState,
request.resumableState,
shellComplete,
);
} catch (error) {
// We assume preloads are optimistic and thus non-fatal if errored.
logRecoverableError(request, error);
}
}
// I extracted this function out because we want to ensure we consistently emit preloads before
// transitioning to the next request stage and this transition can happen in multiple places in this
// implementation.
@ -3232,11 +3248,7 @@ function completeShell(request: Request) {
// we should only be calling completeShell when the shell is complete so we
// just use a literal here
const shellComplete = true;
emitEarlyPreloads(
request.renderState,
request.resumableState,
shellComplete,
);
safelyEmitEarlyPreloads(request, shellComplete);
}
// We have completed the shell so the shell can't error anymore.
request.onShellError = noop;
@ -3259,7 +3271,7 @@ function completeAll(request: Request) {
: // Prerender Request, we use the state of the root segment
request.completedRootSegment === null ||
request.completedRootSegment.status !== POSTPONED;
emitEarlyPreloads(request.renderState, request.resumableState, shellComplete);
safelyEmitEarlyPreloads(request, shellComplete);
const onAllReady = request.onAllReady;
onAllReady();
}
@ -4124,7 +4136,7 @@ export function startWork(request: Request): void {
function enqueueEarlyPreloadsAfterInitialWork(request: Request) {
const shellComplete = request.pendingRootTasks === 0;
emitEarlyPreloads(request.renderState, request.resumableState, shellComplete);
safelyEmitEarlyPreloads(request, shellComplete);
}
function enqueueFlush(request: Request): void {
@ -4168,7 +4180,7 @@ export function prepareForStartFlowingIfBeforeAllReady(request: Request) {
request.completedRootSegment === null
? request.pendingRootTasks === 0
: request.completedRootSegment.status !== POSTPONED;
emitEarlyPreloads(request.renderState, request.resumableState, shellComplete);
safelyEmitEarlyPreloads(request, shellComplete);
}
export function startFlowing(request: Request, destination: Destination): void {