mirror of
https://github.com/zebrajr/react.git
synced 2025-12-06 12:20:20 +01:00
[Flight] model halting as never delivered chunks (#30740)
stacked on: #30731 We've refined the model of halting a prerender. Now when you abort during a prerender we simply omit the rows that would complete the flight render. This is analagous to prerendering in Fizz where you must resume the prerender to actually result in errors propagating in the postponed holes. We don't have a resume yet for flight and it's not entirely clear how that will work however the key insight here is that deciding whether the never resolving rows are an error or not should really be done on the consuming side rather than in the producer. This PR also reintroduces the logs for the abort error/postpone when prerendering which will give you some indication that something wasn't finished when the prerender was aborted.
This commit is contained in:
parent
0fa9476b9b
commit
a960b92cb9
22
packages/react-client/src/ReactFlightClient.js
vendored
22
packages/react-client/src/ReactFlightClient.js
vendored
|
|
@ -46,7 +46,6 @@ import {
|
|||
enableRefAsProp,
|
||||
enableFlightReadableStream,
|
||||
enableOwnerStacks,
|
||||
enableHalt,
|
||||
} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
|
|
@ -1997,20 +1996,6 @@ function resolvePostponeDev(
|
|||
}
|
||||
}
|
||||
|
||||
function resolveBlocked(response: Response, id: number): void {
|
||||
const chunks = response._chunks;
|
||||
const chunk = chunks.get(id);
|
||||
if (!chunk) {
|
||||
chunks.set(id, createBlockedChunk(response));
|
||||
} else if (chunk.status === PENDING) {
|
||||
// This chunk as contructed via other means but it is actually a blocked chunk
|
||||
// so we update it here. We check the status because it might have been aborted
|
||||
// before we attempted to resolve it.
|
||||
const blockedChunk: BlockedChunk<mixed> = (chunk: any);
|
||||
blockedChunk.status = BLOCKED;
|
||||
}
|
||||
}
|
||||
|
||||
function resolveHint<Code: HintCode>(
|
||||
response: Response,
|
||||
code: Code,
|
||||
|
|
@ -2637,13 +2622,6 @@ function processFullStringRow(
|
|||
}
|
||||
}
|
||||
// Fallthrough
|
||||
case 35 /* "#" */: {
|
||||
if (enableHalt) {
|
||||
resolveBlocked(response, id);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Fallthrough
|
||||
default: /* """ "{" "[" "t" "f" "n" "0" - "9" */ {
|
||||
// We assume anything else is JSON.
|
||||
resolveModel(response, id, row);
|
||||
|
|
|
|||
|
|
@ -20,15 +20,13 @@ import type {Thenable} from 'shared/ReactTypes';
|
|||
|
||||
import {Readable} from 'stream';
|
||||
|
||||
import {enableHalt} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
createRequest,
|
||||
createPrerenderRequest,
|
||||
startWork,
|
||||
startFlowing,
|
||||
stopFlowing,
|
||||
abort,
|
||||
halt,
|
||||
} from 'react-server/src/ReactFlightServer';
|
||||
|
||||
import {
|
||||
|
|
@ -175,35 +173,27 @@ function prerenderToNodeStream(
|
|||
resolve({prelude: readable});
|
||||
}
|
||||
|
||||
const request = createRequest(
|
||||
const request = createPrerenderRequest(
|
||||
model,
|
||||
moduleBasePath,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
options ? options.onError : undefined,
|
||||
options ? options.identifierPrefix : undefined,
|
||||
options ? options.onPostpone : undefined,
|
||||
options ? options.temporaryReferences : undefined,
|
||||
__DEV__ && options ? options.environmentName : undefined,
|
||||
__DEV__ && options ? options.filterStackFrame : undefined,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
);
|
||||
if (options && options.signal) {
|
||||
const signal = options.signal;
|
||||
if (signal.aborted) {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
} else {
|
||||
const listener = () => {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
signal.removeEventListener('abort', listener);
|
||||
};
|
||||
signal.addEventListener('abort', listener);
|
||||
|
|
|
|||
|
|
@ -12,15 +12,13 @@ import type {Thenable} from 'shared/ReactTypes';
|
|||
import type {ClientManifest} from './ReactFlightServerConfigTurbopackBundler';
|
||||
import type {ServerManifest} from 'react-client/src/ReactFlightClientConfig';
|
||||
|
||||
import {enableHalt} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
createRequest,
|
||||
createPrerenderRequest,
|
||||
startWork,
|
||||
startFlowing,
|
||||
stopFlowing,
|
||||
abort,
|
||||
halt,
|
||||
} from 'react-server/src/ReactFlightServer';
|
||||
|
||||
import {
|
||||
|
|
@ -134,35 +132,27 @@ function prerender(
|
|||
);
|
||||
resolve({prelude: stream});
|
||||
}
|
||||
const request = createRequest(
|
||||
const request = createPrerenderRequest(
|
||||
model,
|
||||
turbopackMap,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
options ? options.onError : undefined,
|
||||
options ? options.identifierPrefix : undefined,
|
||||
options ? options.onPostpone : undefined,
|
||||
options ? options.temporaryReferences : undefined,
|
||||
__DEV__ && options ? options.environmentName : undefined,
|
||||
__DEV__ && options ? options.filterStackFrame : undefined,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
);
|
||||
if (options && options.signal) {
|
||||
const signal = options.signal;
|
||||
if (signal.aborted) {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
} else {
|
||||
const listener = () => {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
signal.removeEventListener('abort', listener);
|
||||
};
|
||||
signal.addEventListener('abort', listener);
|
||||
|
|
|
|||
|
|
@ -12,15 +12,13 @@ import type {Thenable} from 'shared/ReactTypes';
|
|||
import type {ClientManifest} from './ReactFlightServerConfigTurbopackBundler';
|
||||
import type {ServerManifest} from 'react-client/src/ReactFlightClientConfig';
|
||||
|
||||
import {enableHalt} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
createRequest,
|
||||
createPrerenderRequest,
|
||||
startWork,
|
||||
startFlowing,
|
||||
stopFlowing,
|
||||
abort,
|
||||
halt,
|
||||
} from 'react-server/src/ReactFlightServer';
|
||||
|
||||
import {
|
||||
|
|
@ -134,35 +132,27 @@ function prerender(
|
|||
);
|
||||
resolve({prelude: stream});
|
||||
}
|
||||
const request = createRequest(
|
||||
const request = createPrerenderRequest(
|
||||
model,
|
||||
turbopackMap,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
options ? options.onError : undefined,
|
||||
options ? options.identifierPrefix : undefined,
|
||||
options ? options.onPostpone : undefined,
|
||||
options ? options.temporaryReferences : undefined,
|
||||
__DEV__ && options ? options.environmentName : undefined,
|
||||
__DEV__ && options ? options.filterStackFrame : undefined,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
);
|
||||
if (options && options.signal) {
|
||||
const signal = options.signal;
|
||||
if (signal.aborted) {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
} else {
|
||||
const listener = () => {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
signal.removeEventListener('abort', listener);
|
||||
};
|
||||
signal.addEventListener('abort', listener);
|
||||
|
|
|
|||
|
|
@ -20,15 +20,13 @@ import type {Thenable} from 'shared/ReactTypes';
|
|||
|
||||
import {Readable} from 'stream';
|
||||
|
||||
import {enableHalt} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
createRequest,
|
||||
createPrerenderRequest,
|
||||
startWork,
|
||||
startFlowing,
|
||||
stopFlowing,
|
||||
abort,
|
||||
halt,
|
||||
} from 'react-server/src/ReactFlightServer';
|
||||
|
||||
import {
|
||||
|
|
@ -177,35 +175,27 @@ function prerenderToNodeStream(
|
|||
resolve({prelude: readable});
|
||||
}
|
||||
|
||||
const request = createRequest(
|
||||
const request = createPrerenderRequest(
|
||||
model,
|
||||
turbopackMap,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
options ? options.onError : undefined,
|
||||
options ? options.identifierPrefix : undefined,
|
||||
options ? options.onPostpone : undefined,
|
||||
options ? options.temporaryReferences : undefined,
|
||||
__DEV__ && options ? options.environmentName : undefined,
|
||||
__DEV__ && options ? options.filterStackFrame : undefined,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
);
|
||||
if (options && options.signal) {
|
||||
const signal = options.signal;
|
||||
if (signal.aborted) {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
} else {
|
||||
const listener = () => {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
signal.removeEventListener('abort', listener);
|
||||
};
|
||||
signal.addEventListener('abort', listener);
|
||||
|
|
|
|||
|
|
@ -2724,7 +2724,7 @@ describe('ReactFlightDOM', () => {
|
|||
});
|
||||
|
||||
// @gate enableHalt
|
||||
it('serializes unfinished tasks with infinite promises when aborting a prerender', async () => {
|
||||
it('does not propagate abort reasons errors when aborting a prerender', async () => {
|
||||
let resolveGreeting;
|
||||
const greetingPromise = new Promise(resolve => {
|
||||
resolveGreeting = resolve;
|
||||
|
|
@ -2746,6 +2746,7 @@ describe('ReactFlightDOM', () => {
|
|||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const errors = [];
|
||||
const {pendingResult} = await serverAct(async () => {
|
||||
// destructure trick to avoid the act scope from awaiting the returned value
|
||||
return {
|
||||
|
|
@ -2754,15 +2755,20 @@ describe('ReactFlightDOM', () => {
|
|||
webpackMap,
|
||||
{
|
||||
signal: controller.signal,
|
||||
onError(err) {
|
||||
errors.push(err);
|
||||
},
|
||||
},
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
controller.abort('boom');
|
||||
resolveGreeting();
|
||||
const {prelude} = await pendingResult;
|
||||
|
||||
expect(errors).toEqual(['boom']);
|
||||
|
||||
const preludeWeb = Readable.toWeb(prelude);
|
||||
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
|
||||
|
||||
|
|
@ -2772,7 +2778,7 @@ describe('ReactFlightDOM', () => {
|
|||
return use(response);
|
||||
}
|
||||
|
||||
const errors = [];
|
||||
errors.length = 0;
|
||||
let abortFizz;
|
||||
await serverAct(async () => {
|
||||
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
|
||||
|
|
@ -2788,10 +2794,10 @@ describe('ReactFlightDOM', () => {
|
|||
});
|
||||
|
||||
await serverAct(() => {
|
||||
abortFizz('boom');
|
||||
abortFizz('bam');
|
||||
});
|
||||
|
||||
expect(errors).toEqual(['boom']);
|
||||
expect(errors).toEqual(['bam']);
|
||||
|
||||
const container = document.createElement('div');
|
||||
await readInto(container, fizzReadable);
|
||||
|
|
@ -2861,7 +2867,7 @@ describe('ReactFlightDOM', () => {
|
|||
it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => {
|
||||
const controller = new AbortController();
|
||||
function ComponentThatAborts() {
|
||||
controller.abort();
|
||||
controller.abort('boom');
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
@ -2912,11 +2918,8 @@ describe('ReactFlightDOM', () => {
|
|||
};
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
|
||||
const {prelude} = await pendingResult;
|
||||
expect(errors).toEqual([]);
|
||||
|
||||
expect(errors).toEqual(['boom']);
|
||||
const response = ReactServerDOMClient.createFromReadableStream(
|
||||
Readable.toWeb(prelude),
|
||||
);
|
||||
|
|
@ -2926,6 +2929,7 @@ describe('ReactFlightDOM', () => {
|
|||
function ClientApp() {
|
||||
return use(response);
|
||||
}
|
||||
errors.length = 0;
|
||||
let abortFizz;
|
||||
await serverAct(async () => {
|
||||
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
|
||||
|
|
|
|||
|
|
@ -2402,7 +2402,7 @@ describe('ReactFlightDOMBrowser', () => {
|
|||
});
|
||||
|
||||
// @gate enableHalt
|
||||
it('serializes unfinished tasks with infinite promises when aborting a prerender', async () => {
|
||||
it('does not propagate abort reasons errors when aborting a prerender', async () => {
|
||||
let resolveGreeting;
|
||||
const greetingPromise = new Promise(resolve => {
|
||||
resolveGreeting = resolve;
|
||||
|
|
@ -2424,6 +2424,7 @@ describe('ReactFlightDOMBrowser', () => {
|
|||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const errors = [];
|
||||
const {pendingResult} = await serverAct(async () => {
|
||||
// destructure trick to avoid the act scope from awaiting the returned value
|
||||
return {
|
||||
|
|
@ -2432,14 +2433,18 @@ describe('ReactFlightDOMBrowser', () => {
|
|||
webpackMap,
|
||||
{
|
||||
signal: controller.signal,
|
||||
onError(err) {
|
||||
errors.push(err);
|
||||
},
|
||||
},
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
controller.abort('boom');
|
||||
resolveGreeting();
|
||||
const {prelude} = await pendingResult;
|
||||
expect(errors).toEqual(['boom']);
|
||||
|
||||
function ClientRoot({response}) {
|
||||
return use(response);
|
||||
|
|
|
|||
|
|
@ -1103,7 +1103,7 @@ describe('ReactFlightDOMEdge', () => {
|
|||
});
|
||||
|
||||
// @gate enableHalt
|
||||
it('serializes unfinished tasks with infinite promises when aborting a prerender', async () => {
|
||||
it('does not propagate abort reasons errors when aborting a prerender', async () => {
|
||||
let resolveGreeting;
|
||||
const greetingPromise = new Promise(resolve => {
|
||||
resolveGreeting = resolve;
|
||||
|
|
@ -1125,6 +1125,7 @@ describe('ReactFlightDOMEdge', () => {
|
|||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const errors = [];
|
||||
const {pendingResult} = await serverAct(async () => {
|
||||
// destructure trick to avoid the act scope from awaiting the returned value
|
||||
return {
|
||||
|
|
@ -1133,15 +1134,20 @@ describe('ReactFlightDOMEdge', () => {
|
|||
webpackMap,
|
||||
{
|
||||
signal: controller.signal,
|
||||
onError(err) {
|
||||
errors.push(err);
|
||||
},
|
||||
},
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
controller.abort('boom');
|
||||
resolveGreeting();
|
||||
const {prelude} = await pendingResult;
|
||||
|
||||
expect(errors).toEqual(['boom']);
|
||||
|
||||
function ClientRoot({response}) {
|
||||
return use(response);
|
||||
}
|
||||
|
|
@ -1153,7 +1159,7 @@ describe('ReactFlightDOMEdge', () => {
|
|||
},
|
||||
});
|
||||
const fizzController = new AbortController();
|
||||
const errors = [];
|
||||
errors.length = 0;
|
||||
const ssrStream = await serverAct(() =>
|
||||
ReactDOMServer.renderToReadableStream(
|
||||
React.createElement(ClientRoot, {response}),
|
||||
|
|
@ -1165,8 +1171,8 @@ describe('ReactFlightDOMEdge', () => {
|
|||
},
|
||||
),
|
||||
);
|
||||
fizzController.abort('boom');
|
||||
expect(errors).toEqual(['boom']);
|
||||
fizzController.abort('bam');
|
||||
expect(errors).toEqual(['bam']);
|
||||
// Should still match the result when parsed
|
||||
const result = await readResult(ssrStream);
|
||||
const div = document.createElement('div');
|
||||
|
|
|
|||
|
|
@ -443,7 +443,7 @@ describe('ReactFlightDOMNode', () => {
|
|||
});
|
||||
|
||||
// @gate enableHalt
|
||||
it('serializes unfinished tasks with infinite promises when aborting a prerender', async () => {
|
||||
it('does not propagate abort reasons errors when aborting a prerender', async () => {
|
||||
let resolveGreeting;
|
||||
const greetingPromise = new Promise(resolve => {
|
||||
resolveGreeting = resolve;
|
||||
|
|
@ -465,6 +465,7 @@ describe('ReactFlightDOMNode', () => {
|
|||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const errors = [];
|
||||
const {pendingResult} = await serverAct(async () => {
|
||||
// destructure trick to avoid the act scope from awaiting the returned value
|
||||
return {
|
||||
|
|
@ -473,14 +474,18 @@ describe('ReactFlightDOMNode', () => {
|
|||
webpackMap,
|
||||
{
|
||||
signal: controller.signal,
|
||||
onError(err) {
|
||||
errors.push(err);
|
||||
},
|
||||
},
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
controller.abort('boom');
|
||||
resolveGreeting();
|
||||
const {prelude} = await pendingResult;
|
||||
expect(errors).toEqual(['boom']);
|
||||
|
||||
function ClientRoot({response}) {
|
||||
return use(response);
|
||||
|
|
@ -492,7 +497,7 @@ describe('ReactFlightDOMNode', () => {
|
|||
moduleLoading: null,
|
||||
},
|
||||
});
|
||||
const errors = [];
|
||||
errors.length = 0;
|
||||
const ssrStream = await serverAct(() =>
|
||||
ReactDOMServer.renderToPipeableStream(
|
||||
React.createElement(ClientRoot, {response}),
|
||||
|
|
@ -503,8 +508,8 @@ describe('ReactFlightDOMNode', () => {
|
|||
},
|
||||
),
|
||||
);
|
||||
ssrStream.abort('boom');
|
||||
expect(errors).toEqual(['boom']);
|
||||
ssrStream.abort('bam');
|
||||
expect(errors).toEqual(['bam']);
|
||||
// Should still match the result when parsed
|
||||
const result = await readResult(ssrStream);
|
||||
const div = document.createElement('div');
|
||||
|
|
|
|||
|
|
@ -12,15 +12,13 @@ import type {Thenable} from 'shared/ReactTypes';
|
|||
import type {ClientManifest} from './ReactFlightServerConfigWebpackBundler';
|
||||
import type {ServerManifest} from 'react-client/src/ReactFlightClientConfig';
|
||||
|
||||
import {enableHalt} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
createRequest,
|
||||
createPrerenderRequest,
|
||||
startWork,
|
||||
startFlowing,
|
||||
stopFlowing,
|
||||
abort,
|
||||
halt,
|
||||
} from 'react-server/src/ReactFlightServer';
|
||||
|
||||
import {
|
||||
|
|
@ -134,35 +132,27 @@ function prerender(
|
|||
);
|
||||
resolve({prelude: stream});
|
||||
}
|
||||
const request = createRequest(
|
||||
const request = createPrerenderRequest(
|
||||
model,
|
||||
webpackMap,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
options ? options.onError : undefined,
|
||||
options ? options.identifierPrefix : undefined,
|
||||
options ? options.onPostpone : undefined,
|
||||
options ? options.temporaryReferences : undefined,
|
||||
__DEV__ && options ? options.environmentName : undefined,
|
||||
__DEV__ && options ? options.filterStackFrame : undefined,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
);
|
||||
if (options && options.signal) {
|
||||
const signal = options.signal;
|
||||
if (signal.aborted) {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
} else {
|
||||
const listener = () => {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
signal.removeEventListener('abort', listener);
|
||||
};
|
||||
signal.addEventListener('abort', listener);
|
||||
|
|
|
|||
|
|
@ -12,15 +12,13 @@ import type {Thenable} from 'shared/ReactTypes';
|
|||
import type {ClientManifest} from './ReactFlightServerConfigWebpackBundler';
|
||||
import type {ServerManifest} from 'react-client/src/ReactFlightClientConfig';
|
||||
|
||||
import {enableHalt} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
createRequest,
|
||||
createPrerenderRequest,
|
||||
startWork,
|
||||
startFlowing,
|
||||
stopFlowing,
|
||||
abort,
|
||||
halt,
|
||||
} from 'react-server/src/ReactFlightServer';
|
||||
|
||||
import {
|
||||
|
|
@ -134,35 +132,27 @@ function prerender(
|
|||
);
|
||||
resolve({prelude: stream});
|
||||
}
|
||||
const request = createRequest(
|
||||
const request = createPrerenderRequest(
|
||||
model,
|
||||
webpackMap,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
options ? options.onError : undefined,
|
||||
options ? options.identifierPrefix : undefined,
|
||||
options ? options.onPostpone : undefined,
|
||||
options ? options.temporaryReferences : undefined,
|
||||
__DEV__ && options ? options.environmentName : undefined,
|
||||
__DEV__ && options ? options.filterStackFrame : undefined,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
);
|
||||
if (options && options.signal) {
|
||||
const signal = options.signal;
|
||||
if (signal.aborted) {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
} else {
|
||||
const listener = () => {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
signal.removeEventListener('abort', listener);
|
||||
};
|
||||
signal.addEventListener('abort', listener);
|
||||
|
|
|
|||
|
|
@ -20,15 +20,13 @@ import type {Thenable} from 'shared/ReactTypes';
|
|||
|
||||
import {Readable} from 'stream';
|
||||
|
||||
import {enableHalt} from 'shared/ReactFeatureFlags';
|
||||
|
||||
import {
|
||||
createRequest,
|
||||
createPrerenderRequest,
|
||||
startWork,
|
||||
startFlowing,
|
||||
stopFlowing,
|
||||
abort,
|
||||
halt,
|
||||
} from 'react-server/src/ReactFlightServer';
|
||||
|
||||
import {
|
||||
|
|
@ -177,35 +175,27 @@ function prerenderToNodeStream(
|
|||
resolve({prelude: readable});
|
||||
}
|
||||
|
||||
const request = createRequest(
|
||||
const request = createPrerenderRequest(
|
||||
model,
|
||||
webpackMap,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
options ? options.onError : undefined,
|
||||
options ? options.identifierPrefix : undefined,
|
||||
options ? options.onPostpone : undefined,
|
||||
options ? options.temporaryReferences : undefined,
|
||||
__DEV__ && options ? options.environmentName : undefined,
|
||||
__DEV__ && options ? options.filterStackFrame : undefined,
|
||||
onAllReady,
|
||||
onFatalError,
|
||||
);
|
||||
if (options && options.signal) {
|
||||
const signal = options.signal;
|
||||
if (signal.aborted) {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
} else {
|
||||
const listener = () => {
|
||||
const reason = (signal: any).reason;
|
||||
if (enableHalt) {
|
||||
halt(request, reason);
|
||||
} else {
|
||||
abort(request, reason);
|
||||
}
|
||||
abort(request, reason);
|
||||
signal.removeEventListener('abort', listener);
|
||||
};
|
||||
signal.addEventListener('abort', listener);
|
||||
|
|
|
|||
296
packages/react-server/src/ReactFlightServer.js
vendored
296
packages/react-server/src/ReactFlightServer.js
vendored
|
|
@ -353,7 +353,8 @@ type Task = {
|
|||
interface Reference {}
|
||||
|
||||
export type Request = {
|
||||
status: 0 | 1 | 2 | 3,
|
||||
status: 10 | 11 | 12 | 13,
|
||||
type: 20 | 21,
|
||||
flushScheduled: boolean,
|
||||
fatalError: mixed,
|
||||
destination: null | Destination,
|
||||
|
|
@ -425,13 +426,17 @@ function defaultPostponeHandler(reason: string) {
|
|||
// Noop
|
||||
}
|
||||
|
||||
const OPEN = 0;
|
||||
const ABORTING = 1;
|
||||
const CLOSING = 2;
|
||||
const CLOSED = 3;
|
||||
const OPEN = 10;
|
||||
const ABORTING = 11;
|
||||
const CLOSING = 12;
|
||||
const CLOSED = 13;
|
||||
|
||||
const RENDER = 20;
|
||||
const PRERENDER = 21;
|
||||
|
||||
function RequestInstance(
|
||||
this: $FlowFixMe,
|
||||
type: 20 | 21,
|
||||
model: ReactClientValue,
|
||||
bundlerConfig: ClientManifest,
|
||||
onError: void | ((error: mixed) => ?string),
|
||||
|
|
@ -440,8 +445,8 @@ function RequestInstance(
|
|||
temporaryReferences: void | TemporaryReferenceSet,
|
||||
environmentName: void | string | (() => string), // DEV-only
|
||||
filterStackFrame: void | ((url: string, functionName: string) => boolean), // DEV-only
|
||||
onAllReady: void | (() => void),
|
||||
onFatalError: void | ((error: mixed) => void),
|
||||
onAllReady: () => void,
|
||||
onFatalError: (error: mixed) => void,
|
||||
) {
|
||||
if (
|
||||
ReactSharedInternals.A !== null &&
|
||||
|
|
@ -466,6 +471,7 @@ function RequestInstance(
|
|||
TaintRegistryPendingRequests.add(cleanupQueue);
|
||||
}
|
||||
const hints = createHints();
|
||||
this.type = type;
|
||||
this.status = OPEN;
|
||||
this.flushScheduled = false;
|
||||
this.fatalError = null;
|
||||
|
|
@ -493,8 +499,8 @@ function RequestInstance(
|
|||
this.onError = onError === undefined ? defaultErrorHandler : onError;
|
||||
this.onPostpone =
|
||||
onPostpone === undefined ? defaultPostponeHandler : onPostpone;
|
||||
this.onAllReady = onAllReady === undefined ? noop : onAllReady;
|
||||
this.onFatalError = onFatalError === undefined ? noop : onFatalError;
|
||||
this.onAllReady = onAllReady;
|
||||
this.onFatalError = onFatalError;
|
||||
|
||||
if (__DEV__) {
|
||||
this.environmentName =
|
||||
|
|
@ -522,7 +528,7 @@ function RequestInstance(
|
|||
pingedTasks.push(rootTask);
|
||||
}
|
||||
|
||||
function noop(): void {}
|
||||
function noop() {}
|
||||
|
||||
export function createRequest(
|
||||
model: ReactClientValue,
|
||||
|
|
@ -533,11 +539,38 @@ export function createRequest(
|
|||
temporaryReferences: void | TemporaryReferenceSet,
|
||||
environmentName: void | string | (() => string), // DEV-only
|
||||
filterStackFrame: void | ((url: string, functionName: string) => boolean), // DEV-only
|
||||
onAllReady: void | (() => void),
|
||||
onFatalError: void | (() => void),
|
||||
): Request {
|
||||
// $FlowFixMe[invalid-constructor]: the shapes are exact here but Flow doesn't like constructors
|
||||
return new RequestInstance(
|
||||
RENDER,
|
||||
model,
|
||||
bundlerConfig,
|
||||
onError,
|
||||
identifierPrefix,
|
||||
onPostpone,
|
||||
temporaryReferences,
|
||||
environmentName,
|
||||
filterStackFrame,
|
||||
noop,
|
||||
noop,
|
||||
);
|
||||
}
|
||||
|
||||
export function createPrerenderRequest(
|
||||
model: ReactClientValue,
|
||||
bundlerConfig: ClientManifest,
|
||||
onAllReady: () => void,
|
||||
onFatalError: () => void,
|
||||
onError: void | ((error: mixed) => ?string),
|
||||
identifierPrefix?: string,
|
||||
onPostpone: void | ((reason: string) => void),
|
||||
temporaryReferences: void | TemporaryReferenceSet,
|
||||
environmentName: void | string | (() => string), // DEV-only
|
||||
filterStackFrame: void | ((url: string, functionName: string) => boolean), // DEV-only
|
||||
): Request {
|
||||
// $FlowFixMe[invalid-constructor]: the shapes are exact here but Flow doesn't like constructors
|
||||
return new RequestInstance(
|
||||
PRERENDER,
|
||||
model,
|
||||
bundlerConfig,
|
||||
onError,
|
||||
|
|
@ -616,13 +649,9 @@ function serializeThenable(
|
|||
// We can no longer accept any resolved values
|
||||
request.abortableTasks.delete(newTask);
|
||||
newTask.status = ABORTED;
|
||||
if (enableHalt && request.fatalError === haltSymbol) {
|
||||
emitBlockedChunk(request, newTask.id);
|
||||
} else {
|
||||
const errorId: number = (request.fatalError: any);
|
||||
const model = stringify(serializeByValueID(errorId));
|
||||
emitModelChunk(request, newTask.id, model);
|
||||
}
|
||||
const errorId: number = (request.fatalError: any);
|
||||
const model = stringify(serializeByValueID(errorId));
|
||||
emitModelChunk(request, newTask.id, model);
|
||||
return newTask.id;
|
||||
}
|
||||
if (typeof thenable.status === 'string') {
|
||||
|
|
@ -732,7 +761,7 @@ function serializeReadableStream(
|
|||
}
|
||||
|
||||
if (entry.done) {
|
||||
request.abortListeners.delete(error);
|
||||
request.abortListeners.delete(abortStream);
|
||||
const endStreamRow = streamTask.id.toString(16) + ':C\n';
|
||||
request.completedRegularChunks.push(stringToChunk(endStreamRow));
|
||||
enqueueFlush(request);
|
||||
|
|
@ -754,34 +783,49 @@ function serializeReadableStream(
|
|||
return;
|
||||
}
|
||||
aborted = true;
|
||||
request.abortListeners.delete(error);
|
||||
request.abortListeners.delete(abortStream);
|
||||
const digest = logRecoverableError(request, reason, streamTask);
|
||||
emitErrorChunk(request, streamTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
|
||||
let cancelWith: mixed;
|
||||
if (enableHalt && request.fatalError === haltSymbol) {
|
||||
cancelWith = reason;
|
||||
} else if (
|
||||
// $FlowFixMe should be able to pass mixed
|
||||
reader.cancel(reason).then(error, error);
|
||||
}
|
||||
function abortStream(reason: mixed) {
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
aborted = true;
|
||||
request.abortListeners.delete(abortStream);
|
||||
if (
|
||||
enablePostpone &&
|
||||
typeof reason === 'object' &&
|
||||
reason !== null &&
|
||||
(reason: any).$$typeof === REACT_POSTPONE_TYPE
|
||||
) {
|
||||
cancelWith = reason;
|
||||
const postponeInstance: Postpone = (reason: any);
|
||||
logPostpone(request, postponeInstance.message, streamTask);
|
||||
emitPostponeChunk(request, streamTask.id, postponeInstance);
|
||||
enqueueFlush(request);
|
||||
if (enableHalt && request.type === PRERENDER) {
|
||||
request.pendingChunks--;
|
||||
} else {
|
||||
emitPostponeChunk(request, streamTask.id, postponeInstance);
|
||||
enqueueFlush(request);
|
||||
}
|
||||
} else {
|
||||
cancelWith = reason;
|
||||
const digest = logRecoverableError(request, reason, streamTask);
|
||||
emitErrorChunk(request, streamTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
if (enableHalt && request.type === PRERENDER) {
|
||||
request.pendingChunks--;
|
||||
} else {
|
||||
emitErrorChunk(request, streamTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
}
|
||||
}
|
||||
|
||||
// $FlowFixMe should be able to pass mixed
|
||||
reader.cancel(cancelWith).then(error, error);
|
||||
reader.cancel(reason).then(error, error);
|
||||
}
|
||||
|
||||
request.abortListeners.add(error);
|
||||
request.abortListeners.add(abortStream);
|
||||
reader.read().then(progress, error);
|
||||
return serializeByValueID(streamTask.id);
|
||||
}
|
||||
|
|
@ -837,7 +881,7 @@ function serializeAsyncIterable(
|
|||
}
|
||||
|
||||
if (entry.done) {
|
||||
request.abortListeners.delete(error);
|
||||
request.abortListeners.delete(abortIterable);
|
||||
let endStreamRow;
|
||||
if (entry.value === undefined) {
|
||||
endStreamRow = streamTask.id.toString(16) + ':C\n';
|
||||
|
|
@ -881,34 +925,52 @@ function serializeAsyncIterable(
|
|||
return;
|
||||
}
|
||||
aborted = true;
|
||||
request.abortListeners.delete(error);
|
||||
let throwWith: mixed;
|
||||
if (enableHalt && request.fatalError === haltSymbol) {
|
||||
throwWith = reason;
|
||||
} else if (
|
||||
request.abortListeners.delete(abortIterable);
|
||||
const digest = logRecoverableError(request, reason, streamTask);
|
||||
emitErrorChunk(request, streamTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
if (typeof (iterator: any).throw === 'function') {
|
||||
// The iterator protocol doesn't necessarily include this but a generator do.
|
||||
// $FlowFixMe should be able to pass mixed
|
||||
iterator.throw(reason).then(error, error);
|
||||
}
|
||||
}
|
||||
function abortIterable(reason: mixed) {
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
aborted = true;
|
||||
request.abortListeners.delete(abortIterable);
|
||||
if (
|
||||
enablePostpone &&
|
||||
typeof reason === 'object' &&
|
||||
reason !== null &&
|
||||
(reason: any).$$typeof === REACT_POSTPONE_TYPE
|
||||
) {
|
||||
throwWith = reason;
|
||||
const postponeInstance: Postpone = (reason: any);
|
||||
logPostpone(request, postponeInstance.message, streamTask);
|
||||
emitPostponeChunk(request, streamTask.id, postponeInstance);
|
||||
enqueueFlush(request);
|
||||
if (enableHalt && request.type === PRERENDER) {
|
||||
request.pendingChunks--;
|
||||
} else {
|
||||
emitPostponeChunk(request, streamTask.id, postponeInstance);
|
||||
enqueueFlush(request);
|
||||
}
|
||||
} else {
|
||||
throwWith = reason;
|
||||
const digest = logRecoverableError(request, reason, streamTask);
|
||||
emitErrorChunk(request, streamTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
if (enableHalt && request.type === PRERENDER) {
|
||||
request.pendingChunks--;
|
||||
} else {
|
||||
emitErrorChunk(request, streamTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
}
|
||||
}
|
||||
if (typeof (iterator: any).throw === 'function') {
|
||||
// The iterator protocol doesn't necessarily include this but a generator do.
|
||||
// $FlowFixMe should be able to pass mixed
|
||||
iterator.throw(throwWith).then(error, error);
|
||||
iterator.throw(reason).then(error, error);
|
||||
}
|
||||
}
|
||||
request.abortListeners.add(error);
|
||||
request.abortListeners.add(abortIterable);
|
||||
if (__DEV__) {
|
||||
callIteratorInDEV(iterator, progress, error);
|
||||
} else {
|
||||
|
|
@ -2101,7 +2163,7 @@ function serializeBlob(request: Request, blob: Blob): string {
|
|||
return;
|
||||
}
|
||||
if (entry.done) {
|
||||
request.abortListeners.delete(error);
|
||||
request.abortListeners.delete(abortBlob);
|
||||
aborted = true;
|
||||
pingTask(request, newTask);
|
||||
return;
|
||||
|
|
@ -2111,28 +2173,52 @@ function serializeBlob(request: Request, blob: Blob): string {
|
|||
// $FlowFixMe[incompatible-call]
|
||||
return reader.read().then(progress).catch(error);
|
||||
}
|
||||
|
||||
function error(reason: mixed) {
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
aborted = true;
|
||||
request.abortListeners.delete(error);
|
||||
let cancelWith: mixed;
|
||||
if (enableHalt && request.fatalError === haltSymbol) {
|
||||
cancelWith = reason;
|
||||
request.abortListeners.delete(abortBlob);
|
||||
const digest = logRecoverableError(request, reason, newTask);
|
||||
emitErrorChunk(request, newTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
// $FlowFixMe should be able to pass mixed
|
||||
reader.cancel(reason).then(error, error);
|
||||
}
|
||||
function abortBlob(reason: mixed) {
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
aborted = true;
|
||||
request.abortListeners.delete(abortBlob);
|
||||
if (
|
||||
enablePostpone &&
|
||||
typeof reason === 'object' &&
|
||||
reason !== null &&
|
||||
(reason: any).$$typeof === REACT_POSTPONE_TYPE
|
||||
) {
|
||||
const postponeInstance: Postpone = (reason: any);
|
||||
logPostpone(request, postponeInstance.message, newTask);
|
||||
if (enableHalt && request.type === PRERENDER) {
|
||||
request.pendingChunks--;
|
||||
} else {
|
||||
emitPostponeChunk(request, newTask.id, postponeInstance);
|
||||
enqueueFlush(request);
|
||||
}
|
||||
} else {
|
||||
cancelWith = reason;
|
||||
const digest = logRecoverableError(request, reason, newTask);
|
||||
emitErrorChunk(request, newTask.id, digest, reason);
|
||||
request.abortableTasks.delete(newTask);
|
||||
enqueueFlush(request);
|
||||
if (enableHalt && request.type === PRERENDER) {
|
||||
request.pendingChunks--;
|
||||
} else {
|
||||
emitErrorChunk(request, newTask.id, digest, reason);
|
||||
enqueueFlush(request);
|
||||
}
|
||||
}
|
||||
// $FlowFixMe should be able to pass mixed
|
||||
reader.cancel(cancelWith).then(error, error);
|
||||
reader.cancel(reason).then(error, error);
|
||||
}
|
||||
|
||||
request.abortListeners.add(error);
|
||||
request.abortListeners.add(abortBlob);
|
||||
|
||||
// $FlowFixMe[incompatible-call]
|
||||
reader.read().then(progress).catch(error);
|
||||
|
|
@ -3001,12 +3087,6 @@ function emitPostponeChunk(
|
|||
request.completedErrorChunks.push(processedChunk);
|
||||
}
|
||||
|
||||
function emitBlockedChunk(request: Request, id: number): void {
|
||||
const row = serializeRowHeader('#', id) + '\n';
|
||||
const processedChunk = stringToChunk(row);
|
||||
request.completedErrorChunks.push(processedChunk);
|
||||
}
|
||||
|
||||
function emitErrorChunk(
|
||||
request: Request,
|
||||
id: number,
|
||||
|
|
@ -3755,13 +3835,9 @@ function retryTask(request: Request, task: Task): void {
|
|||
if (request.status === ABORTING) {
|
||||
request.abortableTasks.delete(task);
|
||||
task.status = ABORTED;
|
||||
if (enableHalt && request.fatalError === haltSymbol) {
|
||||
emitBlockedChunk(request, task.id);
|
||||
} else {
|
||||
const errorId: number = (request.fatalError: any);
|
||||
const model = stringify(serializeByValueID(errorId));
|
||||
emitModelChunk(request, task.id, model);
|
||||
}
|
||||
const errorId: number = (request.fatalError: any);
|
||||
const model = stringify(serializeByValueID(errorId));
|
||||
emitModelChunk(request, task.id, model);
|
||||
return;
|
||||
}
|
||||
// Something suspended again, let's pick it back up later.
|
||||
|
|
@ -3783,13 +3859,9 @@ function retryTask(request: Request, task: Task): void {
|
|||
if (request.status === ABORTING) {
|
||||
request.abortableTasks.delete(task);
|
||||
task.status = ABORTED;
|
||||
if (enableHalt && request.fatalError === haltSymbol) {
|
||||
emitBlockedChunk(request, task.id);
|
||||
} else {
|
||||
const errorId: number = (request.fatalError: any);
|
||||
const model = stringify(serializeByValueID(errorId));
|
||||
emitModelChunk(request, task.id, model);
|
||||
}
|
||||
const errorId: number = (request.fatalError: any);
|
||||
const model = stringify(serializeByValueID(errorId));
|
||||
emitModelChunk(request, task.id, model);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -3844,7 +3916,8 @@ function performWork(request: Request): void {
|
|||
// We can ping after completing but if this happens there already
|
||||
// wouldn't be any abortable tasks. So we only call allReady after
|
||||
// the work which actually completed the last pending task
|
||||
allReady(request);
|
||||
const onAllReady = request.onAllReady;
|
||||
onAllReady();
|
||||
}
|
||||
} catch (error) {
|
||||
logRecoverableError(request, error, null);
|
||||
|
|
@ -4007,17 +4080,17 @@ export function stopFlowing(request: Request): void {
|
|||
request.destination = null;
|
||||
}
|
||||
|
||||
// This is called to early terminate a request. It creates an error at all pending tasks.
|
||||
export function abort(request: Request, reason: mixed): void {
|
||||
try {
|
||||
if (request.status === OPEN) {
|
||||
request.status = ABORTING;
|
||||
}
|
||||
const abortableTasks = request.abortableTasks;
|
||||
// We have tasks to abort. We'll emit one error row and then emit a reference
|
||||
// to that row from every row that's still remaining.
|
||||
if (abortableTasks.size > 0) {
|
||||
request.pendingChunks++;
|
||||
// We have tasks to abort. We'll emit one error row and then emit a reference
|
||||
// to that row from every row that's still remaining if we are rendering. If we
|
||||
// are prerendering (and halt semantics are enabled) we will refer to an error row
|
||||
// but not actually emit it so the reciever can at that point rather than error.
|
||||
const errorId = request.nextChunkId++;
|
||||
request.fatalError = errorId;
|
||||
if (
|
||||
|
|
@ -4028,7 +4101,11 @@ export function abort(request: Request, reason: mixed): void {
|
|||
) {
|
||||
const postponeInstance: Postpone = (reason: any);
|
||||
logPostpone(request, postponeInstance.message, null);
|
||||
emitPostponeChunk(request, errorId, postponeInstance);
|
||||
if (!enableHalt || request.type === PRERENDER) {
|
||||
// When prerendering with halt semantics we omit the referred to postpone.
|
||||
request.pendingChunks++;
|
||||
emitPostponeChunk(request, errorId, postponeInstance);
|
||||
}
|
||||
} else {
|
||||
const error =
|
||||
reason === undefined
|
||||
|
|
@ -4043,11 +4120,16 @@ export function abort(request: Request, reason: mixed): void {
|
|||
)
|
||||
: reason;
|
||||
const digest = logRecoverableError(request, error, null);
|
||||
emitErrorChunk(request, errorId, digest, error);
|
||||
if (!enableHalt || request.type === RENDER) {
|
||||
// When prerendering with halt semantics we omit the referred to error.
|
||||
request.pendingChunks++;
|
||||
emitErrorChunk(request, errorId, digest, error);
|
||||
}
|
||||
}
|
||||
abortableTasks.forEach(task => abortTask(task, request, errorId));
|
||||
abortableTasks.clear();
|
||||
allReady(request);
|
||||
const onAllReady = request.onAllReady;
|
||||
onAllReady();
|
||||
}
|
||||
const abortListeners = request.abortListeners;
|
||||
if (abortListeners.size > 0) {
|
||||
|
|
@ -4087,43 +4169,3 @@ export function abort(request: Request, reason: mixed): void {
|
|||
fatalError(request, error);
|
||||
}
|
||||
}
|
||||
|
||||
const haltSymbol = Symbol('halt');
|
||||
|
||||
// This is called to stop rendering without erroring. All unfinished work is represented Promises
|
||||
// that never resolve.
|
||||
export function halt(request: Request, reason: mixed): void {
|
||||
try {
|
||||
if (request.status === OPEN) {
|
||||
request.status = ABORTING;
|
||||
}
|
||||
request.fatalError = haltSymbol;
|
||||
const abortableTasks = request.abortableTasks;
|
||||
// We have tasks to abort. We'll emit one error row and then emit a reference
|
||||
// to that row from every row that's still remaining.
|
||||
if (abortableTasks.size > 0) {
|
||||
request.pendingChunks++;
|
||||
const errorId = request.nextChunkId++;
|
||||
emitBlockedChunk(request, errorId);
|
||||
abortableTasks.forEach(task => abortTask(task, request, errorId));
|
||||
abortableTasks.clear();
|
||||
allReady(request);
|
||||
}
|
||||
const abortListeners = request.abortListeners;
|
||||
if (abortListeners.size > 0) {
|
||||
abortListeners.forEach(callback => callback(reason));
|
||||
abortListeners.clear();
|
||||
}
|
||||
if (request.destination !== null) {
|
||||
flushCompletedChunks(request, request.destination);
|
||||
}
|
||||
} catch (error) {
|
||||
logRecoverableError(request, error, null);
|
||||
fatalError(request, error);
|
||||
}
|
||||
}
|
||||
|
||||
function allReady(request: Request) {
|
||||
const onAllReady = request.onAllReady;
|
||||
onAllReady();
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user