react/fixtures/ssr/server/render.js
Sebastian Markbåge 18212ca960
[Fizz] Outline if a boundary would add too many bytes to the next completion (#33029)
Follow up to #33027.

This enhances the heuristic so that we accumulate the size of the
currently written boundaries. Starting from the size of the root (minus
preamble) for the shell.

This ensures that if you have many small boundaries they don't all
continue to get inlined. For example, you can wrap each paragraph in a
document in a Suspense boundary to regain document streaming
capabilities if that's what you want.

However, one consideration is if it's worth producing a fallback at all.
Maybe if it's like `null` it's free but if it's like a whole alternative
page, then it's not. It's possible to have completely useless Suspense
boundaries such as when you nest several directly inside each other. So
this uses a limit of at least 500 bytes of the content itself for it to
be worth outlining at all. It also can't be too small because then for
example a long list of paragraphs can never be outlined.

In the fixture I straddle this limit so some paragraphs are too small to
be considered. An unfortunate effect of that is that you can end up with
some of them not being outlined which means that they appear out of
order. SuspenseList is supposed to address that but it's unfortunate.

The limit is still fairly high though so it's unlikely that by default
you'd start outlining anything within the viewport at all. I had to
reduce the `progressiveChunkSize` by an order of magnitude in my fixture
to try it out properly.
2025-04-29 19:13:28 -04:00

79 lines
2.1 KiB
JavaScript

import React from 'react';
import {renderToPipeableStream} from 'react-dom/server';
import {Writable} from 'stream';
import App from '../src/components/App';
let assets;
if (process.env.NODE_ENV === 'development') {
// Use the bundle from create-react-app's server in development mode.
assets = {
'main.js': '/static/js/bundle.js',
'main.css': '',
};
} else {
assets = require('../build/asset-manifest.json');
}
class ThrottledWritable extends Writable {
constructor(destination) {
super();
this.destination = destination;
this.delay = 10;
}
_write(chunk, encoding, callback) {
let o = 0;
const write = () => {
this.destination.write(chunk.slice(o, o + 100), encoding, x => {
o += 100;
if (o < chunk.length) {
setTimeout(write, this.delay);
} else {
callback(x);
}
});
};
setTimeout(write, this.delay);
}
_final(callback) {
setTimeout(() => {
this.destination.end(callback);
}, this.delay);
}
}
export default function render(url, res) {
res.socket.on('error', error => {
// Log fatal errors
console.error('Fatal', error);
});
let didError = false;
const {pipe, abort} = renderToPipeableStream(<App assets={assets} />, {
bootstrapScripts: [assets['main.js']],
progressiveChunkSize: 1024,
onShellReady() {
// If something errored before we started streaming, we set the error code appropriately.
res.statusCode = didError ? 500 : 200;
res.setHeader('Content-type', 'text/html');
// To test the actual chunks taking time to load over the network, we throttle
// the stream a bit.
const throttledResponse = new ThrottledWritable(res);
pipe(throttledResponse);
},
onShellError(x) {
// Something errored before we could complete the shell so we emit an alternative shell.
res.statusCode = 500;
res.send('<!doctype><p>Error</p>');
},
onError(x) {
didError = true;
console.error(x);
},
});
// Abandon and switch to client rendering after 5 seconds.
// Try lowering this to see the client recover.
setTimeout(abort, 5000);
}