[ci] Cleanup forked build files

Unforks these scripts now that we are fully migrated to GH.

ghstack-source-id: e1e15452f2d2e178a5b56203ebd0b42151e6a9ba
Pull Request resolved: https://github.com/facebook/react/pull/30506
This commit is contained in:
Lauren Tan 2024-07-29 18:51:17 -04:00
parent 6b82f3c904
commit 70885cfebe
No known key found for this signature in database
GPG Key ID: D9B8BF35B75B9883
11 changed files with 146 additions and 1128 deletions

View File

@ -37,7 +37,7 @@ jobs:
- name: Download react-devtools artifacts for base revision
run: |
git fetch origin main
GH_TOKEN=${{ github.token }} scripts/release/download-experimental-build-ghaction.js --commit=$(git rev-parse origin/main)
GH_TOKEN=${{ github.token }} scripts/release/download-experimental-build.js --commit=$(git rev-parse origin/main)
- name: Display structure of build
run: ls -R build
- name: Archive build

View File

@ -549,7 +549,7 @@ jobs:
- name: Download artifacts for base revision
run: |
git fetch origin main
GH_TOKEN=${{ github.token }} scripts/release/download-experimental-build-ghaction.js --commit=$(git rev-parse origin/main)
GH_TOKEN=${{ github.token }} scripts/release/download-experimental-build.js --commit=$(git rev-parse origin/main)
mv ./build ./base-build
# TODO: The `download-experimental-build` script copies the npm
# packages into the `node_modules` directory. This is a historical

View File

@ -72,7 +72,7 @@ jobs:
working-directory: scripts/release
- name: Download artifacts for base revision
run: |
GH_TOKEN=${{ github.token }} scripts/release/download-experimental-build-ghaction.js --commit=${{ github.event.workflow_run.head_sha }}
GH_TOKEN=${{ github.token }} scripts/release/download-experimental-build.js --commit=${{ github.event.workflow_run.head_sha }}
- name: Display structure of build
run: ls -R build
- name: Strip @license from eslint plugin and react-refresh

View File

@ -1,59 +0,0 @@
#!/usr/bin/env node
'use strict';
const {join, relative} = require('path');
const {handleError} = require('./utils');
const yargs = require('yargs');
const clear = require('clear');
const theme = require('./theme');
const {
downloadBuildArtifacts,
} = require('./shared-commands/download-build-artifacts-ghaction');
const argv = yargs.wrap(yargs.terminalWidth()).options({
releaseChannel: {
alias: 'r',
describe: 'Download the given release channel.',
requiresArg: true,
type: 'string',
choices: ['experimental', 'stable'],
default: 'experimental',
},
commit: {
alias: 'c',
describe: 'Commit hash to download.',
requiresArg: true,
demandOption: true,
type: 'string',
},
}).argv;
function printSummary(commit) {
const commandPath = relative(
process.env.PWD,
join(__dirname, '../download-experimental-build-ghaction.js')
);
clear();
const message = theme`
{caution An experimental build has been downloaded!}
You can download this build again by running:
{path ${commandPath}} --commit={commit ${commit}}
`;
console.log(message.replace(/\n +/g, '\n').trim());
}
const main = async () => {
try {
await downloadBuildArtifacts(argv.commit, argv.releaseChannel);
printSummary(argv.commit);
} catch (error) {
handleError(error);
}
};
main();

View File

@ -2,31 +2,58 @@
'use strict';
const {join} = require('path');
const {join, relative} = require('path');
const {handleError} = require('./utils');
const yargs = require('yargs');
const clear = require('clear');
const theme = require('./theme');
const {
addDefaultParamValue,
getPublicPackages,
handleError,
} = require('./utils');
downloadBuildArtifacts,
} = require('./shared-commands/download-build-artifacts');
const downloadBuildArtifacts = require('./shared-commands/download-build-artifacts');
const parseParams = require('./shared-commands/parse-params');
const printSummary = require('./download-experimental-build-commands/print-summary');
const argv = yargs.wrap(yargs.terminalWidth()).options({
releaseChannel: {
alias: 'r',
describe: 'Download the given release channel.',
requiresArg: true,
type: 'string',
choices: ['experimental', 'stable'],
default: 'experimental',
},
commit: {
alias: 'c',
describe: 'Commit hash to download.',
requiresArg: true,
demandOption: true,
type: 'string',
},
}).argv;
const run = async () => {
function printSummary(commit) {
const commandPath = relative(
process.env.PWD,
join(__dirname, '../download-experimental-build.js')
);
clear();
const message = theme`
{caution An experimental build has been downloaded!}
You can download this build again by running:
{path ${commandPath}} --commit={commit ${commit}}
`;
console.log(message.replace(/\n +/g, '\n').trim());
}
const main = async () => {
try {
addDefaultParamValue('-r', '--releaseChannel', 'experimental');
const params = await parseParams();
params.cwd = join(__dirname, '..', '..');
params.packages = await getPublicPackages(true);
await downloadBuildArtifacts(params);
printSummary(params);
await downloadBuildArtifacts(argv.commit, argv.releaseChannel);
printSummary(argv.commit);
} catch (error) {
handleError(error);
}
};
run();
main();

View File

@ -7,7 +7,7 @@ const {addDefaultParamValue, handleError} = require('./utils');
const {
downloadBuildArtifacts,
} = require('./shared-commands/download-build-artifacts-ghaction');
} = require('./shared-commands/download-build-artifacts');
const parseParams = require('./shared-commands/parse-params');
const printPrereleaseSummary = require('./shared-commands/print-prerelease-summary');
const testPackagingFixture = require('./shared-commands/test-packaging-fixture');

View File

@ -1,136 +0,0 @@
'use strict';
const {join} = require('path');
const theme = require('../theme');
const {exec} = require('child-process-promise');
const {existsSync} = require('fs');
const {logPromise} = require('../utils');
if (process.env.GH_TOKEN == null) {
console.log(
theme`{error Expected GH_TOKEN to be provided as an env variable}`
);
process.exit(1);
}
const OWNER = 'facebook';
const REPO = 'react';
const WORKFLOW_ID = 'runtime_build_and_test.yml';
const GITHUB_HEADERS = `
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${process.env.GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28"`.trim();
function getWorkflowId() {
if (
existsSync(join(__dirname, `../../../.github/workflows/${WORKFLOW_ID}`))
) {
return WORKFLOW_ID;
} else {
throw new Error(
`Incorrect workflow ID: .github/workflows/${WORKFLOW_ID} does not exist. Please check the name of the workflow being downloaded from.`
);
}
}
async function getWorkflowRunId(commit) {
const res = await exec(
`curl -L ${GITHUB_HEADERS} https://api.github.com/repos/${OWNER}/${REPO}/actions/workflows/${getWorkflowId()}/runs?head_sha=${commit}&branch=main&exclude_pull_requests=true`
);
const json = JSON.parse(res.stdout);
let workflowRun;
if (json.total_count === 1) {
workflowRun = json.workflow_runs[0];
} else {
workflowRun = json.workflow_runs.find(
run => run.head_sha === commit && run.head_branch === 'main'
);
}
if (workflowRun == null || workflowRun.id == null) {
console.log(
theme`{error The workflow run for the specified commit (${commit}) could not be found.}`
);
process.exit(1);
}
return workflowRun.id;
}
async function getArtifact(workflowRunId, artifactName) {
const res = await exec(
`curl -L ${GITHUB_HEADERS} https://api.github.com/repos/${OWNER}/${REPO}/actions/runs/${workflowRunId}/artifacts?per_page=100&name=${artifactName}`
);
const json = JSON.parse(res.stdout);
let artifact;
if (json.total_count === 1) {
artifact = json.artifacts[0];
} else {
artifact = json.artifacts.find(
_artifact => _artifact.name === artifactName
);
}
if (artifact == null) {
console.log(
theme`{error The specified workflow run (${workflowRunId}) does not contain any build artifacts.}`
);
process.exit(1);
}
return artifact;
}
async function downloadArtifactsFromGitHub(commit, releaseChannel) {
const workflowRunId = await getWorkflowRunId(commit);
const artifact = await getArtifact(workflowRunId, 'artifacts_combined');
// Download and extract artifact
const cwd = join(__dirname, '..', '..', '..');
await exec(`rm -rf ./build`, {cwd});
await exec(
`curl -L ${GITHUB_HEADERS} ${artifact.archive_download_url} \
> a.zip && unzip a.zip -d . && rm a.zip build2.tgz && tar -xvzf build.tgz && rm build.tgz`,
{
cwd,
}
);
// Copy to staging directory
// TODO: Consider staging the release in a different directory from the CI
// build artifacts: `./build/node_modules` -> `./staged-releases`
if (!existsSync(join(cwd, 'build'))) {
await exec(`mkdir ./build`, {cwd});
} else {
await exec(`rm -rf ./build/node_modules`, {cwd});
}
let sourceDir;
// TODO: Rename release channel to `next`
if (releaseChannel === 'stable') {
sourceDir = 'oss-stable';
} else if (releaseChannel === 'experimental') {
sourceDir = 'oss-experimental';
} else if (releaseChannel === 'rc') {
sourceDir = 'oss-stable-rc';
} else if (releaseChannel === 'latest') {
sourceDir = 'oss-stable-semver';
} else {
console.error('Internal error: Invalid release channel: ' + releaseChannel);
process.exit(releaseChannel);
}
await exec(`cp -r ./build/${sourceDir} ./build/node_modules`, {cwd});
}
async function downloadBuildArtifacts(commit, releaseChannel) {
const label = theme`commit {commit ${commit}})`;
return logPromise(
downloadArtifactsFromGitHub(commit, releaseChannel),
theme`Downloading artifacts from GitHub for ${label}`
);
}
module.exports = {
downloadBuildArtifacts,
};

View File

@ -1,36 +1,98 @@
#!/usr/bin/env node
'use strict';
const {join} = require('path');
const theme = require('../theme');
const {exec} = require('child-process-promise');
const {existsSync} = require('fs');
const {join} = require('path');
const {getArtifactsList, logPromise} = require('../utils');
const theme = require('../theme');
const {logPromise} = require('../utils');
const run = async ({build, cwd, releaseChannel}) => {
const artifacts = await getArtifactsList(build);
const buildArtifacts = artifacts.find(entry =>
entry.path.endsWith('build.tgz')
if (process.env.GH_TOKEN == null) {
console.log(
theme`{error Expected GH_TOKEN to be provided as an env variable}`
);
process.exit(1);
}
const OWNER = 'facebook';
const REPO = 'react';
const WORKFLOW_ID = 'runtime_build_and_test.yml';
const GITHUB_HEADERS = `
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${process.env.GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28"`.trim();
function getWorkflowId() {
if (
existsSync(join(__dirname, `../../../.github/workflows/${WORKFLOW_ID}`))
) {
return WORKFLOW_ID;
} else {
throw new Error(
`Incorrect workflow ID: .github/workflows/${WORKFLOW_ID} does not exist. Please check the name of the workflow being downloaded from.`
);
}
}
async function getWorkflowRunId(commit) {
const res = await exec(
`curl -L ${GITHUB_HEADERS} https://api.github.com/repos/${OWNER}/${REPO}/actions/workflows/${getWorkflowId()}/runs?head_sha=${commit}&branch=main&exclude_pull_requests=true`
);
if (!buildArtifacts) {
const json = JSON.parse(res.stdout);
let workflowRun;
if (json.total_count === 1) {
workflowRun = json.workflow_runs[0];
} else {
workflowRun = json.workflow_runs.find(
run => run.head_sha === commit && run.head_branch === 'main'
);
}
if (workflowRun == null || workflowRun.id == null) {
console.log(
theme`{error The specified build (${build}) does not contain any build artifacts.}`
theme`{error The workflow run for the specified commit (${commit}) could not be found.}`
);
process.exit(1);
}
// Download and extract artifact
const {CIRCLE_CI_API_TOKEN} = process.env;
let header = '';
// Add Circle CI API token to request header if available.
if (CIRCLE_CI_API_TOKEN != null) {
header = '-H "Circle-Token: ${CIRCLE_CI_API_TOKEN}" ';
return workflowRun.id;
}
async function getArtifact(workflowRunId, artifactName) {
const res = await exec(
`curl -L ${GITHUB_HEADERS} https://api.github.com/repos/${OWNER}/${REPO}/actions/runs/${workflowRunId}/artifacts?per_page=100&name=${artifactName}`
);
const json = JSON.parse(res.stdout);
let artifact;
if (json.total_count === 1) {
artifact = json.artifacts[0];
} else {
artifact = json.artifacts.find(
_artifact => _artifact.name === artifactName
);
}
if (artifact == null) {
console.log(
theme`{error The specified workflow run (${workflowRunId}) does not contain any build artifacts.}`
);
process.exit(1);
}
return artifact;
}
async function downloadArtifactsFromGitHub(commit, releaseChannel) {
const workflowRunId = await getWorkflowRunId(commit);
const artifact = await getArtifact(workflowRunId, 'artifacts_combined');
// Download and extract artifact
const cwd = join(__dirname, '..', '..', '..');
await exec(`rm -rf ./build`, {cwd});
await exec(
`curl -L $(fwdproxy-config curl) ${buildArtifacts.url} ${header}| tar -xvz`,
`curl -L ${GITHUB_HEADERS} ${artifact.archive_download_url} \
> a.zip && unzip a.zip -d . && rm a.zip build2.tgz && tar -xvzf build.tgz && rm build.tgz`,
{
cwd,
}
@ -59,17 +121,16 @@ const run = async ({build, cwd, releaseChannel}) => {
process.exit(releaseChannel);
}
await exec(`cp -r ./build/${sourceDir} ./build/node_modules`, {cwd});
};
}
module.exports = async ({build, commit, cwd, releaseChannel}) => {
let buildLabel;
if (commit !== null) {
buildLabel = theme`commit {commit ${commit}} (build {build ${build}})`;
} else {
buildLabel = theme`build {build ${build}}`;
}
async function downloadBuildArtifacts(commit, releaseChannel) {
const label = theme`commit {commit ${commit}})`;
return logPromise(
run({build, cwd, releaseChannel}),
theme`Downloading artifacts from Circle CI for ${buildLabel}`
downloadArtifactsFromGitHub(commit, releaseChannel),
theme`Downloading artifacts from GitHub for ${label}`
);
}
module.exports = {
downloadBuildArtifacts,
};

View File

@ -16,7 +16,6 @@ const {
rcNumber,
} = require('../../ReactVersions');
const yargs = require('yargs');
const {buildEverything} = require('./build-ghaction');
const Bundles = require('./bundles');
// Runs the build script for both stable and experimental release channels,
@ -111,7 +110,7 @@ const argv = yargs.wrap(yargs.terminalWidth()).options({
async function main() {
if (argv.ci === 'github') {
await buildEverything(argv.index, argv.total);
buildForChannel(argv.releaseChannel, argv.total, argv.index);
switch (argv.releaseChannel) {
case 'stable': {
processStable('./build');
@ -147,7 +146,7 @@ async function main() {
}
}
function buildForChannel(channel) {
function buildForChannel(channel, total, index) {
const {status} = spawnSync(
'node',
['./scripts/rollup/build.js', ...process.argv.slice(2)],
@ -156,6 +155,8 @@ function buildForChannel(channel) {
env: {
...process.env,
RELEASE_CHANNEL: channel,
CI_TOTAL: total,
CI_INDEX: index,
},
}
);

View File

@ -1,875 +0,0 @@
'use strict';
const rollup = require('rollup');
const babel = require('@rollup/plugin-babel').babel;
const closure = require('./plugins/closure-plugin');
const flowRemoveTypes = require('flow-remove-types');
const prettier = require('rollup-plugin-prettier');
const replace = require('@rollup/plugin-replace');
const stripBanner = require('rollup-plugin-strip-banner');
const chalk = require('chalk');
const resolve = require('@rollup/plugin-node-resolve').nodeResolve;
const fs = require('fs');
const argv = require('minimist')(process.argv.slice(2));
const Modules = require('./modules');
const Bundles = require('./bundles');
const Stats = require('./stats');
const Sync = require('./sync');
const sizes = require('./plugins/sizes-plugin');
const useForks = require('./plugins/use-forks-plugin');
const dynamicImports = require('./plugins/dynamic-imports');
const Packaging = require('./packaging');
const {asyncRimRaf} = require('./utils');
const codeFrame = require('@babel/code-frame');
const Wrappers = require('./wrappers');
const RELEASE_CHANNEL = process.env.RELEASE_CHANNEL;
// Default to building in experimental mode. If the release channel is set via
// an environment variable, then check if it's "experimental".
const __EXPERIMENTAL__ =
typeof RELEASE_CHANNEL === 'string'
? RELEASE_CHANNEL === 'experimental'
: true;
// Errors in promises should be fatal.
let loggedErrors = new Set();
process.on('unhandledRejection', err => {
if (loggedErrors.has(err)) {
// No need to print it twice.
process.exit(1);
}
throw err;
});
const {
NODE_ES2015,
ESM_DEV,
ESM_PROD,
NODE_DEV,
NODE_PROD,
NODE_PROFILING,
BUN_DEV,
BUN_PROD,
FB_WWW_DEV,
FB_WWW_PROD,
FB_WWW_PROFILING,
RN_OSS_DEV,
RN_OSS_PROD,
RN_OSS_PROFILING,
RN_FB_DEV,
RN_FB_PROD,
RN_FB_PROFILING,
BROWSER_SCRIPT,
} = Bundles.bundleTypes;
const {getFilename} = Bundles;
function parseRequestedNames(names, toCase) {
let result = [];
for (let i = 0; i < names.length; i++) {
let splitNames = names[i].split(',');
for (let j = 0; j < splitNames.length; j++) {
let name = splitNames[j].trim();
if (!name) {
continue;
}
if (toCase === 'uppercase') {
name = name.toUpperCase();
} else if (toCase === 'lowercase') {
name = name.toLowerCase();
}
result.push(name);
}
}
return result;
}
const argvType = Array.isArray(argv.type) ? argv.type : [argv.type];
const requestedBundleTypes = argv.type
? parseRequestedNames(argvType, 'uppercase')
: [];
const requestedBundleNames = parseRequestedNames(argv._, 'lowercase');
const forcePrettyOutput = argv.pretty;
const isWatchMode = argv.watch;
const syncFBSourcePath = argv['sync-fbsource'];
const syncWWWPath = argv['sync-www'];
// Non-ES2015 stuff applied before closure compiler.
const babelPlugins = [
// These plugins filter out non-ES2015.
['@babel/plugin-proposal-class-properties', {loose: true}],
'syntax-trailing-function-commas',
// These use loose mode which avoids embedding a runtime.
// TODO: Remove object spread from the source. Prefer Object.assign instead.
[
'@babel/plugin-proposal-object-rest-spread',
{loose: true, useBuiltIns: true},
],
['@babel/plugin-transform-template-literals', {loose: true}],
// TODO: Remove for...of from the source. It requires a runtime to be embedded.
'@babel/plugin-transform-for-of',
// TODO: Remove array spread from the source. Prefer .apply instead.
['@babel/plugin-transform-spread', {loose: true, useBuiltIns: true}],
'@babel/plugin-transform-parameters',
// TODO: Remove array destructuring from the source. Requires runtime.
['@babel/plugin-transform-destructuring', {loose: true, useBuiltIns: true}],
// Transform Object spread to shared/assign
require('../babel/transform-object-assign'),
];
const babelToES5Plugins = [
// These plugins transform DEV mode. Closure compiler deals with these in PROD.
'@babel/plugin-transform-literals',
'@babel/plugin-transform-arrow-functions',
'@babel/plugin-transform-block-scoped-functions',
'@babel/plugin-transform-shorthand-properties',
'@babel/plugin-transform-computed-properties',
['@babel/plugin-transform-block-scoping', {throwIfClosureRequired: true}],
];
function getBabelConfig(
updateBabelOptions,
bundleType,
packageName,
externals,
isDevelopment,
bundle
) {
const canAccessReactObject =
packageName === 'react' || externals.indexOf('react') !== -1;
let options = {
exclude: '/**/node_modules/**',
babelrc: false,
configFile: false,
presets: [],
plugins: [...babelPlugins],
babelHelpers: 'bundled',
sourcemap: false,
};
if (isDevelopment) {
options.plugins.push(...babelToES5Plugins);
if (
bundleType === FB_WWW_DEV ||
bundleType === RN_OSS_DEV ||
bundleType === RN_FB_DEV
) {
options.plugins.push(
// Turn console.error/warn() into a custom wrapper
[
require('../babel/transform-replace-console-calls'),
{
shouldError: !canAccessReactObject,
},
]
);
}
}
if (updateBabelOptions) {
options = updateBabelOptions(options);
}
// Controls whether to replace error messages with error codes in production.
// By default, error messages are replaced in production.
if (!isDevelopment && bundle.minifyWithProdErrorCodes !== false) {
options.plugins.push(require('../error-codes/transform-error-messages'));
}
return options;
}
let getRollupInteropValue = id => {
// We're setting Rollup to assume that imports are ES modules unless otherwise specified.
// However, we also compile ES import syntax to `require()` using Babel.
// This causes Rollup to turn uses of `import SomeDefaultImport from 'some-module' into
// references to `SomeDefaultImport.default` due to CJS/ESM interop.
// Some CJS modules don't have a `.default` export, and the rewritten import is incorrect.
// Specifying `interop: 'default'` instead will have Rollup use the imported variable as-is,
// without adding a `.default` to the reference.
const modulesWithCommonJsExports = [
'art/core/transform',
'art/modes/current',
'art/modes/fast-noSideEffects',
'art/modes/svg',
'JSResourceReferenceImpl',
'error-stack-parser',
'neo-async',
'webpack/lib/dependencies/ModuleDependency',
'webpack/lib/dependencies/NullDependency',
'webpack/lib/Template',
];
if (modulesWithCommonJsExports.includes(id)) {
return 'default';
}
// For all other modules, handle imports without any import helper utils
return 'esModule';
};
function getRollupOutputOptions(
outputPath,
format,
globals,
globalName,
bundleType
) {
const isProduction = isProductionBundleType(bundleType);
return {
file: outputPath,
format,
globals,
freeze: !isProduction,
interop: getRollupInteropValue,
name: globalName,
sourcemap: false,
esModule: false,
exports: 'auto',
};
}
function getFormat(bundleType) {
switch (bundleType) {
case NODE_ES2015:
case NODE_DEV:
case NODE_PROD:
case NODE_PROFILING:
case BUN_DEV:
case BUN_PROD:
case FB_WWW_DEV:
case FB_WWW_PROD:
case FB_WWW_PROFILING:
case RN_OSS_DEV:
case RN_OSS_PROD:
case RN_OSS_PROFILING:
case RN_FB_DEV:
case RN_FB_PROD:
case RN_FB_PROFILING:
return `cjs`;
case ESM_DEV:
case ESM_PROD:
return `es`;
case BROWSER_SCRIPT:
return `iife`;
}
}
function isProductionBundleType(bundleType) {
switch (bundleType) {
case NODE_ES2015:
return true;
case ESM_DEV:
case NODE_DEV:
case BUN_DEV:
case FB_WWW_DEV:
case RN_OSS_DEV:
case RN_FB_DEV:
return false;
case ESM_PROD:
case NODE_PROD:
case BUN_PROD:
case NODE_PROFILING:
case FB_WWW_PROD:
case FB_WWW_PROFILING:
case RN_OSS_PROD:
case RN_OSS_PROFILING:
case RN_FB_PROD:
case RN_FB_PROFILING:
case BROWSER_SCRIPT:
return true;
default:
throw new Error(`Unknown type: ${bundleType}`);
}
}
function isProfilingBundleType(bundleType) {
switch (bundleType) {
case NODE_ES2015:
case FB_WWW_DEV:
case FB_WWW_PROD:
case NODE_DEV:
case NODE_PROD:
case BUN_DEV:
case BUN_PROD:
case RN_FB_DEV:
case RN_FB_PROD:
case RN_OSS_DEV:
case RN_OSS_PROD:
case ESM_DEV:
case ESM_PROD:
case BROWSER_SCRIPT:
return false;
case FB_WWW_PROFILING:
case NODE_PROFILING:
case RN_FB_PROFILING:
case RN_OSS_PROFILING:
return true;
default:
throw new Error(`Unknown type: ${bundleType}`);
}
}
function getBundleTypeFlags(bundleType) {
const isFBWWWBundle =
bundleType === FB_WWW_DEV ||
bundleType === FB_WWW_PROD ||
bundleType === FB_WWW_PROFILING;
const isRNBundle =
bundleType === RN_OSS_DEV ||
bundleType === RN_OSS_PROD ||
bundleType === RN_OSS_PROFILING ||
bundleType === RN_FB_DEV ||
bundleType === RN_FB_PROD ||
bundleType === RN_FB_PROFILING;
const isFBRNBundle =
bundleType === RN_FB_DEV ||
bundleType === RN_FB_PROD ||
bundleType === RN_FB_PROFILING;
const shouldStayReadable = isFBWWWBundle || isRNBundle || forcePrettyOutput;
return {
isFBWWWBundle,
isRNBundle,
isFBRNBundle,
shouldStayReadable,
};
}
function forbidFBJSImports() {
return {
name: 'forbidFBJSImports',
resolveId(importee, importer) {
if (/^fbjs\//.test(importee)) {
throw new Error(
`Don't import ${importee} (found in ${importer}). ` +
`Use the utilities in packages/shared/ instead.`
);
}
},
};
}
function getPlugins(
entry,
externals,
updateBabelOptions,
filename,
packageName,
bundleType,
globalName,
moduleType,
pureExternalModules,
bundle
) {
try {
const forks = Modules.getForks(bundleType, entry, moduleType, bundle);
const isProduction = isProductionBundleType(bundleType);
const isProfiling = isProfilingBundleType(bundleType);
const needsMinifiedByClosure =
bundleType !== ESM_PROD && bundleType !== ESM_DEV;
return [
// Keep dynamic imports as externals
dynamicImports(),
{
name: 'rollup-plugin-flow-remove-types',
transform(code) {
const transformed = flowRemoveTypes(code);
return {
code: transformed.toString(),
map: null,
};
},
},
// Shim any modules that need forking in this environment.
useForks(forks),
// Ensure we don't try to bundle any fbjs modules.
forbidFBJSImports(),
// Use Node resolution mechanism.
resolve({
// skip: externals, // TODO: options.skip was removed in @rollup/plugin-node-resolve 3.0.0
}),
// Remove license headers from individual modules
stripBanner({
exclude: 'node_modules/**/*',
}),
// Compile to ES2015.
babel(
getBabelConfig(
updateBabelOptions,
bundleType,
packageName,
externals,
!isProduction,
bundle
)
),
// Remove 'use strict' from individual source files.
{
name: "remove 'use strict'",
transform(source) {
return source.replace(/['"]use strict["']/g, '');
},
},
// Turn __DEV__ and process.env checks into constants.
replace({
preventAssignment: true,
values: {
__DEV__: isProduction ? 'false' : 'true',
__PROFILE__: isProfiling || !isProduction ? 'true' : 'false',
'process.env.NODE_ENV': isProduction
? "'production'"
: "'development'",
__EXPERIMENTAL__,
},
}),
{
name: 'top-level-definitions',
renderChunk(source) {
return Wrappers.wrapWithTopLevelDefinitions(
source,
bundleType,
globalName,
filename,
moduleType,
bundle.wrapWithModuleBoundaries
);
},
},
// For production builds, compile with Closure. We do this even for the
// "non-minified" production builds because Closure is much better at
// minification than what most applications use. During this step, we do
// preserve the original symbol names, though, so the resulting code is
// relatively readable.
//
// For the minified builds, the names will be mangled later.
//
// We don't bother with sourcemaps at this step. The sourcemaps we publish
// are only for whitespace and symbol renaming; they don't map back to
// before Closure was applied.
needsMinifiedByClosure &&
closure({
compilation_level: 'SIMPLE',
language_in: 'ECMASCRIPT_2020',
language_out:
bundleType === NODE_ES2015
? 'ECMASCRIPT_2020'
: bundleType === BROWSER_SCRIPT
? 'ECMASCRIPT5'
: 'ECMASCRIPT5_STRICT',
emit_use_strict:
bundleType !== BROWSER_SCRIPT &&
bundleType !== ESM_PROD &&
bundleType !== ESM_DEV,
env: 'CUSTOM',
warning_level: 'QUIET',
source_map_include_content: true,
use_types_for_optimization: false,
process_common_js_modules: false,
rewrite_polyfills: false,
inject_libraries: false,
allow_dynamic_import: true,
// Don't let it create global variables in the browser.
// https://github.com/facebook/react/issues/10909
assume_function_wrapper: true,
// Don't rename symbols (variable names, functions, etc). We leave
// this up to the application to handle, if they want. Otherwise gzip
// takes care of it.
renaming: false,
}),
needsMinifiedByClosure &&
// Add the whitespace back
prettier({
parser: 'flow',
singleQuote: false,
trailingComma: 'none',
bracketSpacing: true,
}),
{
name: 'license-and-signature-header',
renderChunk(source) {
return Wrappers.wrapWithLicenseHeader(
source,
bundleType,
globalName,
filename,
moduleType
);
},
},
// Record bundle size.
sizes({
getSize: (size, gzip) => {
const currentSizes = Stats.currentBuildResults.bundleSizes;
const recordIndex = currentSizes.findIndex(
record =>
record.filename === filename && record.bundleType === bundleType
);
const index = recordIndex !== -1 ? recordIndex : currentSizes.length;
currentSizes[index] = {
filename,
bundleType,
packageName,
size,
gzip,
};
},
}),
].filter(Boolean);
} catch (error) {
console.error(
chalk.red(`There was an error preparing plugins for entry "${entry}"`)
);
throw error;
}
}
function shouldSkipBundle(bundle, bundleType) {
const shouldSkipBundleType = bundle.bundleTypes.indexOf(bundleType) === -1;
if (shouldSkipBundleType) {
return true;
}
if (requestedBundleTypes.length > 0) {
const isAskingForDifferentType = requestedBundleTypes.some(
requestedType => !bundleType.includes(requestedType)
);
if (isAskingForDifferentType) {
return true;
}
}
if (requestedBundleNames.length > 0) {
// If the name ends with `something/index` we only match if the
// entry ends in something. Such as `react-dom/index` only matches
// `react-dom` but not `react-dom/server`. Everything else is fuzzy
// search.
const entryLowerCase = bundle.entry.toLowerCase() + '/index.js';
const isAskingForDifferentNames = requestedBundleNames.every(
requestedName => {
const matchEntry = entryLowerCase.indexOf(requestedName) !== -1;
if (!bundle.name) {
return !matchEntry;
}
const matchName =
bundle.name.toLowerCase().indexOf(requestedName) !== -1;
return !matchEntry && !matchName;
}
);
if (isAskingForDifferentNames) {
return true;
}
}
return false;
}
function resolveEntryFork(resolvedEntry, isFBBundle) {
// Pick which entry point fork to use:
// .modern.fb.js
// .classic.fb.js
// .fb.js
// .stable.js
// .experimental.js
// .js
// or any of those plus .development.js
if (isFBBundle) {
const resolvedFBEntry = resolvedEntry.replace(
'.js',
__EXPERIMENTAL__ ? '.modern.fb.js' : '.classic.fb.js'
);
const developmentFBEntry = resolvedFBEntry.replace(
'.js',
'.development.js'
);
if (fs.existsSync(developmentFBEntry)) {
return developmentFBEntry;
}
if (fs.existsSync(resolvedFBEntry)) {
return resolvedFBEntry;
}
const resolvedGenericFBEntry = resolvedEntry.replace('.js', '.fb.js');
const developmentGenericFBEntry = resolvedGenericFBEntry.replace(
'.js',
'.development.js'
);
if (fs.existsSync(developmentGenericFBEntry)) {
return developmentGenericFBEntry;
}
if (fs.existsSync(resolvedGenericFBEntry)) {
return resolvedGenericFBEntry;
}
// Even if it's a FB bundle we fallthrough to pick stable or experimental if we don't have an FB fork.
}
const resolvedForkedEntry = resolvedEntry.replace(
'.js',
__EXPERIMENTAL__ ? '.experimental.js' : '.stable.js'
);
const devForkedEntry = resolvedForkedEntry.replace('.js', '.development.js');
if (fs.existsSync(devForkedEntry)) {
return devForkedEntry;
}
if (fs.existsSync(resolvedForkedEntry)) {
return resolvedForkedEntry;
}
// Just use the plain .js one.
return resolvedEntry;
}
async function createBundle(bundle, bundleType) {
const filename = getFilename(bundle, bundleType);
const logKey =
chalk.white.bold(filename) + chalk.dim(` (${bundleType.toLowerCase()})`);
const format = getFormat(bundleType);
const packageName = Packaging.getPackageName(bundle.entry);
const {isFBWWWBundle, isFBRNBundle} = getBundleTypeFlags(bundleType);
let resolvedEntry = resolveEntryFork(
require.resolve(bundle.entry),
isFBWWWBundle || isFBRNBundle,
!isProductionBundleType(bundleType)
);
const peerGlobals = Modules.getPeerGlobals(bundle.externals, bundleType);
let externals = Object.keys(peerGlobals);
const deps = Modules.getDependencies(bundleType, bundle.entry);
externals = externals.concat(deps);
const importSideEffects = Modules.getImportSideEffects();
const pureExternalModules = Object.keys(importSideEffects).filter(
module => !importSideEffects[module]
);
const rollupConfig = {
input: resolvedEntry,
treeshake: {
moduleSideEffects: (id, external) =>
!(external && pureExternalModules.includes(id)),
propertyReadSideEffects: false,
},
external(id) {
const containsThisModule = pkg => id === pkg || id.startsWith(pkg + '/');
const isProvidedByDependency = externals.some(containsThisModule);
if (isProvidedByDependency) {
if (id.indexOf('/src/') !== -1) {
throw Error(
'You are trying to import ' +
id +
' but ' +
externals.find(containsThisModule) +
' is one of npm dependencies, ' +
'so it will not contain that source file. You probably want ' +
'to create a new bundle entry point for it instead.'
);
}
return true;
}
return !!peerGlobals[id];
},
onwarn: handleRollupWarning,
plugins: getPlugins(
bundle.entry,
externals,
bundle.babel,
filename,
packageName,
bundleType,
bundle.global,
bundle.moduleType,
pureExternalModules,
bundle
),
output: {
externalLiveBindings: false,
freeze: false,
interop: getRollupInteropValue,
esModule: false,
},
};
const mainOutputPath = Packaging.getBundleOutputPath(
bundle,
bundleType,
filename,
packageName
);
const rollupOutputOptions = getRollupOutputOptions(
mainOutputPath,
format,
peerGlobals,
bundle.global,
bundleType
);
if (isWatchMode) {
rollupConfig.output = [rollupOutputOptions];
const watcher = rollup.watch(rollupConfig);
watcher.on('event', async event => {
switch (event.code) {
case 'BUNDLE_START':
console.log(`${chalk.bgYellow.black(' BUILDING ')} ${logKey}`);
break;
case 'BUNDLE_END':
console.log(`${chalk.bgGreen.black(' COMPLETE ')} ${logKey}\n`);
break;
case 'ERROR':
case 'FATAL':
console.log(`${chalk.bgRed.black(' OH NOES! ')} ${logKey}\n`);
handleRollupError(event.error);
break;
}
});
} else {
console.log(`${chalk.bgYellow.black(' BUILDING ')} ${logKey}`);
try {
const result = await rollup.rollup(rollupConfig);
await result.write(rollupOutputOptions);
} catch (error) {
console.log(`${chalk.bgRed.black(' OH NOES! ')} ${logKey}\n`);
handleRollupError(error);
throw error;
}
console.log(`${chalk.bgGreen.black(' COMPLETE ')} ${logKey}\n`);
}
}
function handleRollupWarning(warning) {
if (warning.code === 'UNUSED_EXTERNAL_IMPORT') {
const match = warning.message.match(/external module "([^"]+)"/);
if (!match || typeof match[1] !== 'string') {
throw new Error(
'Could not parse a Rollup warning. ' + 'Fix this method.'
);
}
const importSideEffects = Modules.getImportSideEffects();
const externalModule = match[1];
if (typeof importSideEffects[externalModule] !== 'boolean') {
throw new Error(
'An external module "' +
externalModule +
'" is used in a DEV-only code path ' +
'but we do not know if it is safe to omit an unused require() to it in production. ' +
'Please add it to the `importSideEffects` list in `scripts/rollup/modules.js`.'
);
}
// Don't warn. We will remove side effectless require() in a later pass.
return;
}
if (warning.code === 'CIRCULAR_DEPENDENCY') {
// Ignored
} else if (typeof warning.code === 'string') {
// This is a warning coming from Rollup itself.
// These tend to be important (e.g. clashes in namespaced exports)
// so we'll fail the build on any of them.
console.error();
console.error(warning.message || warning);
console.error();
process.exit(1);
} else {
// The warning is from one of the plugins.
// Maybe it's not important, so just print it.
console.warn(warning.message || warning);
}
}
function handleRollupError(error) {
loggedErrors.add(error);
if (!error.code) {
console.error(error);
return;
}
console.error(
`\x1b[31m-- ${error.code}${error.plugin ? ` (${error.plugin})` : ''} --`
);
console.error(error.stack);
if (error.loc && error.loc.file) {
const {file, line, column} = error.loc;
// This looks like an error from Rollup, e.g. missing export.
// We'll use the accurate line numbers provided by Rollup but
// use Babel code frame because it looks nicer.
const rawLines = fs.readFileSync(file, 'utf-8');
// column + 1 is required due to rollup counting column start position from 0
// whereas babel-code-frame counts from 1
const frame = codeFrame(rawLines, line, column + 1, {
highlightCode: true,
});
console.error(frame);
} else if (error.codeFrame) {
// This looks like an error from a plugin (e.g. Babel).
// In this case we'll resort to displaying the provided code frame
// because we can't be sure the reported location is accurate.
console.error(error.codeFrame);
}
}
async function buildEverything(index, total) {
if (!argv['unsafe-partial']) {
await asyncRimRaf('build');
}
// Run them serially for better console output
// and to avoid any potential race conditions.
let bundles = [];
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
for (const bundle of Bundles.bundles) {
bundles.push(
[bundle, NODE_ES2015],
[bundle, ESM_DEV],
[bundle, ESM_PROD],
[bundle, NODE_DEV],
[bundle, NODE_PROD],
[bundle, NODE_PROFILING],
[bundle, BUN_DEV],
[bundle, BUN_PROD],
[bundle, FB_WWW_DEV],
[bundle, FB_WWW_PROD],
[bundle, FB_WWW_PROFILING],
[bundle, RN_OSS_DEV],
[bundle, RN_OSS_PROD],
[bundle, RN_OSS_PROFILING],
[bundle, RN_FB_DEV],
[bundle, RN_FB_PROD],
[bundle, RN_FB_PROFILING],
[bundle, BROWSER_SCRIPT]
);
}
bundles = bundles.filter(([bundle, bundleType]) => {
return !shouldSkipBundle(bundle, bundleType);
});
const nodeTotal = parseInt(total, 10);
const nodeIndex = parseInt(index, 10);
bundles = bundles.filter((_, i) => i % nodeTotal === nodeIndex);
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
for (const [bundle, bundleType] of bundles) {
await createBundle(bundle, bundleType);
}
await Packaging.copyAllShims();
await Packaging.prepareNpmPackages();
if (syncFBSourcePath) {
await Sync.syncReactNative(syncFBSourcePath);
} else if (syncWWWPath) {
await Sync.syncReactDom('build/facebook-www', syncWWWPath);
}
console.log(Stats.printResults());
if (!forcePrettyOutput) {
Stats.saveResults();
}
}
module.exports = {
buildEverything,
};

View File

@ -850,10 +850,9 @@ async function buildEverything() {
return !shouldSkipBundle(bundle, bundleType);
});
if (process.env.CIRCLE_NODE_TOTAL) {
// In CI, parallelize bundles across multiple tasks.
const nodeTotal = parseInt(process.env.CIRCLE_NODE_TOTAL, 10);
const nodeIndex = parseInt(process.env.CIRCLE_NODE_INDEX, 10);
if (process.env.CI_TOTAL && process.env.CI_INDEX) {
const nodeTotal = parseInt(process.env.CI_TOTAL, 10);
const nodeIndex = parseInt(process.env.CI_INDEX, 10);
bundles = bundles.filter((_, i) => i % nodeTotal === nodeIndex);
}