Parallelizes the build script across multiple processes (#15716)

* Write size info to separate file per bundle

`bundle-sizes.json` contains the combined size information for every
build. This makes it easier to store and process, but it prevents us
from parallelizing the build script, because each process would need to
write to the same file.

So I've updated the Rollup script to output individual files per build.
A downstream CI job consolidates them into a single file.

I have not parallelized the Rollup script yet. I'll do that next.

* Parallelize the build script

Uses CircleCI's `parallelism` config option to spin up multiple build
processes.
This commit is contained in:
Andrew Clark 2019-05-29 14:34:50 -07:00 committed by GitHub
parent 30b1a8009c
commit 1cc3bba004
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 102 additions and 47 deletions

View File

@ -118,6 +118,7 @@ jobs:
build:
docker: *docker
environment: *environment
parallelism: 20
steps:
- checkout
- *restore_yarn_cache
@ -125,7 +126,24 @@ jobs:
- run: ./scripts/circleci/add_build_info_json.sh
- run: ./scripts/circleci/update_package_versions.sh
- run: yarn build
- run: cp ./scripts/rollup/results.json ./build/bundle-sizes.json
- persist_to_workspace:
root: build
paths:
- facebook-www
- node_modules
- react-native
- sizes/*.json
process_artifacts:
docker: *docker
environment: *environment
steps:
- checkout
- attach_workspace: *attach_workspace
- *restore_yarn_cache
- *run_yarn
- run: node ./scripts/rollup/consolidateBundleSizes.js
- run: node ./scripts/tasks/danger
- run: ./scripts/circleci/upload_build.sh
- run: ./scripts/circleci/pack_and_store_artifact.sh
- store_artifacts:
@ -135,24 +153,9 @@ jobs:
- store_artifacts:
path: ./build/bundle-sizes.json
- store_artifacts:
# TODO: Update release script to use local file instead of pulling
# from artifacts.
path: ./scripts/error-codes/codes.json
- persist_to_workspace:
root: build
paths:
- facebook-www
- node_modules
- react-native
- bundle-sizes.json
sizebot:
docker: *docker
environment: *environment
steps:
- checkout
- attach_workspace: *attach_workspace
- *restore_yarn_cache
- *run_yarn
- run: node ./scripts/tasks/danger
lint_build:
docker: *docker
@ -223,13 +226,10 @@ workflows:
- test_source_fire:
requires:
- setup
- test_coverage:
requires:
- setup
- build:
requires:
- setup
- sizebot:
- process_artifacts:
requires:
- build
- lint_build:
@ -254,3 +254,6 @@ workflows:
- test_fuzz:
requires:
- setup
- test_coverage:
requires:
- setup

1
.gitignore vendored
View File

@ -1,7 +1,6 @@
.DS_STORE
node_modules
scripts/flow/*/.flowconfig
scripts/rollup/results.json
*~
*.pyc
.grunt

View File

@ -10,7 +10,7 @@ if [ -z "$CI_PULL_REQUEST" ] && [ -n "$BUILD_SERVER_ENDPOINT" ]; then
-F "react-dom.production.min=@build/dist/react-dom.production.min.js" \
-F "react-dom-server.browser.development=@build/dist/react-dom-server.browser.development.js" \
-F "react-dom-server.browser.production.min=@build/dist/react-dom-server.browser.production.min.js" \
-F "results.json=@build/../scripts/rollup/results.json" \
-F "results.json=@build/../build/bundle-sizes.json" \
-F "commit=$CIRCLE_SHA1" \
-F "date=$(git log --format='%ct' -1)" \
-F "pull_request=false" \

View File

@ -21,7 +21,7 @@ const useForks = require('./plugins/use-forks-plugin');
const stripUnusedImports = require('./plugins/strip-unused-imports');
const extractErrorCodes = require('../error-codes/extract-errors');
const Packaging = require('./packaging');
const {asyncCopyTo, asyncRimRaf} = require('./utils');
const {asyncCopyTo} = require('./utils');
const codeFrame = require('babel-code-frame');
const Wrappers = require('./wrappers');
@ -634,27 +634,41 @@ function handleRollupError(error) {
}
async function buildEverything() {
await asyncRimRaf('build');
// Run them serially for better console output
// and to avoid any potential race conditions.
let bundles = [];
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
for (const bundle of Bundles.bundles) {
await createBundle(bundle, UMD_DEV);
await createBundle(bundle, UMD_PROD);
await createBundle(bundle, UMD_PROFILING);
await createBundle(bundle, NODE_DEV);
await createBundle(bundle, NODE_PROD);
await createBundle(bundle, NODE_PROFILING);
await createBundle(bundle, FB_WWW_DEV);
await createBundle(bundle, FB_WWW_PROD);
await createBundle(bundle, FB_WWW_PROFILING);
await createBundle(bundle, RN_OSS_DEV);
await createBundle(bundle, RN_OSS_PROD);
await createBundle(bundle, RN_OSS_PROFILING);
await createBundle(bundle, RN_FB_DEV);
await createBundle(bundle, RN_FB_PROD);
await createBundle(bundle, RN_FB_PROFILING);
bundles.push(
[bundle, UMD_DEV],
[bundle, UMD_PROD],
[bundle, UMD_PROFILING],
[bundle, NODE_DEV],
[bundle, NODE_PROD],
[bundle, NODE_PROFILING],
[bundle, FB_WWW_DEV],
[bundle, FB_WWW_PROD],
[bundle, FB_WWW_PROFILING],
[bundle, RN_OSS_DEV],
[bundle, RN_OSS_PROD],
[bundle, RN_OSS_PROFILING],
[bundle, RN_FB_DEV],
[bundle, RN_FB_PROD],
[bundle, RN_FB_PROFILING]
);
}
if (!shouldExtractErrors && process.env.CIRCLE_NODE_TOTAL) {
// In CI, parallelize bundles across multiple tasks.
const nodeTotal = parseInt(process.env.CIRCLE_NODE_TOTAL, 10);
const nodeIndex = parseInt(process.env.CIRCLE_NODE_INDEX, 10);
bundles = bundles.filter((_, i) => i % nodeTotal === nodeIndex);
}
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
for (const [bundle, bundleType] of bundles) {
await createBundle(bundle, bundleType);
}
await Packaging.copyAllShims();

View File

@ -0,0 +1,25 @@
'use strict';
// Script that combines bundle size information for each build into a single
// JSON file for easier storage and processing.
const fs = require('fs');
const path = require('path');
const BUILD_DIR = path.join(__dirname, '../../build');
const filenames = fs.readdirSync(path.join(BUILD_DIR, 'sizes'));
let bundleSizes = [];
for (let i = 0; i < filenames.length; i++) {
const filename = filenames[i];
if (filename.endsWith('.size.json')) {
const json = fs.readFileSync(path.join(BUILD_DIR, 'sizes', filename));
bundleSizes.push(JSON.parse(json));
}
}
const outputFilename = path.join(BUILD_DIR, 'bundle-sizes.json');
const outputContents = JSON.stringify({bundleSizes}, null, 2);
fs.writeFileSync(outputFilename, outputContents);

View File

@ -5,21 +5,35 @@ const filesize = require('filesize');
const chalk = require('chalk');
const join = require('path').join;
const fs = require('fs');
const prevBuildResults = fs.existsSync(__dirname + '/results.json')
? require('./results.json')
const mkdirp = require('mkdirp');
const BUNDLE_SIZES_FILE_NAME = join(__dirname, '../../build/bundle-sizes.json');
const prevBuildResults = fs.existsSync(BUNDLE_SIZES_FILE_NAME)
? require(BUNDLE_SIZES_FILE_NAME)
: {bundleSizes: []};
const currentBuildResults = {
// Mutated inside build.js during a build run.
// We make a copy so that partial rebuilds don't erase other stats.
bundleSizes: [...prevBuildResults.bundleSizes],
bundleSizes: [],
};
function saveResults() {
// Write all the bundle sizes to a single JSON file.
fs.writeFileSync(
join('scripts', 'rollup', 'results.json'),
BUNDLE_SIZES_FILE_NAME,
JSON.stringify(currentBuildResults, null, 2)
);
// Also write each bundle size to a separate file. That way multiple build
// processes can run in parallel and generate separate size artifacts.
// A downstream job can combine them into a single JSON file.
mkdirp.sync('build/sizes');
currentBuildResults.bundleSizes.forEach(results => {
fs.writeFileSync(
join('build', 'sizes', `${results.filename}.size.json`),
JSON.stringify(results, null, 2)
);
});
}
function fractionalChange(prev, current) {