Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

extension: split browserifying and extension bundling into separate scripts #6295

Merged
merged 2 commits into from
Oct 18, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 114 additions & 0 deletions lighthouse-extension/build-extension.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
/**
* @license Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';

const fs = require('fs');

const archiver = require('archiver');
const del = require('del');
const cpy = require('cpy');
const makeDir = require('make-dir');
const bundleBuilder = require('./bundle-builder.js');

const distDir = 'dist';
const manifestVersion = require(`./app/manifest.json`).version;

// list of all consumers we build for (easier to understand which file is used for which)
const CONSUMERS = {
DEVTOOLS: {
src: 'devtools-entry.js',
dist: 'lighthouse-dt-bundle.js',
},
EXTENSION: {
src: 'extension-entry.js',
dist: 'lighthouse-ext-bundle.js',
},
LIGHTRIDER: {
src: 'lightrider-entry.js',
dist: 'lighthouse-lr-bundle.js',
},
};

/**
* Browserify and minify scripts.
*/
function buildAll() {
return Object.values(CONSUMERS).map(consumer => {
const inFile = `app/src/${consumer.src}`;
const outFile = `dist/scripts/${consumer.dist}`;
return bundleBuilder.build(inFile, outFile);
});
}

/**
* Copy popup.js to dist folder, inlining the current commit hash along the way.
* @return {Promise<void>}
*/
async function copyPopup() {
let popupSrc = fs.readFileSync('app/src/popup.js', {encoding: 'utf8'});
popupSrc = popupSrc.replace(/__COMMITHASH__/g, bundleBuilder.COMMIT_HASH);

await makeDir(`${distDir}/scripts`);
fs.writeFileSync(`${distDir}/scripts/popup.js`, popupSrc);
}

/**
* @return {Promise<void>}
*/
async function copyAssets() {
return cpy([
'*.html',
'styles/**/*.css',
'images/**/*',
'manifest.json',
'_locales/**', // currently non-functional
], `../${distDir}`, {
cwd: 'app',
parents: true,
});
}

/**
* Put built extension into a zip file ready for install or upload to the
* webstore.
* @return {Promise<void>}
*/
async function packageExtension() {
await del([
`${distDir}/scripts/${CONSUMERS.DEVTOOLS.dist}`,
`${distDir}/scripts/${CONSUMERS.LIGHTRIDER.dist}`,
]);

return new Promise((resolve, reject) => {
const archive = archiver('zip', {
zlib: {level: 9},
});

const outPath = `package/lighthouse-${manifestVersion}.zip`;
const writeStream = fs.createWriteStream(outPath);
writeStream.on('finish', resolve);
writeStream.on('error', reject);

archive.pipe(writeStream);
archive.glob(`${distDir}/**`);
archive.finalize();
});
}

async function run() {
const argv = process.argv.slice(2);
if (argv.includes('package')) {
return packageExtension();
}

await Promise.all([
...buildAll(),
copyAssets(),
copyPopup(),
]);
}

run();
147 changes: 147 additions & 0 deletions lighthouse-extension/bundle-builder.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
/**
* @license Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';

/**
* @fileoverview Script to bundle lighthouse entry points so that they can be run
* in the browser (as long as they have access to a debugger protocol Connection).
*/

const fs = require('fs');
const path = require('path');

const LighthouseRunner = require('../lighthouse-core/runner');
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is all almost exactly the same as it was in the gulpfile, just unindented a few times. One good piece of news: our astw hack is no longer necessary because insert-module-globals moved off off lexical-scope in browserify/insert-module-globals#76 :)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh, and no objection to future moving to a different bundler or whatever. Left exactly the same to stay out of that question for now :)

It should be even easier since bundling is isolated from the specifics of extension building.

const babel = require('babel-core');
const browserify = require('browserify');
const makeDir = require('make-dir');
const pkg = require('../package.json');

const VERSION = pkg.version;
const COMMIT_HASH = require('child_process')
.execSync('git rev-parse HEAD')
.toString().trim();

const audits = LighthouseRunner.getAuditList()
.map(f => '../lighthouse-core/audits/' + f.replace(/\.js$/, ''));

const gatherers = LighthouseRunner.getGathererList()
.map(f => '../lighthouse-core/gather/gatherers/' + f.replace(/\.js$/, ''));

const locales = fs.readdirSync('../lighthouse-core/lib/i18n/locales/')
.map(f => require.resolve(`../lighthouse-core/lib/i18n/locales/${f}`));

/** @param {string} file */
const isDevtools = file => path.basename(file).includes('devtools');
/** @param {string} file */
const isExtension = file => path.basename(file).includes('extension');

const BANNER = `// lighthouse, browserified. ${VERSION} (${COMMIT_HASH})\n`;

/**
* Browserify starting at the file at entryPath. Contains entry-point-specific
* ignores (e.g. for DevTools or the extension) to trim the bundle depending on
* the eventual use case.
* @param {string} entryPath
* @param {string} distPath
* @return {Promise<void>}
*/
async function browserifyFile(entryPath, distPath) {
let bundle = browserify(entryPath); // , {debug: true}); // for sourcemaps

bundle
// Transform the fs.readFile etc into inline strings.
.transform('brfs', {global: true, parserOpts: {ecmaVersion: 9}})
// Strip everything out of package.json includes except for the version.
.transform('package-json-versionify');

// scripts will need some additional transforms, ignores and requires…
bundle.ignore('source-map')
.ignore('debug/node')
.ignore('intl')
.ignore('raven')
.ignore('mkdirp')
.ignore('rimraf')
.ignore('pako/lib/zlib/inflate.js');

// Don't include the desktop protocol connection.
bundle.ignore(require.resolve('../lighthouse-core/gather/connections/cri.js'));

// Dont include the stringified report in DevTools.
if (isDevtools(entryPath)) {
bundle.ignore(require.resolve('../lighthouse-core/report/html/html-report-assets.js'));
}

// Don't include locales in DevTools or the extension for now.
if (isDevtools(entryPath) || isExtension(entryPath)) {
// @ts-ignore bundle.ignore does accept an array of strings.
bundle.ignore(locales);
}

// Expose the audits, gatherers, and computed artifacts so they can be dynamically loaded.
const corePath = '../lighthouse-core/';
const driverPath = `${corePath}gather/`;
audits.forEach(audit => {
bundle = bundle.require(audit, {expose: audit.replace(corePath, '../')});
});
gatherers.forEach(gatherer => {
bundle = bundle.require(gatherer, {expose: gatherer.replace(driverPath, '../gather/')});
});

// browerify's url shim doesn't work with .URL in node_modules,
// and within robots-parser, it does `var URL = require('url').URL`, so we expose our own.
// @see https://github.com/GoogleChrome/lighthouse/issues/5273
const pathToURLShim = require.resolve('../lighthouse-core/lib/url-shim.js');
bundle = bundle.require(pathToURLShim, {expose: 'url'});

const bundleStream = bundle.bundle();

// Make sure path exists.
await makeDir(path.dirname(distPath));
return new Promise((resolve, reject) => {
const writeStream = fs.createWriteStream(distPath);
writeStream.on('finish', resolve);
writeStream.on('error', reject);

bundleStream.pipe(writeStream);
});
}

/**
* Minimally minify a javascript file, in place.
* @param {string} filePath
*/
function minifyScript(filePath) {
Copy link
Collaborator

@wardpeet wardpeet Oct 16, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should we call this minify? 😛 we minify but not really.. 🙄

const opts = {
compact: true, // Do not include superfluous whitespace characters and line terminators.
retainLines: true, // Keep things on the same line (looks wonky but helps with stacktraces)
comments: false, // Don't output comments
shouldPrintComment: () => false, // Don't include @license or @preserve comments either
plugins: [
'syntax-object-rest-spread',
],
// sourceMaps: 'both'
};

const minified = BANNER + babel.transformFileSync(filePath, opts).code;
fs.writeFileSync(filePath, minified);
}

/**
* Browserify starting at entryPath, writing the minified result to distPath.
* @param {string} entryPath
* @param {string} distPath
* @return {Promise<void>}
*/
async function build(entryPath, distPath) {
await browserifyFile(entryPath, distPath);
minifyScript(distPath);
}

module.exports = {
/** The commit hash for the current HEAD. */
COMMIT_HASH,
build,
};
Loading