Skip to content

Commit

Permalink
🏗 Remove gulp streaming from a few developer tasks (#32623)
Browse files Browse the repository at this point in the history
  • Loading branch information
rsimha committed Feb 13, 2021
1 parent 2205cef commit 0989bfe
Show file tree
Hide file tree
Showing 6 changed files with 164 additions and 245 deletions.
69 changes: 28 additions & 41 deletions build-system/tasks/caches-json.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,55 +15,42 @@
*/
'use strict';

const gulp = require('gulp');
const through2 = require('through2');
const {log} = require('../common/logging');
const {red, yellow} = require('kleur/colors');
const path = require('path');
const {log, logLocalDev} = require('../common/logging');
const {red, green, cyan} = require('kleur/colors');

const expectedCaches = ['google'];

const cachesJsonPath = 'build-system/global-configs/caches.json';
const expectedCaches = ['google', 'bing'];
const cachesJsonPath = '../global-configs/caches.json';

/**
* Fail if build-system/global-configs/caches.json is missing some expected
* caches.
* @return {!Promise}
* Entry point for gulp caches-jason.
*/
async function cachesJson() {
return gulp.src([cachesJsonPath]).pipe(
through2.obj(function (file) {
let obj;
try {
obj = JSON.parse(file.contents.toString());
} catch (e) {
log(
yellow(
`Could not parse ${cachesJsonPath}. ` +
'This is most likely a fatal error that ' +
'will be found by checkValidJson'
)
);
return;
}
const foundCaches = [];
for (const foundCache of obj.caches) {
foundCaches.push(foundCache.id);
}
for (const cache of expectedCaches) {
if (!foundCaches.includes(cache)) {
log(
red('Missing expected cache "' + cache + `" in ${cachesJsonPath}`)
);
process.exitCode = 1;
}
}
})
);
const filename = path.basename(cachesJsonPath);
let jsonContent;
try {
jsonContent = require(cachesJsonPath);
} catch (e) {
log(red('ERROR:'), 'Could not parse', cyan(filename));
process.exitCode = 1;
return;
}
const foundCaches = [];
for (const foundCache of jsonContent.caches) {
foundCaches.push(foundCache.id);
}
for (const cache of expectedCaches) {
if (foundCaches.includes(cache)) {
logLocalDev(green('✔'), 'Found', cyan(cache), 'in', cyan(filename));
} else {
log(red('✖'), 'Missing', cyan(cache), 'in', cyan(filename));
process.exitCode = 1;
}
}
}

module.exports = {
cachesJson,
};

cachesJson.description =
'Check that some expected caches are included in caches.json.';
cachesJson.description = 'Check that caches.json contains all expected caches.';
14 changes: 3 additions & 11 deletions build-system/tasks/get-zindex/get-zindex.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,9 @@ const result = {
},
};

test.cb('collects selectors', (t) => {
const data = Object.create(null);
const testFiles = `${__dirname}/*.css`;
m.getZindexStream(testFiles)
.on('data', (chunk) => {
data[chunk.name] = chunk.selectors;
})
.on('end', () => {
t.deepEqual(data, result);
t.end();
});
test('collects selectors', async (t) => {
const data = await m.getZindexSelectors('*.css', __dirname);
t.deepEqual(data, result);
});

test('sync - create array of arrays with z index order', (t) => {
Expand Down
76 changes: 26 additions & 50 deletions build-system/tasks/get-zindex/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,11 @@
'use strict';

const fs = require('fs');
const gulp = require('gulp');
const PluginError = require('plugin-error');
const globby = require('globby');
const path = require('path');
const postcss = require('postcss');
const prettier = require('prettier');
const table = require('text-table');
const through = require('through2');

const tableHeaders = [
['selector', 'z-index', 'file'],
Expand Down Expand Up @@ -56,33 +55,6 @@ function zIndexCollector(acc, css) {
});
}

/**
* @param {!Vinyl} file vinyl fs object
* @param {string} enc encoding value
* @param {function(err: ?Object, data: !Vinyl|string)} cb chunk data through
*/
function onFileThrough(file, enc, cb) {
if (file.isNull()) {
cb(null, file);
return;
}

if (file.isStream()) {
cb(new PluginError('size', 'Stream not supported'));
return;
}

const selectors = Object.create(null);

postcss([zIndexCollector.bind(null, selectors)])
.process(file.contents.toString(), {
from: file.relative,
})
.then(() => {
cb(null, {name: file.relative, selectors});
});
}

/**
* @param {!Object<string, !Object<string, !Array<number>} filesData
* accumulation of files and the rules and z index values.
Expand Down Expand Up @@ -123,31 +95,35 @@ function createTable(filesData) {
}

/**
* Extract z-index selectors from all files matching the given glob starting at
* the given working directory
* @param {string} glob
* @return {!Stream}
* @param {string=} cwd
* @return {Object}
*/
function getZindexStream(glob) {
return gulp.src(glob).pipe(through.obj(onFileThrough));
async function getZindexSelectors(glob, cwd = '.') {
const filesData = Object.create(null);
const files = globby.sync(glob, {cwd});
for (const file of files) {
const contents = await fs.promises.readFile(path.join(cwd, file), 'utf-8');
const selectors = Object.create(null);
const plugins = [zIndexCollector.bind(null, selectors)];
await postcss(plugins).process(contents, {from: file});
filesData[file] = selectors;
}
return filesData;
}

/**
* @param {function()} cb
* Entry point for gulp get-zindex
*/
function getZindex(cb) {
const filesData = Object.create(null);
// Don't return the stream here since we do a `writeFileSync`
getZindexStream('{css,src,extensions}/**/*.css')
.on('data', (chunk) => {
filesData[chunk.name] = chunk.selectors;
})
.on('end', async () => {
const filename = 'css/Z_INDEX.md';
const rows = [...tableHeaders, ...createTable(filesData)];
const tbl = table(rows, tableOptions);
const output = `${preamble}\n\n${tbl}`;
fs.writeFileSync(filename, await prettierFormat(filename, output));
cb();
});
async function getZindex() {
const filesData = await getZindexSelectors('{css,src,extensions}/**/*.css');
const filename = 'css/Z_INDEX.md';
const rows = [...tableHeaders, ...createTable(filesData)];
const tbl = table(rows, tableOptions);
const output = `${preamble}\n\n${tbl}`;
fs.writeFileSync(filename, await prettierFormat(filename, output));
}

async function prettierFormat(filename, output) {
Expand All @@ -160,7 +136,7 @@ async function prettierFormat(filename, output) {
module.exports = {
createTable,
getZindex,
getZindexStream,
getZindexSelectors,
};

getZindex.description =
Expand Down
67 changes: 24 additions & 43 deletions build-system/tasks/performance-urls.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,60 +15,41 @@
*/

const fs = require('fs');
const gulp = require('gulp');
const path = require('path');
const through2 = require('through2');
const {green, red, yellow} = require('kleur/colors');
const {green, red, cyan} = require('kleur/colors');
const {log} = require('../common/logging');

const CONFIG_PATH = 'build-system/tasks/performance/config.json';
const CONFIG_PATH = './performance/config.json';
const LOCAL_HOST_URL = 'http://localhost:8000/';

/**
* Throws an error with the given message. Duplicate function
* located in check-sourcemaps.js
*
* @param {string} message
*/
function throwError(message) {
const err = new Error(message);
err.showStack = false;
throw err;
}

/**
* Entry point for 'gulp performance-urls'
* Check if all localhost urls in performance/config.json exist
* @return {!Promise}
*/
async function performanceUrls() {
return gulp.src([CONFIG_PATH]).pipe(
through2.obj(function (file) {
let obj;
try {
obj = JSON.parse(file.contents.toString());
} catch (e) {
log(yellow(`Could not parse ${CONFIG_PATH}. `));
throwError(`Could not parse ${CONFIG_PATH}. `);
return;
}
const filepaths = obj.handlers.flatMap((handler) =>
handler.urls
.filter((url) => url.startsWith(LOCAL_HOST_URL))
.map((url) =>
path.join(__dirname, '../../', url.split(LOCAL_HOST_URL)[1])
)
);
for (const filepath of filepaths) {
if (!fs.existsSync(filepath)) {
log(red(filepath + ' does not exist.'));
throwError(`${filepath} does not exist.`);
return;
}
}
log(green('SUCCESS:'), 'All local performance task urls are valid.');
})
let jsonContent;
try {
jsonContent = require(CONFIG_PATH);
} catch (e) {
log(red('ERROR:'), 'Could not parse', cyan(CONFIG_PATH));
process.exitCode = 1;
return;
}
const filepaths = jsonContent.handlers.flatMap((handler) =>
handler.urls
.filter((url) => url.startsWith(LOCAL_HOST_URL))
.map((url) =>
path.join(__dirname, '../../', url.split(LOCAL_HOST_URL)[1])
)
);
for (const filepath of filepaths) {
if (!fs.existsSync(filepath)) {
log(red('ERROR:'), cyan(filepath), 'does not exist');
process.exitCode = 1;
return;
}
}
log(green('SUCCESS:'), 'All local performance task urls are valid.');
}

module.exports = {
Expand Down

0 comments on commit 0989bfe

Please sign in to comment.