Skip to content

Commit

Permalink
perl syntax checking in parallel, but limit the number of processes.
Browse files Browse the repository at this point in the history
The syntax checking is slow (5-6 mins on github), but checking over 100 scripts simultaneously is also unworkable:
openfoodfacts#3368

The limit is configurable, in case the environment running the checks ever has more than the 2 cores it currently does. Increasing the limit on 2 cores works (at least with 3 & 4), but is no faster.
  • Loading branch information
svensven committed Oct 7, 2020
1 parent 2341bd0 commit 0f8085c
Showing 1 changed file with 95 additions and 23 deletions.
118 changes: 95 additions & 23 deletions scripts/check_perl.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,37 +2,109 @@
/*eslint no-await-in-loop: "off"*/

const process = require("process");
const util = require("util");
const util = require('util');
const glob = util.promisify(require("glob").glob);
const { spawn } = require("child_process");

function checkFile(path, doneCallback) {
console.log(`Checking ${path}`);
try {
const child = spawn(`perl`, ["-c", "-CS", "-Ilib", path]);
child.stdout.on("data", (data) => console.log("[" + path + "] " + data));
child.stderr.on("data", (data) => console.error("[" + path + "] " + data));
child.on("close", (code) => {
console.log("[" + path + "] result: " + code);
process.exitCode = code;
doneCallback();
});
} catch (e) {
console.error("[" + path + "] " + e);
process.exitCode = e.code;
throw e;
}
}
const { spawn } = require('child_process');

const check = util.promisify(checkFile);
// Github actions have 2 CPUs:
// https://docs.github.com/en/free-pro-team@latest/actions/reference/specifications-for-github-hosted-runners#supported-runners-and-hardware-resources
// Travis also seems to have 2 CPUs:
// https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system

const maxRunning = 2;

function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}

async function main() {

const filesToCheck = [];

// build array of paths from given arguments
// 0 = node; 1 = check_perl.js
for (const arg of process.argv.slice(2)) {
const files = await glob(arg);
for (var i = 0; i < files.length; ++i) {
const path = files[i];
await check(path);
filesToCheck.push(files[i]);
}
}
const numFilesToCheck = filesToCheck.length;

const spawns = [];

console.log(`Checking ${numFilesToCheck} files, max ${maxRunning} processes...`);

let running = Number.MAX_SAFE_INTEGER;

// loop until we've popped all the files off the array
while (filesToCheck.length > 0) {

// count how many spawned processes don't have an exit code yet
running = spawns.reduce((pv, cv) => {
if (cv.exitCode === null) {
return pv + 1;
}
else {
return pv;
}
}, 0);

// if we're not at the maximum number of processes, fire off another
if (running < maxRunning) {
try {

const file = filesToCheck.shift();
console.log(`Queueing ${file}`);
const perl = spawn(
'perl',
["-c", "-CS", "-Ilib", file ],
{ stdio: ['pipe', 'inherit', 'inherit'] }
);
spawns.push(perl);

perl.on('close', (code, signal) => {
if (code !== null) {
console.log(`[${file}] child process exited with code ${code}`);
if (code != 0) {
process.exitCode = code;
}
}
else if ( signal !== null ) {
console.log(`[${file}] child process exited with signal ${signal}`);
}
});

} catch (e) {
console.error(e);
process.exitCode = e.code;
throw e;
}
}

// wait before looping
if (running > 0) {
await sleep(100);
}

}

console.log('Finished queueing files.');

// don't return until all child processes have finished
running = Number.MAX_SAFE_INTEGER;
while (running > 0) {
running = spawns.reduce((pv, cv) => {
if (cv.exitCode === null) {
return pv + 1;
}
else {
return pv;
}
}, 0);

if (running > 0) {
await sleep(100);
}
}

Expand Down

0 comments on commit 0f8085c

Please sign in to comment.