Skip to content

Commit

Permalink
Merge 94a2c8a into 538c008
Browse files Browse the repository at this point in the history
  • Loading branch information
dubzzz authored Nov 2, 2020
2 parents 538c008 + 94a2c8a commit 6d5c125
Show file tree
Hide file tree
Showing 14 changed files with 1,095 additions and 180 deletions.
5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@
"@types/jest": "^26.0.4",
"@types/node": "^14.0.13",
"benchmark": "^2.1.4",
"chalk": "^4.1.0",
"console-table-printer": "2.4.11",
"coveralls": "^3.0.9",
"fast-check": "^2.0.0",
"glob": "^7.1.6",
Expand All @@ -50,7 +52,8 @@
"source-map-support": "^0.5.16",
"ts-jest": "^26.1.2",
"ts-node": "^9.0.0",
"typescript": "^4.0.2"
"typescript": "^4.0.2",
"yargs": "^16.0.3"
},
"keywords": [
"pure random",
Expand Down
363 changes: 313 additions & 50 deletions perf/benchmark.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -8,63 +8,326 @@
// Or against another generator:
// $: PROF_GEN="mersenne" node perf/benchmark.cjs

const { genFor } = require('./helpers.cjs');
const {
testGenerateWithSameDistribution,
testGenerateWithSkipDistributionSingle,
testGenerateWithSkipDistribution,
} = require('./tasks.cjs');
const Benchmark = require('benchmark');
const prandRef = require('../lib/pure-rand');
const prandTest = require('../lib-new/pure-rand');

const WARMUP_SAMPLES = 50;
const MIN_SAMPLES = 250;
const benchConf = { initCount: WARMUP_SAMPLES, minSamples: MIN_SAMPLES };

const NUM_TESTS = 100;
const PROF_GEN = process.env.PROF_GEN || 'congruential32';
console.log(`Generator....: ${PROF_GEN}\n`);

const buildBenchmarks = (type, lib) => {
return [
new Benchmark(
`distribution/no@${type}`,
() => {
const g = genFor(lib, PROF_GEN);
testGenerateWithSkipDistribution(lib, g, NUM_TESTS);
const chalk = require('chalk');
const { exec, execFile } = require('child_process');
const { Table } = require('console-table-printer');
const path = require('path');
const yargs = require('yargs/yargs');
const { hideBin } = require('yargs/helpers');

const { countCallsToNext, genFor } = require('./helpers.cjs');
const { testDistribution } = require('./tasks.cjs');

const argv = yargs(hideBin(process.argv))
.usage('Usage: yarn bench -c [commit_hash(:alias(:target))] -c [commit_hash(:alias(:target))]')
.option('commit', {
alias: 'c',
type: 'string',
description: 'Hash of the commit to use in the benchmark',
})
.option('generator', {
alias: 'g',
type: 'string',
default: 'xoroshiro128plus',
description: 'Name of the generator',
})
.option('target', {
alias: 't',
type: 'string',
default: 'es6',
description: 'Default compilation target',
})
.option('count', {
type: 'number',
default: 1,
description: 'Number of measurements per commit',
})
.option('samples', {
alias: 's',
type: 'number',
default: 500,
description: 'Number of samples',
})
.option('verbose', {
alias: 'v',
type: 'boolean',
default: false,
description: 'Enable verbose mode',
})
.demandOption(['commit']).argv;

const verboseLog = (...args) => {
if (argv.verbose) {
console.log(`${chalk.greenBright('DEBUG')}`, ...args);
}
};
const cleanErr = (err) => {
if (!err) return err;
const { stack, ...others } = err;
return others;
};
const execAsync = (command, options) => {
const prettyCmd = `exec(${JSON.stringify(command)}, ${JSON.stringify(options)}})`;
return new Promise((resolve) => {
verboseLog(`Call to ${prettyCmd}`);
exec(command, options, (err, stdout, stderr) => {
verboseLog(`Answer from ${prettyCmd}`);
verboseLog(`err:`, cleanErr(err));
verboseLog(`stdout:`, stdout.toString());
verboseLog(`stderr:`, stderr.toString());
resolve({ err, stdout, stderr });
});
});
};
const execFileAsync = (command, args, options) => {
const prettyCmd = `execFile(${JSON.stringify(command)}, ${JSON.stringify(args)}, ${JSON.stringify(options)}})`;
return new Promise((resolve) => {
verboseLog(`Call to ${prettyCmd}`);
execFile(command, args, options, (err, stdout, stderr) => {
verboseLog(`Answer from ${prettyCmd}`);
verboseLog(`err:`, cleanErr(err));
verboseLog(`stdout:`, stdout.toString());
verboseLog(`stderr:`, stderr.toString());
resolve({ err, stdout, stderr });
});
});
};
const prettyName = ({ hash, alias }) => {
const isHash = /[0-9a-f]{40}/.test(hash);
const smallHash = isHash ? hash.substring(0, 8) : hash;
if (alias === undefined) return smallHash;
else return `${alias}(${smallHash})`;
};
const libName = ({ hash, target }) => {
return `lib-${hash}-${target}`;
};

async function run() {
// Check that there is no local changes
const { err: gitDiffErr } = await execAsync('git diff-index --quiet HEAD --');
if (gitDiffErr && gitDiffErr.code) {
console.error(`${chalk.red('ERROR')} Please commit or stash your local changes!`);
return;
}

// Extract current branch
const { err: gitBranchErr, stdout } = await execAsync('git branch');
if (gitBranchErr && gitBranchErr.code) {
console.error(`${chalk.red('ERROR')} Failed to get the name of the current branch!`);
return;
}
const rawCurrentBranch = stdout.split('\n').find((line) => line.startsWith('* '));
if (!rawCurrentBranch) {
console.error(`${chalk.red('ERROR')} Failed to get the name of the current branch, outside of a branch!`);
return;
}
const detactedHeadRegex = /\* \(HEAD detached at ([a-f0-9]+)\)/;
const detactedHead = detactedHeadRegex.exec(rawCurrentBranch);
const currentBranch = detactedHead !== null ? detactedHead[1] : rawCurrentBranch.substring(2);

const commits = (Array.isArray(argv.commit) ? argv.commit : [argv.commit]).map((commit) => {
const [hash, alias, target] = commit.split(':', 3);
return { hash, alias, target: target || argv.target };
});
try {
// Build one bundle per commit
for (const commit of commits) {
console.info(`${chalk.cyan('INFO ')} Building bundle for ${prettyName(commit)}`);
const { err: gitCheckoutErr } = await execFileAsync('git', ['checkout', commit.hash]);
if (gitCheckoutErr && gitCheckoutErr.code) {
console.error(`${chalk.red('ERROR')} Failed to checkout ${prettyName(commit)}`);
return;
}
const { err: buildErr } = await execFileAsync('node', [
path.join(__dirname, '..', 'node_modules', 'typescript', 'bin', 'tsc'),
'--target',
commit.target,
'--outDir',
path.join(__dirname, '..', libName(commit)),
...(argv.verbose ? ['--diagnostics', '--extendedDiagnostics', '--listEmittedFiles'] : []),
]);
if (buildErr && buildErr.code) {
console.error(`${chalk.red('ERROR')} Failed to build ${prettyName(commit)}`);
return;
}
}
} finally {
// Go back to the original branch
await execFileAsync('git', ['checkout', currentBranch]);
}

const PRERUN_SAMPLES = Math.floor(argv.samples / 10);
const WARMUP_SAMPLES = argv.samples;
const MIN_SAMPLES = argv.samples;
const NUM_TESTS = 500;
const benchConf = { initCount: WARMUP_SAMPLES, minSamples: MIN_SAMPLES };

const PROF_GEN = argv.generator;
console.info(`${chalk.cyan('INFO ')} Warm-up samples : ${PRERUN_SAMPLES}`);
console.info(`${chalk.cyan('INFO ')} Benchmark samples: ${MIN_SAMPLES}`);
console.info(`${chalk.cyan('INFO ')} Generator : ${PROF_GEN}\n`);

// Declare configuration matrix
const configurations = [...Array(argv.count)].flatMap((_, index) =>
commits.map((commit) => {
const name = prettyName(commit);
const libPath = `../${libName(commit)}/pure-rand`;
verboseLog(`name: ${name}, require: ${libPath}`);
return [name + (index === 0 ? '' : `(${index + 1})`), require(libPath)];
})
);

// Declare performance tests
const performanceTests = [
{
name: (type) => `uniformIntDistribution[0;96]...............................@${type}`,
run: (lib, customGen = genFor) => {
// Range size is prime
const g = customGen(lib, PROF_GEN);
const distribution = lib.uniformIntDistribution;
const settings = { min: 0, max: 0xffff };
testDistribution(distribution, g, NUM_TESTS, settings);
},
},
{
name: (type) => `uniformIntDistribution[0;0xffff]...........................@${type}`,
run: (lib, customGen = genFor) => {
// Range size is a small power of 2
const g = customGen(lib, PROF_GEN);
const distribution = lib.uniformIntDistribution;
const settings = { min: 0, max: 0xffff };
testDistribution(distribution, g, NUM_TESTS, settings);
},
benchConf
),
new Benchmark(
`distribution/re-use@${type}`,
() => {
const g = genFor(lib, PROF_GEN);
testGenerateWithSameDistribution(lib, g, NUM_TESTS);
},
{
name: (type) => `uniformIntDistribution[0;0xffffffff].......................@${type}`,
run: (lib, customGen = genFor) => {
// For range of size <=2**32 (ie to-from+1 <= 2**32)
// uniformIntDistribution uses another execution path
const g = customGen(lib, PROF_GEN);
const distribution = lib.uniformIntDistribution;
const settings = { min: 0, max: 0xffffffff };
testDistribution(distribution, g, NUM_TESTS, settings);
},
benchConf
),
new Benchmark(
`generator/new@${type}`,
() => {
const g = genFor(lib, PROF_GEN);
testGenerateWithSkipDistributionSingle(lib, g);
},
{
name: (type) => `uniformIntDistribution[0;0xffffffff+1].....................@${type}`,
run: (lib, customGen = genFor) => {
// Range size is just above threshold used by uniformIntDistribution
// to switch to another algorithm
const g = customGen(lib, PROF_GEN);
const distribution = lib.uniformIntDistribution;
const settings = { min: 0, max: 0xffffffff + 1 };
testDistribution(distribution, g, NUM_TESTS, settings);
},
benchConf
),
},
{
name: (type) => `uniformIntDistribution[MIN_SAFE_INTEGER;MAX_SAFE_INTEGER]..@${type}`,
run: (lib, customGen = genFor) => {
// Range size is the maximal one
const g = customGen(lib, PROF_GEN);
const distribution = lib.uniformIntDistribution;
const settings = { min: Number.MIN_SAFE_INTEGER, max: Number.MAX_SAFE_INTEGER };
testDistribution(distribution, g, NUM_TESTS, settings);
},
},
];
};

Benchmark.invoke(
[
...buildBenchmarks('Reference', prandRef),
...buildBenchmarks('Test', prandTest),
...buildBenchmarks('Reference', prandRef),
...buildBenchmarks('Test', prandTest),
],
{
// Declare the benchmarks
let benchmarks = performanceTests.flatMap((test) =>
configurations.map(([type, lib]) => new Benchmark(test.name(type), () => test.run(lib), benchConf))
);
const benchmarkStatsFor = (configurationIndex, testIndex) => {
return benchmarks[configurationIndex + testIndex * configurations.length].stats.mean;
};

// Simple checks concerning number of calls to the underlying generators
console.info(`${chalk.cyan('INFO ')} Measuring number of calls to next...\n`);
for (const test of performanceTests) {
for (const [type, lib] of configurations) {
const [g, counter] = countCallsToNext(genFor(lib, PROF_GEN));
test.run(lib, () => g);
console.log(`${test.name(type)} called generator on next ${counter.count} times`);
}
}
console.log(``);

// Run all the code of all the benchmarks at least once before running them for measurements.
// It ensures that non-optimized path will not be wrongly optimized. In the past we had reports like:
// test1 @reference - 400 ops/s
// test2 @reference - 200 ops/s
// test1 @reference - 200 ops/s
// test2 @reference - 200 ops/s
// Because running test2 de-optimized the code that was optimized for test1 during first runs.
console.info(`${chalk.cyan('INFO ')} Warm-up phase...\n`);
Benchmark.invoke(
benchmarks.map((b) => b.clone({ initCount: 1, minSamples: PRERUN_SAMPLES })),
{
name: 'run',
queued: true,
onCycle: (event) => console.log(String(event.target)),
}
);

// Run benchmarks
console.info(`\n${chalk.cyan('INFO ')} Benchmark phase...\n`);
benchmarks = Benchmark.invoke(benchmarks, {
name: 'run',
queued: true,
onCycle: (event) => console.log(String(event.target)),
});

// Print comparison tables
console.info(`\n${chalk.cyan('INFO ')} Reports\n`);
for (let testIndex = 0; testIndex !== performanceTests.length; ++testIndex) {
const testName = performanceTests[testIndex].name('').replace(/\.+@$/, '');
console.log(`Stats for ${testName}`);
// Create and fill the reporting table
const table = new Table({
columns: [
{ name: 'Name', alignment: 'left' },
...configurations.map(([configName]) => ({ name: configName, alignment: 'right' })),
],
});
// Find the best and worst configurations
const [idxWorst, idxBest] = configurations.reduce(
([idxWorst, idxBest], _, currentConfigIndex) => {
const worst = benchmarkStatsFor(idxWorst, testIndex);
const best = benchmarkStatsFor(idxBest, testIndex);
const current = benchmarkStatsFor(currentConfigIndex, testIndex);
return [current > worst ? currentConfigIndex : idxWorst, current < best ? currentConfigIndex : idxBest];
},
[0, 0]
);
// Add rows
for (let currentConfigIndex = 0; currentConfigIndex !== configurations.length; ++currentConfigIndex) {
const currentBenchMean = benchmarkStatsFor(currentConfigIndex, testIndex);
table.addRow(
{
Name: configurations[currentConfigIndex][0],
...Object.fromEntries(
configurations.map((config, configIndex) => {
if (configIndex === currentConfigIndex) {
return [config[0], '-'];
}
const otherBenchMean = benchmarkStatsFor(configIndex, testIndex);
const ratio = (100.0 * otherBenchMean) / currentBenchMean - 100.0;
return [config[0], `${ratio >= 0 ? '+' : ''}${ratio.toFixed(2)} %`];
})
),
},
currentConfigIndex === idxBest
? { color: 'green' }
: currentConfigIndex === idxWorst
? { color: 'red' }
: undefined
);
}
// Print the table
table.printTable();
console.log(``);
}
);
}

run();
Loading

0 comments on commit 6d5c125

Please sign in to comment.