Skip to content

Commit

Permalink
benchmark: allow benchmarks to specify flags
Browse files Browse the repository at this point in the history
* Give createBenchmark and the Benchmark constructor
  a third argument for specifying the command line flags
  that this benchmark should be run with.
  The benchmarks are no longer run with --expose-internals
  by default, they will need to explicitly pass the flags.
* Rename options to configs in createBenchmark and the Benchmark
  constructor to match the documentation since they are not optional.
* Comment the properties of a Benchmark object

Also improve the documentation about creating benchmarks

* Add detailed description of the arguments of `createBenchmark`
* Describe the two passes of running the benchmarks
* Suggest what kind of code should go where in the benchmark example

PR-URL: #10448
Reviewed-By: Andreas Madsen <amwebdk@gmail.com>
Reviewed-By: Brian White <mscdex@mscdex.net>
  • Loading branch information
joyeecheung committed Jan 4, 2017
1 parent f2f997a commit 2826e63
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 25 deletions.
67 changes: 57 additions & 10 deletions benchmark/README.md
Expand Up @@ -287,37 +287,84 @@ chunk encoding mean confidence.interval
## Creating a benchmark

All benchmarks use the `require('../common.js')` module. This contains the
`createBenchmark(main, configs)` method which will setup your benchmark.
`createBenchmark(main, configs[, options])` method which will setup your
benchmark.

The first argument `main` is the benchmark function, the second argument
specifies the benchmark parameters. `createBenchmark` will run all possible
combinations of these parameters, unless specified otherwise. Note that the
configuration values can only be strings or numbers.
The arguments of `createBenchmark` are:

`createBenchmark` also creates a `bench` object, which is used for timing
* `main` {Function} The benchmark function,
where the code running operations and controlling timers should go
* `configs` {Object} The benchmark parameters. `createBenchmark` will run all
possible combinations of these parameters, unless specified otherwise.
Each configuration is a property with an array of possible values.
Note that the configuration values can only be strings or numbers.
* `options` {Object} The benchmark options. At the moment only the `flags`
option for specifying command line flags is supported.

`createBenchmark` returns a `bench` object, which is used for timing
the runtime of the benchmark. Run `bench.start()` after the initialization
and `bench.end(n)` when the benchmark is done. `n` is the number of operations
you performed in the benchmark.

The benchmark script will be run twice:

The first pass will configure the benchmark with the combination of
parameters specified in `configs`, and WILL NOT run the `main` function.
In this pass, no flags except the ones directly passed via commands
that you run the benchmarks with will be used.

In the second pass, the `main` function will be run, and the process
will be launched with:

* The flags you've passed into `createBenchmark` (the third argument)
* The flags in the command that you run this benchmark with

Beware that any code outside the `main` function will be run twice
in different processes. This could be troublesome if the code
outside the `main` function has side effects. In general, prefer putting
the code inside the `main` function if it's more than just declaration.

```js
'use strict';
const common = require('../common.js');
const SlowBuffer = require('buffer').SlowBuffer;

const bench = common.createBenchmark(main, {
const configs = {
// Number of operations, specified here so they show up in the report.
// Most benchmarks just use one value for all runs.
n: [1024],
type: ['fast', 'slow'],
size: [16, 128, 1024]
});
type: ['fast', 'slow'], // Custom configurations
size: [16, 128, 1024] // Custom configurations
};

const options = {
// Add --expose-internals if you want to require internal modules in main
flags: ['--zero-fill-buffers']
};

// main and configs are required, options is optional.
const bench = common.createBenchmark(main, configs, options);

// Note that any code outside main will be run twice,
// in different processes, with different command line arguments.

function main(conf) {
// You will only get the flags that you have passed to createBenchmark
// earlier when main is run. If you want to benchmark the internal modules,
// require them here. For example:
// const URL = require('internal/url').URL

// Start the timer
bench.start();

// Do operations here
const BufferConstructor = conf.type === 'fast' ? Buffer : SlowBuffer;

for (let i = 0; i < conf.n; i++) {
new BufferConstructor(conf.size);
}

// End the timer, pass in the number of operations
bench.end(conf.n);
}
```
Expand Down
34 changes: 22 additions & 12 deletions benchmark/common.js
Expand Up @@ -3,19 +3,29 @@
const child_process = require('child_process');
const http_benchmarkers = require('./_http-benchmarkers.js');

exports.createBenchmark = function(fn, options) {
return new Benchmark(fn, options);
exports.createBenchmark = function(fn, configs, options) {
return new Benchmark(fn, configs, options);
};

function Benchmark(fn, options) {
function Benchmark(fn, configs, options) {
// Use the file name as the name of the benchmark
this.name = require.main.filename.slice(__dirname.length + 1);
const parsed_args = this._parseArgs(process.argv.slice(2), options);
// Parse job-specific configuration from the command line arguments
const parsed_args = this._parseArgs(process.argv.slice(2), configs);
this.options = parsed_args.cli;
this.extra_options = parsed_args.extra;
// The configuration list as a queue of jobs
this.queue = this._queue(this.options);
// The configuration of the current job, head of the queue
this.config = this.queue[0];

this._time = [0, 0]; // holds process.hrtime value
// Execution arguments i.e. flags used to run the jobs
this.flags = [];
if (options && options.flags) {
this.flags = this.flags.concat(options.flags);
}
// Holds process.hrtime value
this._time = [0, 0];
// Used to make sure a benchmark only start a timer once
this._started = false;

// this._run will use fork() to create a new process for each configuration
Expand All @@ -27,8 +37,8 @@ function Benchmark(fn, options) {
}
}

Benchmark.prototype._parseArgs = function(argv, options) {
const cliOptions = Object.assign({}, options);
Benchmark.prototype._parseArgs = function(argv, configs) {
const cliOptions = Object.assign({}, configs);
const extraOptions = {};
// Parse configuration arguments
for (const arg of argv) {
Expand All @@ -38,9 +48,9 @@ Benchmark.prototype._parseArgs = function(argv, options) {
process.exit(1);
}

if (options[match[1]]) {
// Infer the type from the options object and parse accordingly
const isNumber = typeof options[match[1]][0] === 'number';
if (configs[match[1]]) {
// Infer the type from the config object and parse accordingly
const isNumber = typeof configs[match[1]][0] === 'number';
const value = isNumber ? +match[2] : match[2];
cliOptions[match[1]] = [value];
} else {
Expand Down Expand Up @@ -138,7 +148,7 @@ Benchmark.prototype._run = function() {

const child = child_process.fork(require.main.filename, childArgs, {
env: childEnv,
execArgv: ['--expose_internals'].concat(process.execArgv)
execArgv: self.flags.concat(process.execArgv)
});
child.on('message', sendResult);
child.on('close', function(code) {
Expand Down
5 changes: 2 additions & 3 deletions benchmark/misc/freelist.js
Expand Up @@ -4,12 +4,11 @@ var common = require('../common.js');

var bench = common.createBenchmark(main, {
n: [100000]
}, {
flags: ['--expose-internals']
});

function main(conf) {
// Using internal/freelist requires node to be run with --expose_internals
// switch. common.js will do that when calling main(), so we require
// this module here
const FreeList = require('internal/freelist').FreeList;
var n = conf.n;
var poolSize = 1000;
Expand Down

0 comments on commit 2826e63

Please sign in to comment.