From 11a50b241222775b6aac0884e38073921da1795e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mi=C5=A1ko=20Hevery?= Date: Tue, 15 Oct 2019 13:15:24 -0700 Subject: [PATCH 1/5] test: Add script which allows running all of the profiling tests and compare results --- .gitignore | 2 + packages/core/test/render3/perf/README.md | 48 ++++++++++ .../core/test/render3/perf/profile_all.js | 90 +++++++++++++++++++ 3 files changed, 140 insertions(+) create mode 100644 packages/core/test/render3/perf/profile_all.js diff --git a/.gitignore b/.gitignore index 050e9b0493550..0dc9e53cfb638 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,5 @@ yarn-error.log # User specific bazel settings .bazelrc.user +.notes.md +baseline.json diff --git a/packages/core/test/render3/perf/README.md b/packages/core/test/render3/perf/README.md index 9708d02c5eb77..64b424d0a445f 100644 --- a/packages/core/test/render3/perf/README.md +++ b/packages/core/test/render3/perf/README.md @@ -1,19 +1,67 @@ ### Build +``` yarn bazel build //packages/core/test/render3/perf:{name}.min_debug.es2015.js --define=compile=aot +``` ### Run +``` node dist/bin/packages/core/test/render3/perf/{name}.min_debug.es2015.js +``` ### Profile +``` node --no-turbo-inlining --inspect-brk dist/bin/packages/core/test/render3/perf/{name}.min_debug.es2015.js +``` then connect with a debugger (the `--inspect-brk` option will make sure that benchmark execution doesn't start until a debugger is connected and the code execution is manually resumed). The actual benchmark code has calls that will start (`console.profile`) and stop (`console.profileEnd`) a profiling session. +### Profile All + +To run all of the benchmarks use the `profile_all.js` script: +``` +node packages/core/test/render3/perf/profile_all.js +``` + +NOTE: This command will build all of the tests, so there is no need to do so manually. + +Optionally use the `--write` command to save the run result to a file for later comparison. + + +``` +node packages/core/test/render3/perf/profile_all.js --write baseline.json +``` + +### Comparing Runs + +If you have saved the baseline (as described in the step above) you can use it to get change in performance like so: + +``` +node packages/core/test/render3/perf/profile_all.js --read baseline.json +``` + +The resulting output should look something like this: +``` +┌────────────────────────────────────┬─────────┬──────┬───────────┬───────────┬───────┐ +│ (index) │ time │ unit │ base_time │ base_unit │ % │ +├────────────────────────────────────┼─────────┼──────┼───────────┼───────────┼───────┤ +│ directive_instantiate │ 276.652 │ 'ms' │ 286.292 │ 'ms' │ -3.37 │ +│ element_text_create │ 262.868 │ 'ms' │ 260.031 │ 'ms' │ 1.09 │ +│ interpolation │ 257.733 │ 'us' │ 260.489 │ 'us' │ -1.06 │ +│ listeners │ 1.997 │ 'us' │ 1.985 │ 'us' │ 0.6 │ +│ map_based_style_and_class_bindings │ 10.07 │ 'ms' │ 9.786 │ 'ms' │ 2.9 │ +│ noop_change_detection │ 93.256 │ 'us' │ 91.745 │ 'us' │ 1.65 │ +│ property_binding │ 290.777 │ 'us' │ 280.586 │ 'us' │ 3.63 │ +│ property_binding_update │ 588.545 │ 'us' │ 583.334 │ 'us' │ 0.89 │ +│ style_and_class_bindings │ 1.061 │ 'ms' │ 1.047 │ 'ms' │ 1.34 │ +│ style_binding │ 543.841 │ 'us' │ 545.385 │ 'us' │ -0.28 │ +└────────────────────────────────────┴─────────┴──────┴───────────┴───────────┴───────┘ +``` + ### Notes In all the above commands {name} should be replaced with the actual benchmark (folder) name, ex.: diff --git a/packages/core/test/render3/perf/profile_all.js b/packages/core/test/render3/perf/profile_all.js new file mode 100644 index 0000000000000..206d874dc39e4 --- /dev/null +++ b/packages/core/test/render3/perf/profile_all.js @@ -0,0 +1,90 @@ +/** + * @license + * Copyright Google Inc. All Rights Reserved. + * + * Use of this source code is governed by an MIT-style license that can be + * found in the LICENSE file at https://angular.io/license + */ + +const shell = require('shelljs'); +const fs = require('fs'); +const path = require('path'); + +const argv = process.argv +const baseDir = path.dirname(argv[1]); +const readPath = argv[2] == '--read' ? argv[3] : null; +const writePath = argv[2] == '--write' ? argv[3] : null; + +const UNITS = { + 'ps': 1e-12, + 'ns': 1e-9, + 'us': 1e-6, + 'ms': 1e-3, + 's': 1, +} + +// Contains the list of tests which should be built and profiled +const profileTests = + shell.ls(baseDir).filter((filename) => fs.statSync(path.join(baseDir, filename)).isDirectory()); + +// build tests +shell.exec( + `yarn bazel build --define=compile=aot ` + + profileTests.map((name) => `//packages/core/test/render3/perf:${name}.min_debug.es2015.js`) + .join(' ')); + +// profile tests +console.log('------------------------------------------------'); +console.log('PROFILING'); +console.log('------------------------------------------------'); + +// This stores the results of the run +const times = {}; + + +// If we have a readPath than read it into the `times` +if (readPath) { + const json = JSON.parse(shell.cat(readPath)); + Object.keys(json).forEach((name) => { + const run = json[name]; + times[name] = { + name: run.name, + base_time: run.time, + base_unit: run.unit, + } + }); +} +profileTests.forEach((name) => { + console.log('----------------', name, '----------------'); + const log = + shell.exec(`node dist/bin/packages/core/test/render3/perf/${name}.min_debug.es2015.js`); + if (log.code != 0) throw new Error(log); + const matches = log.stdout.match(/: ([\d\.]+) (.s)/); + const runTime = times[name] || (times[name] = {name: name}); + runTime.time = Number.parseFloat(matches[1]); + runTime.unit = matches[2]; + if (runTime.base_unit) { + const time = runTime.time * UNITS[runTime.unit]; + const base_time = runTime.base_time * UNITS[runTime.base_unit]; + const change = (time - base_time) / base_time * 100; + runTime['%'] = Number.parseFloat(change.toFixed(2)); + } +}); +console.log('================================================'); + +// If we have the writePath than write the `times` to file +if (writePath) { + const baseTimes = {}; + profileTests.forEach((name) => { + const run = times[name]; + baseTimes[name] = { + name: run.name, + time: run.time, + unit: run.unit, + } + }); + fs.writeFileSync(writePath, JSON.stringify(baseTimes, undefined, 2)); +} + +// Pretty print the table with the run information +console.table(times, ['time', 'unit', 'base_time', 'base_unit', '%']); From 9639d7428ea2d8e59c84160d8138b19a2447840a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mi=C5=A1ko=20Hevery?= Date: Tue, 15 Oct 2019 13:35:28 -0700 Subject: [PATCH 2/5] fixup! test: Add script which allows running all of the profiling tests and compare results --- packages/core/test/render3/perf/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/test/render3/perf/README.md b/packages/core/test/render3/perf/README.md index 64b424d0a445f..ab79a0bb53efb 100644 --- a/packages/core/test/render3/perf/README.md +++ b/packages/core/test/render3/perf/README.md @@ -20,7 +20,7 @@ then connect with a debugger (the `--inspect-brk` option will make sure that ben The actual benchmark code has calls that will start (`console.profile`) and stop (`console.profileEnd`) a profiling session. -### Profile All +### Run All To run all of the benchmarks use the `profile_all.js` script: ``` From bf92b40324927e01200b1b9c0a6f3275eb862667 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mi=C5=A1ko=20Hevery?= Date: Tue, 15 Oct 2019 13:35:28 -0700 Subject: [PATCH 3/5] fixup! test: Add script which allows running all of the profiling tests and compare results --- packages/core/test/render3/perf/profile_all.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/core/test/render3/perf/profile_all.js b/packages/core/test/render3/perf/profile_all.js index 206d874dc39e4..8cee86aa73c1b 100644 --- a/packages/core/test/render3/perf/profile_all.js +++ b/packages/core/test/render3/perf/profile_all.js @@ -34,8 +34,11 @@ shell.exec( .join(' ')); // profile tests +// tslint:disable-next-line:no-console console.log('------------------------------------------------'); +// tslint:disable-next-line:no-console console.log('PROFILING'); +// tslint:disable-next-line:no-console console.log('------------------------------------------------'); // This stores the results of the run @@ -55,6 +58,7 @@ if (readPath) { }); } profileTests.forEach((name) => { + // tslint:disable-next-line:no-console console.log('----------------', name, '----------------'); const log = shell.exec(`node dist/bin/packages/core/test/render3/perf/${name}.min_debug.es2015.js`); @@ -70,6 +74,7 @@ profileTests.forEach((name) => { runTime['%'] = Number.parseFloat(change.toFixed(2)); } }); +// tslint:disable-next-line:no-console console.log('================================================'); // If we have the writePath than write the `times` to file @@ -87,4 +92,5 @@ if (writePath) { } // Pretty print the table with the run information +// tslint:disable-next-line:no-console console.table(times, ['time', 'unit', 'base_time', 'base_unit', '%']); From ed4da55f7159d1cb06a44796fa1883b2d6bf6bf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mi=C5=A1ko=20Hevery?= Date: Tue, 15 Oct 2019 13:35:28 -0700 Subject: [PATCH 4/5] fixup! test: Add script which allows running all of the profiling tests and compare results --- packages/core/test/render3/perf/profile_all.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/core/test/render3/perf/profile_all.js b/packages/core/test/render3/perf/profile_all.js index 8cee86aa73c1b..0eb53459de5da 100644 --- a/packages/core/test/render3/perf/profile_all.js +++ b/packages/core/test/render3/perf/profile_all.js @@ -10,7 +10,7 @@ const shell = require('shelljs'); const fs = require('fs'); const path = require('path'); -const argv = process.argv +const argv = process.argv; const baseDir = path.dirname(argv[1]); const readPath = argv[2] == '--read' ? argv[3] : null; const writePath = argv[2] == '--write' ? argv[3] : null; @@ -21,7 +21,7 @@ const UNITS = { 'us': 1e-6, 'ms': 1e-3, 's': 1, -} +}; // Contains the list of tests which should be built and profiled const profileTests = @@ -54,7 +54,7 @@ if (readPath) { name: run.name, base_time: run.time, base_unit: run.unit, - } + }; }); } profileTests.forEach((name) => { @@ -86,7 +86,7 @@ if (writePath) { name: run.name, time: run.time, unit: run.unit, - } + }; }); fs.writeFileSync(writePath, JSON.stringify(baseTimes, undefined, 2)); } From a6ed0080d975ac5ee3e9a8c215b36ea062fdeb76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mi=C5=A1ko=20Hevery?= Date: Tue, 15 Oct 2019 13:35:28 -0700 Subject: [PATCH 5/5] fixup! test: Add script which allows running all of the profiling tests and compare results --- .../core/test/render3/perf/directive_instantiate/index.ts | 6 +++--- packages/core/test/render3/perf/micro_bench.ts | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/core/test/render3/perf/directive_instantiate/index.ts b/packages/core/test/render3/perf/directive_instantiate/index.ts index 5e322d62d4a7f..25d65d2a64cbf 100644 --- a/packages/core/test/render3/perf/directive_instantiate/index.ts +++ b/packages/core/test/render3/perf/directive_instantiate/index.ts @@ -16,8 +16,8 @@ import {createAndRenderLView} from '../setup'; class Tooltip { tooltip?: string; position?: string; - static ngFactoryDef = () => new Tooltip(); - static ngDirectiveDef = ɵɵdefineDirective({ + static ɵfac = () => new Tooltip(); + static ɵdir = ɵɵdefineDirective({ type: Tooltip, selectors: [['', 'tooltip', '']], inputs: {tooltip: 'tooltip', position: 'position'} @@ -75,7 +75,7 @@ function testTemplate(rf: RenderFlags, ctx: any) { const viewTNode = createTNode(null !, null, TNodeType.View, -1, null, null) as TViewNode; const embeddedTView = createTView( - -1, testTemplate, 21, 10, [Tooltip.ngDirectiveDef], null, null, null, + -1, testTemplate, 21, 10, [Tooltip.ɵdir], null, null, null, [['position', 'top', 3, 'tooltip']]); // initialize global state diff --git a/packages/core/test/render3/perf/micro_bench.ts b/packages/core/test/render3/perf/micro_bench.ts index 47d6c0e9a3f27..d727935eab927 100644 --- a/packages/core/test/render3/perf/micro_bench.ts +++ b/packages/core/test/render3/perf/micro_bench.ts @@ -7,7 +7,7 @@ */ const performance = require('perf_hooks').performance; -const MIN_SAMPLE_COUNT_NO_IMPROVEMENT = 10; +const MIN_SAMPLE_COUNT_NO_IMPROVEMENT = 30; const MIN_SAMPLE_DURATION = 100; const UNITS = ['ms', 'us', 'ns', 'ps'];