Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(benchmark): don't fail when running correct benchmarks #3629

Merged
merged 3 commits into from Jun 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
14 changes: 9 additions & 5 deletions packages/vitest/src/runtime/runners/benchmark.ts
Expand Up @@ -24,6 +24,8 @@ function createBenchmarkResult(name: string): BenchmarkResult {
} as BenchmarkResult
}

const benchmarkTasks = new WeakMap<Benchmark, import('tinybench').Task>()

async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) {
const { Task, Bench } = await importTinybench()
const start = performance.now()
Expand Down Expand Up @@ -67,12 +69,13 @@ async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) {
benchmarkMap[id] = benchmark

const task = new Task(benchmarkInstance, id, benchmarkFn)
benchmark.meta.task = task
benchmarkTasks.set(benchmark, task)
updateTask(benchmark)
})

benchmarkGroup.forEach((benchmark) => {
benchmark.meta.task!.addEventListener('complete', (e) => {
const task = benchmarkTasks.get(benchmark)!
task.addEventListener('complete', (e) => {
const task = e.task
const _benchmark = benchmarkMap[task.name || '']
if (_benchmark) {
Expand All @@ -82,7 +85,7 @@ async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) {
updateTask(_benchmark)
}
})
benchmark.meta.task!.addEventListener('error', (e) => {
task.addEventListener('error', (e) => {
const task = e.task
const _benchmark = benchmarkMap[task.name || '']
defer.reject(_benchmark ? task.result!.error : e)
Expand All @@ -91,10 +94,11 @@ async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) {

const tasks: BenchTask[] = []
for (const benchmark of benchmarkGroup) {
await benchmark.meta.task!.warmup()
const task = benchmarkTasks.get(benchmark)!
await task.warmup()
const { setTimeout } = getSafeTimers()
tasks.push(await new Promise<BenchTask>(resolve => setTimeout(async () => {
resolve(await benchmark.meta.task!.run())
resolve(await task.run())
})))
}

Expand Down
1 change: 0 additions & 1 deletion packages/vitest/src/types/benchmark.ts
Expand Up @@ -42,7 +42,6 @@ export interface BenchmarkUserOptions {
export interface Benchmark extends TaskCustom {
meta: {
benchmark: true
task?: BenchTask
result?: BenchTaskResult
}
}
Expand Down
2 changes: 1 addition & 1 deletion test/benchmark/package.json
Expand Up @@ -2,7 +2,7 @@
"name": "@vitest/benchmark",
"private": true,
"scripts": {
"test": "node test.mjs",
"test": "node --test specs/ && echo '1'",
"bench:json": "vitest bench --reporter=json",
"bench": "vitest bench"
},
Expand Down
40 changes: 40 additions & 0 deletions test/benchmark/specs/runner.test.mjs
@@ -0,0 +1,40 @@
import { existsSync, rmSync } from 'node:fs'
import test from 'node:test'
import * as assert from 'node:assert'
import { readFile } from 'node:fs/promises'
import { startVitest } from 'vitest/node'

if (existsSync('./bench.json'))
rmSync('./bench.json')

try {
await startVitest('benchmark', ['base.bench', 'mode.bench', 'only.bench'], {
watch: false,
})
}
catch (error) {
console.error(error)
process.exit(1)
}

const benchResult = await readFile('./bench.json', 'utf-8')
const resultJson = JSON.parse(benchResult)

await test('benchmarks are actually running', async () => {
assert.ok(resultJson.testResults.sort, 'sort is in results')
assert.ok(resultJson.testResults.timeout, 'timeout is in results')
assert.ok(resultJson.testResults.a0, 'a0 is in results')
assert.ok(resultJson.testResults.c1, 'c1 is in results')
assert.ok(resultJson.testResults.a2, 'a2 is in results')
assert.ok(resultJson.testResults.b3, 'b3 is in results')
assert.ok(resultJson.testResults.b4, 'b4 is in results')
})

await test('doesn\'t have skipped tests', () => {
assert.doesNotMatch(benchResult, /skip/, 'contains skipped benchmarks')

const skippedBenches = ['s0', 's1', 's2', 's3', 'sb4', 's4']
const todoBenches = ['unimplemented suite', 'unimplemented test']

assert.ok(skippedBenches.concat(todoBenches).every(b => !benchResult.includes(b)), 'contains skipped benchmarks')
})
29 changes: 0 additions & 29 deletions test/benchmark/test.mjs

This file was deleted.