From 2ef9a1df22391ceaf1a4237997860b2baead61df Mon Sep 17 00:00:00 2001 From: Thomas Viehmann Date: Tue, 25 May 2021 08:37:37 -0700 Subject: [PATCH] Increase mimimum number of warmup runs to 2 (#58801) Summary: The JIT will typically need two warmup runs to do profiling and optimization. This is not the perfect solution but it will substantially reduce the number of surprised people when the docs say torch.utils.benchmark.Timer takes care of warmup. Pull Request resolved: https://github.com/pytorch/pytorch/pull/58801 Reviewed By: desertfire Differential Revision: D28644244 Pulled By: robieta fbshipit-source-id: cc54ed019e882a379d6e4a0c6a01fd5873dd41c3 --- torch/utils/benchmark/utils/timer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torch/utils/benchmark/utils/timer.py b/torch/utils/benchmark/utils/timer.py index f47cc1537a26a..428196eac8b29 100644 --- a/torch/utils/benchmark/utils/timer.py +++ b/torch/utils/benchmark/utils/timer.py @@ -259,7 +259,7 @@ def timeit(self, number: int = 1000000) -> common.Measurement: """ with common.set_torch_threads(self._task_spec.num_threads): # Warmup - self._timer.timeit(number=max(int(number // 100), 1)) + self._timer.timeit(number=max(int(number // 100), 2)) return common.Measurement( number_per_run=number,