Skip to content

Commit

Permalink
Add a --benchmark-enable property to forcibly enable benchmarks.
Browse files Browse the repository at this point in the history
  • Loading branch information
ionelmc committed Apr 10, 2016
1 parent 219fbe3 commit b181664
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 1 deletion.
6 changes: 6 additions & 0 deletions src/pytest_benchmark/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,12 @@ def pytest_addoption(parser):
help="Disable benchmarks. Benchmarked functions are only ran once and no stats are reported. Use this is you "
"want to run the test but don't do any benchmarking."
)
group.addoption(
"--benchmark-enable",
action="store_true", default=False,
help="Forcibly enable benchmarks. Use this option to override --benchmark-disable (in case you have it in "
"pytest configuration)."
)
group.addoption(
"--benchmark-only",
action="store_true", default=False,
Expand Down
2 changes: 1 addition & 1 deletion src/pytest_benchmark/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(self, config):
warmup_iterations=config.getoption("benchmark_warmup_iterations"),
)
self.skip = config.getoption("benchmark_skip")
self.disable = config.getoption("benchmark_disable")
self.disabled = config.getoption("benchmark_disable") and not config.getoption("benchmark_enable")

if config.getoption("dist", "no") != "no" and not self.skip:
self.logger.warn(
Expand Down

0 comments on commit b181664

Please sign in to comment.