Skip to content

Commit

Permalink
Update ddev option to "latest-metrics"
Browse files Browse the repository at this point in the history
  • Loading branch information
mgarabed committed Mar 25, 2020
1 parent 4401146 commit 5224d70
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 15 deletions.
2 changes: 1 addition & 1 deletion airflow/tests/test_check_metrics_up_to_date.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
METRIC_PATTERN = re.compile(r'^``([^`]+)``\s+(.*)', re.MULTILINE)


@pytest.mark.check_metrics
@pytest.mark.latest_metrics
def test_check_metrics_up_to_date():
url = 'https://raw.githubusercontent.com/apache/airflow/master/docs/metrics.rst'
resp = requests.get(url)
Expand Down
2 changes: 1 addition & 1 deletion clickhouse/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def test_error_query(instance):
check.log.error.assert_any_call('Error querying %s: %s', 'system.metrics', mock.ANY)


@pytest.mark.check_metrics
@pytest.mark.latest_metrics
@pytest.mark.parametrize(
'metrics, ignored_columns, metric_source_url',
[
Expand Down
15 changes: 9 additions & 6 deletions datadog_checks_dev/datadog_checks/dev/plugin/pytest.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,18 +237,21 @@ def pytest_configure(config):
config.addinivalue_line('markers', 'unit: marker for unit tests')
config.addinivalue_line('markers', 'integration: marker for integration tests')
config.addinivalue_line('markers', 'e2e: marker for end-to-end Datadog Agent tests')
config.addinivalue_line("markers", "check_metrics: mark test as checking metrics")
config.addinivalue_line("markers", "latest_metrics: mark test as checking metrics")


def pytest_addoption(parser):
parser.addoption("--run-check-metrics", action="store_true", default=False, help="run check_metrics tests")
parser.addoption("--run-latest-metrics", action="store_true", default=False, help="run check_metrics tests")


def pytest_collection_modifyitems(config, items):
if config.getoption("--run-check-metrics"):
# at test collection time, this function gets called by pytest, see:
# https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option
# if the particular option is not present, it will skip all tests marked `latest_metrics`
if config.getoption("--run-latest-metrics"):
# --run-check-metrics given in cli: do not skip slow tests
return
skip_check_metrics = pytest.mark.skip(reason="need --run-check-metrics option to run")
skip_latest_metrics = pytest.mark.skip(reason="need --run-latest-metrics option to run")
for item in items:
if "check_metrics" in item.keywords:
item.add_marker(skip_check_metrics)
if "latest_metrics" in item.keywords:
item.add_marker(skip_latest_metrics)
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def display_envs(check_envs):
@click.option('--format-style', '-fs', is_flag=True, help='Run only the code style formatter')
@click.option('--style', '-s', is_flag=True, help='Run only style checks')
@click.option('--bench', '-b', is_flag=True, help='Run only benchmarks')
@click.option('--check-metrics', is_flag=True, help='Run only metrics validation tests')
@click.option('--latest-metrics', is_flag=True, help='Run only metrics validation tests')
@click.option('--e2e', is_flag=True, help='Run only end-to-end tests')
@click.option('--cov', '-c', 'coverage', is_flag=True, help='Measure code coverage')
@click.option('--cov-missing', '-cm', is_flag=True, help='Show line numbers of statements that were not executed')
Expand All @@ -50,7 +50,7 @@ def test(
format_style,
style,
bench,
check_metrics,
latest_metrics,
e2e,
coverage,
junit,
Expand Down Expand Up @@ -152,7 +152,7 @@ def test(
enter_pdb=enter_pdb,
debug=debug,
bench=bench,
check_metrics=check_metrics,
latest_metrics=latest_metrics,
coverage=coverage,
junit=junit,
marker=marker,
Expand All @@ -174,6 +174,8 @@ def test(
test_type_display = 'only style checks'
elif bench:
test_type_display = 'only benchmarks'
elif latest_metrics:
test_type_display = 'only latest metrics validation'
elif e2e:
test_type_display = 'only end-to-end tests'
else:
Expand Down
8 changes: 4 additions & 4 deletions datadog_checks_dev/datadog_checks/dev/tooling/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def construct_pytest_options(
enter_pdb=False,
debug=False,
bench=False,
check_metrics=False,
latest_metrics=False,
coverage=False,
junit=False,
marker='',
Expand Down Expand Up @@ -210,9 +210,9 @@ def construct_pytest_options(
else:
pytest_options += ' --benchmark-skip'

if check_metrics:
pytest_options += ' --run-check-metrics'
marker = 'check_metrics'
if latest_metrics:
pytest_options += ' --run-latest-metrics'
marker = 'latest_metrics'

if junit:
test_group = 'e2e' if e2e else 'unit'
Expand Down

0 comments on commit 5224d70

Please sign in to comment.