Skip to content

Commit

Permalink
Merge branch 'ignore-unknown'
Browse files Browse the repository at this point in the history
  • Loading branch information
RKrahl committed Dec 10, 2017
2 parents b911c8b + 67fa9ee commit 7e491bb
Show file tree
Hide file tree
Showing 4 changed files with 102 additions and 3 deletions.
2 changes: 2 additions & 0 deletions CHANGES
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

+ Issue #7: Add a configuration switch to implicitly mark all tests.

+ Issue #10: Add an option to ignore unknown dependencies.

** Incompatible changes

+ Prepend the class name to the default test name for test class
Expand Down
15 changes: 15 additions & 0 deletions doc/src/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,18 @@ Configuration file options can be set in the `ini file`.
decorated. If set to `True`, the outcome of all tests will be
registered. It has the same effect as if all tests are
implicitly decorated with :func:`pytest.mark.dependency`.

Command line options
--------------------

The following command line options are added by pytest.dependency:

`--ignore-unknown-dependency`
By default, a test will be skipped unless all the dependencies
have been run successful. If this option is set, a test will be
skipped if any of the dependencies has been skipped or failed.
E.g. dependencies that have not been run at all will be ignored.

This may be useful if you run only a subset of the testsuite and
some tests in the selected set are marked to depend on other
tests that have not been selected.
16 changes: 13 additions & 3 deletions pytest_dependency.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import pytest

_automark = False
_ignore_unknown = False


def _get_bool(value):
Expand Down Expand Up @@ -72,8 +73,13 @@ def addResult(self, item, name, rep):

def checkDepend(self, depends, item):
for i in depends:
if not(i in self.results and self.results[i].isSuccess()):
pytest.skip("%s depends on %s" % (item.name, i))
if i in self.results:
if self.results[i].isSuccess():
continue
else:
if _ignore_unknown:
continue
pytest.skip("%s depends on %s" % (item.name, i))


def depends(request, other):
Expand Down Expand Up @@ -102,11 +108,15 @@ def pytest_addoption(parser):
parser.addini("automark_dependency",
"Add the dependency marker to all tests automatically",
default=False)
parser.addoption("--ignore-unknown-dependency",
action="store_true", default=False,
help="ignore dependencies whose outcome is not known")


def pytest_configure(config):
global _automark
global _automark, _ignore_unknown
_automark = _get_bool(config.getini("automark_dependency"))
_ignore_unknown = config.getoption("--ignore-unknown-dependency")


@pytest.hookimpl(tryfirst=True, hookwrapper=True)
Expand Down
72 changes: 72 additions & 0 deletions tests/test_04_ignore_unknown.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
"""Test the ignore-unknown-dependency command line option.
"""

import pytest


def test_no_ignore(ctestdir):
"""No command line option, e.g. ignore-unknown-dependency is not set.
Explicitly select only a single test that depends on another one.
Since the other test has not been run at all, the selected test
will be skipped.
"""
ctestdir.makepyfile("""
import pytest
@pytest.mark.dependency()
def test_a():
pass
@pytest.mark.dependency()
def test_b():
pass
@pytest.mark.dependency()
def test_c():
pass
@pytest.mark.dependency(depends=["test_c"])
def test_d():
pass
""")
result = ctestdir.runpytest("--verbose", "test_no_ignore.py::test_d")
result.assert_outcomes(passed=0, skipped=1, failed=0)
result.stdout.fnmatch_lines("""
*::test_d SKIPPED
""")


def test_ignore(ctestdir):
"""Set the ignore-unknown-dependency command line option.
Explicitly select only a single test that depends on another one.
The other test has not been run at all, but since unknown
dependencies will be ignored, the selected test will be run
nevertheless.
"""
ctestdir.makepyfile("""
import pytest
@pytest.mark.dependency()
def test_a():
pass
@pytest.mark.dependency()
def test_b():
pass
@pytest.mark.dependency()
def test_c():
pass
@pytest.mark.dependency(depends=["test_c"])
def test_d():
pass
""")
result = ctestdir.runpytest("--verbose", "--ignore-unknown-dependency",
"test_ignore.py::test_d")
result.assert_outcomes(passed=1, skipped=0, failed=0)
result.stdout.fnmatch_lines("""
*::test_d PASSED
""")

0 comments on commit 7e491bb

Please sign in to comment.