diff --git a/.appveyor.yml b/.appveyor.yml deleted file mode 100644 index 667bf4ae..00000000 --- a/.appveyor.yml +++ /dev/null @@ -1,21 +0,0 @@ -environment: - matrix: - - TOXENV: "py35-pytestlatest" - - TOXENV: "py36-pytestlatest" - - TOXENV: "py37-pytestlatest" - - TOXENV: "py38-pytestlatest" - - TOXENV: "py38-pytestmaster" - - TOXENV: "py38-psutil" - -install: - - C:\Python38\python -m pip install -U pip setuptools virtualenv - - C:\Python38\python -m pip install -U tox setuptools_scm - -build: false # Not a C# project, build stuff at the test step instead. - -test_script: - - C:\Python38\python -m tox - -# We don't deploy anything on tags with AppVeyor, we use Travis instead, so we -# might as well save resources -skip_tags: true diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 00000000..7359cfe4 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,81 @@ +name: build + +on: [push, pull_request] + +jobs: + build: + + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + tox_env: + - "py36-pytestlatest" + - "py37-pytestlatest" + - "py38-pytestlatest" + - "py39-pytestlatest" + - "py310-pytestlatest" + - "py38-pytestmain" + - "py38-psutil" + - "py38-setproctitle" + + os: [ubuntu-latest, windows-latest] + include: + - tox_env: "py36-pytestlatest" + python: "3.6" + - tox_env: "py37-pytestlatest" + python: "3.7" + - tox_env: "py38-pytestlatest" + python: "3.8" + - tox_env: "py39-pytestlatest" + python: "3.9" + - tox_env: "py310-pytestlatest" + python: "3.10-dev" + - tox_env: "py38-pytestmain" + python: "3.8" + - tox_env: "py38-psutil" + python: "3.8" + - tox_env: "py38-setproctitle" + python: "3.8" + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install tox + run: | + python -m pip install --upgrade pip + pip install tox + - name: Test + run: | + tox -e ${{ matrix.tox_env }} + + deploy: + + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') + + runs-on: ubuntu-latest + + needs: build + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: "3.7" + - name: Install wheel + run: | + python -m pip install --upgrade pip + pip install wheel + - name: Build package + run: | + python setup.py sdist bdist_wheel + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@master + with: + user: __token__ + password: ${{ secrets.pypi_token }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f48f7ce4..9c2836d9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,20 +1,27 @@ repos: -- repo: https://github.com/ambv/black - rev: 19.10b0 +- repo: https://github.com/psf/black + rev: 22.3.0 hooks: - id: black args: [--safe, --quiet, --target-version, py35] - language_version: python3.7 +- repo: https://github.com/asottile/blacken-docs + rev: v1.12.1 + hooks: + - id: blacken-docs + additional_dependencies: [black==20.8b1] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.4.0 + rev: v4.2.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml - id: debug-statements +- repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 + hooks: - id: flake8 - repo: https://github.com/asottile/pyupgrade - rev: v2.7.2 + rev: v2.32.1 hooks: - id: pyupgrade args: [--py3-plus] @@ -26,4 +33,12 @@ repos: files: ^(CHANGELOG.rst|HOWTORELEASE.rst|README.rst|changelog/.*)$ language: python additional_dependencies: [pygments, restructuredtext_lint] - language_version: python3.7 +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.960 + hooks: + - id: mypy + files: ^(src/|testing/) + args: [] + additional_dependencies: + - pytest>=6.2.0 + - py>=1.10.0 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 9f728eb2..00000000 --- a/.travis.yml +++ /dev/null @@ -1,64 +0,0 @@ -dist: xenial -language: python - -notifications: - irc: - channels: - - 'chat.freenode.net#pytest' - on_success: change - on_failure: change - skip_join: true - email: - - pytest-commit@python.org - -install: - - pip install -U pip setuptools - - pip install tox setuptools_scm -script: tox - -stages: - - baseline - - test - - name: deploy - if: repo = pytest-dev/pytest-xdist AND tag IS present - -jobs: - include: - - stage: baseline - python: '3.7' - env: TOXENV=linting - cache: - directories: - - $HOME/.cache/pre-commit - - python: '3.8' - env: TOXENV=py38-pytestlatest - - - stage: test - python: "3.5" - env: TOXENV=py35-pytestlatest - - python: "3.6" - env: TOXENV=py36-pytestlatest - - python: "3.7" - env: TOXENV=py37-pytestlatest - - python: "3.9-dev" - env: TOXENV=py39-pytestlatest - - python: "3.8" - env: TOXENV=py38-pytestmaster - - python: "3.8" - env: TOXENV=py38-psutil - - - stage: deploy - python: '3.8' - env: - install: pip install -U setuptools setuptools_scm - script: skip - deploy: - provider: pypi - user: ronny - distributions: sdist bdist_wheel - skip_upload_docs: true - password: - secure: cxmSDho5d+PYKEM4ZCg8ms1P4lzhYkrw6fEOm2HtTcsuCyY6aZMSgImWAnEYbJHSkdzgcxlXK9UKJ9B0YenXmBCkAr7UjdnpNXNmkySr0sYzlH/sfqt/dDATCHFaRKxnkOSOVywaDYhT9n8YudbXI77pXwD12i/CeSSJDbHhsu0JYUfAcb+D6YjRYoA2SEGCnzSzg+gDDfwXZx4ZiODCGLVwieNp1klCg88YROUE1BaYYNuUOONvfXX8+TWowbCF6ChH1WL/bZ49OStEYQNuYxZQZr4yClIqu9VJbchrU8j860K9ott2kkGTgfB/dDrQB/XncBubyIX9ikzCQAmmBXWAI3eyvWLPDk2Jz7kW2l2RT7syct80tCq3JhvQ1qdwr5ap7siocTLgnBW0tF4tkHSTFN3510fkc43npnp6FThebESQpnI24vqpwJ9hI/kW5mYi014Og2E/cpCXnz2XO8iZPDbqAMQpDsqEQoyhfGNgPTGp4K30TxRtwZBI5hHhDKnnR16fXtRgt1gYPvz/peUQvvpOm4JzIzGXPzluuutpnCBy75v5+oiwT3YRrLL/Meims9FtDDXL3qQubAE/ezIOOpm0N5XXV8DxIom8EN71yq5ab1tqhM+tBX7owRjy4FR4If2Q8feBdmTuh26DIQt/y+qSG8VkB9Sw/JCjc7c= - on: - tags: true - repo: pytest-dev/pytest-xdist diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ad758a30..d218ba55 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,98 @@ +pytest-xdist 2.5.0 (2021-12-10) +=============================== + +Deprecations and Removals +------------------------- + +- `#468 `_: The ``--boxed`` command line argument is deprecated. Install pytest-forked and use ``--forked`` instead. pytest-xdist 3.0.0 will remove the ``--boxed`` argument and pytest-forked dependency. + + +Features +-------- + +- `#722 `_: Full compatibility with pytest 7 - no deprecation warnings or use of legacy features. + +- `#733 `_: New ``--dist=loadgroup`` option, which ensures all tests marked with ``@pytest.mark.xdist_group`` run in the same session/worker. Other tests run distributed as in ``--dist=load``. + + +Trivial Changes +--------------- + +- `#708 `_: Use ``@pytest.hookspec`` decorator to declare hook options in ``newhooks.py`` to avoid warnings in ``pytest 7.0``. + +- `#719 `_: Use up-to-date ``setup.cfg``/``pyproject.toml`` packaging setup. + +- `#720 `_: Require pytest>=6.2.0. + +- `#721 `_: Started using type annotations and mypy checking internally. The types are incomplete and not published. + + +pytest-xdist 2.4.0 (2021-09-20) +=============================== + +Features +-------- + +- `#696 `_: On Linux, the process title now changes to indicate the current worker state (running/idle). + + Depends on the `setproctitle `__ package, which can be installed with ``pip install pytest-xdist[setproctitle]``. + +- `#704 `_: Add support for Python 3.10. + + +pytest-xdist 2.3.0 (2021-06-16) +=============================== + +Deprecations and Removals +------------------------- + +- `#654 `_: Python 3.5 is no longer supported. + + +Features +-------- + +- `#646 `_: Add ``--numprocesses=logical`` flag, which automatically uses the number of logical CPUs available, instead of physical CPUs with ``auto``. + + This is very useful for test suites which are not CPU-bound. + +- `#650 `_: Added new ``pytest_handlecrashitem`` hook to allow handling and rescheduling crashed items. + + +Bug Fixes +--------- + +- `#421 `_: Copy the parent process sys.path into local workers, to work around execnet's python -c adding the current directory to sys.path. + +- `#638 `_: Fix issue caused by changing the branch name of the pytest repository. + + +Trivial Changes +--------------- + +- `#592 `_: Replace master with controller where ever possible. + +- `#643 `_: Use 'main' to refer to pytest default branch in tox env names. + + +pytest-xdist 2.2.1 (2021-02-09) +=============================== + +Bug Fixes +--------- + +- `#623 `_: Gracefully handle the pending deprecation of Node.fspath by using config.rootpath for topdir. + + +pytest-xdist 2.2.0 (2020-12-14) +=============================== + +Features +-------- + +- `#608 `_: Internal errors in workers are now propagated to the master node. + + pytest-xdist 2.1.0 (2020-08-25) =============================== diff --git a/OVERVIEW.md b/OVERVIEW.md deleted file mode 100644 index 87b9d1b3..00000000 --- a/OVERVIEW.md +++ /dev/null @@ -1,76 +0,0 @@ -# Overview # - -`xdist` works by spawning one or more **workers**, which are controlled -by the **master**. Each **worker** is responsible for performing -a full test collection and afterwards running tests as dictated by the **master**. - -The execution flow is: - -1. **master** spawns one or more **workers** at the beginning of - the test session. The communication between **master** and **worker** nodes makes use of - [execnet](http://codespeak.net/execnet/) and its [gateways](http://codespeak.net/execnet/basics.html#gateways-bootstrapping-python-interpreters). - The actual interpreters executing the code for the **workers** might - be remote or local. - -1. Each **worker** itself is a mini pytest runner. **workers** at this - point perform a full test collection, sending back the collected - test-ids back to the **master** which does not - perform any collection itself. - -1. The **master** receives the result of the collection from all nodes. - At this point the **master** performs some sanity check to ensure that - all **workers** collected the same tests (including order), bailing out otherwise. - If all is well, it converts the list of test-ids into a list of simple - indexes, where each index corresponds to the position of that test in the - original collection list. This works because all nodes have the same - collection list, and saves bandwidth because the **master** can now tell - one of the workers to just *execute test index 3* index of passing the - full test id. - -1. If **dist-mode** is **each**: the **master** just sends the full list - of test indexes to each node at this moment. - -1. If **dist-mode** is **load**: the **master** takes around 25% of the - tests and sends them one by one to each **worker** in a round robin - fashion. The rest of the tests will be distributed later as **workers** - finish tests (see below). - -1. Note that `pytest_xdist_make_scheduler` hook can be used to implement custom tests distribution logic. - -1. **workers** re-implement `pytest_runtestloop`: pytest's default implementation - basically loops over all collected items in the `session` object and executes - the `pytest_runtest_protocol` for each test item, but in xdist **workers** sit idly - waiting for **master** to send tests for execution. As tests are - received by **workers**, `pytest_runtest_protocol` is executed for each test. - Here it worth noting an implementation detail: **workers** always must keep at - least one test item on their queue due to how the `pytest_runtest_protocol(item, nextitem)` - hook is defined: in order to pass the `nextitem` to the hook, the worker must wait for more - instructions from master before executing that remaining test. If it receives more tests, - then it can safely call `pytest_runtest_protocol` because it knows what the `nextitem` parameter will be. - If it receives a "shutdown" signal, then it can execute the hook passing `nextitem` as `None`. - -1. As tests are started and completed at the **workers**, the results are sent - back to the **master**, which then just forwards the results to - the appropriate pytest hooks: `pytest_runtest_logstart` and - `pytest_runtest_logreport`. This way other plugins (for example `junitxml`) - can work normally. The **master** (when in dist-mode **load**) - decides to send more tests to a node when a test completes, using - some heuristics such as test durations and how many tests each **worker** - still has to run. - -1. When the **master** has no more pending tests it will - send a "shutdown" signal to all **workers**, which will then run their - remaining tests to completion and shut down. At this point the - **master** will sit waiting for **workers** to shut down, still - processing events such as `pytest_runtest_logreport`. - -## FAQ ## - -> Why does each worker do its own collection, as opposed to having -the master collect once and distribute from that collection to the workers? - -If collection was performed by master then it would have to -serialize collected items to send them through the wire, as workers live in another process. -The problem is that test items are not easily (impossible?) to serialize, as they contain references to -the test functions, fixture managers, config objects, etc. Even if one manages to serialize it, -it seems it would be very hard to get it right and easy to break by any small change in pytest. diff --git a/README.rst b/README.rst index 61c16b54..176fcba1 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,6 @@ - +============ +pytest-xdist +============ .. image:: http://img.shields.io/pypi/v/pytest-xdist.svg :alt: PyPI version @@ -11,384 +13,21 @@ :alt: Python versions :target: https://pypi.python.org/pypi/pytest-xdist -.. image:: https://travis-ci.org/pytest-dev/pytest-xdist.svg?branch=master - :alt: Travis CI build status - :target: https://travis-ci.org/pytest-dev/pytest-xdist - -.. image:: https://ci.appveyor.com/api/projects/status/56eq1a1avd4sdd7e/branch/master?svg=true - :alt: AppVeyor build status - :target: https://ci.appveyor.com/project/pytestbot/pytest-xdist +.. image:: https://github.com/pytest-dev/pytest-xdist/workflows/build/badge.svg + :target: https://github.com/pytest-dev/pytest-xdist/actions .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/ambv/black -xdist: pytest distributed testing plugin -======================================== - -The `pytest-xdist`_ plugin extends pytest with some unique -test execution modes: - -* test run parallelization_: if you have multiple CPUs or hosts you can use - those for a combined test run. This allows to speed up - development or to use special resources of `remote machines`_. - - -* ``--looponfail``: run your tests repeatedly in a subprocess. After each run - pytest waits until a file in your project changes and then re-runs - the previously failing tests. This is repeated until all tests pass - after which again a full run is performed. - -* `Multi-Platform`_ coverage: you can specify different Python interpreters - or different platforms and run tests in parallel on all of them. - -Before running tests remotely, ``pytest`` efficiently "rsyncs" your -program source code to the remote place. All test results -are reported back and displayed to your local terminal. -You may specify different Python versions and interpreters. - -If you would like to know how pytest-xdist works under the covers, checkout -`OVERVIEW `_. - - -Installation ------------- - -Install the plugin with:: - - pip install pytest-xdist - - -To use ``psutil`` for detection of the number of CPUs available, install the ``psutil`` extra:: - - pip install pytest-xdist[psutil] - - -.. _parallelization: - -Speed up test runs by sending tests to multiple CPUs ----------------------------------------------------- - -To send tests to multiple CPUs, use the ``-n`` (or ``--numprocesses``) option:: - - pytest -n NUMCPUS - -Pass ``-n auto`` to use as many processes as your computer has CPU cores. This -can lead to considerable speed ups, especially if your test suite takes a -noticeable amount of time. - -If a test crashes a worker, pytest-xdist will automatically restart that worker -and report the test’s failure. You can use the ``--max-worker-restart`` option -to limit the number of worker restarts that are allowed, or disable restarting -altogether using ``--max-worker-restart 0``. - -By default, using ``--numprocesses`` will send pending tests to any worker that -is available, without any guaranteed order. You can change the test -distribution algorithm this with the ``--dist`` option. It takes these values: - -* ``--dist no``: The default algorithm, distributing one test at a time. - -* ``--dist loadscope``: Tests are grouped by **module** for *test functions* - and by **class** for *test methods*. Groups are distributed to available - workers as whole units. This guarantees that all tests in a group run in the - same process. This can be useful if you have expensive module-level or - class-level fixtures. Grouping by class takes priority over grouping by - module. - -* ``--dist loadfile``: Tests are grouped by their containing file. Groups are - distributed to available workers as whole units. This guarantees that all - tests in a file run in the same worker. - -Making session-scoped fixtures execute only once -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -``pytest-xdist`` is designed so that each worker process will perform its own collection and execute -a subset of all tests. This means that tests in different processes requesting a high-level -scoped fixture (for example ``session``) will execute the fixture code more than once, which -breaks expectations and might be undesired in certain situations. - -While ``pytest-xdist`` does not have a builtin support for ensuring a session-scoped fixture is -executed exactly once, this can be achieved by using a lock file for inter-process communication. - -The example below needs to execute the fixture ``session_data`` only once (because it is -resource intensive, or needs to execute only once to define configuration options, etc), so it makes -use of a `FileLock `_ to produce the fixture data only once -when the first process requests the fixture, while the other processes will then read -the data from a file. - -Here is the code: - -.. code-block:: python - - import json - - import pytest - from filelock import FileLock - - - @pytest.fixture(scope="session") - def session_data(tmp_path_factory, worker_id): - if worker_id == "master": - # not executing in with multiple workers, just produce the data and let - # pytest's fixture caching do its job - return produce_expensive_data() - - # get the temp directory shared by all workers - root_tmp_dir = tmp_path_factory.getbasetemp().parent - - fn = root_tmp_dir / "data.json" - with FileLock(str(fn) + ".lock"): - if fn.is_file(): - data = json.loads(fn.read_text()) - else: - data = produce_expensive_data() - fn.write_text(json.dumps(data)) - return data - - -The example above can also be use in cases a fixture needs to execute exactly once per test session, like -initializing a database service and populating initial tables. - -This technique might not work for every case, but should be a starting point for many situations -where executing a high-scope fixture exactly once is important. - -Running tests in a Python subprocess ------------------------------------- - -To instantiate a python3.5 subprocess and send tests to it, you may type:: - - pytest -d --tx popen//python=python3.5 - -This will start a subprocess which is run with the ``python3.5`` -Python interpreter, found in your system binary lookup path. - -If you prefix the --tx option value like this:: - - --tx 3*popen//python=python3.5 - -then three subprocesses would be created and tests -will be load-balanced across these three processes. - -.. _boxed: - -Running tests in a boxed subprocess ------------------------------------ - -This functionality has been moved to the -`pytest-forked `_ plugin, but the ``--boxed`` option -is still kept for backward compatibility. - -.. _`remote machines`: - -Sending tests to remote SSH accounts ------------------------------------- - -Suppose you have a package ``mypkg`` which contains some -tests that you can successfully run locally. And you -have a ssh-reachable machine ``myhost``. Then -you can ad-hoc distribute your tests by typing:: - - pytest -d --tx ssh=myhostpopen --rsyncdir mypkg mypkg - -This will synchronize your :code:`mypkg` package directory -to a remote ssh account and then locally collect tests -and send them to remote places for execution. - -You can specify multiple :code:`--rsyncdir` directories -to be sent to the remote side. - -.. note:: - - For pytest to collect and send tests correctly - you not only need to make sure all code and tests - directories are rsynced, but that any test (sub) directory - also has an :code:`__init__.py` file because internally - pytest references tests as a fully qualified python - module path. **You will otherwise get strange errors** - during setup of the remote side. - - -You can specify multiple :code:`--rsyncignore` glob patterns -to be ignored when file are sent to the remote side. -There are also internal ignores: :code:`.*, *.pyc, *.pyo, *~` -Those you cannot override using rsyncignore command-line or -ini-file option(s). - - -Sending tests to remote Socket Servers --------------------------------------- - -Download the single-module `socketserver.py`_ Python program -and run it like this:: - - python socketserver.py - -It will tell you that it starts listening on the default -port. You can now on your home machine specify this -new socket host with something like this:: - - pytest -d --tx socket=192.168.1.102:8888 --rsyncdir mypkg mypkg - - -.. _`atonce`: -.. _`Multi-Platform`: - - -Running tests on many platforms at once ---------------------------------------- - -The basic command to run tests on multiple platforms is:: - - pytest --dist=each --tx=spec1 --tx=spec2 - -If you specify a windows host, an OSX host and a Linux -environment this command will send each tests to all -platforms - and report back failures from all platforms -at once. The specifications strings use the `xspec syntax`_. - -.. _`xspec syntax`: http://codespeak.net/execnet/basics.html#xspec - -.. _`socketserver.py`: https://raw.githubusercontent.com/pytest-dev/execnet/master/execnet/script/socketserver.py - -.. _`execnet`: http://codespeak.net/execnet - -Identifying the worker process during a test --------------------------------------------- - -*New in version 1.15.* - -If you need to determine the identity of a worker process in -a test or fixture, you may use the ``worker_id`` fixture to do so: - -.. code-block:: python - - @pytest.fixture() - def user_account(worker_id): - """ use a different account in each xdist worker """ - return "account_%s" % worker_id - -When ``xdist`` is disabled (running with ``-n0`` for example), then -``worker_id`` will return ``"master"``. - -Worker processes also have the following environment variables -defined: - -* ``PYTEST_XDIST_WORKER``: the name of the worker, e.g., ``"gw2"``. -* ``PYTEST_XDIST_WORKER_COUNT``: the total number of workers in this session, - e.g., ``"4"`` when ``-n 4`` is given in the command-line. - -The information about the worker_id in a test is stored in the ``TestReport`` as -well, under the ``worker_id`` attribute. - -Since version 2.0, the following functions are also available in the ``xdist`` module: - -.. code-block:: python - - def is_xdist_worker(request_or_session) -> bool: - """Return `True` if this is an xdist worker, `False` otherwise - - :param request_or_session: the `pytest` `request` or `session` object - """ - - def is_xdist_master(request_or_session) -> bool: - """Return `True` if this is the xdist master, `False` otherwise - - Note: this method also returns `False` when distribution has not been - activated at all. - - :param request_or_session: the `pytest` `request` or `session` object - """ - - def get_xdist_worker_id(request_or_session) -> str: - """Return the id of the current worker ('gw0', 'gw1', etc) or 'master' - if running on the 'master' node. - - If not distributing tests (for example passing `-n0` or not passing `-n` at all) also return 'master'. - - :param request_or_session: the `pytest` `request` or `session` object - """ - - -Uniquely identifying the current test run ------------------------------------------ - -*New in version 1.32.* - -If you need to globally distinguish one test run from others in your -workers, you can use the ``testrun_uid`` fixture. For instance, let's say you -wanted to create a separate database for each test run: - -.. code-block:: python - - import pytest - from posix_ipc import Semaphore, O_CREAT - - @pytest.fixture(scope="session", autouse=True) - def create_unique_database(testrun_uid): - """ create a unique database for this particular test run """ - database_url = f"psql://myapp-{testrun_uid}" - - with Semaphore(f"/{testrun_uid}-lock", flags=O_CREAT, initial_value=1): - if not database_exists(database_url): - create_database(database_url) - - @pytest.fixture() - def db(testrun_uid): - """ retrieve unique database """ - database_url = f"psql://myapp-{testrun_uid}" - return database_get_instance(database_url) - - -Additionally, during a test run, the following environment variable is defined: - -* ``PYTEST_XDIST_TESTRUNUID``: the unique id of the test run. - -Accessing ``sys.argv`` from the master node in workers ------------------------------------------------------- - -To access the ``sys.argv`` passed to the command-line of the master node, use -``request.config.workerinput["mainargv"]``. - - -Specifying test exec environments in an ini file ------------------------------------------------- - -You can use pytest's ini file configuration to avoid typing common options. -You can for example make running with three subprocesses your default like this: - -.. code-block:: ini - - [pytest] - addopts = -n3 - -You can also add default environments like this: - -.. code-block:: ini - - [pytest] - addopts = --tx ssh=myhost//python=python3.5 --tx ssh=myhost//python=python3.6 - -and then just type:: - - pytest --dist=each - -to run tests in each of the environments. - - -Specifying "rsync" dirs in an ini-file --------------------------------------- - -In a ``tox.ini`` or ``setup.cfg`` file in your root project directory -you may specify directories to include or to exclude in synchronisation: +The `pytest-xdist`_ plugin extends pytest with new test execution modes, the most used being distributing +tests across multiple CPUs to speed up test execution:: -.. code-block:: ini + pytest -n auto - [pytest] - rsyncdirs = . mypkg helperpkg - rsyncignore = .hg +With this call, pytest will spawn a number of workers processes equal to the number of available CPUs, and distribute +the tests randomly across them. -These directory specifications are relative to the directory -where the configuration file was found. +Documentation +============= -.. _`pytest-xdist`: http://pypi.python.org/pypi/pytest-xdist -.. _`pytest-xdist repository`: https://github.com/pytest-dev/pytest-xdist -.. _`pytest`: http://pytest.org +Documentation is available at `Read The Docs `__. diff --git a/RELEASING.rst b/RELEASING.rst index 078c35b4..5cfd7c01 100644 --- a/RELEASING.rst +++ b/RELEASING.rst @@ -41,3 +41,5 @@ To publish a new release ``X.Y.Z``, the steps are as follows: $ git push git@github.com:pytest-dev/pytest-xdist.git v$VERSION That will build the package and publish it on ``PyPI`` automatically. + +#. Merge the release PR to `master`. diff --git a/changelog/791.doc b/changelog/791.doc new file mode 100644 index 00000000..adc3e65c --- /dev/null +++ b/changelog/791.doc @@ -0,0 +1 @@ +Document the ``pytest_xdist_auto_num_workers`` hook. diff --git a/changelog/796.doc.rst b/changelog/796.doc.rst new file mode 100644 index 00000000..28a10bc4 --- /dev/null +++ b/changelog/796.doc.rst @@ -0,0 +1 @@ +Added known limitations section to documentation. diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..69fa449d --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +_build/ diff --git a/docs/boxed.rst b/docs/boxed.rst new file mode 100644 index 00000000..ec02bd57 --- /dev/null +++ b/docs/boxed.rst @@ -0,0 +1,9 @@ + +.. _boxed: + +Running tests in a boxed subprocess (moved to pytest-forked) +============================================================ + +This functionality has been moved to the +`pytest-forked `_ plugin, but the ``--boxed`` option +is still kept for backward compatibility. diff --git a/docs/changelog.rst b/docs/changelog.rst new file mode 100644 index 00000000..4c32ed85 --- /dev/null +++ b/docs/changelog.rst @@ -0,0 +1,5 @@ +========= +Changelog +========= + +.. include:: ../CHANGELOG.rst diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..ec93d438 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,55 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = "pytest-xdist" +copyright = "2022, holger krekel and contributors" +author = "holger krekel and contributors" + +master_doc = "index" + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx_rtd_theme", + "sphinx.ext.autodoc", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ['_static'] diff --git a/docs/crash.rst b/docs/crash.rst new file mode 100644 index 00000000..30cb11b5 --- /dev/null +++ b/docs/crash.rst @@ -0,0 +1,7 @@ +When tests crash +================ + +If a test crashes a worker, pytest-xdist will automatically restart that worker +and report the test’s failure. You can use the ``--max-worker-restart`` option +to limit the number of worker restarts that are allowed, or disable restarting +altogether using ``--max-worker-restart 0``. diff --git a/docs/distribution.rst b/docs/distribution.rst new file mode 100644 index 00000000..0ec9416d --- /dev/null +++ b/docs/distribution.rst @@ -0,0 +1,69 @@ +.. _parallelization: + +Running tests across multiple CPUs +================================== + +To send tests to multiple CPUs, use the ``-n`` (or ``--numprocesses``) option:: + + pytest -n auto + +This can lead to considerable speed ups, especially if your test suite takes a +noticeable amount of time. + +With ``-n auto``, pytest-xdist will use as many processes as your computer +has CPU cores. +Pass a number, e.g. ``-n 8``, to specify the number of processes explicitly. + +To specify a different meaning for ``-n auto`` for your tests, +you can implement the ``pytest_xdist_auto_num_workers`` +`pytest hook `__ +(a function named ``pytest_xdist_auto_num_workers`` in e.g. ``conftest.py``) +that returns the number of processes to use. + + +Parallelization can be configured further with these options: + +* ``--maxprocesses=maxprocesses``: limit the maximum number of workers to + process the tests. + +* ``--max-worker-restart``: maximum number of workers that can be restarted + when crashed (set to zero to disable this feature). + +The test distribution algorithm is configured with the ``--dist`` command-line option: + +.. _distribution modes: + +* ``--dist load`` **(default)**: Sends pending tests to any worker that is + available, without any guaranteed order. + +* ``--dist loadscope``: Tests are grouped by **module** for *test functions* + and by **class** for *test methods*. Groups are distributed to available + workers as whole units. This guarantees that all tests in a group run in the + same process. This can be useful if you have expensive module-level or + class-level fixtures. Grouping by class takes priority over grouping by + module. + +* ``--dist loadfile``: Tests are grouped by their containing file. Groups are + distributed to available workers as whole units. This guarantees that all + tests in a file run in the same worker. + +* ``--dist loadgroup``: Tests are grouped by the ``xdist_group`` mark. Groups are + distributed to available workers as whole units. This guarantees that all + tests with same ``xdist_group`` name run in the same worker. + + .. code-block:: python + + @pytest.mark.xdist_group(name="group1") + def test1(): + pass + + + class TestA: + @pytest.mark.xdist_group("group1") + def test2(): + pass + + This will make sure ``test1`` and ``TestA::test2`` will run in the same worker. + Tests without the ``xdist_group`` mark are distributed normally as in the ``--dist=load`` mode. + +* ``--dist no``: The normal pytest execution mode, runs one test at a time (no distribution at all). diff --git a/docs/how-it-works.rst b/docs/how-it-works.rst new file mode 100644 index 00000000..3111d070 --- /dev/null +++ b/docs/how-it-works.rst @@ -0,0 +1,90 @@ +How it works? +============= + +``xdist`` works by spawning one or more **workers**, which are +controlled by the **controller**. Each **worker** is responsible for +performing a full test collection and afterwards running tests as +dictated by the **controller**. + +The execution flow is: + +1. **controller** spawns one or more **workers** at the beginning of the + test session. The communication between **controller** and **worker** + nodes makes use of `execnet `__ and + its + `gateways `__. + The actual interpreters executing the code for the **workers** might + be remote or local. + +2. Each **worker** itself is a mini pytest runner. **workers** at this + point perform a full test collection, sending back the collected + test-ids back to the **controller** which does not perform any + collection itself. + +3. The **controller** receives the result of the collection from all + nodes. At this point the **controller** performs some sanity check to + ensure that all **workers** collected the same tests (including + order), bailing out otherwise. If all is well, it converts the list + of test-ids into a list of simple indexes, where each index + corresponds to the position of that test in the original collection + list. This works because all nodes have the same collection list, and + saves bandwidth because the **controller** can now tell one of the + workers to just *execute test index 3* index of passing the full test + id. + +4. If **dist-mode** is **each**: the **controller** just sends the full + list of test indexes to each node at this moment. + +5. If **dist-mode** is **load**: the **controller** takes around 25% of + the tests and sends them one by one to each **worker** in a round + robin fashion. The rest of the tests will be distributed later as + **workers** finish tests (see below). + +6. Note that ``pytest_xdist_make_scheduler`` hook can be used to + implement custom tests distribution logic. + +7. **workers** re-implement ``pytest_runtestloop``: pytest’s default + implementation basically loops over all collected items in the + ``session`` object and executes the ``pytest_runtest_protocol`` for + each test item, but in xdist **workers** sit idly waiting for + **controller** to send tests for execution. As tests are received by + **workers**, ``pytest_runtest_protocol`` is executed for each test. + Here it worth noting an implementation detail: **workers** always + must keep at least one test item on their queue due to how the + ``pytest_runtest_protocol(item, nextitem)`` hook is defined: in order + to pass the ``nextitem`` to the hook, the worker must wait for more + instructions from controller before executing that remaining test. If + it receives more tests, then it can safely call + ``pytest_runtest_protocol`` because it knows what the ``nextitem`` + parameter will be. If it receives a “shutdown” signal, then it can + execute the hook passing ``nextitem`` as ``None``. + +8. As tests are started and completed at the **workers**, the results + are sent back to the **controller**, which then just forwards the + results to the appropriate pytest hooks: ``pytest_runtest_logstart`` + and ``pytest_runtest_logreport``. This way other plugins (for example + ``junitxml``) can work normally. The **controller** (when in + dist-mode **load**) decides to send more tests to a node when a test + completes, using some heuristics such as test durations and how many + tests each **worker** still has to run. + +9. When the **controller** has no more pending tests it will send a + “shutdown” signal to all **workers**, which will then run their + remaining tests to completion and shut down. At this point the + **controller** will sit waiting for **workers** to shut down, still + processing events such as ``pytest_runtest_logreport``. + +FAQ +--- + +**Question**: Why does each worker do its own collection, as opposed to having the +controller collect once and distribute from that collection to the +workers? + +If collection was performed by controller then it would have to +serialize collected items to send them through the wire, as workers live +in another process. The problem is that test items are not easily +(impossible?) to serialize, as they contain references to the test +functions, fixture managers, config objects, etc. Even if one manages to +serialize it, it seems it would be very hard to get it right and easy to +break by any small change in pytest. diff --git a/docs/how-to.rst b/docs/how-to.rst new file mode 100644 index 00000000..5d6ed128 --- /dev/null +++ b/docs/how-to.rst @@ -0,0 +1,233 @@ +How-tos +------- + +This section show cases how to accomplish some specialized tasks with ``pytest-xdist``. + +Identifying the worker process during a test +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +*New in version 1.15.* + +If you need to determine the identity of a worker process in +a test or fixture, you may use the ``worker_id`` fixture to do so: + +.. code-block:: python + + @pytest.fixture() + def user_account(worker_id): + """ use a different account in each xdist worker """ + return "account_%s" % worker_id + +When ``xdist`` is disabled (running with ``-n0`` for example), then +``worker_id`` will return ``"master"``. + +Worker processes also have the following environment variables +defined: + +.. envvar:: PYTEST_XDIST_WORKER + +The name of the worker, e.g., ``"gw2"``. + +.. envvar:: PYTEST_XDIST_WORKER_COUNT + +The total number of workers in this session, e.g., ``"4"`` when ``-n 4`` is given in the command-line. + +The information about the worker_id in a test is stored in the ``TestReport`` as +well, under the ``worker_id`` attribute. + +Since version 2.0, the following functions are also available in the ``xdist`` module: + + +.. autofunction:: xdist.is_xdist_worker +.. autofunction:: xdist.is_xdist_controller +.. autofunction:: xdist.is_xdist_master +.. autofunction:: xdist.get_xdist_worker_id + +Identifying workers from the system environment +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +*New in version 2.4* + +If the `setproctitle`_ package is installed, ``pytest-xdist`` will use it to +update the process title (command line) on its workers to show their current +state. The titles used are ``[pytest-xdist running] file.py/node::id`` and +``[pytest-xdist idle]``, visible in standard tools like ``ps`` and ``top`` on +Linux, Mac OS X and BSD systems. For Windows, please follow `setproctitle`_'s +pointer regarding the Process Explorer tool. + +This is intended purely as an UX enhancement, e.g. to track down issues with +long-running or CPU intensive tests. Errors in changing the title are ignored +silently. Please try not to rely on the title format or title changes in +external scripts. + +.. _`setproctitle`: https://pypi.org/project/setproctitle/ + + +Uniquely identifying the current test run +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +*New in version 1.32.* + +If you need to globally distinguish one test run from others in your +workers, you can use the ``testrun_uid`` fixture. For instance, let's say you +wanted to create a separate database for each test run: + +.. code-block:: python + + import pytest + from posix_ipc import Semaphore, O_CREAT + + + @pytest.fixture(scope="session", autouse=True) + def create_unique_database(testrun_uid): + """ create a unique database for this particular test run """ + database_url = f"psql://myapp-{testrun_uid}" + + with Semaphore(f"/{testrun_uid}-lock", flags=O_CREAT, initial_value=1): + if not database_exists(database_url): + create_database(database_url) + + + @pytest.fixture() + def db(testrun_uid): + """ retrieve unique database """ + database_url = f"psql://myapp-{testrun_uid}" + return database_get_instance(database_url) + + +Additionally, during a test run, the following environment variable is defined: + +.. envvar:: PYTEST_XDIST_TESTRUNUID + +The unique id of the test run. + +Accessing ``sys.argv`` from the controller node in workers +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To access the ``sys.argv`` passed to the command-line of the controller node, use +``request.config.workerinput["mainargv"]``. + + +Specifying test exec environments in an ini file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can use pytest's ini file configuration to avoid typing common options. +You can for example make running with three subprocesses your default like this: + +.. code-block:: ini + + [pytest] + addopts = -n3 + +You can also add default environments like this: + +.. code-block:: ini + + [pytest] + addopts = --tx ssh=myhost//python=python3.9 --tx ssh=myhost//python=python3.6 + +and then just type:: + + pytest --dist=each + +to run tests in each of the environments. + + +Specifying "rsync" dirs in an ini-file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In a ``tox.ini`` or ``setup.cfg`` file in your root project directory +you may specify directories to include or to exclude in synchronisation: + +.. code-block:: ini + + [pytest] + rsyncdirs = . mypkg helperpkg + rsyncignore = .hg + +These directory specifications are relative to the directory +where the configuration file was found. + +.. _`pytest-xdist`: http://pypi.python.org/pypi/pytest-xdist +.. _`pytest-xdist repository`: https://github.com/pytest-dev/pytest-xdist +.. _`pytest`: http://pytest.org + + +Making session-scoped fixtures execute only once +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +``pytest-xdist`` is designed so that each worker process will perform its own collection and execute +a subset of all tests. This means that tests in different processes requesting a high-level +scoped fixture (for example ``session``) will execute the fixture code more than once, which +breaks expectations and might be undesired in certain situations. + +While ``pytest-xdist`` does not have a builtin support for ensuring a session-scoped fixture is +executed exactly once, this can be achieved by using a lock file for inter-process communication. + +The example below needs to execute the fixture ``session_data`` only once (because it is +resource intensive, or needs to execute only once to define configuration options, etc), so it makes +use of a `FileLock `_ to produce the fixture data only once +when the first process requests the fixture, while the other processes will then read +the data from a file. + +Here is the code: + +.. code-block:: python + + import json + + import pytest + from filelock import FileLock + + + @pytest.fixture(scope="session") + def session_data(tmp_path_factory, worker_id): + if worker_id == "master": + # not executing in with multiple workers, just produce the data and let + # pytest's fixture caching do its job + return produce_expensive_data() + + # get the temp directory shared by all workers + root_tmp_dir = tmp_path_factory.getbasetemp().parent + + fn = root_tmp_dir / "data.json" + with FileLock(str(fn) + ".lock"): + if fn.is_file(): + data = json.loads(fn.read_text()) + else: + data = produce_expensive_data() + fn.write_text(json.dumps(data)) + return data + + +The example above can also be use in cases a fixture needs to execute exactly once per test session, like +initializing a database service and populating initial tables. + +This technique might not work for every case, but should be a starting point for many situations +where executing a high-scope fixture exactly once is important. + + +Creating one log file for each worker +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To create one log file for each worker with ``pytest-xdist``, you can leverage :envvar:`PYTEST_XDIST_WORKER` +to generate a unique filename for each worker. + +Example: + +.. code-block:: python + + # content of conftest.py + def pytest_configure(config): + worker_id = os.environ.get("PYTEST_XDIST_WORKER") + if worker_id is not None: + log_file = config.getini("worker_log_file") + logging.basicConfig( + format=config.getini("log_file_format"), + filename=f"tests_{worker_id}.log", + level=config.getini("log_file_level"), + ) + + +When running the tests with ``-n3``, for example, three files will be created in the current directory: +``tests_gw0.log``, ``tests_gw1.log`` and ``tests_gw2.log``. diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..6e8e6112 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,64 @@ +pytest-xdist +============ + +The `pytest-xdist`_ plugin extends pytest with new test execution modes, the most used being distributing +tests across multiple CPUs to speed up test execution:: + + pytest -n auto + +With this call, pytest will spawn a number of workers processes equal to the number of available CPUs, and distribute +the tests randomly across them. + +.. note:: + Due to how pytest-xdist is implemented, the ``-s/--capture=no`` option does not work. + + +Installation +------------ + +Install the plugin with:: + + pip install pytest-xdist + + +To use ``psutil`` for detection of the number of CPUs available, install the ``psutil`` extra:: + + pip install pytest-xdist[psutil] + +Features +-------- + +* Test run :ref:`parallelization`: tests can be executed across multiple CPUs or hosts. + This allows to speed up development or to use special resources of :ref:`remote machines`. + +* ``--looponfail``: run your tests repeatedly in a subprocess. After each run + pytest waits until a file in your project changes and then re-runs + the previously failing tests. This is repeated until all tests pass + after which again a full run is performed. + +* :ref:`Multi-Platform` coverage: you can specify different Python interpreters + or different platforms and run tests in parallel on all of them. + + Before running tests remotely, ``pytest`` efficiently "rsyncs" your + program source code to the remote place. + You may specify different Python versions and interpreters. It does not + installs/synchronize dependencies however. + + **Note**: this mode exists mostly for backward compatibility, as modern development + relies on continuous integration for multi-platform testing. + + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + distribution + subprocess + boxed + remote + crash + how-to + how-it-works + known-limitations + changelog diff --git a/docs/known-limitations.rst b/docs/known-limitations.rst new file mode 100644 index 00000000..04641a7b --- /dev/null +++ b/docs/known-limitations.rst @@ -0,0 +1,52 @@ +Known limitations +================= + +pytest-xdist has some limitations that may be supported in pytest but can't be supported in pytest-xdist. + +Order and amount of test must be consistent +------------------------------------------- + +Is is not possible to have tests that differ in order or their amount across workers. + +This is especially true with ``pytest.mark.parametrize``, when values are produced with sets or other unordered iterables/generators. + + +Example: + +.. code-block:: python + + import pytest + + @pytest.mark.parametrize("param", {"a","b"}) + def test_pytest_parametrize_unordered(param): + pass + +In the example above, the fact that ``set`` are not necessarily ordered can cause different workers +to collect tests in different order, which will throw an error. + +Workarounds +~~~~~~~~~~~ + +A solution to this is to guarantee that the parametrized values have the same order. + +Some solutions: + +* Convert your sequence to a ``list``. + + .. code-block:: python + + import pytest + + @pytest.mark.parametrize("param", ["a", "b"]) + def test_pytest_parametrize_unordered(param): + pass + +* Sort your sequence, guaranteeing order. + + .. code-block:: python + + import pytest + + @pytest.mark.parametrize("param", sorted({"a", "b"})) + def test_pytest_parametrize_unordered(param): + pass diff --git a/docs/remote.rst b/docs/remote.rst new file mode 100644 index 00000000..997bef37 --- /dev/null +++ b/docs/remote.rst @@ -0,0 +1,72 @@ + +.. _`Multi-Platform`: +.. _`remote machines`: + +Sending tests to remote SSH accounts +==================================== + +Suppose you have a package ``mypkg`` which contains some +tests that you can successfully run locally. And you +have a ssh-reachable machine ``myhost``. Then +you can ad-hoc distribute your tests by typing:: + + pytest -d --rsyncdir mypkg --tx ssh=myhostpopen mypkg/tests/unit/test_something.py + +This will synchronize your :code:`mypkg` package directory +to a remote ssh account and then locally collect tests +and send them to remote places for execution. + +You can specify multiple :code:`--rsyncdir` directories +to be sent to the remote side. + +.. note:: + + For pytest to collect and send tests correctly + you not only need to make sure all code and tests + directories are rsynced, but that any test (sub) directory + also has an :code:`__init__.py` file because internally + pytest references tests as a fully qualified python + module path. **You will otherwise get strange errors** + during setup of the remote side. + + +You can specify multiple :code:`--rsyncignore` glob patterns +to be ignored when file are sent to the remote side. +There are also internal ignores: :code:`.*, *.pyc, *.pyo, *~` +Those you cannot override using rsyncignore command-line or +ini-file option(s). + + +Sending tests to remote Socket Servers +-------------------------------------- + +Download the single-module `socketserver.py`_ Python program +and run it like this:: + + python socketserver.py + +It will tell you that it starts listening on the default +port. You can now on your home machine specify this +new socket host with something like this:: + + pytest -d --tx socket=192.168.1.102:8888 --rsyncdir mypkg + + + +Running tests on many platforms at once +--------------------------------------- + +The basic command to run tests on multiple platforms is:: + + pytest --dist=each --tx=spec1 --tx=spec2 + +If you specify a windows host, an OSX host and a Linux +environment this command will send each tests to all +platforms - and report back failures from all platforms +at once. The specifications strings use the `xspec syntax`_. + +.. _`xspec syntax`: https://codespeak.net/execnet/basics.html#xspec + +.. _`execnet`: https://codespeak.net/execnet + +.. _`socketserver.py`: https://raw.githubusercontent.com/pytest-dev/execnet/master/execnet/script/socketserver.py diff --git a/docs/subprocess.rst b/docs/subprocess.rst new file mode 100644 index 00000000..2148a867 --- /dev/null +++ b/docs/subprocess.rst @@ -0,0 +1,16 @@ +Running tests in a Python subprocess +==================================== + +To instantiate a ``python3.9`` subprocess and send tests to it, you may type:: + + pytest -d --tx popen//python=python3.9 + +This will start a subprocess which is run with the ``python3.9`` +Python interpreter, found in your system binary lookup path. + +If you prefix the --tx option value like this:: + + --tx 3*popen//python=python3.9 + +then three subprocesses would be created and tests +will be load-balanced across these three processes. diff --git a/example/boxed.txt b/example/boxed.txt index aabb27e3..81543ab3 100644 --- a/example/boxed.txt +++ b/example/boxed.txt @@ -1,9 +1,9 @@ -.. note:: +.. warning:: Since 1.19.0, the actual implementation of the ``--boxed`` option has been moved to a separate plugin, `pytest-forked `_ - which can be installed independently. The ``--boxed`` command-line options remains - for backward compatibility reasons. + which can be installed independently. The ``--boxed`` command-line option is deprecated + and will be removed in pytest-xdist 3.0.0; use ``--forked`` from pytest-forked instead. If your testing involves C or C++ libraries you might have to deal diff --git a/pyproject.toml b/pyproject.toml index 94b1be61..72a7f7d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,15 @@ +[build-system] +requires = [ + # sync with setup.py until we discard non-pep-517/518 + "setuptools>=45.0", + "setuptools-scm[toml]>=6.2.3", + "wheel", +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +write_to = "src/xdist/_version.py" + [tool.towncrier] package = "xdist" filename = "CHANGELOG.rst" diff --git a/setup.cfg b/setup.cfg index 71037d7b..4994681a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,81 @@ [metadata] +name = pytest-xdist +description = pytest xdist plugin for distributed testing and loop-on-failing modes +long_description = file: README.rst +license = MIT +author = holger krekel and contributors +author_email = pytest-dev@python.org,holger@merlinux.eu +url = https://github.com/pytest-dev/pytest-xdist +platforms = + linux + osx + win32 +classifiers = + Development Status :: 5 - Production/Stable + Framework :: Pytest + Intended Audience :: Developers + License :: OSI Approved :: MIT License + Operating System :: POSIX + Operating System :: Microsoft :: Windows + Operating System :: MacOS :: MacOS X + Topic :: Software Development :: Testing + Topic :: Software Development :: Quality Assurance + Topic :: Utilities + Programming Language :: Python + Programming Language :: Python :: 3 + Programming Language :: Python :: 3 :: Only + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 license_file = LICENSE +project_urls = + Documentation=https://pytest-xdist.readthedocs.io/en/latest + Changelog=https://pytest-xdist.readthedocs.io/en/latest/changelog.html + Source=https://github.com/pytest-dev/pytest-xdist + Tracker=https://github.com/pytest-dev/pytest-xdist/issues + +[options] +packages = find: +package_dir = =src +zip_safe = False +python_requires = >=3.6 +install_requires = + execnet>=1.1 + pytest>=6.2.0 + pytest-forked +setup_requires = # left empty, enforce using isolated build system + +[options.packages.find] +where = src + +[options.entry_points] +pytest11 = + xdist = xdist.plugin + xdist.looponfail = xdist.looponfail + +[options.extras_require] +testing = + filelock +psutil = psutil>=3.0 +setproctitle = setproctitle [flake8] max-line-length = 100 + +[mypy] +mypy_path = src +# TODO: Enable this & fix errors. +# check_untyped_defs = True +disallow_any_generics = True +ignore_missing_imports = True +no_implicit_optional = True +show_error_codes = True +strict_equality = True +warn_redundant_casts = True +warn_return_any = True +warn_unreachable = True +warn_unused_configs = True +# TODO: Enable this & fix errors. +# no_implicit_reexport = True diff --git a/setup.py b/setup.py index b6fb0f16..7f1a1763 100644 --- a/setup.py +++ b/setup.py @@ -1,48 +1,4 @@ -from setuptools import setup, find_packages +from setuptools import setup -install_requires = ["execnet>=1.1", "pytest>=6.0.0", "pytest-forked"] - - -with open("README.rst") as f: - long_description = f.read() - -setup( - name="pytest-xdist", - use_scm_version={"write_to": "src/xdist/_version.py"}, - description="pytest xdist plugin for distributed testing and loop-on-failing modes", - long_description=long_description, - license="MIT", - author="holger krekel and contributors", - author_email="pytest-dev@python.org,holger@merlinux.eu", - url="https://github.com/pytest-dev/pytest-xdist", - platforms=["linux", "osx", "win32"], - packages=find_packages(where="src"), - package_dir={"": "src"}, - extras_require={"testing": ["filelock"], "psutil": ["psutil>=3.0"]}, - entry_points={ - "pytest11": ["xdist = xdist.plugin", "xdist.looponfail = xdist.looponfail"] - }, - zip_safe=False, - python_requires=">=3.5", - install_requires=install_requires, - setup_requires=["setuptools_scm"], - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Framework :: Pytest", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: POSIX", - "Operating System :: Microsoft :: Windows", - "Operating System :: MacOS :: MacOS X", - "Topic :: Software Development :: Testing", - "Topic :: Software Development :: Quality Assurance", - "Topic :: Utilities", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - ], -) +if __name__ == "__main__": + setup() diff --git a/src/xdist/__init__.py b/src/xdist/__init__.py index 83ef7762..031a3d34 100644 --- a/src/xdist/__init__.py +++ b/src/xdist/__init__.py @@ -1,4 +1,15 @@ -from xdist.plugin import is_xdist_worker, is_xdist_master, get_xdist_worker_id +from xdist.plugin import ( + is_xdist_worker, + is_xdist_master, + get_xdist_worker_id, + is_xdist_controller, +) from xdist._version import version as __version__ -__all__ = ["__version__", "is_xdist_worker", "is_xdist_master", "get_xdist_worker_id"] +__all__ = [ + "__version__", + "is_xdist_worker", + "is_xdist_master", + "is_xdist_controller", + "get_xdist_worker_id", +] diff --git a/src/xdist/dsession.py b/src/xdist/dsession.py index 07ef091e..2ae3db6b 100644 --- a/src/xdist/dsession.py +++ b/src/xdist/dsession.py @@ -7,6 +7,7 @@ LoadScheduling, LoadScopeScheduling, LoadFileScheduling, + LoadGroupScheduling, ) @@ -14,7 +15,7 @@ class Interrupted(KeyboardInterrupt): - """ signals an immediate interruption. """ + """signals an immediate interruption.""" class DSession: @@ -67,7 +68,7 @@ def report_line(self, line): if self.terminal and self.config.option.verbose >= 0: self.terminal.write_line(line) - @pytest.mark.trylast + @pytest.hookimpl(trylast=True) def pytest_sessionstart(self, session): """Creates and starts the nodes. @@ -79,6 +80,7 @@ def pytest_sessionstart(self, session): self._active_nodes.update(nodes) self._session = session + @pytest.hookimpl def pytest_sessionfinish(self, session): """Shutdown all nodes.""" nm = getattr(self, "nodemanager", None) # if not fully initialized @@ -86,11 +88,12 @@ def pytest_sessionfinish(self, session): nm.teardown_nodes() self._session = None + @pytest.hookimpl def pytest_collection(self): - # prohibit collection of test items in master process + # prohibit collection of test items in controller process return True - @pytest.mark.trylast + @pytest.hookimpl(trylast=True) def pytest_xdist_make_scheduler(self, config, log): dist = config.getvalue("dist") schedulers = { @@ -98,9 +101,11 @@ def pytest_xdist_make_scheduler(self, config, log): "load": LoadScheduling, "loadscope": LoadScopeScheduling, "loadfile": LoadFileScheduling, + "loadgroup": LoadGroupScheduling, } return schedulers[dist](config, log) + @pytest.hookimpl def pytest_runtestloop(self): self.sched = self.config.hook.pytest_xdist_make_scheduler( config=self.config, log=self.log @@ -174,6 +179,24 @@ def worker_workerfinished(self, node): assert not crashitem, (crashitem, node) self._active_nodes.remove(node) + def worker_internal_error(self, node, formatted_error): + """ + pytest_internalerror() was called on the worker. + + pytest_internalerror() arguments are an excinfo and an excrepr, which can't + be serialized, so we go with a poor man's solution of raising an exception + here ourselves using the formatted message. + """ + self._active_nodes.remove(node) + try: + assert False, formatted_error + except AssertionError: + from _pytest._code import ExceptionInfo + + excinfo = ExceptionInfo.from_current() + excrepr = excinfo.getrepr() + self.config.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo) + def worker_errordown(self, node, error): """Emitted by the WorkerController when a node dies.""" self.config.hook.pytest_testnodedown(node=node, error=error) @@ -205,6 +228,7 @@ def worker_errordown(self, node, error): self._clone_node(node) self._active_nodes.remove(node) + @pytest.hookimpl def pytest_terminal_summary(self, terminalreporter): if self.config.option.verbose >= 0 and self._summary_report: terminalreporter.write_sep("=", "xdist: {}".format(self._summary_report)) @@ -222,7 +246,7 @@ def worker_collectionfinish(self, node, ids): return self.config.hook.pytest_xdist_node_collection_finished(node=node, ids=ids) # tell session which items were effectively collected otherwise - # the master node will finish the session with EXIT_NOTESTSCOLLECTED + # the controller node will finish the session with EXIT_NOTESTSCOLLECTED self._session.testscollected = len(ids) self.sched.add_node_collection(node, ids) if self.terminal: @@ -270,6 +294,8 @@ def worker_collectreport(self, node, rep): def worker_warning_captured(self, warning_message, when, item): """Emitted when a node calls the pytest_warning_captured hook (deprecated in 6.0).""" + # This hook as been removed in pytest 7.1, and we can remove support once we only + # support pytest >=7.1. kwargs = dict(warning_message=warning_message, when=when, item=item) self.config.hook.pytest_warning_captured.call_historic(kwargs=kwargs) @@ -325,6 +351,12 @@ def handle_crashitem(self, nodeid, worker): nodeid, (fspath, None, fspath), (), "failed", msg, "???" ) rep.node = worker + + self.config.hook.pytest_handlecrashitem( + crashitem=nodeid, + report=rep, + sched=self.sched, + ) self.config.hook.pytest_runtest_logreport(report=rep) @@ -366,6 +398,7 @@ def rewrite(self, line, newline=False): self._lastlen = len(line) self.tr.rewrite(pline, bold=True) + @pytest.hookimpl def pytest_xdist_setupnodes(self, specs): self._specs = specs for spec in specs: @@ -373,6 +406,7 @@ def pytest_xdist_setupnodes(self, specs): self.setstatus(spec, "I", show=True) self.ensure_show_status() + @pytest.hookimpl def pytest_xdist_newgateway(self, gateway): if self.config.option.verbose > 0: rinfo = gateway._rinfo() @@ -384,6 +418,7 @@ def pytest_xdist_newgateway(self, gateway): ) self.setstatus(gateway.spec, "C") + @pytest.hookimpl def pytest_testnodeready(self, node): if self.config.option.verbose > 0: d = node.workerinfo @@ -393,6 +428,7 @@ def pytest_testnodeready(self, node): self.rewrite(infoline, newline=True) self.setstatus(node.gateway.spec, "ok") + @pytest.hookimpl def pytest_testnodedown(self, node, error): if not error: return diff --git a/src/xdist/looponfail.py b/src/xdist/looponfail.py index 19b9313e..ef4c34ff 100644 --- a/src/xdist/looponfail.py +++ b/src/xdist/looponfail.py @@ -13,6 +13,7 @@ import execnet +@pytest.hookimpl def pytest_addoption(parser): group = parser.getgroup("xdist", "distributed and subprocess testing") group._addoption( @@ -26,6 +27,7 @@ def pytest_addoption(parser): ) +@pytest.hookimpl def pytest_cmdline_main(config): if config.getoption("looponfail"): @@ -38,7 +40,7 @@ def pytest_cmdline_main(config): def looponfail_main(config): remotecontrol = RemoteControl(config) - rootdirs = config.getini("looponfailroots") + rootdirs = [py.path.local(root) for root in config.getini("looponfailroots")] statrecorder = StatRecorder(rootdirs) try: while 1: @@ -61,7 +63,7 @@ def __init__(self, config): def trace(self, *args): if self.config.option.debug: - msg = " ".join([str(x) for x in args]) + msg = " ".join(str(x) for x in args) print("RemoteControl:", msg) def initgateway(self): @@ -178,6 +180,7 @@ def DEBUG(self, *args): if self.config.option.debug: print(" ".join(map(str, args))) + @pytest.hookimpl def pytest_collection(self, session): self.session = session self.trails = self.current_command @@ -192,10 +195,12 @@ def pytest_collection(self, session): hook.pytest_collection_finish(session=session) return True + @pytest.hookimpl def pytest_runtest_logreport(self, report): if report.failed: self.recorded_failures.append(report) + @pytest.hookimpl def pytest_collectreport(self, report): if report.failed: self.recorded_failures.append(report) diff --git a/src/xdist/newhooks.py b/src/xdist/newhooks.py index 4ac71960..77766244 100644 --- a/src/xdist/newhooks.py +++ b/src/xdist/newhooks.py @@ -14,50 +14,57 @@ import pytest +@pytest.hookspec() def pytest_xdist_setupnodes(config, specs): - """ called before any remote node is set up. """ + """called before any remote node is set up.""" +@pytest.hookspec() def pytest_xdist_newgateway(gateway): - """ called on new raw gateway creation. """ + """called on new raw gateway creation.""" +@pytest.hookspec() def pytest_xdist_rsyncstart(source, gateways): - """ called before rsyncing a directory to remote gateways takes place. """ + """called before rsyncing a directory to remote gateways takes place.""" +@pytest.hookspec() def pytest_xdist_rsyncfinish(source, gateways): - """ called after rsyncing a directory to remote gateways takes place. """ + """called after rsyncing a directory to remote gateways takes place.""" -@pytest.mark.firstresult +@pytest.hookspec(firstresult=True) def pytest_xdist_getremotemodule(): - """ called when creating remote node""" + """called when creating remote node""" +@pytest.hookspec() def pytest_configure_node(node): - """ configure node information before it gets instantiated. """ + """configure node information before it gets instantiated.""" +@pytest.hookspec() def pytest_testnodeready(node): - """ Test Node is ready to operate. """ + """Test Node is ready to operate.""" +@pytest.hookspec() def pytest_testnodedown(node, error): - """ Test Node is down. """ + """Test Node is down.""" +@pytest.hookspec() def pytest_xdist_node_collection_finished(node, ids): - """called by the master node when a node finishes collecting. - """ + """called by the controller node when a worker node finishes collecting.""" -@pytest.mark.firstresult +@pytest.hookspec(firstresult=True) def pytest_xdist_make_scheduler(config, log): - """ return a node scheduler implementation """ + """return a node scheduler implementation""" -@pytest.mark.firstresult +@pytest.hookspec(firstresult=True) def pytest_xdist_auto_num_workers(config): """ Return the number of workers to spawn when ``--numprocesses=auto`` is given in the @@ -65,3 +72,20 @@ def pytest_xdist_auto_num_workers(config): .. versionadded:: 2.1 """ + + +@pytest.hookspec(firstresult=True) +def pytest_handlecrashitem(crashitem, report, sched): + """ + Handle a crashitem, modifying the report if necessary. + + The scheduler is provided as a parameter to reschedule the test if desired with + `sched.mark_test_pending`. + + def pytest_handlecrashitem(crashitem, report, sched): + if should_rerun(crashitem): + sched.mark_test_pending(crashitem) + report.outcome = "rerun" + + .. versionadded:: 2.2.1 + """ diff --git a/src/xdist/plugin.py b/src/xdist/plugin.py index 2d8424d9..d0448fa7 100644 --- a/src/xdist/plugin.py +++ b/src/xdist/plugin.py @@ -1,17 +1,26 @@ import os import uuid +import sys +from pathlib import Path import py import pytest -def pytest_xdist_auto_num_workers(): +PYTEST_GTE_7 = hasattr(pytest, "version_tuple") and pytest.version_tuple >= (7, 0) # type: ignore[attr-defined] + +_sys_path = list(sys.path) # freeze a copy of sys.path at interpreter startup + + +@pytest.hookimpl +def pytest_xdist_auto_num_workers(config): try: import psutil except ImportError: pass else: - count = psutil.cpu_count(logical=False) or psutil.cpu_count() + use_logical = config.option.numprocesses == "logical" + count = psutil.cpu_count(logical=use_logical) or psutil.cpu_count() if count: return count try: @@ -36,12 +45,13 @@ def cpu_count(): def parse_numprocesses(s): - if s == "auto": - return "auto" + if s in ("auto", "logical"): + return s elif s is not None: return int(s) +@pytest.hookimpl def pytest_addoption(parser): group = parser.getgroup("xdist", "distributed and subprocess testing") group._addoption( @@ -51,9 +61,10 @@ def pytest_addoption(parser): metavar="numprocesses", action="store", type=parse_numprocesses, - help="shortcut for '--dist=load --tx=NUM*popen', " - "you can use 'auto' here for auto detection CPUs number on " - "host system and it will be 0 when used with --pdb", + help="Shortcut for '--dist=load --tx=NUM*popen'. With 'auto', attempt " + "to detect physical CPU count. With 'logical', detect logical CPU " + "count. If physical CPU count cannot be found, falls back to logical " + "count. This will be 0 when used with --pdb.", ) group.addoption( "--maxprocesses", @@ -75,7 +86,7 @@ def pytest_addoption(parser): "--dist", metavar="distmode", action="store", - choices=["each", "load", "loadscope", "loadfile", "no"], + choices=["each", "load", "loadscope", "loadfile", "loadgroup", "no"], dest="dist", default="no", help=( @@ -87,6 +98,7 @@ def pytest_addoption(parser): " the same scope to any available environment.\n\n" "loadfile: load balance by sending test grouped by file" " to any available environment.\n\n" + "loadgroup: like load, but sends tests marked with 'xdist_group' to the same worker.\n\n" "(default) no: run tests inprocess, don't distribute." ), ) @@ -142,18 +154,18 @@ def pytest_addoption(parser): parser.addini( "rsyncdirs", "list of (relative) paths to be rsynced for remote distributed testing.", - type="pathlist", + type="paths" if PYTEST_GTE_7 else "pathlist", ) parser.addini( "rsyncignore", "list of (relative) glob-style paths to be ignored for rsyncing.", - type="pathlist", + type="paths" if PYTEST_GTE_7 else "pathlist", ) parser.addini( "looponfailroots", - type="pathlist", + type="paths" if PYTEST_GTE_7 else "pathlist", help="directories to check for changes", - default=[py.path.local()], + default=[Path.cwd() if PYTEST_GTE_7 else py.path.local()], ) @@ -162,6 +174,7 @@ def pytest_addoption(parser): # ------------------------------------------------------------------------- +@pytest.hookimpl def pytest_addhooks(pluginmanager): from xdist import newhooks @@ -173,7 +186,7 @@ def pytest_addhooks(pluginmanager): # ------------------------------------------------------------------------- -@pytest.mark.trylast +@pytest.hookimpl(trylast=True) def pytest_configure(config): if config.getoption("dist") != "no" and not config.getvalue("collectonly"): from xdist.dsession import DSession @@ -184,13 +197,25 @@ def pytest_configure(config): if tr: tr.showfspath = False if config.getoption("boxed"): + warning = DeprecationWarning( + "The --boxed command line argument is deprecated. " + "Install pytest-forked and use --forked instead. " + "pytest-xdist 3.0.0 will remove the --boxed argument and pytest-forked dependency." + ) + config.issue_config_time_warning(warning, 2) config.option.forked = True + config_line = ( + "xdist_group: specify group for tests should run in same session." + "in relation to one another. " + "Provided by pytest-xdist." + ) + config.addinivalue_line("markers", config_line) -@pytest.mark.tryfirst + +@pytest.hookimpl(tryfirst=True) def pytest_cmdline_main(config): usepdb = config.getoption("usepdb", False) # a core option - if config.option.numprocesses == "auto": + if config.option.numprocesses in ("auto", "logical"): if usepdb: config.option.numprocesses = 0 config.option.dist = "no" @@ -227,8 +252,8 @@ def is_xdist_worker(request_or_session) -> bool: return hasattr(request_or_session.config, "workerinput") -def is_xdist_master(request_or_session) -> bool: - """Return `True` if this is the xdist master, `False` otherwise +def is_xdist_controller(request_or_session) -> bool: + """Return `True` if this is the xdist controller, `False` otherwise Note: this method also returns `False` when distribution has not been activated at all. @@ -241,9 +266,13 @@ def is_xdist_master(request_or_session) -> bool: ) -def get_xdist_worker_id(request_or_session) -> str: +# ALIAS: TODO, deprecate (#592) +is_xdist_master = is_xdist_controller + + +def get_xdist_worker_id(request_or_session): """Return the id of the current worker ('gw0', 'gw1', etc) or 'master' - if running on the 'master' node. + if running on the controller node. If not distributing tests (for example passing `-n0` or not passing `-n` at all) also return 'master'. @@ -253,6 +282,7 @@ def get_xdist_worker_id(request_or_session) -> str: if hasattr(request_or_session.config, "workerinput"): return request_or_session.config.workerinput["workerid"] else: + # TODO: remove "master", ideally for a None return "master" @@ -261,6 +291,7 @@ def worker_id(request): """Return the id of the current worker ('gw0', 'gw1', etc) or 'master' if running on the master node. """ + # TODO: remove "master", ideally for a None return get_xdist_worker_id(request) diff --git a/src/xdist/remote.py b/src/xdist/remote.py index 97dc180c..160b042a 100644 --- a/src/xdist/remote.py +++ b/src/xdist/remote.py @@ -16,6 +16,21 @@ from _pytest.config import _prepareconfig, Config +try: + from setproctitle import setproctitle +except ImportError: + + def setproctitle(title): + pass + + +def worker_title(title): + try: + setproctitle(title) + except Exception: + # changing the process name is very optional, no errors please + pass + class WorkerInteractor: def __init__(self, config, channel): @@ -32,10 +47,14 @@ def sendevent(self, name, **kwargs): self.log("sending", name, kwargs) self.channel.send((name, kwargs)) + @pytest.hookimpl def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): + formatted_error = str(excrepr) + for line in formatted_error.split("\n"): self.log("IERROR>", line) + interactor.sendevent("internal_error", formatted_error=formatted_error) + @pytest.hookimpl def pytest_sessionstart(self, session): self.session = session workerinfo = getinfodict() @@ -48,9 +67,11 @@ def pytest_sessionfinish(self, exitstatus): yield self.sendevent("workerfinished", workeroutput=self.config.workeroutput) + @pytest.hookimpl def pytest_collection(self, session): self.sendevent("collectionstart") + @pytest.hookimpl def pytest_runtestloop(self, session): self.log("entering main loop") torun = [] @@ -83,26 +104,54 @@ def run_one_test(self, torun): else: nextitem = None + worker_title("[pytest-xdist running] %s" % item.nodeid) + start = time.time() self.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) duration = time.time() - start + + worker_title("[pytest-xdist idle]") + self.sendevent( "runtest_protocol_complete", item_index=self.item_index, duration=duration ) + def pytest_collection_modifyitems(self, session, config, items): + # add the group name to nodeid as suffix if --dist=loadgroup + if config.getvalue("loadgroup"): + for item in items: + mark = item.get_closest_marker("xdist_group") + if not mark: + continue + gname = ( + mark.args[0] + if len(mark.args) > 0 + else mark.kwargs.get("name", "default") + ) + item._nodeid = "{}@{}".format(item.nodeid, gname) + + @pytest.hookimpl def pytest_collection_finish(self, session): + try: + topdir = str(self.config.rootpath) + except AttributeError: # pytest <= 6.1.0 + topdir = str(self.config.rootdir) + self.sendevent( "collectionfinish", - topdir=str(session.fspath), + topdir=topdir, ids=[item.nodeid for item in session.items], ) + @pytest.hookimpl def pytest_runtest_logstart(self, nodeid, location): self.sendevent("logstart", nodeid=nodeid, location=location) + @pytest.hookimpl def pytest_runtest_logfinish(self, nodeid, location): self.sendevent("logfinish", nodeid=nodeid, location=location) + @pytest.hookimpl def pytest_runtest_logreport(self, report): data = self.config.hook.pytest_report_to_serializable( config=self.config, report=report @@ -113,14 +162,16 @@ def pytest_runtest_logreport(self, report): assert self.session.items[self.item_index].nodeid == report.nodeid self.sendevent("testreport", data=data) + @pytest.hookimpl def pytest_collectreport(self, report): - # send only reports that have not passed to master as optimization (#330) + # send only reports that have not passed to controller as optimization (#330) if not report.passed: data = self.config.hook.pytest_report_to_serializable( config=self.config, report=report ) self.sendevent("collectreport", data=data) + @pytest.hookimpl def pytest_warning_recorded(self, warning_message, when, nodeid, location): self.sendevent( "warning_recorded", @@ -137,7 +188,7 @@ def serialize_warning_message(warning_message): message_class_name = type(warning_message.message).__name__ message_str = str(warning_message.message) # check now if we can serialize the warning arguments (#349) - # if not, we will just use the exception message on the master node + # if not, we will just use the exception message on the controller node try: dumps(warning_message.message.args) except DumpError: @@ -199,6 +250,7 @@ def remote_initconfig(option_dict, args): def setup_config(config, basetemp): + config.option.loadgroup = config.getvalue("dist") == "loadgroup" config.option.looponfail = False config.option.usepdb = False config.option.dist = "no" @@ -209,15 +261,17 @@ def setup_config(config, basetemp): if __name__ == "__channelexec__": - channel = channel # noqa - workerinput, args, option_dict, change_sys_path = channel.receive() + channel = channel # type: ignore[name-defined] # noqa: F821 + workerinput, args, option_dict, change_sys_path = channel.receive() # type: ignore[name-defined] - if change_sys_path: + if change_sys_path is None: importpath = os.getcwd() sys.path.insert(0, importpath) os.environ["PYTHONPATH"] = ( importpath + os.pathsep + os.environ.get("PYTHONPATH", "") ) + else: + sys.path = change_sys_path os.environ["PYTEST_XDIST_TESTRUNUID"] = workerinput["testrunuid"] os.environ["PYTEST_XDIST_WORKER"] = workerinput["workerid"] @@ -231,7 +285,7 @@ def setup_config(config, basetemp): setup_config(config, option_dict.get("basetemp")) config._parser.prog = os.path.basename(workerinput["mainargv"][0]) - config.workerinput = workerinput - config.workeroutput = {} - interactor = WorkerInteractor(config, channel) + config.workerinput = workerinput # type: ignore[attr-defined] + config.workeroutput = {} # type: ignore[attr-defined] + interactor = WorkerInteractor(config, channel) # type: ignore[name-defined] config.hook.pytest_cmdline_main(config=config) diff --git a/src/xdist/report.py b/src/xdist/report.py index 8843b40b..d956577d 100644 --- a/src/xdist/report.py +++ b/src/xdist/report.py @@ -14,7 +14,8 @@ def report_collection_diff(from_collection, to_collection, from_id, to_id): error_message = ( "Different tests were collected between {from_id} and {to_id}. " "The difference is:\n" - "{diff}" + "{diff}\n" + "To see why this happens see Known limitations in documentation" ).format(from_id=from_id, to_id=to_id, diff="\n".join(diff)) - msg = "\n".join([x.rstrip() for x in error_message.split("\n")]) + msg = "\n".join(x.rstrip() for x in error_message.split("\n")) return msg diff --git a/src/xdist/scheduler/__init__.py b/src/xdist/scheduler/__init__.py index 06ba6b7b..ab2e830f 100644 --- a/src/xdist/scheduler/__init__.py +++ b/src/xdist/scheduler/__init__.py @@ -2,3 +2,4 @@ from xdist.scheduler.load import LoadScheduling # noqa from xdist.scheduler.loadfile import LoadFileScheduling # noqa from xdist.scheduler.loadscope import LoadScopeScheduling # noqa +from xdist.scheduler.loadgroup import LoadGroupScheduling # noqa diff --git a/src/xdist/scheduler/each.py b/src/xdist/scheduler/each.py index b2a04420..cfe99e7d 100644 --- a/src/xdist/scheduler/each.py +++ b/src/xdist/scheduler/each.py @@ -101,6 +101,14 @@ def add_node_collection(self, node, collection): def mark_test_complete(self, node, item_index, duration=0): self.node2pending[node].remove(item_index) + def mark_test_pending(self, item): + self.pending.insert( + 0, + self.collection.index(item), + ) + for node in self.node2pending: + self.check_schedule(node) + def remove_node(self, node): # KeyError if we didn't get an add_node() yet pending = self.node2pending.pop(node) diff --git a/src/xdist/scheduler/load.py b/src/xdist/scheduler/load.py index e378d9a6..f32caa55 100644 --- a/src/xdist/scheduler/load.py +++ b/src/xdist/scheduler/load.py @@ -151,6 +151,14 @@ def mark_test_complete(self, node, item_index, duration=0): self.node2pending[node].remove(item_index) self.check_schedule(node, duration=duration) + def mark_test_pending(self, item): + self.pending.insert( + 0, + self.collection.index(item), + ) + for node in self.node2pending: + self.check_schedule(node) + def check_schedule(self, node, duration=0): """Maybe schedule new items on the node diff --git a/src/xdist/scheduler/loadgroup.py b/src/xdist/scheduler/loadgroup.py new file mode 100644 index 00000000..072f64ab --- /dev/null +++ b/src/xdist/scheduler/loadgroup.py @@ -0,0 +1,54 @@ +from .loadscope import LoadScopeScheduling +from py.log import Producer + + +class LoadGroupScheduling(LoadScopeScheduling): + """Implement load scheduling across nodes, but grouping test by xdist_group mark. + + This class behaves very much like LoadScopeScheduling, but it groups tests by xdist_group mark + instead of the module or class to which they belong to. + """ + + def __init__(self, config, log=None): + super().__init__(config, log) + if log is None: + self.log = Producer("loadgroupsched") + else: + self.log = log.loadgroupsched + + def _split_scope(self, nodeid): + """Determine the scope (grouping) of a nodeid. + + There are usually 3 cases for a nodeid:: + + example/loadsuite/test/test_beta.py::test_beta0 + example/loadsuite/test/test_delta.py::Delta1::test_delta0 + example/loadsuite/epsilon/__init__.py::epsilon.epsilon + + #. Function in a test module. + #. Method of a class in a test module. + #. Doctest in a function in a package. + + With loadgroup, two cases are added:: + + example/loadsuite/test/test_beta.py::test_beta0 + example/loadsuite/test/test_delta.py::Delta1::test_delta0 + example/loadsuite/epsilon/__init__.py::epsilon.epsilon + example/loadsuite/test/test_gamma.py::test_beta0@gname + example/loadsuite/test/test_delta.py::Gamma1::test_gamma0@gname + + This function will group tests with the scope determined by splitting the first ``@`` + from the right. That is, test will be grouped in a single work unit when they have + same group name. In the above example, scopes will be:: + + example/loadsuite/test/test_beta.py::test_beta0 + example/loadsuite/test/test_delta.py::Delta1::test_delta0 + example/loadsuite/epsilon/__init__.py::epsilon.epsilon + gname + gname + """ + if nodeid.rfind("@") > nodeid.rfind("]"): + # check the index of ']' to avoid the case: parametrize mark value has '@' + return nodeid.split("@")[-1] + else: + return nodeid diff --git a/src/xdist/scheduler/loadscope.py b/src/xdist/scheduler/loadscope.py index 31dbe26c..69d9d9a2 100644 --- a/src/xdist/scheduler/loadscope.py +++ b/src/xdist/scheduler/loadscope.py @@ -243,6 +243,9 @@ def mark_test_complete(self, node, item_index, duration=0): self.assigned_work[node][scope][nodeid] = True self._reschedule(node) + def mark_test_pending(self, item): + raise NotImplementedError() + def _assign_work_unit(self, node): """Assign a work unit to a node.""" assert self.workqueue @@ -358,12 +361,12 @@ def schedule(self): extra_nodes = len(self.nodes) - len(self.workqueue) if extra_nodes > 0: - self.log("Shuting down {} nodes".format(extra_nodes)) + self.log("Shutting down {} nodes".format(extra_nodes)) for _ in range(extra_nodes): unused_node, assigned = self.assigned_work.popitem(last=True) - self.log("Shuting down unused node {}".format(unused_node)) + self.log("Shutting down unused node {}".format(unused_node)) unused_node.shutdown() # Assign initial workload diff --git a/src/xdist/workermanage.py b/src/xdist/workermanage.py index dfcb59b8..8d291d46 100644 --- a/src/xdist/workermanage.py +++ b/src/xdist/workermanage.py @@ -9,6 +9,7 @@ import execnet import xdist.remote +from xdist.plugin import _sys_path def parse_spec_config(config): @@ -117,8 +118,8 @@ def get_dir(p): def _getrsyncoptions(self): """Get options to be passed for rsync.""" ignores = list(self.DEFAULT_IGNORES) - ignores += self.config.option.rsyncignore - ignores += self.config.getini("rsyncignore") + ignores += [str(path) for path in self.config.option.rsyncignore] + ignores += [str(path) for path in self.config.getini("rsyncignore")] return { "ignores": ignores, @@ -157,8 +158,7 @@ def finished(): class HostRSync(execnet.RSync): - """ RSyncer that filters out common files - """ + """RSyncer that filters out common files""" def __init__(self, sourcedir, *args, **kwargs): self._synced = {} @@ -212,7 +212,7 @@ class WorkerController: ENDMARK = -1 class RemoteHook: - @pytest.mark.trylast + @pytest.hookimpl(trylast=True) def pytest_xdist_getremotemodule(self): return xdist.remote @@ -254,15 +254,16 @@ def setup(self): args = make_reltoroot(self.nodemanager.roots, args) if spec.popen: name = "popen-%s" % self.gateway.id - if hasattr(self.config, "_tmpdirhandler"): - basetemp = self.config._tmpdirhandler.getbasetemp() - option_dict["basetemp"] = str(basetemp.join(name)) + if hasattr(self.config, "_tmp_path_factory"): + basetemp = self.config._tmp_path_factory.getbasetemp() + option_dict["basetemp"] = str(basetemp / name) self.config.hook.pytest_configure_node(node=self) remote_module = self.config.hook.pytest_xdist_getremotemodule() self.channel = self.gateway.remote_exec(remote_module) # change sys.path only for remote workers - change_sys_path = not self.gateway.spec.popen + # restore sys.path from a frozen copy for local workers + change_sys_path = _sys_path if self.gateway.spec.popen else None self.channel.send((self.workerinput, args, option_dict, change_sys_path)) if self.putevent: @@ -294,7 +295,7 @@ def shutdown(self): self._shutdown_sent = True def sendcommand(self, name, **kwargs): - """ send a named parametrized command to the other side. """ + """send a named parametrized command to the other side.""" self.log("sending command {}(**{})".format(name, kwargs)) self.channel.send((name, kwargs)) @@ -303,12 +304,12 @@ def notify_inproc(self, eventname, **kwargs): self.putevent((eventname, kwargs)) def process_from_remote(self, eventcall): # noqa too complex - """ this gets called for each object we receive from - the other side and if the channel closes. + """this gets called for each object we receive from + the other side and if the channel closes. - Note that channel callbacks run in the receiver - thread of execnet gateways - we need to - avoid raising exceptions or doing heavy work. + Note that channel callbacks run in the receiver + thread of execnet gateways - we need to + avoid raising exceptions or doing heavy work. """ try: if eventcall == self.ENDMARK: @@ -324,6 +325,8 @@ def process_from_remote(self, eventcall): # noqa too complex self.log("ignoring {}({})".format(eventname, kwargs)) elif eventname == "workerready": self.notify_inproc(eventname, node=self, **kwargs) + elif eventname == "internal_error": + self.notify_inproc(eventname, node=self, **kwargs) elif eventname == "workerfinished": self._down = True self.workeroutput = kwargs["workeroutput"] @@ -420,9 +423,9 @@ def unserialize_warning_message(data): kwargs = {"message": message, "category": category} # access private _WARNING_DETAILS because the attributes vary between Python versions - for attr_name in warnings.WarningMessage._WARNING_DETAILS: + for attr_name in warnings.WarningMessage._WARNING_DETAILS: # type: ignore[attr-defined] if attr_name in ("message", "category"): continue kwargs[attr_name] = data[attr_name] - return warnings.WarningMessage(**kwargs) + return warnings.WarningMessage(**kwargs) # type: ignore[arg-type] diff --git a/testing/acceptance_test.py b/testing/acceptance_test.py index dbffe244..05bd9bf6 100644 --- a/testing/acceptance_test.py +++ b/testing/acceptance_test.py @@ -1,90 +1,93 @@ import os import re +import shutil +from typing import Dict +from typing import List +from typing import Tuple -import py import pytest import xdist class TestDistribution: - def test_n1_pass(self, testdir): - p1 = testdir.makepyfile( + def test_n1_pass(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ def test_ok(): pass """ ) - result = testdir.runpytest(p1, "-n1") + result = pytester.runpytest(p1, "-n1") assert result.ret == 0 result.stdout.fnmatch_lines(["*1 passed*"]) - def test_n1_fail(self, testdir): - p1 = testdir.makepyfile( + def test_n1_fail(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ def test_fail(): assert 0 """ ) - result = testdir.runpytest(p1, "-n1") + result = pytester.runpytest(p1, "-n1") assert result.ret == 1 result.stdout.fnmatch_lines(["*1 failed*"]) - def test_n1_import_error(self, testdir): - p1 = testdir.makepyfile( + def test_n1_import_error(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ import __import_of_missing_module def test_import(): pass """ ) - result = testdir.runpytest(p1, "-n1") + result = pytester.runpytest(p1, "-n1") assert result.ret == 1 result.stdout.fnmatch_lines( ["E *Error: No module named *__import_of_missing_module*"] ) - def test_n2_import_error(self, testdir): + def test_n2_import_error(self, pytester: pytest.Pytester) -> None: """Check that we don't report the same import error multiple times in distributed mode.""" - p1 = testdir.makepyfile( + p1 = pytester.makepyfile( """ import __import_of_missing_module def test_import(): pass """ ) - result1 = testdir.runpytest(p1, "-n2") - result2 = testdir.runpytest(p1, "-n1") + result1 = pytester.runpytest(p1, "-n2") + result2 = pytester.runpytest(p1, "-n1") assert len(result1.stdout.lines) == len(result2.stdout.lines) - def test_n1_skip(self, testdir): - p1 = testdir.makepyfile( + def test_n1_skip(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ def test_skip(): import pytest pytest.skip("myreason") """ ) - result = testdir.runpytest(p1, "-n1") + result = pytester.runpytest(p1, "-n1") assert result.ret == 0 result.stdout.fnmatch_lines(["*1 skipped*"]) - def test_manytests_to_one_import_error(self, testdir): - p1 = testdir.makepyfile( + def test_manytests_to_one_import_error(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ import __import_of_missing_module def test_import(): pass """ ) - result = testdir.runpytest(p1, "--tx=popen", "--tx=popen") + result = pytester.runpytest(p1, "--tx=popen", "--tx=popen") assert result.ret in (1, 2) result.stdout.fnmatch_lines( ["E *Error: No module named *__import_of_missing_module*"] ) - def test_manytests_to_one_popen(self, testdir): - p1 = testdir.makepyfile( + def test_manytests_to_one_popen(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ import pytest def test_fail0(): @@ -97,12 +100,12 @@ def test_skip(): pytest.skip("hello") """ ) - result = testdir.runpytest(p1, "-v", "-d", "--tx=popen", "--tx=popen") + result = pytester.runpytest(p1, "-v", "-d", "--tx=popen", "--tx=popen") result.stdout.fnmatch_lines(["*1*Python*", "*2 failed, 1 passed, 1 skipped*"]) assert result.ret == 1 - def test_n1_fail_minus_x(self, testdir): - p1 = testdir.makepyfile( + def test_n1_fail_minus_x(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ def test_fail1(): assert 0 @@ -110,25 +113,25 @@ def test_fail2(): assert 0 """ ) - result = testdir.runpytest(p1, "-x", "-v", "-n1") + result = pytester.runpytest(p1, "-x", "-v", "-n1") assert result.ret == 2 result.stdout.fnmatch_lines(["*Interrupted: stopping*1*", "*1 failed*"]) - def test_basetemp_in_subprocesses(self, testdir): - p1 = testdir.makepyfile( + def test_basetemp_in_subprocesses(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ - def test_send(tmpdir): - import py - assert tmpdir.relto(py.path.local(%r)), tmpdir + def test_send(tmp_path): + from pathlib import Path + assert tmp_path.relative_to(Path(%r)), tmp_path """ - % str(testdir.tmpdir) + % str(pytester.path) ) - result = testdir.runpytest_subprocess(p1, "-n1") + result = pytester.runpytest_subprocess(p1, "-n1") assert result.ret == 0 result.stdout.fnmatch_lines(["*1 passed*"]) - def test_dist_ini_specified(self, testdir): - p1 = testdir.makepyfile( + def test_dist_ini_specified(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ import pytest def test_fail0(): @@ -141,22 +144,21 @@ def test_skip(): pytest.skip("hello") """ ) - testdir.makeini( + pytester.makeini( """ [pytest] addopts = --tx=3*popen """ ) - result = testdir.runpytest(p1, "-d", "-v") + result = pytester.runpytest(p1, "-d", "-v") result.stdout.fnmatch_lines(["*2*Python*", "*2 failed, 1 passed, 1 skipped*"]) assert result.ret == 1 - @pytest.mark.xfail("sys.platform.startswith('java')", run=False) - def test_dist_tests_with_crash(self, testdir): + def test_dist_tests_with_crash(self, pytester: pytest.Pytester) -> None: if not hasattr(os, "kill"): pytest.skip("no os.kill") - p1 = testdir.makepyfile( + p1 = pytester.makepyfile( """ import pytest def test_fail0(): @@ -174,7 +176,7 @@ def test_crash(): os.kill(os.getpid(), 15) """ ) - result = testdir.runpytest(p1, "-v", "-d", "-n1") + result = pytester.runpytest(p1, "-v", "-d", "-n1") result.stdout.fnmatch_lines( [ "*Python*", @@ -185,10 +187,12 @@ def test_crash(): ) assert result.ret == 1 - def test_distribution_rsyncdirs_example(self, testdir, monkeypatch): + def test_distribution_rsyncdirs_example( + self, pytester: pytest.Pytester, monkeypatch + ) -> None: # use a custom plugin that has a custom command-line option to ensure # this is propagated to workers (see #491) - testdir.makepyfile( + pytester.makepyfile( **{ "myplugin/src/foobarplugin.py": """ from __future__ import print_function @@ -200,25 +204,26 @@ def test_distribution_rsyncdirs_example(self, testdir, monkeypatch): def pytest_addoption(parser): parser.addoption("--foobar", action="store", dest="foobar_opt") - @pytest.mark.tryfirst + @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config): opt = early_config.known_args_namespace.foobar_opt print("--foobar=%s active! [%s]" % (opt, os.getpid()), file=sys.stderr) """ } ) - assert (testdir.tmpdir / "myplugin/src/foobarplugin.py").check(file=1) + assert (pytester.path / "myplugin/src/foobarplugin.py").is_file() monkeypatch.setenv( - "PYTHONPATH", str(testdir.tmpdir / "myplugin/src"), prepend=os.pathsep + "PYTHONPATH", str(pytester.path / "myplugin/src"), prepend=os.pathsep ) - source = testdir.mkdir("source") - dest = testdir.mkdir("dest") - subdir = source.mkdir("example_pkg") - subdir.ensure("__init__.py") - p = subdir.join("test_one.py") - p.write("def test_5():\n assert not __file__.startswith(%r)" % str(p)) - result = testdir.runpytest_subprocess( + source = pytester.mkdir("source") + dest = pytester.mkdir("dest") + subdir = source / "example_pkg" + subdir.mkdir() + subdir.joinpath("__init__.py").touch() + p = subdir / "test_one.py" + p.write_text("def test_5():\n assert not __file__.startswith(%r)" % str(p)) + result = pytester.runpytest_subprocess( "-v", "-d", "-s", @@ -239,12 +244,12 @@ def pytest_load_initial_conftests(early_config): ] ) result.stderr.fnmatch_lines(["--foobar=123 active! *"]) - assert dest.join(subdir.basename).check(dir=1) + assert dest.joinpath(subdir.name).is_dir() - def test_data_exchange(self, testdir): - testdir.makeconftest( + def test_data_exchange(self, pytester: pytest.Pytester) -> None: + pytester.makeconftest( """ - # This hook only called on master. + # This hook only called on the controlling process. def pytest_configure_node(node): node.workerinput['a'] = 42 node.workerinput['b'] = 7 @@ -257,7 +262,7 @@ def pytest_configure(config): r = a + b config.workeroutput['r'] = r - # This hook only called on master. + # This hook only called on the controlling process. def pytest_testnodedown(node, error): node.config.calc_result = node.workeroutput['r'] @@ -268,50 +273,50 @@ def pytest_terminal_summary(terminalreporter): 'calculated result is %s' % calc_result) """ ) - p1 = testdir.makepyfile("def test_func(): pass") - result = testdir.runpytest("-v", p1, "-d", "--tx=popen") + p1 = pytester.makepyfile("def test_func(): pass") + result = pytester.runpytest("-v", p1, "-d", "--tx=popen") result.stdout.fnmatch_lines( ["*0*Python*", "*calculated result is 49*", "*1 passed*"] ) assert result.ret == 0 - def test_keyboardinterrupt_hooks_issue79(self, testdir): - testdir.makepyfile( + def test_keyboardinterrupt_hooks_issue79(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( __init__="", test_one=""" def test_hello(): raise KeyboardInterrupt() """, ) - testdir.makeconftest( + pytester.makeconftest( """ def pytest_sessionfinish(session): # on the worker if hasattr(session.config, 'workeroutput'): session.config.workeroutput['s2'] = 42 - # on the master + # on the controller def pytest_testnodedown(node, error): assert node.workeroutput['s2'] == 42 print ("s2call-finished") """ ) args = ["-n1", "--debug"] - result = testdir.runpytest_subprocess(*args) + result = pytester.runpytest_subprocess(*args) s = result.stdout.str() assert result.ret == 2 assert "s2call" in s assert "Interrupted" in s - def test_keyboard_interrupt_dist(self, testdir): + def test_keyboard_interrupt_dist(self, pytester: pytest.Pytester) -> None: # xxx could be refined to check for return code - testdir.makepyfile( + pytester.makepyfile( """ def test_sleep(): import time time.sleep(10) """ ) - child = testdir.spawn_pytest("-n1 -v", expect_timeout=30.0) + child = pytester.spawn_pytest("-n1 -v", expect_timeout=30.0) child.expect(".*test_sleep.*") child.kill(2) # keyboard interrupt child.expect(".*KeyboardInterrupt.*") @@ -319,42 +324,42 @@ def test_sleep(): child.close() # assert ret == 2 - def test_dist_with_collectonly(self, testdir): - p1 = testdir.makepyfile( + def test_dist_with_collectonly(self, pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ def test_ok(): pass """ ) - result = testdir.runpytest(p1, "-n1", "--collect-only") + result = pytester.runpytest(p1, "-n1", "--collect-only") assert result.ret == 0 result.stdout.fnmatch_lines(["*collected 1 item*"]) class TestDistEach: - def test_simple(self, testdir): - testdir.makepyfile( + def test_simple(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( """ def test_hello(): pass """ ) - result = testdir.runpytest_subprocess("--debug", "--dist=each", "--tx=2*popen") + result = pytester.runpytest_subprocess("--debug", "--dist=each", "--tx=2*popen") assert not result.ret result.stdout.fnmatch_lines(["*2 pass*"]) @pytest.mark.xfail( - run=False, reason="other python versions might not have py.test installed" + run=False, reason="other python versions might not have pytest installed" ) - def test_simple_diffoutput(self, testdir): + def test_simple_diffoutput(self, pytester: pytest.Pytester) -> None: interpreters = [] for name in ("python2.5", "python2.6"): - interp = py.path.local.sysfind(name) + interp = shutil.which(name) if interp is None: pytest.skip("%s not found" % name) interpreters.append(interp) - testdir.makepyfile( + pytester.makepyfile( __init__="", test_one=""" import sys @@ -366,7 +371,7 @@ def test_hello(): args = ["--dist=each", "-v"] args += ["--tx", "popen//python=%s" % interpreters[0]] args += ["--tx", "popen//python=%s" % interpreters[1]] - result = testdir.runpytest(*args) + result = pytester.runpytest(*args) s = result.stdout.str() assert "2...5" in s assert "2...6" in s @@ -374,8 +379,8 @@ def test_hello(): class TestTerminalReporting: @pytest.mark.parametrize("verbosity", ["", "-q", "-v"]) - def test_output_verbosity(self, testdir, verbosity): - testdir.makepyfile( + def test_output_verbosity(self, pytester, verbosity: str) -> None: + pytester.makepyfile( """ def test_ok(): pass @@ -384,7 +389,7 @@ def test_ok(): args = ["-n1"] if verbosity: args.append(verbosity) - result = testdir.runpytest(*args) + result = pytester.runpytest(*args) out = result.stdout.str() if verbosity == "-v": assert "scheduling tests" in out @@ -397,8 +402,8 @@ def test_ok(): assert "scheduling tests" not in out assert "gw" in out - def test_pass_skip_fail(self, testdir): - testdir.makepyfile( + def test_pass_skip_fail(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( """ import pytest def test_ok(): @@ -409,7 +414,7 @@ def test_func(): assert 0 """ ) - result = testdir.runpytest("-n1", "-v") + result = pytester.runpytest("-n1", "-v") result.stdout.fnmatch_lines_random( [ "*PASS*test_pass_skip_fail.py*test_ok*", @@ -421,14 +426,14 @@ def test_func(): ["*def test_func():", "> assert 0", "E assert 0"] ) - def test_fail_platinfo(self, testdir): - testdir.makepyfile( + def test_fail_platinfo(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( """ def test_func(): assert 0 """ ) - result = testdir.runpytest("-n1", "-v") + result = pytester.runpytest("-n1", "-v") result.stdout.fnmatch_lines( [ "*FAIL*test_fail_platinfo.py*test_func*", @@ -439,31 +444,26 @@ def test_func(): ] ) - def test_logfinish_hook(self, testdir): + def test_logfinish_hook(self, pytester: pytest.Pytester) -> None: """Ensure the pytest_runtest_logfinish hook is being properly handled""" - from _pytest import hookspec - - if not hasattr(hookspec, "pytest_runtest_logfinish"): - pytest.skip("test requires pytest_runtest_logfinish hook in pytest (3.4+)") - - testdir.makeconftest( + pytester.makeconftest( """ def pytest_runtest_logfinish(): print('pytest_runtest_logfinish hook called') """ ) - testdir.makepyfile( + pytester.makepyfile( """ def test_func(): pass """ ) - result = testdir.runpytest("-n1", "-s") + result = pytester.runpytest("-n1", "-s") result.stdout.fnmatch_lines(["*pytest_runtest_logfinish hook called*"]) -def test_teardownfails_one_function(testdir): - p = testdir.makepyfile( +def test_teardownfails_one_function(pytester: pytest.Pytester) -> None: + p = pytester.makepyfile( """ def test_func(): pass @@ -471,15 +471,15 @@ def teardown_function(function): assert 0 """ ) - result = testdir.runpytest(p, "-n1", "--tx=popen") + result = pytester.runpytest(p, "-n1", "--tx=popen") result.stdout.fnmatch_lines( ["*def teardown_function(function):*", "*1 passed*1 error*"] ) @pytest.mark.xfail -def test_terminate_on_hangingnode(testdir): - p = testdir.makeconftest( +def test_terminate_on_hangingnode(pytester: pytest.Pytester) -> None: + p = pytester.makeconftest( """ def pytest_sessionfinish(session): if session.nodeid == "my": # running on worker @@ -487,14 +487,14 @@ def pytest_sessionfinish(session): time.sleep(3) """ ) - result = testdir.runpytest(p, "--dist=each", "--tx=popen//id=my") + result = pytester.runpytest(p, "--dist=each", "--tx=popen//id=my") assert result.duration < 2.0 result.stdout.fnmatch_lines(["*killed*my*"]) @pytest.mark.xfail(reason="works if run outside test suite", run=False) -def test_session_hooks(testdir): - testdir.makeconftest( +def test_session_hooks(pytester: pytest.Pytester) -> None: + pytester.makeconftest( """ import sys def pytest_sessionstart(session): @@ -503,7 +503,7 @@ def pytest_sessionfinish(session): if hasattr(session.config, 'workerinput'): name = "worker" else: - name = "master" + name = "controller" with open(name, "w") as f: f.write("xy") # let's fail on the worker @@ -511,28 +511,28 @@ def pytest_sessionfinish(session): raise ValueError(42) """ ) - p = testdir.makepyfile( + p = pytester.makepyfile( """ import sys def test_hello(): assert hasattr(sys, 'pytestsessionhooks') """ ) - result = testdir.runpytest(p, "--dist=each", "--tx=popen") + result = pytester.runpytest(p, "--dist=each", "--tx=popen") result.stdout.fnmatch_lines(["*ValueError*", "*1 passed*"]) assert not result.ret d = result.parseoutcomes() assert d["passed"] == 1 - assert testdir.tmpdir.join("worker").check() - assert testdir.tmpdir.join("master").check() + assert pytester.path.joinpath("worker").exists() + assert pytester.path.joinpath("controller").exists() -def test_session_testscollected(testdir): +def test_session_testscollected(pytester: pytest.Pytester) -> None: """ - Make sure master node is updating the session object with the number + Make sure controller node is updating the session object with the number of tests collected from the workers. """ - testdir.makepyfile( + pytester.makepyfile( test_foo=""" import pytest @pytest.mark.parametrize('i', range(3)) @@ -540,7 +540,7 @@ def test_ok(i): pass """ ) - testdir.makeconftest( + pytester.makeconftest( """ def pytest_sessionfinish(session): collected = getattr(session, 'testscollected', None) @@ -548,15 +548,15 @@ def pytest_sessionfinish(session): f.write('collected = %s' % collected) """ ) - result = testdir.inline_run("-n1") + result = pytester.inline_run("-n1") result.assertoutcome(passed=3) - collected_file = testdir.tmpdir.join("testscollected") - assert collected_file.isfile() - assert collected_file.read() == "collected = 3" + collected_file = pytester.path / "testscollected" + assert collected_file.is_file() + assert collected_file.read_text() == "collected = 3" -def test_fixture_teardown_failure(testdir): - p = testdir.makepyfile( +def test_fixture_teardown_failure(pytester: pytest.Pytester) -> None: + p = pytester.makepyfile( """ import pytest @pytest.fixture(scope="module") @@ -568,14 +568,16 @@ def test_hello(myarg): pass """ ) - result = testdir.runpytest_subprocess("--debug", p) # , "-n1") + result = pytester.runpytest_subprocess(p, "-n1") result.stdout.fnmatch_lines(["*ValueError*42*", "*1 passed*1 error*"]) assert result.ret -def test_config_initialization(testdir, monkeypatch, pytestconfig): - """Ensure workers and master are initialized consistently. Integration test for #445""" - testdir.makepyfile( +def test_config_initialization( + pytester: pytest.Pytester, monkeypatch: pytest.MonkeyPatch, pytestconfig +) -> None: + """Ensure workers and controller are initialized consistently. Integration test for #445""" + pytester.makepyfile( **{ "dir_a/test_foo.py": """ def test_1(request): @@ -583,7 +585,7 @@ def test_1(request): """ } ) - testdir.makefile( + pytester.makefile( ".ini", myconfig=""" [pytest] @@ -591,17 +593,17 @@ def test_1(request): """, ) monkeypatch.setenv("PYTEST_ADDOPTS", "-v") - result = testdir.runpytest("-n2", "-c", "myconfig.ini", "-v") + result = pytester.runpytest("-n2", "-c", "myconfig.ini", "-v") result.stdout.fnmatch_lines(["dir_a/test_foo.py::test_1*", "*= 1 passed in *"]) assert result.ret == 0 @pytest.mark.parametrize("when", ["setup", "call", "teardown"]) -def test_crashing_item(testdir, when): +def test_crashing_item(pytester, when) -> None: """Ensure crashing item is correctly reported during all testing stages""" code = dict(setup="", call="", teardown="") code[when] = "py.process.kill(os.getpid())" - p = testdir.makepyfile( + p = pytester.makepyfile( """ import os import py @@ -624,19 +626,19 @@ def test_ok(): ) ) passes = 2 if when == "teardown" else 1 - result = testdir.runpytest("-n2", p) + result = pytester.runpytest("-n2", p) result.stdout.fnmatch_lines( ["*crashed*test_crash*", "*1 failed*%d passed*" % passes] ) -def test_multiple_log_reports(testdir): +def test_multiple_log_reports(pytester: pytest.Pytester) -> None: """ Ensure that pytest-xdist supports plugins that emit multiple logreports (#206). Inspired by pytest-rerunfailures. """ - testdir.makeconftest( + pytester.makeconftest( """ from _pytest.runner import runtestprotocol def pytest_runtest_protocol(item, nextitem): @@ -648,31 +650,31 @@ def pytest_runtest_protocol(item, nextitem): return True """ ) - testdir.makepyfile( + pytester.makepyfile( """ def test(): pass """ ) - result = testdir.runpytest("-n1") + result = pytester.runpytest("-n1") result.stdout.fnmatch_lines(["*2 passed*"]) -def test_skipping(testdir): - p = testdir.makepyfile( +def test_skipping(pytester: pytest.Pytester) -> None: + p = pytester.makepyfile( """ import pytest def test_crash(): pytest.skip("hello") """ ) - result = testdir.runpytest("-n1", "-rs", p) + result = pytester.runpytest("-n1", "-rs", p) assert result.ret == 0 result.stdout.fnmatch_lines(["*hello*", "*1 skipped*"]) -def test_fixture_scope_caching_issue503(testdir): - p1 = testdir.makepyfile( +def test_fixture_scope_caching_issue503(pytester: pytest.Pytester) -> None: + p1 = pytester.makepyfile( """ import pytest @@ -690,17 +692,17 @@ def test_b(fix): pass """ ) - result = testdir.runpytest(p1, "-v", "-n1") + result = pytester.runpytest(p1, "-v", "-n1") assert result.ret == 0 result.stdout.fnmatch_lines(["*2 passed*"]) -def test_issue_594_random_parametrize(testdir): +def test_issue_594_random_parametrize(pytester: pytest.Pytester) -> None: """ Make sure that tests that are randomly parametrized display an appropriate error message, instead of silently skipping the entire test run. """ - p1 = testdir.makepyfile( + p1 = pytester.makepyfile( """ import pytest import random @@ -712,44 +714,42 @@ def test_foo(x): assert 1 """ ) - result = testdir.runpytest(p1, "-v", "-n4") + result = pytester.runpytest(p1, "-v", "-n4") assert result.ret == 1 result.stdout.fnmatch_lines(["Different tests were collected between gw* and gw*"]) -def test_tmpdir_disabled(testdir): - """Test xdist doesn't break if internal tmpdir plugin is disabled (#22). - """ - p1 = testdir.makepyfile( +def test_tmpdir_disabled(pytester: pytest.Pytester) -> None: + """Test xdist doesn't break if internal tmpdir plugin is disabled (#22).""" + p1 = pytester.makepyfile( """ def test_ok(): pass """ ) - result = testdir.runpytest(p1, "-n1", "-p", "no:tmpdir") + result = pytester.runpytest(p1, "-n1", "-p", "no:tmpdir") assert result.ret == 0 result.stdout.fnmatch_lines("*1 passed*") -@pytest.mark.parametrize("plugin", ["xdist.looponfail", "xdist.boxed"]) -def test_sub_plugins_disabled(testdir, plugin): - """Test that xdist doesn't break if we disable any of its sub-plugins. (#32) - """ - p1 = testdir.makepyfile( +@pytest.mark.parametrize("plugin", ["xdist.looponfail"]) +def test_sub_plugins_disabled(pytester, plugin) -> None: + """Test that xdist doesn't break if we disable any of its sub-plugins. (#32)""" + p1 = pytester.makepyfile( """ def test_ok(): pass """ ) - result = testdir.runpytest(p1, "-n1", "-p", "no:%s" % plugin) + result = pytester.runpytest(p1, "-n1", "-p", "no:%s" % plugin) assert result.ret == 0 result.stdout.fnmatch_lines("*1 passed*") class TestWarnings: @pytest.mark.parametrize("n", ["-n0", "-n1"]) - def test_warnings(self, testdir, n): - testdir.makepyfile( + def test_warnings(self, pytester, n) -> None: + pytester.makepyfile( """ import warnings, py, pytest @@ -758,41 +758,49 @@ def test_func(request): warnings.warn(UserWarning('this is a warning')) """ ) - result = testdir.runpytest(n) + result = pytester.runpytest(n) result.stdout.fnmatch_lines(["*this is a warning*", "*1 passed, 1 warning*"]) - def test_warning_captured_deprecated_in_pytest_6(self, testdir): + def test_warning_captured_deprecated_in_pytest_6( + self, pytester: pytest.Pytester + ) -> None: """ Do not trigger the deprecated pytest_warning_captured hook in pytest 6+ (#562) """ - import _pytest.hookspec + from _pytest import hookspec - if not hasattr(_pytest.hookspec, "pytest_warning_recorded"): - pytest.skip("test requires pytest 6.0+") + if not hasattr(hookspec, "pytest_warning_captured"): + pytest.skip( + f"pytest {pytest.__version__} does not have the pytest_warning_captured hook." + ) - testdir.makeconftest( + pytester.makeconftest( """ - def pytest_warning_captured(): - assert False, "this hook should not be called in this version" + def pytest_warning_captured(warning_message): + if warning_message == "my custom worker warning": + assert False, ( + "this hook should not be called from workers " + "in this version: {}" + ).format(warning_message) """ ) - testdir.makepyfile( + pytester.makepyfile( """ import warnings def test(): - warnings.warn("custom warning") + warnings.warn("my custom worker warning") """ ) - result = testdir.runpytest("-n1") - result.stdout.fnmatch_lines(["* 1 passed in *"]) + result = pytester.runpytest("-n1", "-Wignore") + result.stdout.fnmatch_lines(["*1 passed*"]) result.stdout.no_fnmatch_line("*this hook should not be called in this version") @pytest.mark.parametrize("n", ["-n0", "-n1"]) - def test_custom_subclass(self, testdir, n): + def test_custom_subclass(self, pytester, n) -> None: """Check that warning subclasses that don't honor the args attribute don't break pytest-xdist (#344) """ - testdir.makepyfile( + pytester.makepyfile( """ import warnings, py, pytest @@ -807,33 +815,34 @@ def test_func(request): warnings.warn(MyWarning("foo", 1)) """ ) - testdir.syspathinsert() - result = testdir.runpytest(n) + pytester.syspathinsert() + result = pytester.runpytest(n) result.stdout.fnmatch_lines(["*MyWarning*", "*1 passed, 1 warning*"]) @pytest.mark.parametrize("n", ["-n0", "-n1"]) - def test_unserializable_arguments(self, testdir, n): + def test_unserializable_arguments(self, pytester, n) -> None: """Check that warnings with unserializable arguments are handled correctly (#349).""" - testdir.makepyfile( + pytester.makepyfile( """ import warnings, pytest - def test_func(tmpdir): - fn = (tmpdir / 'foo.txt').ensure(file=1) + def test_func(tmp_path): + fn = tmp_path / 'foo.txt' + fn.touch() with fn.open('r') as f: warnings.warn(UserWarning("foo", f)) """ ) - testdir.syspathinsert() - result = testdir.runpytest(n) + pytester.syspathinsert() + result = pytester.runpytest(n) result.stdout.fnmatch_lines(["*UserWarning*foo.txt*", "*1 passed, 1 warning*"]) @pytest.mark.parametrize("n", ["-n0", "-n1"]) - def test_unserializable_warning_details(self, testdir, n): + def test_unserializable_warning_details(self, pytester, n) -> None: """Check that warnings with unserializable _WARNING_DETAILS are handled correctly (#379). """ - testdir.makepyfile( + pytester.makepyfile( """ import warnings, pytest import socket @@ -847,28 +856,28 @@ def abuse_socket(): # _WARNING_DETAIL. We need to test that it is not serialized # (it can't be, so the test will fail if we try to). @pytest.mark.filterwarnings('always') - def test_func(tmpdir): + def test_func(tmp_path): abuse_socket() gc.collect() """ ) - testdir.syspathinsert() - result = testdir.runpytest(n) + pytester.syspathinsert() + result = pytester.runpytest(n) result.stdout.fnmatch_lines( ["*ResourceWarning*unclosed*", "*1 passed, 1 warning*"] ) class TestNodeFailure: - def test_load_single(self, testdir): - f = testdir.makepyfile( + def test_load_single(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os def test_a(): os._exit(1) def test_b(): pass """ ) - res = testdir.runpytest(f, "-n1") + res = pytester.runpytest(f, "-n1") res.stdout.fnmatch_lines( [ "replacing crashed worker gw*", @@ -877,8 +886,8 @@ def test_b(): pass ] ) - def test_load_multiple(self, testdir): - f = testdir.makepyfile( + def test_load_multiple(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os def test_a(): pass @@ -887,7 +896,7 @@ def test_c(): pass def test_d(): pass """ ) - res = testdir.runpytest(f, "-n2") + res = pytester.runpytest(f, "-n2") res.stdout.fnmatch_lines( [ "replacing crashed worker gw*", @@ -896,15 +905,15 @@ def test_d(): pass ] ) - def test_each_single(self, testdir): - f = testdir.makepyfile( + def test_each_single(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os def test_a(): os._exit(1) def test_b(): pass """ ) - res = testdir.runpytest(f, "--dist=each", "--tx=popen") + res = pytester.runpytest(f, "--dist=each", "--tx=popen") res.stdout.fnmatch_lines( [ "replacing crashed worker gw*", @@ -914,15 +923,15 @@ def test_b(): pass ) @pytest.mark.xfail(reason="#20: xdist race condition on node restart") - def test_each_multiple(self, testdir): - f = testdir.makepyfile( + def test_each_multiple(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os def test_a(): os._exit(1) def test_b(): pass """ ) - res = testdir.runpytest(f, "--dist=each", "--tx=2*popen") + res = pytester.runpytest(f, "--dist=each", "--tx=2*popen") res.stdout.fnmatch_lines( [ "*Replacing crashed worker*", @@ -931,8 +940,8 @@ def test_b(): pass ] ) - def test_max_worker_restart(self, testdir): - f = testdir.makepyfile( + def test_max_worker_restart(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os def test_a(): pass @@ -941,7 +950,7 @@ def test_c(): os._exit(1) def test_d(): pass """ ) - res = testdir.runpytest(f, "-n4", "--max-worker-restart=1") + res = pytester.runpytest(f, "-n4", "--max-worker-restart=1") res.stdout.fnmatch_lines( [ "replacing crashed worker*", @@ -952,15 +961,15 @@ def test_d(): pass ] ) - def test_max_worker_restart_tests_queued(self, testdir): - f = testdir.makepyfile( + def test_max_worker_restart_tests_queued(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os, pytest @pytest.mark.parametrize('i', range(10)) def test(i): os._exit(1) """ ) - res = testdir.runpytest(f, "-n2", "--max-worker-restart=3") + res = pytester.runpytest(f, "-n2", "--max-worker-restart=3") res.stdout.fnmatch_lines( [ "replacing crashed worker*", @@ -973,14 +982,14 @@ def test(i): os._exit(1) ) assert "INTERNALERROR" not in res.stdout.str() - def test_max_worker_restart_die(self, testdir): - f = testdir.makepyfile( + def test_max_worker_restart_die(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os os._exit(1) """ ) - res = testdir.runpytest(f, "-n4", "--max-worker-restart=0") + res = pytester.runpytest(f, "-n4", "--max-worker-restart=0") res.stdout.fnmatch_lines( [ "* xdist: worker gw* crashed and worker restarting disabled *", @@ -988,8 +997,8 @@ def test_max_worker_restart_die(self, testdir): ] ) - def test_disable_restart(self, testdir): - f = testdir.makepyfile( + def test_disable_restart(self, pytester: pytest.Pytester) -> None: + f = pytester.makepyfile( """ import os def test_a(): pass @@ -997,7 +1006,7 @@ def test_b(): os._exit(1) def test_c(): pass """ ) - res = testdir.runpytest(f, "-n4", "--max-worker-restart=0") + res = pytester.runpytest(f, "-n4", "--max-worker-restart=0") res.stdout.fnmatch_lines( [ "worker gw* crashed and worker restarting disabled", @@ -1009,10 +1018,10 @@ def test_c(): pass @pytest.mark.parametrize("n", [0, 2]) -def test_worker_id_fixture(testdir, n): +def test_worker_id_fixture(pytester, n) -> None: import glob - f = testdir.makepyfile( + f = pytester.makepyfile( """ import pytest @pytest.mark.parametrize("run_num", range(2)) @@ -1021,10 +1030,10 @@ def test_worker_id1(worker_id, run_num): f.write(worker_id) """ ) - result = testdir.runpytest(f, "-n%d" % n) + result = pytester.runpytest(f, "-n%d" % n) result.stdout.fnmatch_lines("* 2 passed in *") worker_ids = set() - for fname in glob.glob(str(testdir.tmpdir.join("*.txt"))): + for fname in glob.glob(str(pytester.path / "*.txt")): with open(fname) as f: worker_ids.add(f.read().strip()) if n == 0: @@ -1034,10 +1043,10 @@ def test_worker_id1(worker_id, run_num): @pytest.mark.parametrize("n", [0, 2]) -def test_testrun_uid_fixture(testdir, n): +def test_testrun_uid_fixture(pytester, n) -> None: import glob - f = testdir.makepyfile( + f = pytester.makepyfile( """ import pytest @pytest.mark.parametrize("run_num", range(2)) @@ -1046,10 +1055,10 @@ def test_testrun_uid1(testrun_uid, run_num): f.write(testrun_uid) """ ) - result = testdir.runpytest(f, "-n%d" % n) + result = pytester.runpytest(f, "-n%d" % n) result.stdout.fnmatch_lines("* 2 passed in *") testrun_uids = set() - for fname in glob.glob(str(testdir.tmpdir.join("*.txt"))): + for fname in glob.glob(str(pytester.path / "*.txt")): with open(fname) as f: testrun_uids.add(f.read().strip()) assert len(testrun_uids) == 1 @@ -1057,21 +1066,21 @@ def test_testrun_uid1(testrun_uid, run_num): @pytest.mark.parametrize("tb", ["auto", "long", "short", "no", "line", "native"]) -def test_error_report_styles(testdir, tb): - testdir.makepyfile( +def test_error_report_styles(pytester, tb) -> None: + pytester.makepyfile( """ import pytest def test_error_report_styles(): raise RuntimeError('some failure happened') """ ) - result = testdir.runpytest("-n1", "--tb=%s" % tb) + result = pytester.runpytest("-n1", "--tb=%s" % tb) if tb != "no": result.stdout.fnmatch_lines("*some failure happened*") result.assert_outcomes(failed=1) -def test_color_yes_collection_on_non_atty(testdir, request): +def test_color_yes_collection_on_non_atty(pytester, request) -> None: """skip collect progress report when working on non-terminals. Similar to pytest-dev/pytest#1397 @@ -1079,7 +1088,7 @@ def test_color_yes_collection_on_non_atty(testdir, request): tr = request.config.pluginmanager.getplugin("terminalreporter") if not hasattr(tr, "isatty"): pytest.skip("only valid for newer pytest versions") - testdir.makepyfile( + pytester.makepyfile( """ import pytest @pytest.mark.parametrize('i', range(10)) @@ -1088,34 +1097,34 @@ def test_this(i): """ ) args = ["--color=yes", "-n2"] - result = testdir.runpytest(*args) + result = pytester.runpytest(*args) assert "test session starts" in result.stdout.str() assert "\x1b[1m" in result.stdout.str() assert "gw0 [10] / gw1 [10]" in result.stdout.str() assert "gw0 C / gw1 C" not in result.stdout.str() -def test_without_terminal_plugin(testdir, request): +def test_without_terminal_plugin(pytester, request) -> None: """ No output when terminal plugin is disabled """ - testdir.makepyfile( + pytester.makepyfile( """ def test_1(): pass """ ) - result = testdir.runpytest("-p", "no:terminal", "-n2") + result = pytester.runpytest("-p", "no:terminal", "-n2") assert result.stdout.str() == "" assert result.stderr.str() == "" assert result.ret == 0 -def test_internal_error_with_maxfail(testdir): +def test_internal_error_with_maxfail(pytester: pytest.Pytester) -> None: """ Internal error when using --maxfail option (#62, #65). """ - testdir.makepyfile( + pytester.makepyfile( """ import pytest @@ -1129,21 +1138,33 @@ def test_aaa1(crasher): pass """ ) - result = testdir.runpytest_subprocess("--maxfail=1", "-n1") + result = pytester.runpytest_subprocess("--maxfail=1", "-n1") result.stdout.fnmatch_lines(["* 1 error in *"]) assert "INTERNALERROR" not in result.stderr.str() +def test_internal_errors_propagate_to_controller(pytester: pytest.Pytester) -> None: + pytester.makeconftest( + """ + def pytest_collection_modifyitems(): + raise RuntimeError("Some runtime error") + """ + ) + pytester.makepyfile("def test(): pass") + result = pytester.runpytest("-n1") + result.stdout.fnmatch_lines(["*RuntimeError: Some runtime error*"]) + + class TestLoadScope: - def test_by_module(self, testdir): + def test_by_module(self, pytester: pytest.Pytester) -> None: test_file = """ import pytest @pytest.mark.parametrize('i', range(10)) def test(i): pass """ - testdir.makepyfile(test_a=test_file, test_b=test_file) - result = testdir.runpytest("-n2", "--dist=loadscope", "-v") + pytester.makepyfile(test_a=test_file, test_b=test_file) + result = pytester.runpytest("-n2", "--dist=loadscope", "-v") assert get_workers_and_test_count_by_prefix( "test_a.py::test", result.outlines ) in ({"gw0": 10}, {"gw1": 10}) @@ -1151,8 +1172,8 @@ def test(i): "test_b.py::test", result.outlines ) in ({"gw0": 10}, {"gw1": 10}) - def test_by_class(self, testdir): - testdir.makepyfile( + def test_by_class(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( test_a=""" import pytest class TestA: @@ -1166,7 +1187,7 @@ def test(self, i): pass """ ) - result = testdir.runpytest("-n2", "--dist=loadscope", "-v") + result = pytester.runpytest("-n2", "--dist=loadscope", "-v") assert get_workers_and_test_count_by_prefix( "test_a.py::TestA", result.outlines ) in ({"gw0": 10}, {"gw1": 10}) @@ -1174,7 +1195,7 @@ def test(self, i): "test_a.py::TestB", result.outlines ) in ({"gw0": 10}, {"gw1": 10}) - def test_module_single_start(self, testdir): + def test_module_single_start(self, pytester: pytest.Pytester) -> None: """Fix test suite never finishing in case all workers start with a single test (#277).""" test_file1 = """ import pytest @@ -1188,8 +1209,8 @@ def test_1(): def test_2(): pass """ - testdir.makepyfile(test_a=test_file1, test_b=test_file1, test_c=test_file2) - result = testdir.runpytest("-n2", "--dist=loadscope", "-v") + pytester.makepyfile(test_a=test_file1, test_b=test_file1, test_c=test_file2) + result = pytester.runpytest("-n2", "--dist=loadscope", "-v") a = get_workers_and_test_count_by_prefix("test_a.py::test", result.outlines) b = get_workers_and_test_count_by_prefix("test_b.py::test", result.outlines) c1 = get_workers_and_test_count_by_prefix("test_c.py::test_1", result.outlines) @@ -1201,7 +1222,7 @@ def test_2(): class TestFileScope: - def test_by_module(self, testdir): + def test_by_module(self, pytester: pytest.Pytester) -> None: test_file = """ import pytest class TestA: @@ -1214,8 +1235,8 @@ class TestB: def test(self, i): pass """ - testdir.makepyfile(test_a=test_file, test_b=test_file) - result = testdir.runpytest("-n2", "--dist=loadfile", "-v") + pytester.makepyfile(test_a=test_file, test_b=test_file) + result = pytester.runpytest("-n2", "--dist=loadfile", "-v") test_a_workers_and_test_count = get_workers_and_test_count_by_prefix( "test_a.py::TestA", result.outlines ) @@ -1232,8 +1253,8 @@ def test(self, i): {"gw1": 0}, ) or test_b_workers_and_test_count in ({"gw0": 0}, {"gw1": 10}) - def test_by_class(self, testdir): - testdir.makepyfile( + def test_by_class(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( test_a=""" import pytest class TestA: @@ -1247,7 +1268,7 @@ def test(self, i): pass """ ) - result = testdir.runpytest("-n2", "--dist=loadfile", "-v") + result = pytester.runpytest("-n2", "--dist=loadfile", "-v") test_a_workers_and_test_count = get_workers_and_test_count_by_prefix( "test_a.py::TestA", result.outlines ) @@ -1264,7 +1285,7 @@ def test(self, i): {"gw1": 0}, ) or test_b_workers_and_test_count in ({"gw0": 0}, {"gw1": 10}) - def test_module_single_start(self, testdir): + def test_module_single_start(self, pytester: pytest.Pytester) -> None: """Fix test suite never finishing in case all workers start with a single test (#277).""" test_file1 = """ import pytest @@ -1278,8 +1299,8 @@ def test_1(): def test_2(): pass """ - testdir.makepyfile(test_a=test_file1, test_b=test_file1, test_c=test_file2) - result = testdir.runpytest("-n2", "--dist=loadfile", "-v") + pytester.makepyfile(test_a=test_file1, test_b=test_file1, test_c=test_file2) + result = pytester.runpytest("-n2", "--dist=loadfile", "-v") a = get_workers_and_test_count_by_prefix("test_a.py::test", result.outlines) b = get_workers_and_test_count_by_prefix("test_b.py::test", result.outlines) c1 = get_workers_and_test_count_by_prefix("test_c.py::test_1", result.outlines) @@ -1290,6 +1311,118 @@ def test_2(): assert c1 == c2 +class TestGroupScope: + def test_by_module(self, testdir): + test_file = """ + import pytest + class TestA: + @pytest.mark.xdist_group(name="xdist_group") + @pytest.mark.parametrize('i', range(5)) + def test(self, i): + pass + """ + testdir.makepyfile(test_a=test_file, test_b=test_file) + result = testdir.runpytest("-n2", "--dist=loadgroup", "-v") + test_a_workers_and_test_count = get_workers_and_test_count_by_prefix( + "test_a.py::TestA", result.outlines + ) + test_b_workers_and_test_count = get_workers_and_test_count_by_prefix( + "test_b.py::TestA", result.outlines + ) + + assert test_a_workers_and_test_count in ( + {"gw0": 5}, + {"gw1": 0}, + ) or test_a_workers_and_test_count in ({"gw0": 0}, {"gw1": 5}) + assert test_b_workers_and_test_count in ( + {"gw0": 5}, + {"gw1": 0}, + ) or test_b_workers_and_test_count in ({"gw0": 0}, {"gw1": 5}) + assert ( + test_a_workers_and_test_count.items() + == test_b_workers_and_test_count.items() + ) + + def test_by_class(self, testdir): + testdir.makepyfile( + test_a=""" + import pytest + class TestA: + @pytest.mark.xdist_group(name="xdist_group") + @pytest.mark.parametrize('i', range(10)) + def test(self, i): + pass + class TestB: + @pytest.mark.xdist_group(name="xdist_group") + @pytest.mark.parametrize('i', range(10)) + def test(self, i): + pass + """ + ) + result = testdir.runpytest("-n2", "--dist=loadgroup", "-v") + test_a_workers_and_test_count = get_workers_and_test_count_by_prefix( + "test_a.py::TestA", result.outlines + ) + test_b_workers_and_test_count = get_workers_and_test_count_by_prefix( + "test_a.py::TestB", result.outlines + ) + + assert test_a_workers_and_test_count in ( + {"gw0": 10}, + {"gw1": 0}, + ) or test_a_workers_and_test_count in ({"gw0": 0}, {"gw1": 10}) + assert test_b_workers_and_test_count in ( + {"gw0": 10}, + {"gw1": 0}, + ) or test_b_workers_and_test_count in ({"gw0": 0}, {"gw1": 10}) + assert ( + test_a_workers_and_test_count.items() + == test_b_workers_and_test_count.items() + ) + + def test_module_single_start(self, testdir): + test_file1 = """ + import pytest + @pytest.mark.xdist_group(name="xdist_group") + def test(): + pass + """ + test_file2 = """ + import pytest + def test_1(): + pass + @pytest.mark.xdist_group(name="xdist_group") + def test_2(): + pass + """ + testdir.makepyfile(test_a=test_file1, test_b=test_file1, test_c=test_file2) + result = testdir.runpytest("-n2", "--dist=loadgroup", "-v") + a = get_workers_and_test_count_by_prefix("test_a.py::test", result.outlines) + b = get_workers_and_test_count_by_prefix("test_b.py::test", result.outlines) + c = get_workers_and_test_count_by_prefix("test_c.py::test_2", result.outlines) + + assert a.keys() == b.keys() and b.keys() == c.keys() + + def test_with_two_group_names(self, testdir): + test_file = """ + import pytest + @pytest.mark.xdist_group(name="group1") + def test_1(): + pass + @pytest.mark.xdist_group("group2") + def test_2(): + pass + """ + testdir.makepyfile(test_a=test_file, test_b=test_file) + result = testdir.runpytest("-n2", "--dist=loadgroup", "-v") + a_1 = get_workers_and_test_count_by_prefix("test_a.py::test_1", result.outlines) + a_2 = get_workers_and_test_count_by_prefix("test_a.py::test_2", result.outlines) + b_1 = get_workers_and_test_count_by_prefix("test_b.py::test_1", result.outlines) + b_2 = get_workers_and_test_count_by_prefix("test_b.py::test_2", result.outlines) + + assert a_1.keys() == b_1.keys() and a_2.keys() == b_2.keys() + + class TestLocking: _test_content = """ class TestClassName%s(object): @@ -1323,24 +1456,24 @@ def test_c(self): ) @pytest.mark.parametrize("scope", ["each", "load", "loadscope", "loadfile", "no"]) - def test_single_file(self, testdir, scope): - testdir.makepyfile(test_a=self.test_file1) - result = testdir.runpytest("-n2", "--dist=%s" % scope, "-v") + def test_single_file(self, pytester, scope) -> None: + pytester.makepyfile(test_a=self.test_file1) + result = pytester.runpytest("-n2", "--dist=%s" % scope, "-v") result.assert_outcomes(passed=(12 if scope != "each" else 12 * 2)) @pytest.mark.parametrize("scope", ["each", "load", "loadscope", "loadfile", "no"]) - def test_multi_file(self, testdir, scope): - testdir.makepyfile( + def test_multi_file(self, pytester, scope) -> None: + pytester.makepyfile( test_a=self.test_file1, test_b=self.test_file1, test_c=self.test_file1, test_d=self.test_file1, ) - result = testdir.runpytest("-n2", "--dist=%s" % scope, "-v") + result = pytester.runpytest("-n2", "--dist=%s" % scope, "-v") result.assert_outcomes(passed=(48 if scope != "each" else 48 * 2)) -def parse_tests_and_workers_from_output(lines): +def parse_tests_and_workers_from_output(lines: List[str]) -> List[Tuple[str, str, str]]: result = [] for line in lines: # example match: "[gw0] PASSED test_a.py::test[7]" @@ -1361,8 +1494,10 @@ def parse_tests_and_workers_from_output(lines): return result -def get_workers_and_test_count_by_prefix(prefix, lines, expected_status="PASSED"): - result = {} +def get_workers_and_test_count_by_prefix( + prefix: str, lines: List[str], expected_status: str = "PASSED" +) -> Dict[str, int]: + result: Dict[str, int] = {} for worker, status, nodeid in parse_tests_and_workers_from_output(lines): if expected_status == status and nodeid.startswith(prefix): result[worker] = result.get(worker, 0) + 1 @@ -1387,19 +1522,24 @@ def __init__(self): return FakeRequest() - def test_is_xdist_worker(self, fake_request): + def test_is_xdist_worker(self, fake_request) -> None: assert xdist.is_xdist_worker(fake_request) del fake_request.config.workerinput assert not xdist.is_xdist_worker(fake_request) - def test_is_xdist_master(self, fake_request): + def test_is_xdist_controller(self, fake_request) -> None: assert not xdist.is_xdist_master(fake_request) + assert not xdist.is_xdist_controller(fake_request) + del fake_request.config.workerinput assert xdist.is_xdist_master(fake_request) + assert xdist.is_xdist_controller(fake_request) + fake_request.config.option.dist = "no" assert not xdist.is_xdist_master(fake_request) + assert not xdist.is_xdist_controller(fake_request) - def test_get_xdist_worker_id(self, fake_request): + def test_get_xdist_worker_id(self, fake_request) -> None: assert xdist.get_xdist_worker_id(fake_request) == "gw5" del fake_request.config.workerinput assert xdist.get_xdist_worker_id(fake_request) == "master" diff --git a/testing/conftest.py b/testing/conftest.py index 52f03082..dd7293d2 100644 --- a/testing/conftest.py +++ b/testing/conftest.py @@ -1,12 +1,13 @@ -import py -import pytest import execnet +import pytest +import shutil +from typing import List pytest_plugins = "pytester" @pytest.fixture(autouse=True) -def _divert_atexit(request, monkeypatch): +def _divert_atexit(request, monkeypatch: pytest.MonkeyPatch): import atexit finalizers = [] @@ -23,7 +24,7 @@ def fake_register(func, *args, **kwargs): func(*args, **kwargs) -def pytest_addoption(parser): +def pytest_addoption(parser) -> None: parser.addoption( "--gx", action="append", @@ -33,28 +34,28 @@ def pytest_addoption(parser): @pytest.fixture -def specssh(request): +def specssh(request) -> str: return getspecssh(request.config) # configuration information for tests -def getgspecs(config): +def getgspecs(config) -> List[execnet.XSpec]: return [execnet.XSpec(spec) for spec in config.getvalueorskip("gspecs")] -def getspecssh(config): +def getspecssh(config) -> str: # type: ignore[return] xspecs = getgspecs(config) for spec in xspecs: if spec.ssh: - if not py.path.local.sysfind("ssh"): - py.test.skip("command not found: ssh") + if not shutil.which("ssh"): + pytest.skip("command not found: ssh") return str(spec) - py.test.skip("need '--gx ssh=...'") + pytest.skip("need '--gx ssh=...'") -def getsocketspec(config): +def getsocketspec(config) -> execnet.XSpec: xspecs = getgspecs(config) for spec in xspecs: if spec.socket: return spec - py.test.skip("need '--gx socket=...'") + pytest.skip("need '--gx socket=...'") diff --git a/testing/test_dsession.py b/testing/test_dsession.py index b015c75e..d3a57152 100644 --- a/testing/test_dsession.py +++ b/testing/test_dsession.py @@ -1,59 +1,44 @@ from xdist.dsession import DSession, get_default_max_worker_restart from xdist.report import report_collection_diff from xdist.scheduler import EachScheduling, LoadScheduling +from typing import Optional -import py import pytest import execnet -XSpec = execnet.XSpec - - -def run(item, node, excinfo=None): - runner = item.config.pluginmanager.getplugin("runner") - rep = runner.ItemTestReport(item=item, excinfo=excinfo, when="call") - rep.node = node - return rep - class MockGateway: - _count = 0 - - def __init__(self): + def __init__(self) -> None: + self._count = 0 self.id = str(self._count) self._count += 1 class MockNode: - def __init__(self): - self.sent = [] + def __init__(self) -> None: + self.sent = [] # type: ignore[var-annotated] self.gateway = MockGateway() self._shutdown = False - def send_runtest_some(self, indices): + def send_runtest_some(self, indices) -> None: self.sent.extend(indices) - def send_runtest_all(self): + def send_runtest_all(self) -> None: self.sent.append("ALL") - def shutdown(self): + def shutdown(self) -> None: self._shutdown = True @property - def shutting_down(self): + def shutting_down(self) -> bool: return self._shutdown -def dumpqueue(queue): - while queue.qsize(): - print(queue.get()) - - class TestEachScheduling: - def test_schedule_load_simple(self, testdir): + def test_schedule_load_simple(self, pytester: pytest.Pytester) -> None: node1 = MockNode() node2 = MockNode() - config = testdir.parseconfig("--tx=2*popen") + config = pytester.parseconfig("--tx=2*popen") sched = EachScheduling(config) sched.add_node(node1) sched.add_node(node2) @@ -74,9 +59,9 @@ def test_schedule_load_simple(self, testdir): sched.mark_test_complete(node2, 0) assert sched.tests_finished - def test_schedule_remove_node(self, testdir): + def test_schedule_remove_node(self, pytester: pytest.Pytester) -> None: node1 = MockNode() - config = testdir.parseconfig("--tx=popen") + config = pytester.parseconfig("--tx=popen") sched = EachScheduling(config) sched.add_node(node1) collection = ["a.py::test_1"] @@ -93,8 +78,8 @@ def test_schedule_remove_node(self, testdir): class TestLoadScheduling: - def test_schedule_load_simple(self, testdir): - config = testdir.parseconfig("--tx=2*popen") + def test_schedule_load_simple(self, pytester: pytest.Pytester) -> None: + config = pytester.parseconfig("--tx=2*popen") sched = LoadScheduling(config) sched.add_node(MockNode()) sched.add_node(MockNode()) @@ -117,8 +102,8 @@ def test_schedule_load_simple(self, testdir): sched.mark_test_complete(node1, node1.sent[0]) assert sched.tests_finished - def test_schedule_batch_size(self, testdir): - config = testdir.parseconfig("--tx=2*popen") + def test_schedule_batch_size(self, pytester: pytest.Pytester) -> None: + config = pytester.parseconfig("--tx=2*popen") sched = LoadScheduling(config) sched.add_node(MockNode()) sched.add_node(MockNode()) @@ -144,8 +129,8 @@ def test_schedule_batch_size(self, testdir): assert node1.sent == [0, 2, 4, 5] assert not sched.pending - def test_schedule_fewer_tests_than_nodes(self, testdir): - config = testdir.parseconfig("--tx=2*popen") + def test_schedule_fewer_tests_than_nodes(self, pytester: pytest.Pytester) -> None: + config = pytester.parseconfig("--tx=2*popen") sched = LoadScheduling(config) sched.add_node(MockNode()) sched.add_node(MockNode()) @@ -164,8 +149,10 @@ def test_schedule_fewer_tests_than_nodes(self, testdir): assert sent3 == [] assert not sched.pending - def test_schedule_fewer_than_two_tests_per_node(self, testdir): - config = testdir.parseconfig("--tx=2*popen") + def test_schedule_fewer_than_two_tests_per_node( + self, pytester: pytest.Pytester + ) -> None: + config = pytester.parseconfig("--tx=2*popen") sched = LoadScheduling(config) sched.add_node(MockNode()) sched.add_node(MockNode()) @@ -184,9 +171,9 @@ def test_schedule_fewer_than_two_tests_per_node(self, testdir): assert sent3 == [2] assert not sched.pending - def test_add_remove_node(self, testdir): + def test_add_remove_node(self, pytester: pytest.Pytester) -> None: node = MockNode() - config = testdir.parseconfig("--tx=popen") + config = pytester.parseconfig("--tx=popen") sched = LoadScheduling(config) sched.add_node(node) collection = ["test_file.py::test_func"] @@ -197,7 +184,7 @@ def test_add_remove_node(self, testdir): crashitem = sched.remove_node(node) assert crashitem == collection[0] - def test_different_tests_collected(self, testdir): + def test_different_tests_collected(self, pytester: pytest.Pytester) -> None: """ Test that LoadScheduling is reporting collection errors when different test ids are collected by workers. @@ -215,7 +202,7 @@ def pytest_collectreport(self, report): self.reports.append(report) collect_hook = CollectHook() - config = testdir.parseconfig("--tx=2*popen") + config = pytester.parseconfig("--tx=2*popen") config.pluginmanager.register(collect_hook, "collect_hook") node1 = MockNode() node2 = MockNode() @@ -231,9 +218,9 @@ def pytest_collectreport(self, report): class TestDistReporter: - @py.test.mark.xfail - def test_rsync_printing(self, testdir, linecomp): - config = testdir.parseconfig() + @pytest.mark.xfail + def test_rsync_printing(self, pytester: pytest.Pytester, linecomp) -> None: + config = pytester.parseconfig() from _pytest.pytest_terminal import TerminalReporter rep = TerminalReporter(config, file=linecomp.stringio) @@ -258,21 +245,21 @@ class gw2: # linecomp.assert_contains_lines([ # "*X1*popen*xyz*2.5*" # ]) - dsession.pytest_xdist_rsyncstart(source="hello", gateways=[gw1, gw2]) + dsession.pytest_xdist_rsyncstart(source="hello", gateways=[gw1, gw2]) # type: ignore[attr-defined] linecomp.assert_contains_lines(["[X1,X2] rsyncing: hello"]) -def test_report_collection_diff_equal(): +def test_report_collection_diff_equal() -> None: """Test reporting of equal collections.""" from_collection = to_collection = ["aaa", "bbb", "ccc"] assert report_collection_diff(from_collection, to_collection, 1, 2) is None -def test_default_max_worker_restart(): +def test_default_max_worker_restart() -> None: class config: class option: - maxworkerrestart = None - numprocesses = 0 + maxworkerrestart: Optional[str] = None + numprocesses: int = 0 assert get_default_max_worker_restart(config) is None @@ -286,7 +273,7 @@ class option: assert get_default_max_worker_restart(config) == 0 -def test_report_collection_diff_different(): +def test_report_collection_diff_different() -> None: """Test reporting of different collections.""" from_collection = ["aaa", "bbb", "ccc", "YYY"] to_collection = ["aZa", "bbb", "XXX", "ccc"] @@ -303,7 +290,8 @@ def test_report_collection_diff_different(): " bbb\n" "+XXX\n" " ccc\n" - "-YYY" + "-YYY\n" + "To see why this happens see Known limitations in documentation" ) msg = report_collection_diff(from_collection, to_collection, "1", "2") @@ -311,8 +299,8 @@ def test_report_collection_diff_different(): @pytest.mark.xfail(reason="duplicate test ids not supported yet") -def test_pytest_issue419(testdir): - testdir.makepyfile( +def test_pytest_issue419(pytester: pytest.Pytester) -> None: + pytester.makepyfile( """ import pytest @@ -321,6 +309,6 @@ def test_2011_table(birth_year): pass """ ) - reprec = testdir.inline_run("-n1") + reprec = pytester.inline_run("-n1") reprec.assertoutcome(passed=2) assert 0 diff --git a/testing/test_looponfail.py b/testing/test_looponfail.py index 4b69a85f..c70b00f5 100644 --- a/testing/test_looponfail.py +++ b/testing/test_looponfail.py @@ -1,90 +1,106 @@ import py import pytest -from pkg_resources import parse_version +import shutil +import textwrap +from pathlib import Path from xdist.looponfail import RemoteControl from xdist.looponfail import StatRecorder +PYTEST_GTE_7 = hasattr(pytest, "version_tuple") and pytest.version_tuple >= (7, 0) # type: ignore[attr-defined] + + class TestStatRecorder: - def test_filechange(self, tmpdir): - tmp = tmpdir - hello = tmp.ensure("hello.py") - sd = StatRecorder([tmp]) + def test_filechange(self, tmp_path: Path) -> None: + tmp = tmp_path + hello = tmp / "hello.py" + hello.touch() + sd = StatRecorder([py.path.local(tmp)]) changed = sd.check() assert not changed - hello.write("world") + hello.write_text("world") changed = sd.check() assert changed - (hello + "c").write("hello") + hello.with_suffix(".pyc").write_text("hello") changed = sd.check() assert not changed - p = tmp.ensure("new.py") + p = tmp / "new.py" + p.touch() changed = sd.check() assert changed - p.remove() + p.unlink() changed = sd.check() assert changed - tmp.join("a", "b", "c.py").ensure() + tmp.joinpath("a", "b").mkdir(parents=True) + tmp.joinpath("a", "b", "c.py").touch() changed = sd.check() assert changed - tmp.join("a", "c.txt").ensure() + tmp.joinpath("a", "c.txt").touch() changed = sd.check() assert changed changed = sd.check() assert not changed - tmp.join("a").remove() + shutil.rmtree(str(tmp.joinpath("a"))) changed = sd.check() assert changed - def test_dirchange(self, tmpdir): - tmp = tmpdir - tmp.ensure("dir", "hello.py") - sd = StatRecorder([tmp]) - assert not sd.fil(tmp.join("dir")) - - def test_filechange_deletion_race(self, tmpdir, monkeypatch): - tmp = tmpdir - sd = StatRecorder([tmp]) + def test_dirchange(self, tmp_path: Path) -> None: + tmp = tmp_path + tmp.joinpath("dir").mkdir() + tmp.joinpath("dir", "hello.py").touch() + sd = StatRecorder([py.path.local(tmp)]) + assert not sd.fil(py.path.local(tmp / "dir")) + + def test_filechange_deletion_race( + self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch + ) -> None: + tmp = tmp_path + pytmp = py.path.local(tmp) + sd = StatRecorder([pytmp]) changed = sd.check() assert not changed - p = tmp.ensure("new.py") + p = tmp.joinpath("new.py") + p.touch() changed = sd.check() assert changed - p.remove() + p.unlink() # make check()'s visit() call return our just removed # path as if we were in a race condition - monkeypatch.setattr(tmp, "visit", lambda *args: [p]) + monkeypatch.setattr(pytmp, "visit", lambda *args: [py.path.local(p)]) changed = sd.check() assert changed - def test_pycremoval(self, tmpdir): - tmp = tmpdir - hello = tmp.ensure("hello.py") - sd = StatRecorder([tmp]) + def test_pycremoval(self, tmp_path: Path) -> None: + tmp = tmp_path + hello = tmp / "hello.py" + hello.touch() + sd = StatRecorder([py.path.local(tmp)]) changed = sd.check() assert not changed - pycfile = hello + "c" - pycfile.ensure() - hello.write("world") + pycfile = hello.with_suffix(".pyc") + pycfile.touch() + hello.write_text("world") changed = sd.check() assert changed - assert not pycfile.check() + assert not pycfile.exists() - def test_waitonchange(self, tmpdir, monkeypatch): - tmp = tmpdir - sd = StatRecorder([tmp]) + def test_waitonchange( + self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch + ) -> None: + tmp = tmp_path + sd = StatRecorder([py.path.local(tmp)]) ret_values = [True, False] monkeypatch.setattr(StatRecorder, "check", lambda self: ret_values.pop()) @@ -93,63 +109,78 @@ def test_waitonchange(self, tmpdir, monkeypatch): class TestRemoteControl: - def test_nofailures(self, testdir): - item = testdir.getitem("def test_func(): pass\n") + def test_nofailures(self, pytester: pytest.Pytester) -> None: + item = pytester.getitem("def test_func(): pass\n") control = RemoteControl(item.config) control.setup() topdir, failures = control.runsession()[:2] assert not failures - def test_failures_somewhere(self, testdir): - item = testdir.getitem("def test_func():\n assert 0\n") + def test_failures_somewhere(self, pytester: pytest.Pytester) -> None: + item = pytester.getitem("def test_func():\n assert 0\n") control = RemoteControl(item.config) control.setup() failures = control.runsession() assert failures control.setup() - item.fspath.write("def test_func():\n assert 1\n") - removepyc(item.fspath) + item_path = item.path if PYTEST_GTE_7 else Path(str(item.fspath)) # type: ignore[attr-defined] + item_path.write_text("def test_func():\n assert 1\n") + removepyc(item_path) topdir, failures = control.runsession()[:2] assert not failures - def test_failure_change(self, testdir): - modcol = testdir.getitem( - """ - def test_func(): - assert 0 - """ + def test_failure_change(self, pytester: pytest.Pytester) -> None: + modcol = pytester.getitem( + textwrap.dedent( + """ + def test_func(): + assert 0 + """ + ) ) control = RemoteControl(modcol.config) control.loop_once() assert control.failures - modcol.fspath.write( - py.code.Source( + if PYTEST_GTE_7: + modcol_path = modcol.path # type:ignore[attr-defined] + else: + modcol_path = Path(str(modcol.fspath)) + + modcol_path.write_text( + textwrap.dedent( + """ + def test_func(): + assert 1 + def test_new(): + assert 0 """ - def test_func(): - assert 1 - def test_new(): - assert 0 - """ ) ) - removepyc(modcol.fspath) + removepyc(modcol_path) control.loop_once() assert not control.failures control.loop_once() assert control.failures assert str(control.failures).find("test_new") != -1 - def test_failure_subdir_no_init(self, testdir): - modcol = testdir.getitem( - """ - def test_func(): - assert 0 - """ + def test_failure_subdir_no_init( + self, pytester: pytest.Pytester, monkeypatch: pytest.MonkeyPatch + ) -> None: + modcol = pytester.getitem( + textwrap.dedent( + """ + def test_func(): + assert 0 + """ + ) ) - parent = modcol.fspath.dirpath().dirpath() - parent.chdir() + if PYTEST_GTE_7: + parent = modcol.path.parent.parent # type: ignore[attr-defined] + else: + parent = Path(modcol.fspath.dirpath().dirpath()) + monkeypatch.chdir(parent) modcol.config.args = [ - py.path.local(x).relto(parent) for x in modcol.config.args + str(Path(x).relative_to(parent)) for x in modcol.config.args ] control = RemoteControl(modcol.config) control.loop_once() @@ -159,57 +190,63 @@ def test_func(): class TestLooponFailing: - def test_looponfail_from_fail_to_ok(self, testdir): - modcol = testdir.getmodulecol( - """ - def test_one(): - x = 0 - assert x == 1 - def test_two(): - assert 1 - """ + def test_looponfail_from_fail_to_ok(self, pytester: pytest.Pytester) -> None: + modcol = pytester.getmodulecol( + textwrap.dedent( + """ + def test_one(): + x = 0 + assert x == 1 + def test_two(): + assert 1 + """ + ) ) remotecontrol = RemoteControl(modcol.config) remotecontrol.loop_once() assert len(remotecontrol.failures) == 1 - modcol.fspath.write( - py.code.Source( + modcol_path = modcol.path if PYTEST_GTE_7 else Path(modcol.fspath) + modcol_path.write_text( + textwrap.dedent( + """ + def test_one(): + assert 1 + def test_two(): + assert 1 """ - def test_one(): - assert 1 - def test_two(): - assert 1 - """ ) ) - removepyc(modcol.fspath) + removepyc(modcol_path) remotecontrol.loop_once() assert not remotecontrol.failures - def test_looponfail_from_one_to_two_tests(self, testdir): - modcol = testdir.getmodulecol( - """ - def test_one(): - assert 0 - """ + def test_looponfail_from_one_to_two_tests(self, pytester: pytest.Pytester) -> None: + modcol = pytester.getmodulecol( + textwrap.dedent( + """ + def test_one(): + assert 0 + """ + ) ) remotecontrol = RemoteControl(modcol.config) remotecontrol.loop_once() assert len(remotecontrol.failures) == 1 assert "test_one" in remotecontrol.failures[0] - modcol.fspath.write( - py.code.Source( + modcol_path = modcol.path if PYTEST_GTE_7 else Path(modcol.fspath) + modcol_path.write_text( + textwrap.dedent( + """ + def test_one(): + assert 1 # passes now + def test_two(): + assert 0 # new and fails """ - def test_one(): - assert 1 # passes now - def test_two(): - assert 0 # new and fails - """ ) ) - removepyc(modcol.fspath) + removepyc(modcol_path) remotecontrol.loop_once() assert len(remotecontrol.failures) == 0 remotecontrol.loop_once() @@ -217,47 +254,49 @@ def test_two(): assert "test_one" not in remotecontrol.failures[0] assert "test_two" in remotecontrol.failures[0] - @pytest.mark.xfail( - parse_version(pytest.__version__) >= parse_version("3.1"), - reason="broken by pytest 3.1+", - strict=True, - ) - def test_looponfail_removed_test(self, testdir): - modcol = testdir.getmodulecol( - """ - def test_one(): - assert 0 - def test_two(): - assert 0 - """ + @pytest.mark.xfail(reason="broken by pytest 3.1+", strict=True) + def test_looponfail_removed_test(self, pytester: pytest.Pytester) -> None: + modcol = pytester.getmodulecol( + textwrap.dedent( + """ + def test_one(): + assert 0 + def test_two(): + assert 0 + """ + ) ) remotecontrol = RemoteControl(modcol.config) remotecontrol.loop_once() assert len(remotecontrol.failures) == 2 - modcol.fspath.write( - py.code.Source( + modcol.path.write_text( + textwrap.dedent( + """ + def test_xxx(): # renamed test + assert 0 + def test_two(): + assert 1 # pass now """ - def test_xxx(): # renamed test - assert 0 - def test_two(): - assert 1 # pass now - """ ) ) - removepyc(modcol.fspath) + removepyc(modcol.path) remotecontrol.loop_once() assert len(remotecontrol.failures) == 0 remotecontrol.loop_once() assert len(remotecontrol.failures) == 1 - def test_looponfail_multiple_errors(self, testdir, monkeypatch): - modcol = testdir.getmodulecol( - """ - def test_one(): - assert 0 - """ + def test_looponfail_multiple_errors( + self, pytester: pytest.Pytester, monkeypatch: pytest.MonkeyPatch + ) -> None: + modcol = pytester.getmodulecol( + textwrap.dedent( + """ + def test_one(): + assert 0 + """ + ) ) remotecontrol = RemoteControl(modcol.config) orig_runsession = remotecontrol.runsession @@ -274,55 +313,59 @@ def runsession_dups(): class TestFunctional: - def test_fail_to_ok(self, testdir): - p = testdir.makepyfile( - """ - def test_one(): - x = 0 - assert x == 1 - """ + def test_fail_to_ok(self, pytester: pytest.Pytester) -> None: + p = pytester.makepyfile( + textwrap.dedent( + """ + def test_one(): + x = 0 + assert x == 1 + """ + ) ) - # p = testdir.mkdir("sub").join(p1.basename) + # p = pytester.mkdir("sub").join(p1.basename) # p1.move(p) - child = testdir.spawn_pytest("-f %s --traceconfig" % p, expect_timeout=30.0) + child = pytester.spawn_pytest("-f %s --traceconfig" % p, expect_timeout=30.0) child.expect("def test_one") child.expect("x == 1") child.expect("1 failed") child.expect("### LOOPONFAILING ####") child.expect("waiting for changes") - p.write( - py.code.Source( + p.write_text( + textwrap.dedent( """ - def test_one(): - x = 1 - assert x == 1 - """ - ) + def test_one(): + x = 1 + assert x == 1 + """ + ), ) child.expect(".*1 passed.*") child.kill(15) - def test_xfail_passes(self, testdir): - p = testdir.makepyfile( - """ - import py - @py.test.mark.xfail - def test_one(): - pass - """ + def test_xfail_passes(self, pytester: pytest.Pytester) -> None: + p = pytester.makepyfile( + textwrap.dedent( + """ + import pytest + @pytest.mark.xfail + def test_one(): + pass + """ + ) ) - child = testdir.spawn_pytest("-f %s" % p, expect_timeout=30.0) + child = pytester.spawn_pytest("-f %s" % p, expect_timeout=30.0) child.expect("1 xpass") # child.expect("### LOOPONFAILING ####") child.expect("waiting for changes") child.kill(15) -def removepyc(path): +def removepyc(path: Path) -> None: # XXX damn those pyc files - pyc = path + "c" - if pyc.check(): - pyc.remove() - c = path.dirpath("__pycache__") - if c.check(): - c.remove() + pyc = path.with_suffix(".pyc") + if pyc.exists(): + pyc.unlink() + c = path.parent / "__pycache__" + if c.exists(): + shutil.rmtree(c) diff --git a/testing/test_newhooks.py b/testing/test_newhooks.py index 741e64fd..012f1ea7 100644 --- a/testing/test_newhooks.py +++ b/testing/test_newhooks.py @@ -3,8 +3,8 @@ class TestHooks: @pytest.fixture(autouse=True) - def create_test_file(self, testdir): - testdir.makepyfile( + def create_test_file(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( """ import os def test_a(): pass @@ -13,11 +13,11 @@ def test_c(): pass """ ) - def test_runtest_logreport(self, testdir): + def test_runtest_logreport(self, pytester: pytest.Pytester) -> None: """Test that log reports from pytest_runtest_logreport when running with xdist contain "node", "nodeid", "worker_id", and "testrun_uid" attributes. (#8) """ - testdir.makeconftest( + pytester.makeconftest( """ def pytest_runtest_logreport(report): if hasattr(report, 'node'): @@ -35,7 +35,7 @@ def pytest_runtest_logreport(report): % (report.nodeid, report.worker_id, report.testrun_uid)) """ ) - res = testdir.runpytest("-n1", "-s") + res = pytester.runpytest("-n1", "-s") res.stdout.fnmatch_lines( [ "*HOOK: test_runtest_logreport.py::test_a gw0 *", @@ -45,10 +45,9 @@ def pytest_runtest_logreport(report): ] ) - def test_node_collection_finished(self, testdir): - """Test pytest_xdist_node_collection_finished hook (#8). - """ - testdir.makeconftest( + def test_node_collection_finished(self, pytester: pytest.Pytester) -> None: + """Test pytest_xdist_node_collection_finished hook (#8).""" + pytester.makeconftest( """ def pytest_xdist_node_collection_finished(node, ids): workerid = node.workerinput['workerid'] @@ -56,8 +55,42 @@ def pytest_xdist_node_collection_finished(node, ids): print("HOOK: %s %s" % (workerid, ', '.join(stripped_ids))) """ ) - res = testdir.runpytest("-n2", "-s") + res = pytester.runpytest("-n2", "-s") res.stdout.fnmatch_lines_random( ["*HOOK: gw0 test_a, test_b, test_c", "*HOOK: gw1 test_a, test_b, test_c"] ) res.stdout.fnmatch_lines(["*3 passed*"]) + + +class TestCrashItem: + @pytest.fixture(autouse=True) + def create_test_file(self, pytester: pytest.Pytester) -> None: + pytester.makepyfile( + """ + import os + def test_a(): pass + def test_b(): os._exit(1) + def test_c(): pass + def test_d(): pass + """ + ) + + def test_handlecrashitem(self, pytester: pytest.Pytester) -> None: + """Test pytest_handlecrashitem hook.""" + pytester.makeconftest( + """ + test_runs = 0 + + def pytest_handlecrashitem(crashitem, report, sched): + global test_runs + + if test_runs == 0: + sched.mark_test_pending(crashitem) + test_runs = 1 + else: + print("HOOK: pytest_handlecrashitem") + """ + ) + res = pytester.runpytest("-n2", "-s") + res.stdout.fnmatch_lines_random(["*HOOK: pytest_handlecrashitem"]) + res.stdout.fnmatch_lines(["*3 passed*"]) diff --git a/testing/test_plugin.py b/testing/test_plugin.py index c1aac652..e50c0cd9 100644 --- a/testing/test_plugin.py +++ b/testing/test_plugin.py @@ -1,44 +1,46 @@ from contextlib import suppress +from pathlib import Path -import py import execnet from xdist.workermanage import NodeManager import pytest -def test_dist_incompatibility_messages(testdir): - result = testdir.runpytest("--pdb", "--looponfail") +def test_dist_incompatibility_messages(pytester: pytest.Pytester) -> None: + result = pytester.runpytest("--pdb", "--looponfail") assert result.ret != 0 - result = testdir.runpytest("--pdb", "-n", "3") + result = pytester.runpytest("--pdb", "-n", "3") assert result.ret != 0 assert "incompatible" in result.stderr.str() - result = testdir.runpytest("--pdb", "-d", "--tx", "popen") + result = pytester.runpytest("--pdb", "-d", "--tx", "popen") assert result.ret != 0 assert "incompatible" in result.stderr.str() -def test_dist_options(testdir): +def test_dist_options(pytester: pytest.Pytester) -> None: from xdist.plugin import pytest_cmdline_main as check_options - config = testdir.parseconfigure("-n 2") + config = pytester.parseconfigure("-n 2") check_options(config) assert config.option.dist == "load" assert config.option.tx == ["popen"] * 2 - config = testdir.parseconfigure("--numprocesses", "2") + config = pytester.parseconfigure("--numprocesses", "2") check_options(config) assert config.option.dist == "load" assert config.option.tx == ["popen"] * 2 - config = testdir.parseconfigure("--numprocesses", "3", "--maxprocesses", "2") + config = pytester.parseconfigure("--numprocesses", "3", "--maxprocesses", "2") check_options(config) assert config.option.dist == "load" assert config.option.tx == ["popen"] * 2 - config = testdir.parseconfigure("-d") + config = pytester.parseconfigure("-d") check_options(config) assert config.option.dist == "load" -def test_auto_detect_cpus(testdir, monkeypatch): +def test_auto_detect_cpus( + pytester: pytest.Pytester, monkeypatch: pytest.MonkeyPatch +) -> None: import os from xdist.plugin import pytest_cmdline_main as check_options @@ -56,14 +58,20 @@ def test_auto_detect_cpus(testdir, monkeypatch): monkeypatch.setattr(multiprocessing, "cpu_count", lambda: 99) - config = testdir.parseconfigure("-n2") + config = pytester.parseconfigure("-n2") assert config.getoption("numprocesses") == 2 - config = testdir.parseconfigure("-nauto") + config = pytester.parseconfigure("-nauto") check_options(config) assert config.getoption("numprocesses") == 99 - config = testdir.parseconfigure("-nauto", "--pdb") + config = pytester.parseconfigure("-nauto", "--pdb") + check_options(config) + assert config.getoption("usepdb") + assert config.getoption("numprocesses") == 0 + assert config.getoption("dist") == "no" + + config = pytester.parseconfigure("-nlogical", "--pdb") check_options(config) assert config.getoption("usepdb") assert config.getoption("numprocesses") == 0 @@ -71,83 +79,95 @@ def test_auto_detect_cpus(testdir, monkeypatch): monkeypatch.delattr(os, "sched_getaffinity", raising=False) monkeypatch.setenv("TRAVIS", "true") - config = testdir.parseconfigure("-nauto") + config = pytester.parseconfigure("-nauto") check_options(config) assert config.getoption("numprocesses") == 2 -def test_auto_detect_cpus_psutil(testdir, monkeypatch): +def test_auto_detect_cpus_psutil( + pytester: pytest.Pytester, monkeypatch: pytest.MonkeyPatch +) -> None: from xdist.plugin import pytest_cmdline_main as check_options psutil = pytest.importorskip("psutil") - monkeypatch.setattr(psutil, "cpu_count", lambda logical=True: 42) + monkeypatch.setattr(psutil, "cpu_count", lambda logical=True: 84 if logical else 42) - config = testdir.parseconfigure("-nauto") + config = pytester.parseconfigure("-nauto") check_options(config) assert config.getoption("numprocesses") == 42 + config = pytester.parseconfigure("-nlogical") + check_options(config) + assert config.getoption("numprocesses") == 84 + -def test_hook_auto_num_workers(testdir, monkeypatch): +def test_hook_auto_num_workers( + pytester: pytest.Pytester, monkeypatch: pytest.MonkeyPatch +) -> None: from xdist.plugin import pytest_cmdline_main as check_options - testdir.makeconftest( + pytester.makeconftest( """ def pytest_xdist_auto_num_workers(): return 42 """ ) - config = testdir.parseconfigure("-nauto") + config = pytester.parseconfigure("-nauto") + check_options(config) + assert config.getoption("numprocesses") == 42 + + config = pytester.parseconfigure("-nlogical") check_options(config) assert config.getoption("numprocesses") == 42 -def test_boxed_with_collect_only(testdir): +def test_boxed_with_collect_only(pytester: pytest.Pytester) -> None: from xdist.plugin import pytest_cmdline_main as check_options - config = testdir.parseconfigure("-n1", "--boxed") + config = pytester.parseconfigure("-n1", "--boxed") check_options(config) assert config.option.forked - config = testdir.parseconfigure("-n1", "--collect-only") + config = pytester.parseconfigure("-n1", "--collect-only") check_options(config) assert not config.option.forked - config = testdir.parseconfigure("-n1", "--boxed", "--collect-only") + config = pytester.parseconfigure("-n1", "--boxed", "--collect-only") check_options(config) assert config.option.forked -def test_dsession_with_collect_only(testdir): +def test_dsession_with_collect_only(pytester: pytest.Pytester) -> None: from xdist.plugin import pytest_cmdline_main as check_options from xdist.plugin import pytest_configure as configure - config = testdir.parseconfigure("-n1") + config = pytester.parseconfigure("-n1") check_options(config) configure(config) assert config.pluginmanager.hasplugin("dsession") - config = testdir.parseconfigure("-n1", "--collect-only") + config = pytester.parseconfigure("-n1", "--collect-only") check_options(config) configure(config) assert not config.pluginmanager.hasplugin("dsession") -def test_testrunuid_provided(testdir): - config = testdir.parseconfigure("--testrunuid", "test123", "--tx=popen") +def test_testrunuid_provided(pytester: pytest.Pytester) -> None: + config = pytester.parseconfigure("--testrunuid", "test123", "--tx=popen") nm = NodeManager(config) assert nm.testrunuid == "test123" -def test_testrunuid_generated(testdir): - config = testdir.parseconfigure("--tx=popen") +def test_testrunuid_generated(pytester: pytest.Pytester) -> None: + config = pytester.parseconfigure("--tx=popen") nm = NodeManager(config) assert len(nm.testrunuid) == 32 class TestDistOptions: - def test_getxspecs(self, testdir): - config = testdir.parseconfigure("--tx=popen", "--tx", "ssh=xyz") + def test_getxspecs(self, pytester: pytest.Pytester) -> None: + config = pytester.parseconfigure("--tx=popen", "--tx", "ssh=xyz") nodemanager = NodeManager(config) xspecs = nodemanager._getxspecs() assert len(xspecs) == 2 @@ -155,39 +175,39 @@ def test_getxspecs(self, testdir): assert xspecs[0].popen assert xspecs[1].ssh == "xyz" - def test_xspecs_multiplied(self, testdir): - config = testdir.parseconfigure("--tx=3*popen") + def test_xspecs_multiplied(self, pytester: pytest.Pytester) -> None: + config = pytester.parseconfigure("--tx=3*popen") xspecs = NodeManager(config)._getxspecs() assert len(xspecs) == 3 assert xspecs[1].popen - def test_getrsyncdirs(self, testdir): - config = testdir.parseconfigure("--rsyncdir=" + str(testdir.tmpdir)) + def test_getrsyncdirs(self, pytester: pytest.Pytester) -> None: + config = pytester.parseconfigure("--rsyncdir=" + str(pytester.path)) nm = NodeManager(config, specs=[execnet.XSpec("popen")]) assert not nm._getrsyncdirs() nm = NodeManager(config, specs=[execnet.XSpec("popen//chdir=qwe")]) assert nm.roots - assert testdir.tmpdir in nm.roots + assert pytester.path in nm.roots - def test_getrsyncignore(self, testdir): - config = testdir.parseconfigure("--rsyncignore=fo*") + def test_getrsyncignore(self, pytester: pytest.Pytester) -> None: + config = pytester.parseconfigure("--rsyncignore=fo*") nm = NodeManager(config, specs=[execnet.XSpec("popen//chdir=qwe")]) assert "fo*" in nm.rsyncoptions["ignores"] - def test_getrsyncdirs_with_conftest(self, testdir): - p = py.path.local() - for bn in "x y z".split(): - p.mkdir(bn) - testdir.makeini( + def test_getrsyncdirs_with_conftest(self, pytester: pytest.Pytester) -> None: + p = Path.cwd() + for bn in ("x", "y", "z"): + p.joinpath(bn).mkdir() + pytester.makeini( """ [pytest] rsyncdirs= x """ ) - config = testdir.parseconfigure(testdir.tmpdir, "--rsyncdir=y", "--rsyncdir=z") + config = pytester.parseconfigure(pytester.path, "--rsyncdir=y", "--rsyncdir=z") nm = NodeManager(config, specs=[execnet.XSpec("popen//chdir=xyz")]) roots = nm._getrsyncdirs() # assert len(roots) == 3 + 1 # pylib - assert py.path.local("y") in roots - assert py.path.local("z") in roots - assert testdir.tmpdir.join("x") in roots + assert Path("y").resolve() in roots + assert Path("z").resolve() in roots + assert pytester.path.joinpath("x") in roots diff --git a/testing/test_remote.py b/testing/test_remote.py index da2f6a86..348febc5 100644 --- a/testing/test_remote.py +++ b/testing/test_remote.py @@ -1,5 +1,5 @@ -import py import pprint +import py import pytest import sys import uuid @@ -32,16 +32,16 @@ def __str__(self): class WorkerSetup: use_callback = False - def __init__(self, request, testdir): + def __init__(self, request, pytester: pytest.Pytester) -> None: self.request = request - self.testdir = testdir - self.events = Queue() + self.pytester = pytester + self.events = Queue() # type: ignore[var-annotated] - def setup(self,): - self.testdir.chdir() + def setup(self) -> None: + self.pytester.chdir() # import os ; os.environ['EXECNET_DEBUG'] = "2" self.gateway = execnet.makegateway() - self.config = config = self.testdir.parseconfigure() + self.config = config = self.pytester.parseconfigure() putevent = self.use_callback and self.events.put or None class DummyMananger: @@ -68,15 +68,15 @@ def sendcommand(self, name, **kwargs): @pytest.fixture -def worker(request, testdir): - return WorkerSetup(request, testdir) +def worker(request, pytester: pytest.Pytester) -> WorkerSetup: + return WorkerSetup(request, pytester) @pytest.mark.xfail(reason="#59") -def test_remoteinitconfig(testdir): +def test_remoteinitconfig(pytester: pytest.Pytester) -> None: from xdist.remote import remote_initconfig - config1 = testdir.parseconfig() + config1 = pytester.parseconfig() config2 = remote_initconfig(config1.option.__dict__, config1.args) assert config2.option.__dict__ == config1.option.__dict__ assert config2.pluginmanager.getplugin("terminal") in (-1, None) @@ -92,8 +92,10 @@ def unserialize(data): return unserialize - def test_basic_collect_and_runtests(self, worker, unserialize_report): - worker.testdir.makepyfile( + def test_basic_collect_and_runtests( + self, worker: WorkerSetup, unserialize_report + ) -> None: + worker.pytester.makepyfile( """ def test_func(): pass @@ -106,7 +108,7 @@ def test_func(): assert ev.name == "collectionstart" assert not ev.kwargs ev = worker.popevent("collectionfinish") - assert ev.kwargs["topdir"] == worker.testdir.tmpdir + assert ev.kwargs["topdir"] == py.path.local(worker.pytester.path) ids = ev.kwargs["ids"] assert len(ids) == 1 worker.sendcommand("runtests", indices=list(range(len(ids)))) @@ -124,8 +126,8 @@ def test_func(): ev = worker.popevent("workerfinished") assert "workeroutput" in ev.kwargs - def test_remote_collect_skip(self, worker, unserialize_report): - worker.testdir.makepyfile( + def test_remote_collect_skip(self, worker: WorkerSetup, unserialize_report) -> None: + worker.pytester.makepyfile( """ import pytest pytest.skip("hello", allow_module_level=True) @@ -142,8 +144,8 @@ def test_remote_collect_skip(self, worker, unserialize_report): ev = worker.popevent("collectionfinish") assert not ev.kwargs["ids"] - def test_remote_collect_fail(self, worker, unserialize_report): - worker.testdir.makepyfile("""aasd qwe""") + def test_remote_collect_fail(self, worker: WorkerSetup, unserialize_report) -> None: + worker.pytester.makepyfile("""aasd qwe""") worker.setup() ev = worker.popevent("collectionstart") assert not ev.kwargs @@ -154,8 +156,8 @@ def test_remote_collect_fail(self, worker, unserialize_report): ev = worker.popevent("collectionfinish") assert not ev.kwargs["ids"] - def test_runtests_all(self, worker, unserialize_report): - worker.testdir.makepyfile( + def test_runtests_all(self, worker: WorkerSetup, unserialize_report) -> None: + worker.pytester.makepyfile( """ def test_func(): pass def test_func2(): pass @@ -181,17 +183,19 @@ def test_func2(): pass ev = worker.popevent("workerfinished") assert "workeroutput" in ev.kwargs - def test_happy_run_events_converted(self, testdir, worker): - py.test.xfail("implement a simple test for event production") - assert not worker.use_callback - worker.testdir.makepyfile( + def test_happy_run_events_converted( + self, pytester: pytest.Pytester, worker: WorkerSetup + ) -> None: + pytest.xfail("implement a simple test for event production") + assert not worker.use_callback # type: ignore[unreachable] + worker.pytester.makepyfile( """ def test_func(): pass """ ) worker.setup() - hookrec = testdir.getreportrecorder(worker.config) + hookrec = pytester.getreportrecorder(worker.config) for data in worker.slp.channel: worker.slp.process_from_remote(data) worker.slp.process_from_remote(worker.slp.ENDMARK) @@ -207,7 +211,9 @@ def test_func(): ] ) - def test_process_from_remote_error_handling(self, worker, capsys): + def test_process_from_remote_error_handling( + self, worker: WorkerSetup, capsys: pytest.CaptureFixture[str] + ) -> None: worker.use_callback = True worker.setup() worker.slp.process_from_remote(("", ())) @@ -217,8 +223,8 @@ def test_process_from_remote_error_handling(self, worker, capsys): assert ev.name == "errordown" -def test_remote_env_vars(testdir): - testdir.makepyfile( +def test_remote_env_vars(pytester: pytest.Pytester) -> None: + pytester.makepyfile( """ import os def test(): @@ -227,13 +233,13 @@ def test(): assert os.environ['PYTEST_XDIST_WORKER_COUNT'] == '2' """ ) - result = testdir.runpytest("-n2", "--max-worker-restart=0") + result = pytester.runpytest("-n2", "--max-worker-restart=0") assert result.ret == 0 -def test_remote_inner_argv(testdir): +def test_remote_inner_argv(pytester: pytest.Pytester) -> None: """Test/document the behavior due to execnet using `python -c`.""" - testdir.makepyfile( + pytester.makepyfile( """ import sys @@ -241,14 +247,14 @@ def test_argv(): assert sys.argv == ["-c"] """ ) - result = testdir.runpytest("-n1") + result = pytester.runpytest("-n1") assert result.ret == 0 -def test_remote_mainargv(testdir): +def test_remote_mainargv(pytester: pytest.Pytester) -> None: outer_argv = sys.argv - testdir.makepyfile( + pytester.makepyfile( """ def test_mainargv(request): assert request.config.workerinput["mainargv"] == {!r} @@ -256,14 +262,14 @@ def test_mainargv(request): outer_argv ) ) - result = testdir.runpytest("-n1") + result = pytester.runpytest("-n1") assert result.ret == 0 -def test_remote_usage_prog(testdir, request): +def test_remote_usage_prog(pytester: pytest.Pytester, request) -> None: if not hasattr(request.config._parser, "prog"): pytest.skip("prog not available in config parser") - testdir.makeconftest( + pytester.makeconftest( """ import pytest @@ -278,7 +284,7 @@ def pytest_configure(config): config_parser = config._parser """ ) - testdir.makepyfile( + pytester.makepyfile( """ import sys @@ -287,6 +293,20 @@ def test(get_config_parser, request): """ ) - result = testdir.runpytest_subprocess("-n1") + result = pytester.runpytest_subprocess("-n1") assert result.ret == 1 result.stdout.fnmatch_lines(["*usage: *", "*error: my_usage_error"]) + + +def test_remote_sys_path(pytester: pytest.Pytester) -> None: + """Work around sys.path differences due to execnet using `python -c`.""" + pytester.makepyfile( + """ + import sys + + def test_sys_path(): + assert "" not in sys.path + """ + ) + result = pytester.runpytest("-n1") + assert result.ret == 0 diff --git a/testing/test_workermanage.py b/testing/test_workermanage.py index 3cf19a8f..b19c524a 100644 --- a/testing/test_workermanage.py +++ b/testing/test_workermanage.py @@ -1,39 +1,45 @@ +import execnet import py import pytest +import shutil import textwrap -import execnet -from _pytest.pytester import HookRecorder -from xdist import workermanage, newhooks -from xdist.workermanage import HostRSync, NodeManager +import warnings +from pathlib import Path +from util import generate_warning +from xdist import workermanage +from xdist.remote import serialize_warning_message +from xdist.workermanage import HostRSync, NodeManager, unserialize_warning_message pytest_plugins = "pytester" @pytest.fixture -def hookrecorder(request, config): - hookrecorder = HookRecorder(config.pluginmanager) - if hasattr(hookrecorder, "start_recording"): - hookrecorder.start_recording(newhooks) - request.addfinalizer(hookrecorder.finish_recording) +def hookrecorder(request, config, pytester: pytest.Pytester): + hookrecorder = pytester.make_hook_recorder(config.pluginmanager) return hookrecorder @pytest.fixture -def config(testdir): - return testdir.parseconfig() +def config(pytester: pytest.Pytester): + return pytester.parseconfig() @pytest.fixture -def mysetup(tmpdir): - class mysetup: - source = tmpdir.mkdir("source") - dest = tmpdir.mkdir("dest") +def source(tmp_path: Path) -> Path: + source = tmp_path / "source" + source.mkdir() + return source + - return mysetup() +@pytest.fixture +def dest(tmp_path: Path) -> Path: + dest = tmp_path / "dest" + dest.mkdir() + return dest @pytest.fixture -def workercontroller(monkeypatch): +def workercontroller(monkeypatch: pytest.MonkeyPatch): class MockController: def __init__(self, *args): pass @@ -46,18 +52,20 @@ def setup(self): class TestNodeManagerPopen: - def test_popen_no_default_chdir(self, config): + def test_popen_no_default_chdir(self, config) -> None: gm = NodeManager(config, ["popen"]) assert gm.specs[0].chdir is None - def test_default_chdir(self, config): + def test_default_chdir(self, config) -> None: specs = ["ssh=noco", "socket=xyz"] for spec in NodeManager(config, specs).specs: assert spec.chdir == "pyexecnetcache" for spec in NodeManager(config, specs, defaultchdir="abc").specs: assert spec.chdir == "abc" - def test_popen_makegateway_events(self, config, hookrecorder, workercontroller): + def test_popen_makegateway_events( + self, config, hookrecorder, workercontroller + ) -> None: hm = NodeManager(config, ["popen"] * 2) hm.setup_nodes(None) call = hookrecorder.popcall("pytest_xdist_setupnodes") @@ -72,15 +80,16 @@ def test_popen_makegateway_events(self, config, hookrecorder, workercontroller): hm.teardown_nodes() assert not len(hm.group) - def test_popens_rsync(self, config, mysetup, workercontroller): - source = mysetup.source + def test_popens_rsync( + self, config, source: Path, dest: Path, workercontroller + ) -> None: hm = NodeManager(config, ["popen"] * 2) hm.setup_nodes(None) assert len(hm.group) == 2 for gw in hm.group: class pseudoexec: - args = [] + args = [] # type: ignore[var-annotated] def __init__(self, *args): self.args.extend(args) @@ -97,30 +106,37 @@ def waitclose(self): assert not len(hm.group) assert "sys.path.insert" in gw.remote_exec.args[0] - def test_rsync_popen_with_path(self, config, mysetup, workercontroller): - source, dest = mysetup.source, mysetup.dest + def test_rsync_popen_with_path( + self, config, source: Path, dest: Path, workercontroller + ) -> None: hm = NodeManager(config, ["popen//chdir=%s" % dest] * 1) hm.setup_nodes(None) - source.ensure("dir1", "dir2", "hello") + source.joinpath("dir1", "dir2").mkdir(parents=True) + source.joinpath("dir1", "dir2", "hello").touch() notifications = [] for gw in hm.group: hm.rsync(gw, source, notify=lambda *args: notifications.append(args)) assert len(notifications) == 1 assert notifications[0] == ("rsyncrootready", hm.group["gw0"].spec, source) hm.teardown_nodes() - dest = dest.join(source.basename) - assert dest.join("dir1").check() - assert dest.join("dir1", "dir2").check() - assert dest.join("dir1", "dir2", "hello").check() + dest = dest.joinpath(source.name) + assert dest.joinpath("dir1").exists() + assert dest.joinpath("dir1", "dir2").exists() + assert dest.joinpath("dir1", "dir2", "hello").exists() def test_rsync_same_popen_twice( - self, config, mysetup, hookrecorder, workercontroller - ): - source, dest = mysetup.source, mysetup.dest + self, + config, + source: Path, + dest: Path, + hookrecorder, + workercontroller, + ) -> None: hm = NodeManager(config, ["popen//chdir=%s" % dest] * 2) hm.roots = [] hm.setup_nodes(None) - source.ensure("dir1", "dir2", "hello") + source.joinpath("dir1", "dir2").mkdir(parents=True) + source.joinpath("dir1", "dir2", "hello").touch() gw = hm.group[0] hm.rsync(gw, source) call = hookrecorder.popcall("pytest_xdist_rsyncstart") @@ -131,83 +147,98 @@ def test_rsync_same_popen_twice( class TestHRSync: - def test_hrsync_filter(self, mysetup): - source, _ = mysetup.source, mysetup.dest # noqa - source.ensure("dir", "file.txt") - source.ensure(".svn", "entries") - source.ensure(".somedotfile", "moreentries") - source.ensure("somedir", "editfile~") + def test_hrsync_filter(self, source: Path, dest: Path) -> None: + source.joinpath("dir").mkdir() + source.joinpath("dir", "file.txt").touch() + source.joinpath(".svn").mkdir() + source.joinpath(".svn", "entries").touch() + source.joinpath(".somedotfile").mkdir() + source.joinpath(".somedotfile", "moreentries").touch() + source.joinpath("somedir").mkdir() + source.joinpath("somedir", "editfile~").touch() syncer = HostRSync(source, ignores=NodeManager.DEFAULT_IGNORES) - files = list(source.visit(rec=syncer.filter, fil=syncer.filter)) + files = list(py.path.local(source).visit(rec=syncer.filter, fil=syncer.filter)) assert len(files) == 3 basenames = [x.basename for x in files] assert "dir" in basenames assert "file.txt" in basenames assert "somedir" in basenames - def test_hrsync_one_host(self, mysetup): - source, dest = mysetup.source, mysetup.dest + def test_hrsync_one_host(self, source: Path, dest: Path) -> None: gw = execnet.makegateway("popen//chdir=%s" % dest) finished = [] rsync = HostRSync(source) rsync.add_target_host(gw, finished=lambda: finished.append(1)) - source.join("hello.py").write("world") + source.joinpath("hello.py").write_text("world") rsync.send() gw.exit() - assert dest.join(source.basename, "hello.py").check() + assert dest.joinpath(source.name, "hello.py").exists() assert len(finished) == 1 class TestNodeManager: - @py.test.mark.xfail(run=False) - def test_rsync_roots_no_roots(self, testdir, mysetup): - mysetup.source.ensure("dir1", "file1").write("hello") - config = testdir.parseconfig(mysetup.source) - nodemanager = NodeManager(config, ["popen//chdir=%s" % mysetup.dest]) + @pytest.mark.xfail(run=False) + def test_rsync_roots_no_roots( + self, pytester: pytest.Pytester, source: Path, dest: Path + ) -> None: + source.joinpath("dir1").mkdir() + source.joinpath("dir1", "file1").write_text("hello") + config = pytester.parseconfig(source) + nodemanager = NodeManager(config, ["popen//chdir=%s" % dest]) # assert nodemanager.config.topdir == source == config.topdir - nodemanager.makegateways() - nodemanager.rsync_roots() - (p,) = nodemanager.gwmanager.multi_exec( + nodemanager.makegateways() # type: ignore[attr-defined] + nodemanager.rsync_roots() # type: ignore[call-arg] + (p,) = nodemanager.gwmanager.multi_exec( # type: ignore[attr-defined] "import os ; channel.send(os.getcwd())" ).receive_each() - p = py.path.local(p) + p = Path(p) print("remote curdir", p) - assert p == mysetup.dest.join(config.topdir.basename) - assert p.join("dir1").check() - assert p.join("dir1", "file1").check() - - def test_popen_rsync_subdir(self, testdir, mysetup, workercontroller): - source, dest = mysetup.source, mysetup.dest - dir1 = mysetup.source.mkdir("dir1") - dir2 = dir1.mkdir("dir2") - dir2.ensure("hello") + assert p == dest.joinpath(config.rootpath.name) + assert p.joinpath("dir1").check() + assert p.joinpath("dir1", "file1").check() + + def test_popen_rsync_subdir( + self, pytester: pytest.Pytester, source: Path, dest: Path, workercontroller + ) -> None: + dir1 = source / "dir1" + dir1.mkdir() + dir2 = dir1 / "dir2" + dir2.mkdir() + dir2.joinpath("hello").touch() for rsyncroot in (dir1, source): - dest.remove() + shutil.rmtree(str(dest), ignore_errors=True) nodemanager = NodeManager( - testdir.parseconfig( + pytester.parseconfig( "--tx", "popen//chdir=%s" % dest, "--rsyncdir", rsyncroot, source ) ) nodemanager.setup_nodes(None) # calls .rsync_roots() if rsyncroot == source: - dest = dest.join("source") - assert dest.join("dir1").check() - assert dest.join("dir1", "dir2").check() - assert dest.join("dir1", "dir2", "hello").check() + dest = dest.joinpath("source") + assert dest.joinpath("dir1").exists() + assert dest.joinpath("dir1", "dir2").exists() + assert dest.joinpath("dir1", "dir2", "hello").exists() nodemanager.teardown_nodes() @pytest.mark.parametrize( "flag, expects_report", [("-q", False), ("", False), ("-v", True)] ) def test_rsync_report( - self, testdir, mysetup, workercontroller, capsys, flag, expects_report - ): - source, dest = mysetup.source, mysetup.dest - dir1 = mysetup.source.mkdir("dir1") - args = "--tx", "popen//chdir=%s" % dest, "--rsyncdir", dir1, source + self, + pytester: pytest.Pytester, + source: Path, + dest: Path, + workercontroller, + capsys: pytest.CaptureFixture[str], + flag: str, + expects_report: bool, + ) -> None: + dir1 = source / "dir1" + dir1.mkdir() + args = ["--tx", "popen//chdir=%s" % dest, "--rsyncdir", str(dir1), str(source)] if flag: - args += (flag,) - nodemanager = NodeManager(testdir.parseconfig(*args)) + args.append(flag) + nodemanager = NodeManager(pytester.parseconfig(*args)) nodemanager.setup_nodes(None) # calls .rsync_roots() out, _ = capsys.readouterr() if expects_report: @@ -215,77 +246,163 @@ def test_rsync_report( else: assert "<= pytest/__init__.py" not in out - def test_init_rsync_roots(self, testdir, mysetup, workercontroller): - source, dest = mysetup.source, mysetup.dest - dir2 = source.ensure("dir1", "dir2", dir=1) - source.ensure("dir1", "somefile", dir=1) - dir2.ensure("hello") - source.ensure("bogusdir", "file") - source.join("tox.ini").write( + def test_init_rsync_roots( + self, pytester: pytest.Pytester, source: Path, dest: Path, workercontroller + ) -> None: + dir2 = source.joinpath("dir1", "dir2") + dir2.mkdir(parents=True) + source.joinpath("dir1", "somefile").mkdir() + dir2.joinpath("hello").touch() + source.joinpath("bogusdir").mkdir() + source.joinpath("bogusdir", "file").touch() + source.joinpath("tox.ini").write_text( textwrap.dedent( """ - [pytest] - rsyncdirs=dir1/dir2 - """ + [pytest] + rsyncdirs=dir1/dir2 + """ ) ) - config = testdir.parseconfig(source) + config = pytester.parseconfig(source) nodemanager = NodeManager(config, ["popen//chdir=%s" % dest]) nodemanager.setup_nodes(None) # calls .rsync_roots() - assert dest.join("dir2").check() - assert not dest.join("dir1").check() - assert not dest.join("bogus").check() - - def test_rsyncignore(self, testdir, mysetup, workercontroller): - source, dest = mysetup.source, mysetup.dest - dir2 = source.ensure("dir1", "dir2", dir=1) - source.ensure("dir5", "dir6", "bogus") - source.ensure("dir5", "file") - dir2.ensure("hello") - source.ensure("foo", "bar") - source.ensure("bar", "foo") - source.join("tox.ini").write( + assert dest.joinpath("dir2").exists() + assert not dest.joinpath("dir1").exists() + assert not dest.joinpath("bogus").exists() + + def test_rsyncignore( + self, pytester: pytest.Pytester, source: Path, dest: Path, workercontroller + ) -> None: + dir2 = source.joinpath("dir1", "dir2") + dir2.mkdir(parents=True) + source.joinpath("dir5", "dir6").mkdir(parents=True) + source.joinpath("dir5", "dir6", "bogus").touch() + source.joinpath("dir5", "file").touch() + dir2.joinpath("hello").touch() + source.joinpath("foo").mkdir() + source.joinpath("foo", "bar").touch() + source.joinpath("bar").mkdir() + source.joinpath("bar", "foo").touch() + source.joinpath("tox.ini").write_text( textwrap.dedent( """ - [pytest] - rsyncdirs = dir1 dir5 - rsyncignore = dir1/dir2 dir5/dir6 foo* - """ + [pytest] + rsyncdirs = dir1 dir5 + rsyncignore = dir1/dir2 dir5/dir6 foo* + """ ) ) - config = testdir.parseconfig(source) + config = pytester.parseconfig(source) config.option.rsyncignore = ["bar"] nodemanager = NodeManager(config, ["popen//chdir=%s" % dest]) nodemanager.setup_nodes(None) # calls .rsync_roots() - assert dest.join("dir1").check() - assert not dest.join("dir1", "dir2").check() - assert dest.join("dir5", "file").check() - assert not dest.join("dir6").check() - assert not dest.join("foo").check() - assert not dest.join("bar").check() - - def test_optimise_popen(self, testdir, mysetup, workercontroller): - source = mysetup.source + assert dest.joinpath("dir1").exists() + assert not dest.joinpath("dir1", "dir2").exists() + assert dest.joinpath("dir5", "file").exists() + assert not dest.joinpath("dir6").exists() + assert not dest.joinpath("foo").exists() + assert not dest.joinpath("bar").exists() + + def test_optimise_popen( + self, pytester: pytest.Pytester, source: Path, dest: Path, workercontroller + ) -> None: specs = ["popen"] * 3 - source.join("conftest.py").write("rsyncdirs = ['a']") - source.ensure("a", dir=1) - config = testdir.parseconfig(source) + source.joinpath("conftest.py").write_text("rsyncdirs = ['a']") + source.joinpath("a").mkdir() + config = pytester.parseconfig(source) nodemanager = NodeManager(config, specs) nodemanager.setup_nodes(None) # calls .rysnc_roots() for gwspec in nodemanager.specs: assert gwspec._samefilesystem() assert not gwspec.chdir - def test_ssh_setup_nodes(self, specssh, testdir): - testdir.makepyfile( + def test_ssh_setup_nodes(self, specssh: str, pytester: pytest.Pytester) -> None: + pytester.makepyfile( __init__="", test_x=""" def test_one(): pass """, ) - reprec = testdir.inline_run( - "-d", "--rsyncdir=%s" % testdir.tmpdir, "--tx", specssh, testdir.tmpdir + reprec = pytester.inline_run( + "-d", "--rsyncdir=%s" % pytester.path, "--tx", specssh, pytester.path ) (rep,) = reprec.getreports("pytest_runtest_logreport") assert rep.passed + + +class MyWarning(UserWarning): + pass + + +@pytest.mark.parametrize( + "w_cls", + [ + UserWarning, + MyWarning, + "Imported", + pytest.param( + "Nested", + marks=pytest.mark.xfail(reason="Nested warning classes are not supported."), + ), + ], +) +def test_unserialize_warning_msg(w_cls): + """Test that warning serialization process works well""" + + # Create a test warning message + with pytest.warns(UserWarning) as w: + if not isinstance(w_cls, str): + warnings.warn("hello", w_cls) + elif w_cls == "Imported": + generate_warning() + elif w_cls == "Nested": + # dynamic creation + class MyWarning2(UserWarning): + pass + + warnings.warn("hello", MyWarning2) + + # Unpack + assert len(w) == 1 + w_msg = w[0] + + # Serialize and deserialize + data = serialize_warning_message(w_msg) + w_msg2 = unserialize_warning_message(data) + + # Compare the two objects + all_keys = set(vars(w_msg).keys()).union(set(vars(w_msg2).keys())) + for k in all_keys: + v1 = getattr(w_msg, k) + v2 = getattr(w_msg2, k) + if k == "message": + assert type(v1) == type(v2) + assert v1.args == v2.args + else: + assert v1 == v2 + + +class MyWarningUnknown(UserWarning): + # Changing the __module__ attribute is only safe if class can be imported + # from there + __module__ = "unknown" + + +def test_warning_serialization_tweaked_module(): + """Test for GH#404""" + + # Create a test warning message + with pytest.warns(UserWarning) as w: + warnings.warn("hello", MyWarningUnknown) + + # Unpack + assert len(w) == 1 + w_msg = w[0] + + # Serialize and deserialize + data = serialize_warning_message(w_msg) + + # __module__ cannot be found! + with pytest.raises(ModuleNotFoundError): + unserialize_warning_message(data) diff --git a/testing/util.py b/testing/util.py new file mode 100644 index 00000000..c7bcc552 --- /dev/null +++ b/testing/util.py @@ -0,0 +1,9 @@ +import warnings + + +class MyWarning2(UserWarning): + pass + + +def generate_warning(): + warnings.warn(MyWarning2("hello")) diff --git a/tox.ini b/tox.ini index 3774b08a..0c1655b5 100644 --- a/tox.ini +++ b/tox.ini @@ -1,15 +1,16 @@ [tox] envlist= linting - py{35,36,37,38,39}-pytestlatest - py38-pytestmaster + py{36,37,38,39,310}-pytestlatest + py38-pytestmain py38-psutil - + py38-setproctitle +isolated_build = true [testenv] extras = testing deps = pytestlatest: pytest - pytestmaster: git+https://github.com/pytest-dev/pytest.git@master + pytestmain: git+https://github.com/pytest-dev/pytest.git commands= pytest {posargs} @@ -17,20 +18,28 @@ commands= extras = testing psutil -deps = pytest commands = pytest {posargs:-k psutil} +[testenv:py38-setproctitle] +extras = + testing + setproctitle +deps = pytest +commands = + pytest {posargs} + [testenv:linting] skip_install = True usedevelop = True +passenv = PRE_COMMIT_HOME deps = pre-commit commands = pre-commit run --all-files --show-diff-on-failure [testenv:release] changedir= -decription = do a release, required posarg of the version number +description = do a release, required posarg of the version number basepython = python3.7 skipsdist = True usedevelop = True @@ -40,8 +49,19 @@ deps = commands = towncrier --version {posargs} --yes +[testenv:docs] +basepython = python3 +usedevelop = True +deps = + sphinx + sphinx_rtd_theme +commands = + sphinx-build -W --keep-going -b html docs docs/_build/html {posargs:} + [pytest] -addopts = -ra +# pytest-services also defines a worker_id fixture, disable +# it so they don't conflict with each other (#611). +addopts = -ra -p no:pytest-services testpaths = testing [flake8]